2019-03-23 22:21:33 +00:00
|
|
|
#!/usr/bin/env python3
|
|
|
|
"""
|
2023-08-17 10:02:17 +00:00
|
|
|
Author: kymckay
|
2019-03-23 22:21:33 +00:00
|
|
|
Crawl function headers to produce appropriate documentation of public functions.
|
|
|
|
|
|
|
|
Supported header sections:
|
|
|
|
- Author(s) (with description below)
|
|
|
|
- Arguments
|
|
|
|
- Return Value
|
|
|
|
- Example(s)
|
|
|
|
- Public (by default function will only be documented if set to "Yes")
|
|
|
|
|
|
|
|
EXAMPLES
|
|
|
|
document_functions common --debug
|
|
|
|
Crawl only functions in addons/common and only reports debug messages.
|
|
|
|
"""
|
|
|
|
|
|
|
|
import os
|
|
|
|
import sys
|
|
|
|
import re
|
|
|
|
import argparse
|
|
|
|
|
2023-04-30 21:03:35 +00:00
|
|
|
|
2019-03-23 22:21:33 +00:00
|
|
|
class FunctionFile:
|
|
|
|
def __init__(self, directory="."):
|
|
|
|
self.directory = directory
|
|
|
|
|
|
|
|
# False unless specified in processing
|
|
|
|
self.debug = False
|
2023-04-30 21:03:35 +00:00
|
|
|
self.lint_private = False
|
2019-03-23 22:21:33 +00:00
|
|
|
|
|
|
|
# Empty until imported from file
|
|
|
|
self.path = ""
|
|
|
|
self.header = ""
|
|
|
|
|
|
|
|
# Defaults until header is processed
|
|
|
|
self.public = False
|
|
|
|
self.authors = []
|
|
|
|
self.description = ""
|
|
|
|
self.arguments = []
|
|
|
|
self.return_value = []
|
|
|
|
self.example = ""
|
|
|
|
|
|
|
|
# Filepath should only be logged once
|
|
|
|
self.logged = False
|
|
|
|
|
2023-04-30 21:03:35 +00:00
|
|
|
# Count parse results
|
|
|
|
self.errors = 0
|
|
|
|
|
2019-03-23 22:21:33 +00:00
|
|
|
def import_header(self, file_path):
|
|
|
|
self.path = file_path
|
|
|
|
|
2023-04-30 21:03:35 +00:00
|
|
|
with open(file_path, "r", encoding="utf-8") as file:
|
2019-03-23 22:21:33 +00:00
|
|
|
code = file.read()
|
|
|
|
|
2023-04-30 21:03:35 +00:00
|
|
|
header_match = re.search(r"\s*/\*.+?\*/", code, re.S)
|
2019-03-23 22:21:33 +00:00
|
|
|
if header_match:
|
|
|
|
self.header = header_match.group(0)
|
|
|
|
else:
|
|
|
|
self.feedback("Missing header", 3)
|
|
|
|
|
|
|
|
def has_header(self):
|
|
|
|
return bool(self.header)
|
|
|
|
|
2023-04-30 21:03:35 +00:00
|
|
|
def process_header(self, debug=False, lint_private=False):
|
2019-03-23 22:21:33 +00:00
|
|
|
# Detailed debugging occurs here so value is set
|
|
|
|
self.debug = debug
|
2023-04-30 21:03:35 +00:00
|
|
|
self.lint_private = lint_private
|
2019-03-23 22:21:33 +00:00
|
|
|
|
2023-08-09 03:51:20 +00:00
|
|
|
for lineNumber, line in enumerate(self.header.splitlines()):
|
|
|
|
if (not (line.startswith(" * ") or line in ["", " *", "/*", "*/", " */"])):
|
|
|
|
self.feedback(f"header formating on line {lineNumber+1}: ({line})", 1)
|
|
|
|
|
2019-03-23 22:21:33 +00:00
|
|
|
# Preemptively cut away the comment characters (and leading/trailing whitespace)
|
|
|
|
self.header_text = "\n".join([x[3:].strip() for x in self.header.splitlines()])
|
|
|
|
|
|
|
|
# Split the header into expected sections
|
|
|
|
self.sections = re.split(r"^(Author|Argument|Return Value|Example|Public)s?:\s?", self.header_text, 0, re.M)
|
|
|
|
|
|
|
|
# If public section is missing we can't continue
|
|
|
|
public_raw = self.get_section("Public")
|
|
|
|
if not public_raw:
|
|
|
|
self.feedback("Public value undefined", 3)
|
2023-04-30 21:03:35 +00:00
|
|
|
return self.errors
|
2019-03-23 22:21:33 +00:00
|
|
|
|
|
|
|
# Determine whether the header is public
|
|
|
|
self.public = self.process_public(public_raw)
|
|
|
|
|
|
|
|
# Don't bother to process the rest if private
|
|
|
|
# Unless in debug mode
|
2023-04-30 21:03:35 +00:00
|
|
|
if not self.public and not self.lint_private:
|
|
|
|
return self.errors
|
2019-03-23 22:21:33 +00:00
|
|
|
|
|
|
|
# Retrieve the raw sections text for processing
|
|
|
|
author_raw = self.get_section("Author")
|
|
|
|
arguments_raw = self.get_section("Argument")
|
|
|
|
return_value_raw = self.get_section("Return Value")
|
|
|
|
example_raw = self.get_section("Example")
|
|
|
|
|
|
|
|
# Author and description are stored in first section
|
|
|
|
if author_raw:
|
|
|
|
self.authors = self.process_author(author_raw)
|
|
|
|
self.description = self.process_description(author_raw)
|
|
|
|
|
|
|
|
if arguments_raw:
|
|
|
|
self.arguments = self.process_arguments(arguments_raw)
|
|
|
|
|
|
|
|
# Process return
|
|
|
|
if return_value_raw:
|
|
|
|
self.return_value = self.process_return_value(return_value_raw)
|
|
|
|
|
|
|
|
# Process example
|
|
|
|
if example_raw:
|
2023-11-08 18:00:05 +00:00
|
|
|
self.example = self.process_example(example_raw)
|
2019-03-23 22:21:33 +00:00
|
|
|
|
2023-04-30 21:03:35 +00:00
|
|
|
return self.errors
|
|
|
|
|
2019-03-23 22:21:33 +00:00
|
|
|
def get_section(self, section_name):
|
|
|
|
try:
|
|
|
|
section_text = self.sections[self.sections.index(section_name) + 1]
|
|
|
|
return section_text
|
|
|
|
except ValueError:
|
|
|
|
self.feedback("Missing \"{}\" header section".format(section_name), 2)
|
|
|
|
return ""
|
|
|
|
|
|
|
|
def process_public(self, raw):
|
|
|
|
# Raw just includes an EOL character
|
|
|
|
public_text = raw[:-1]
|
|
|
|
|
|
|
|
if not re.match(r"(Yes|No)", public_text, re.I):
|
|
|
|
self.feedback("Invalid public value \"{}\"".format(public_text), 2)
|
|
|
|
|
|
|
|
return public_text.capitalize() == "Yes"
|
|
|
|
|
|
|
|
def is_public(self):
|
|
|
|
return self.public
|
|
|
|
|
|
|
|
def process_author(self, raw):
|
|
|
|
# Authors are listed on the first line
|
|
|
|
authors_text = raw.splitlines()[0]
|
|
|
|
|
|
|
|
# Seperate authors are divided by commas
|
|
|
|
return authors_text.split(", ")
|
|
|
|
|
|
|
|
def process_description(self, raw):
|
|
|
|
# Just use all the lines after the authors line
|
|
|
|
description_text = "".join(raw.splitlines(1)[1:])
|
|
|
|
|
|
|
|
return description_text
|
|
|
|
|
|
|
|
def process_arguments(self, raw):
|
|
|
|
lines = raw.splitlines()
|
|
|
|
|
|
|
|
if lines[0] == "None":
|
|
|
|
return []
|
|
|
|
|
|
|
|
if lines.count("") == len(lines):
|
|
|
|
self.feedback("No arguments provided (use \"None\" where appropriate)", 2)
|
|
|
|
return []
|
|
|
|
|
|
|
|
if lines[-1] == "":
|
|
|
|
lines.pop()
|
|
|
|
else:
|
|
|
|
self.feedback("No blank line after arguments list", 1)
|
|
|
|
|
|
|
|
arguments = []
|
2023-08-09 03:51:20 +00:00
|
|
|
expectedMainIndex = 0
|
|
|
|
expectedSubIndex = 0
|
2019-03-23 22:21:33 +00:00
|
|
|
for argument in lines:
|
2023-08-09 03:51:20 +00:00
|
|
|
valid = re.match(r"^(- ){0,1}(\d+):\s(.+?)\<([\s\w,\|]+?)\>( )?(\s\(default: (.+)\))?$", argument)
|
2019-03-23 22:21:33 +00:00
|
|
|
|
|
|
|
if valid:
|
2023-08-09 03:51:20 +00:00
|
|
|
arg_isSubIndex = valid.group(1) is not None
|
|
|
|
arg_index = valid.group(2)
|
|
|
|
arg_name = valid.group(3)
|
|
|
|
arg_types = valid.group(4)
|
|
|
|
arg_default = valid.group(7)
|
2019-03-23 22:21:33 +00:00
|
|
|
arg_notes = []
|
|
|
|
|
2023-08-09 03:51:20 +00:00
|
|
|
if arg_isSubIndex:
|
|
|
|
expectedIndex = expectedSubIndex
|
|
|
|
expectedSubIndex = expectedSubIndex + 1
|
|
|
|
else:
|
|
|
|
expectedIndex = expectedMainIndex
|
|
|
|
expectedMainIndex = expectedMainIndex + 1
|
|
|
|
expectedSubIndex = 0
|
|
|
|
|
|
|
|
if int(arg_index) != expectedIndex:
|
|
|
|
print(f"line|{argument}|")
|
|
|
|
self.feedback(f"Argument index {arg_index} does not match listed order {expectedIndex}", 1)
|
2019-03-23 22:21:33 +00:00
|
|
|
|
2023-04-30 21:03:35 +00:00
|
|
|
if arg_default is None:
|
2019-03-23 22:21:33 +00:00
|
|
|
arg_default = ""
|
|
|
|
|
2024-01-22 22:33:30 +00:00
|
|
|
if ("SCALAR" in arg_types or "NUMVER" in arg_types):
|
|
|
|
self.feedback("Bad Arg Type \"{}\"".format(arg_types), 1)
|
|
|
|
|
2019-03-23 22:21:33 +00:00
|
|
|
arguments.append([arg_index, arg_name, arg_types, arg_default, arg_notes])
|
|
|
|
else:
|
|
|
|
# Notes about the above argument won't start with an index
|
|
|
|
# Only applies if there exists an above argument
|
|
|
|
if re.match(r"^(\d+):", argument) or not arguments:
|
|
|
|
self.feedback("Malformed argument \"{}\"".format(argument), 2)
|
|
|
|
arguments.append(["?", "Malformed", "?", "?", []])
|
|
|
|
else:
|
|
|
|
arguments[-1][-1].append(argument)
|
|
|
|
|
|
|
|
return arguments
|
|
|
|
|
|
|
|
def process_return_value(self, raw):
|
|
|
|
return_value = raw.strip()
|
|
|
|
|
|
|
|
if return_value == "None":
|
|
|
|
return []
|
|
|
|
|
|
|
|
valid = re.match(r"^(.+?)\<([\s\w]+?)\>", return_value)
|
|
|
|
|
|
|
|
if valid:
|
|
|
|
return_name = valid.group(1)
|
|
|
|
return_types = valid.group(2)
|
|
|
|
else:
|
|
|
|
self.feedback("Malformed return value \"{}\"".format(return_value), 2)
|
2023-04-30 21:03:35 +00:00
|
|
|
return ["Malformed", ""]
|
2019-03-23 22:21:33 +00:00
|
|
|
|
|
|
|
return [return_name, return_types]
|
|
|
|
|
2023-11-08 18:00:05 +00:00
|
|
|
def process_example(self, raw):
|
|
|
|
return_value = raw.strip()
|
|
|
|
if return_value == "None":
|
|
|
|
return return_value
|
|
|
|
|
|
|
|
path_match = re.match(r".*addons.(.*).functions.(.*).sqf", self.path)
|
|
|
|
expected_func = f"ace_{path_match.group(1)}_{path_match.group(2)}"
|
|
|
|
if (not expected_func.lower() in return_value.lower()) and ((not return_value.startswith("Handled by")) and (not return_value.startswith("Called By"))):
|
|
|
|
self.feedback(f"Malformed example {return_value} should contain func {expected_func}", 2)
|
|
|
|
|
|
|
|
return return_value
|
|
|
|
|
|
|
|
|
2019-03-23 22:21:33 +00:00
|
|
|
def document(self, component):
|
|
|
|
str_list = []
|
|
|
|
|
|
|
|
# Title
|
2023-04-30 21:03:35 +00:00
|
|
|
str_list.append("\n## ace_{}_fnc_{}\n".format(component, os.path.basename(self.path)[4:-4]))
|
2019-03-23 22:21:33 +00:00
|
|
|
# Description
|
|
|
|
str_list.append("__Description__\n\n" + self.description)
|
|
|
|
# Arguments
|
|
|
|
if self.arguments:
|
|
|
|
str_list.append("__Parameters__\n\nIndex | Description | Datatype(s) | Default Value\n--- | --- | --- | ---\n")
|
|
|
|
for argument in self.arguments:
|
|
|
|
str_list.append("{} | {} | {} | {}\n".format(*argument))
|
|
|
|
str_list.append("\n")
|
|
|
|
else:
|
|
|
|
str_list.append("__Parameters__\n\nNone\n\n")
|
|
|
|
# Return Value
|
|
|
|
if self.return_value:
|
|
|
|
str_list.append("__Return Value__\n\nDescription | Datatype(s)\n--- | ---\n{} | {}\n\n".format(*self.return_value))
|
|
|
|
else:
|
|
|
|
str_list.append("__Return Value__\n\nNone\n\n")
|
|
|
|
# Example
|
|
|
|
str_list.append("__Example__\n\n```sqf\n{}\n```\n\n".format(self.example))
|
|
|
|
# Authors
|
|
|
|
str_list.append("\n__Authors__\n\n")
|
|
|
|
for author in self.authors:
|
|
|
|
str_list.append("- {}\n".format(author))
|
|
|
|
# Horizontal rule
|
|
|
|
str_list.append("\n---\n")
|
|
|
|
|
|
|
|
return ''.join(str_list)
|
|
|
|
|
|
|
|
def log_file(self, error=False):
|
|
|
|
# When in debug mode we only want to see the files with errors
|
|
|
|
if not self.debug or error:
|
|
|
|
if not self.logged:
|
|
|
|
rel_path = os.path.relpath(self.path, self.directory)
|
|
|
|
|
|
|
|
self.write("Processing... {}".format(rel_path), 1)
|
|
|
|
self.logged = True
|
|
|
|
|
|
|
|
def feedback(self, message, level=0):
|
2023-04-30 21:03:35 +00:00
|
|
|
priority_str = ["Info", "Warning", "Error", "Aborted"][level]
|
2019-03-23 22:21:33 +00:00
|
|
|
|
|
|
|
self.log_file(level > 0)
|
|
|
|
self.write("{0}: {1}".format(priority_str, message))
|
|
|
|
|
2023-04-30 21:03:35 +00:00
|
|
|
if priority_str in ["Error", "Aborted"]:
|
|
|
|
self.errors += 1
|
|
|
|
|
2019-03-23 22:21:33 +00:00
|
|
|
def write(self, message, indent=2):
|
2023-08-09 03:51:20 +00:00
|
|
|
to_print = [" "] * indent
|
2019-03-23 22:21:33 +00:00
|
|
|
to_print.append(message)
|
|
|
|
print("".join(to_print))
|
|
|
|
|
2023-04-30 21:03:35 +00:00
|
|
|
|
|
|
|
def get_component_name(addons_dir, component):
|
|
|
|
errors = 0
|
|
|
|
|
|
|
|
script_component = os.path.join(addons_dir, component, 'script_component.hpp')
|
|
|
|
with open(script_component, "r", encoding="utf-8") as file:
|
|
|
|
code = file.read()
|
|
|
|
|
|
|
|
name_match = re.search(r"#define COMPONENT_BEAUTIFIED (.*)", code)
|
|
|
|
if name_match:
|
|
|
|
name = name_match.group(1)
|
|
|
|
else:
|
|
|
|
name = component.title()
|
|
|
|
print(" Warning: Missing COMPONENT_BEAUTIFIED")
|
|
|
|
errors += 1
|
|
|
|
|
|
|
|
return name, errors
|
|
|
|
|
|
|
|
|
|
|
|
def document_functions(addons_dir, components):
|
|
|
|
errors = 0
|
|
|
|
|
|
|
|
wiki_dir = os.path.abspath(os.path.join(addons_dir, '../docs/wiki/functions/'))
|
|
|
|
os.makedirs(wiki_dir, exist_ok=True)
|
|
|
|
print("Wiki: {}".format(wiki_dir))
|
2019-03-23 22:21:33 +00:00
|
|
|
|
|
|
|
for component in components:
|
2023-04-30 21:03:35 +00:00
|
|
|
print(" Documenting... {}.md".format(component))
|
|
|
|
component_name, error = get_component_name(addons_dir, component)
|
|
|
|
errors += error
|
|
|
|
|
|
|
|
output = os.path.join(wiki_dir, component) + ".md"
|
|
|
|
with open(output, "w", encoding="utf-8") as file:
|
2023-05-01 15:15:48 +00:00
|
|
|
file.writelines([
|
|
|
|
"---\n",
|
|
|
|
"layout: wiki\n",
|
|
|
|
"title: {} Functions\n".format(component_name),
|
|
|
|
"description: List of functions in {} component.\n".format(component_name),
|
|
|
|
"group: functions\n",
|
|
|
|
"parent: wiki\n",
|
|
|
|
"---\n",
|
|
|
|
])
|
|
|
|
|
2019-03-23 22:21:33 +00:00
|
|
|
for function in components[component]:
|
|
|
|
file.write(function.document(component))
|
|
|
|
|
2023-04-30 21:03:35 +00:00
|
|
|
return errors
|
|
|
|
|
|
|
|
|
|
|
|
def crawl_dir(addons_dir, directory, debug=False, lint_private=False):
|
2019-03-23 22:21:33 +00:00
|
|
|
components = {}
|
2023-04-30 21:03:35 +00:00
|
|
|
errors = 0
|
2019-03-23 22:21:33 +00:00
|
|
|
|
|
|
|
for root, dirs, files in os.walk(directory):
|
|
|
|
for file in files:
|
|
|
|
if file.endswith(".sqf") and file.startswith("fnc_"):
|
|
|
|
file_path = os.path.join(root, file)
|
|
|
|
|
|
|
|
# Attempt to import the header from file
|
|
|
|
function = FunctionFile(directory)
|
|
|
|
function.import_header(file_path)
|
|
|
|
|
|
|
|
# Undergo data extraction and detailed debug
|
|
|
|
if function.has_header():
|
2023-04-30 21:03:35 +00:00
|
|
|
errors += function.process_header(debug, lint_private)
|
2019-03-23 22:21:33 +00:00
|
|
|
|
|
|
|
if function.is_public() and not debug:
|
|
|
|
# Add functions to component key (initalise key if necessary)
|
|
|
|
component = os.path.basename(os.path.dirname(root))
|
2023-04-30 21:03:35 +00:00
|
|
|
components.setdefault(component, []).append(function)
|
2019-03-23 22:21:33 +00:00
|
|
|
|
|
|
|
function.feedback("Publicly documented")
|
2023-04-30 21:03:35 +00:00
|
|
|
else:
|
|
|
|
errors += 1
|
|
|
|
|
2023-05-01 15:38:27 +00:00
|
|
|
if not debug:
|
|
|
|
print()
|
|
|
|
errors += document_functions(addons_dir, components)
|
2023-04-30 21:03:35 +00:00
|
|
|
|
|
|
|
if errors != 0:
|
|
|
|
print("\n Unclean!\n {} errors".format(errors))
|
|
|
|
else:
|
|
|
|
print("\n Clean!")
|
|
|
|
|
|
|
|
return errors
|
2019-03-23 22:21:33 +00:00
|
|
|
|
|
|
|
|
|
|
|
def main():
|
|
|
|
print("""
|
|
|
|
#########################
|
|
|
|
# Documenting Functions #
|
|
|
|
#########################
|
|
|
|
""")
|
|
|
|
|
|
|
|
parser = argparse.ArgumentParser()
|
|
|
|
parser.add_argument('directory', nargs="?", type=str, default=".", help='only crawl specified module addon folder')
|
|
|
|
parser.add_argument('--debug', action="store_true", help='only check for header debug messages')
|
2023-04-30 21:03:35 +00:00
|
|
|
parser.add_argument('--lint-private', action="store_true", help='lint private function headers as well')
|
2019-03-23 22:21:33 +00:00
|
|
|
args = parser.parse_args()
|
|
|
|
|
2023-04-30 21:03:35 +00:00
|
|
|
# Allow calling from anywhere and work our way to addons from this file
|
|
|
|
addons_dir = os.path.abspath(os.path.join(__file__, '../../../addons/'))
|
|
|
|
prospective_dir = os.path.abspath(os.path.join(addons_dir, args.directory))
|
|
|
|
|
2019-03-23 22:21:33 +00:00
|
|
|
if os.path.isdir(prospective_dir):
|
|
|
|
print("Directory: {}".format(prospective_dir))
|
2023-04-30 21:03:35 +00:00
|
|
|
errors = crawl_dir(addons_dir, prospective_dir, args.debug, args.lint_private)
|
|
|
|
return 0 if errors == 0 else 1
|
2019-03-23 22:21:33 +00:00
|
|
|
else:
|
|
|
|
print("Invalid directory: {}".format(prospective_dir))
|
2023-04-30 21:03:35 +00:00
|
|
|
return 1
|
|
|
|
|
2019-03-23 22:21:33 +00:00
|
|
|
|
|
|
|
if __name__ == "__main__":
|
2023-04-30 21:03:35 +00:00
|
|
|
sys.exit(main())
|