diff --git a/builder/build_cli.py b/builder/build_cli.py index 3e17dfb..2dd96fd 100644 --- a/builder/build_cli.py +++ b/builder/build_cli.py @@ -13,6 +13,7 @@ import sys import requests import json +import time # Automatically watch the following extra directories when --serve is used. EXTRA_WATCH_DIRS = ["exts", "themes"] @@ -20,10 +21,35 @@ SPEC_CHECKSUM_URL = "https://spec.ferrocene.dev/paragraph-ids.json" SPEC_LOCKFILE = "spec.lock" -def build_docs(root, builder, clear, serve, debug, spec_lock_consistency_check): +def build_docs( + root: Path, + builder: str, + clear: bool, + serve: bool, + debug: bool, + offline: bool, + spec_lock_consistency_check: bool +) -> Path: + """ + Builds the Sphinx documentation with the specified options. + + Args: + root: The root directory of the documentation. + builder: The builder to use (e.g., 'html', 'xml'). + clear: Whether to disable incremental builds. + serve: Whether to start a local server with live reload. + debug: Whether to enable debug mode. + offline: Whether to build in offline mode. + spec_lock_consistency_check: Whether to check spec lock consistency. + + Returns: + Path: The path to the generated documentation. + """ + dest = root / "build" args = ["-b", builder, "-d", dest / "doctrees"] + if debug: # Disable parallel builds and show exceptions in debug mode. # @@ -42,6 +68,8 @@ def build_docs(root, builder, clear, serve, debug, spec_lock_consistency_check): # Add configuration options as needed if not spec_lock_consistency_check: conf_opt_values.append("enable_spec_lock_consistency=0") + if offline: + conf_opt_values.append("offline=1") # Only add the --define argument if there are options to define if conf_opt_values: args.append("--define") @@ -58,6 +86,9 @@ def build_docs(root, builder, clear, serve, debug, spec_lock_consistency_check): args += ["-W", "--keep-going"] try: + + # Tracking build time + timer_start = time.perf_counter() subprocess.run( [ "sphinx-autobuild" if serve else "sphinx-build", @@ -73,6 +104,8 @@ def build_docs(root, builder, clear, serve, debug, spec_lock_consistency_check): print("\nhint: if you see an exception, pass --debug to see the full traceback") exit(1) + timer_end = time.perf_counter() + print(f"\nBuild finished in {timer_end - timer_start:.2f} seconds.") return dest / builder def update_spec_lockfile(spec_checksum_location, lockfile_location): @@ -110,16 +143,21 @@ def main(root): parser.add_argument( "-c", "--clear", help="disable incremental builds", action="store_true" ) + parser.add_argument( + "--offline", + help="build in offline mode", + action="store_true", + ) group = parser.add_mutually_exclusive_group() parser.add_argument( "--ignore-spec-lock-diff", - help="ignore fls.lock file differences with live release -- for WIP branches only", + help="ignore spec.lock file differences with live release -- for WIP branches only", default=False, action="store_true" ) parser.add_argument( "--update-spec-lock-file", - help="update fls.lock file", + help="update spec.lock file", action="store_true" ) group.add_argument( @@ -145,6 +183,6 @@ def main(root): update_spec_lockfile(SPEC_CHECKSUM_URL, root / "src" / SPEC_LOCKFILE) rendered = build_docs( - root, "xml" if args.xml else "html", args.clear, args.serve, args.debug, not args.ignore_spec_lock_diff + root, "xml" if args.xml else "html", args.clear, args.serve, args.debug, args.offline, not args.ignore_spec_lock_diff ) diff --git a/exts/coding_guidelines/README.rst b/exts/coding_guidelines/README.rst index b4b6ce2..af72c01 100644 --- a/exts/coding_guidelines/README.rst +++ b/exts/coding_guidelines/README.rst @@ -23,7 +23,7 @@ Coverage of the coding guidlines over the FLS is calculated. Each coding guideline has its ``:fls:`` option turned into a hyperlink to the corresponding element within the FLS to be able to navigate there directly. -Further an ``fls.lock`` file located at ``root/src/fls.lock`` is validated against the currently +Further an ``spec.lock`` file located at ``root/src/spec.lock`` is validated against the currently deployed version of the Ferrocene Language Spec and the build is failed if there is discrepency. Links to the Rust standard library diff --git a/exts/coding_guidelines/__init__.py b/exts/coding_guidelines/__init__.py index d903606..5d8161f 100644 --- a/exts/coding_guidelines/__init__.py +++ b/exts/coding_guidelines/__init__.py @@ -35,6 +35,11 @@ def merge_domaindata(self, docnames, other): def setup(app): app.add_domain(CodingGuidelinesDomain) + app.add_config_value( + name = "offline", + default=False, + rebuild= "env" + ) # register the offline option app.add_config_value( name="spec_std_docs_url", default="https://doc.rust-lang.org/stable/std", diff --git a/exts/coding_guidelines/fls_checks.py b/exts/coding_guidelines/fls_checks.py index 4e7700a..ff760c7 100644 --- a/exts/coding_guidelines/fls_checks.py +++ b/exts/coding_guidelines/fls_checks.py @@ -20,28 +20,27 @@ def check_fls(app, env): """Main checking function for FLS validation""" # First make sure all guidelines have correctly formatted FLS IDs check_fls_exists_and_valid_format(app, env) + offline_mode = env.config.offline # Gather all FLS paragraph IDs from the specification and get the raw JSON - fls_ids, raw_json_data = gather_fls_paragraph_ids(fls_paragraph_ids_url) - + fls_ids, raw_json_data = gather_fls_paragraph_ids(app, fls_paragraph_ids_url) # Error out if we couldn't get the raw JSON data if not raw_json_data: error_message = f"Failed to retrieve or parse the FLS specification from {fls_paragraph_ids_url}" logger.error(error_message) - raise FLSValidationError(error_message) - - # Check for differences against lock file - has_differences, differences = check_fls_lock_consistency(app, env, raw_json_data) - if has_differences: - error_message = "The FLS specification has changed since the lock file was created:\n" - for diff in differences: - error_message += f" - {diff}\n" - error_message += "\nPlease manually inspect FLS spec items whose checksums have changed as corresponding guidelines may need to account for these changes." - error_message += "\nOnce resolved, you may run the following to update the local spec lock file:" - error_message += "\n\t./make.py --update-spec-lock-file" - logger.error(error_message) - raise FLSValidationError(error_message) - + raise FLSValidationError(error_message) + if not offline_mode: # in offline mode, ignore checking against the lock file + # Check for differences against lock file + has_differences, differences = check_fls_lock_consistency(app, env, raw_json_data) + if has_differences: + error_message = "The FLS specification has changed since the lock file was created:\n" + for diff in differences: + error_message += f" - {diff}\n" + error_message += "\nPlease manually inspect FLS spec items whose checksums have changed as corresponding guidelines may need to account for these changes." + error_message += "\nOnce resolved, you may run the following to update the local spec lock file:" + error_message += "\n\t./make.py --update-spec-lock-file" + logger.error(error_message) + raise FLSValidationError(error_message) # Check if all referenced FLS IDs exist check_fls_ids_correct(app, env, fls_ids) @@ -154,18 +153,20 @@ def check_fls_ids_correct(app, env, fls_ids): logger.info("All FLS references in guidelines are valid") -def gather_fls_paragraph_ids(json_url): +def gather_fls_paragraph_ids(app, json_url): """ - Gather all Ferrocene Language Specification paragraph IDs from the paragraph-ids.json file, - including both container section IDs and individual paragraph IDs. + Gather all Ferrocene Language Specification paragraph IDs from the paragraph-ids.json file + or from the lock file in offline mode, including both container section IDs and individual paragraph IDs. Args: + app: The Sphinx application json_url: The URL or path to the paragraph-ids.json file Returns: Dictionary mapping paragraph IDs to metadata AND the complete raw JSON data """ - logger.info("Gathering FLS paragraph IDs from %s", json_url) + offline = app.config.offline + lock_path = app.confdir / 'spec.lock' # Dictionary to store all FLS IDs and their metadata all_fls_ids = {} @@ -173,18 +174,30 @@ def gather_fls_paragraph_ids(json_url): try: # Load the JSON file - response = requests.get(json_url) - response.raise_for_status() # Raise exception for HTTP errors - - # Parse the JSON data - try: - raw_json_data = response.json() - data = raw_json_data # Keep reference to the original data - logger.debug("Successfully parsed JSON data") - except json.JSONDecodeError as e: - logger.error(f"Failed to parse JSON: {e}") - logger.debug(f"Response content preview: {response.text[:500]}...") - raise + if not offline: + logger.info("Gathering FLS paragraph IDs from %s", json_url) + response = requests.get(json_url) + response.raise_for_status() # Raise exception for HTTP errors + # Parse the JSON data + try: + raw_json_data = response.json() + data = raw_json_data # Keep reference to the original data + logger.debug("Successfully parsed JSON data") + except json.JSONDecodeError as e: + logger.error(f"Failed to parse JSON: {e}") + logger.debug(f"Response content preview: {response.text[:500]}...") + raise + + else : # if online mode is on read from the lock file + + if not lock_path.exists(): + logger.warning(f"No FLS lock file found at {lock_path}") # TODO: returns an error + return False, [] + logger.info("Gathering FLS paragraph IDs from lock file: %s", lock_path) + with open(lock_path, 'r', encoding='utf-8') as f: + raw_json_data=f.read() + data = json.loads(raw_json_data) + # Check if we have the expected document structure if 'documents' not in data: diff --git a/exts/coding_guidelines/fls_linking.py b/exts/coding_guidelines/fls_linking.py index 17a6abe..67f05fe 100644 --- a/exts/coding_guidelines/fls_linking.py +++ b/exts/coding_guidelines/fls_linking.py @@ -26,7 +26,7 @@ def load_fls_ids(app): """Load FLS IDs and their URLs.""" try: from . import fls_checks - fls_ids, _ = fls_checks.gather_fls_paragraph_ids(app.config.fls_paragraph_ids_url) + fls_ids, _ = fls_checks.gather_fls_paragraph_ids(app, app.config.fls_paragraph_ids_url ) return {fls_id: data['url'] for fls_id, data in fls_ids.items()} except Exception as e: logger.error(f"Failed to load FLS IDs: {e}")