diff --git a/.config/dictionaries/project.dic b/.config/dictionaries/project.dic index 74723fad7..87a059c42 100644 --- a/.config/dictionaries/project.dic +++ b/.config/dictionaries/project.dic @@ -15,6 +15,7 @@ depgraph devenv dind dockerhub +doseq doublecircle Earthfile Earthfiles @@ -50,6 +51,7 @@ idents JDBC jorm jormungandr +jsonlib junitreport Kroki kubeconfig diff --git a/.github/workflows/validate-project-fields.yml b/.github/workflows/validate-project-fields.yml index 732a44479..ad661ff5d 100644 --- a/.github/workflows/validate-project-fields.yml +++ b/.github/workflows/validate-project-fields.yml @@ -19,33 +19,31 @@ concurrency: group: ${{ github.workflow }}-${{ github.head_ref || github.ref }} cancel-in-progress: true -env: - AWS_REGION: eu-central-1 - AWS_ROLE_ARN: arn:aws:iam::332405224602:role/ci - EARTHLY_TARGET: docker - ECR_REGISTRY: 332405224602.dkr.ecr.eu-central-1.amazonaws.com - jobs: validate-project-fields: runs-on: ubuntu-latest + env: + # Needs a PAT Classic with (read:project) + GITHUB_PROJECTS_PAT: ${{ secrets.PROJECTS_PAT }} + GITHUB_REPOSITORY: "${{ github.repository }}" + GITHUB_EVENT_NUMBER: "${{ github.event.number || '0' }}" + PROJECT_NUMBER: 102 steps: - - uses: actions/checkout@v4 + - name: Fetch Validation Script + uses: actions/checkout@v4 + with: + repository: input-output-hk/catalyst-ci + ref: feat/validate-project-fields-in-prs-and-issues-sj + sparse-checkout: | + utilities/project-fields-validator/main.py + sparse-checkout-cone-mode: false - - name: Setup CI - uses: input-output-hk/catalyst-ci/actions/setup@master + - name: Set up Python + uses: actions/setup-python@v5 with: - aws_role_arn: ${{ env.AWS_ROLE_ARN }} - aws_region: ${{ env.AWS_REGION }} - earthly_runner_secret: ${{ secrets.EARTHLY_RUNNER_SECRET }} + python-version: '3.13' - name: Run Project Fields Validation - uses: input-output-hk/catalyst-ci/actions/run@master if: always() continue-on-error: false - with: - earthfile: ./utilities/project-fields-validator - flags: --allow-privileged - targets: validate-project-fields - target_flags: --GITHUB_REPOSITORY="${{ github.repository }}" --GITHUB_EVENT_NUMBER="${{ github.event.number || '0' }}" - runner_address: ${{ secrets.EARTHLY_SATELLITE_ADDRESS }} - artifact: false + run: utilities/project-fields-validator/main.py diff --git a/Justfile b/Justfile index 17c1bbb6e..eb175b123 100644 --- a/Justfile +++ b/Justfile @@ -16,5 +16,15 @@ check-spelling: earthly +clean-spelling-list earthly +check-spelling + +# Fix and Check Markdown files +format-python-code: + ruff check --select I --fix . + ruff format . + +# Fix and Check Markdown files +lint-python: + ruff check . + # Pre Push Checks - intended to be run by a git pre-push hook. -pre-push: check-markdown check-spelling +pre-push: check-markdown check-spelling format-python-code lint-python diff --git a/earthly/docs/common/macros/include.py b/earthly/docs/common/macros/include.py index 4a9ca0b1b..94e615930 100644 --- a/earthly/docs/common/macros/include.py +++ b/earthly/docs/common/macros/include.py @@ -1,6 +1,7 @@ import os -import textwrap import re +import textwrap + def inc_file(env, filename, start_line=0, end_line=None, indent=None): """ @@ -10,7 +11,7 @@ def inc_file(env, filename, start_line=0, end_line=None, indent=None): project. indent = number of spaces to indent every line but the first. """ - + try: full_filename = os.path.join(env.project_dir, filename) @@ -24,8 +25,8 @@ def inc_file(env, filename, start_line=0, end_line=None, indent=None): else: indent = " " * indent text = textwrap.indent(text, indent) - text = text[len(indent):] # First line should not be indented at all. - text = re.sub(r'\n$', '', text, count=1) + text = text[len(indent) :] # First line should not be indented at all. + text = re.sub(r"\n$", "", text, count=1) # print(text) return text except Exception as exc: diff --git a/earthly/docs/dev/local.py b/earthly/docs/dev/local.py index 4a528a53c..0414b43fa 100755 --- a/earthly/docs/dev/local.py +++ b/earthly/docs/dev/local.py @@ -2,13 +2,13 @@ # cspell: words gmtime +import argparse import subprocess +import sys import time +import urllib.request import webbrowser from dataclasses import dataclass, field -import argparse -import sys -import urllib.request class ProcessRunError(Exception): @@ -213,7 +213,7 @@ def main(): # Open the webpage in a browser (once) if not browsed: - browsed=True + browsed = True if not args.no_browser: webbrowser.open(f"http://localhost:{docs_container.exposed_port}") diff --git a/earthly/postgresql/scripts/std_checks.py b/earthly/postgresql/scripts/std_checks.py index 1164445f2..d55277f07 100755 --- a/earthly/postgresql/scripts/std_checks.py +++ b/earthly/postgresql/scripts/std_checks.py @@ -1,11 +1,10 @@ #!/usr/bin/env python3 +import argparse + import python.exec_manager as exec_manager import python.vendor_files_check as vendor_files_check -import argparse import rich -from rich import print -import os # This script is run inside the `check` stage for postgres database setup # to perform all high level non-compilation checks. @@ -32,7 +31,7 @@ def main(): # Force color output in CI rich.reconfigure(color_system="256") - parser = argparse.ArgumentParser(description="Postgres checks processing.") + argparse.ArgumentParser(description="Postgres checks processing.") results = exec_manager.Results("Postgres checks") diff --git a/earthly/postgresql/scripts/std_docs.py b/earthly/postgresql/scripts/std_docs.py index e64c7e3ad..32d234bf1 100755 --- a/earthly/postgresql/scripts/std_docs.py +++ b/earthly/postgresql/scripts/std_docs.py @@ -2,16 +2,17 @@ # cspell: words dbmigrations dbhost dbuser dbuserpw Tsvg pgsql11 -from typing import Optional -import python.exec_manager as exec_manager -import python.db_ops as db_ops import argparse -import rich -from rich import print import os import re from textwrap import indent +import python.db_ops as db_ops +import python.exec_manager as exec_manager +import rich +from rich import print + + def process_sql_files(directory): file_pattern = r"V(\d+)__(\w+)\.sql" migrations = {} @@ -32,11 +33,12 @@ def process_sql_files(directory): migrations[version] = { "version": version, "migration_name": migration_name, - "sql_data": sql_data + "sql_data": sql_data, } return migrations, largest_version + class Migrations: def __init__(self, args: argparse.Namespace): """ @@ -73,6 +75,7 @@ def create_markdown_file(self, file_path): print("Markdown file created successfully at: {}".format(file_path)) + def main(): # Force color output in CI rich.reconfigure(color_system="256") @@ -124,9 +127,7 @@ def main(): f"-o docs/database_schema/ " ) res = exec_manager.cli_run( - schemaspy_cmd, - name="Generate SchemaSpy Documentation", - verbose=True + schemaspy_cmd, name="Generate SchemaSpy Documentation", verbose=True ) results.add(res) @@ -135,7 +136,7 @@ def main(): exec_manager.cli_run( 'echo "hide: true" > docs/database_schema/.pages', name="Create .pages file", - verbose=True + verbose=True, ) migrations.create_markdown_file("docs/migrations.md") @@ -145,5 +146,6 @@ def main(): if not results.ok(): exit(1) + if __name__ == "__main__": - main() \ No newline at end of file + main() diff --git a/earthly/python/Earthfile b/earthly/python/Earthfile index 9a7f66d77..21425d4eb 100644 --- a/earthly/python/Earthfile +++ b/earthly/python/Earthfile @@ -35,9 +35,15 @@ python-base: # Adjust Poetry's configuration to prevent connection pool warnings. RUN poetry config installer.max-workers 10 + # Extension we use needs rust. + RUN curl https://sh.rustup.rs -sSf | bash -s -- -y + RUN echo 'source $HOME/.cargo/env' >> $HOME/.bashrc + ENV PATH="/root/.cargo/bin:${PATH}" + # Install ruff for linting. RUN pip3 install ruff RUN pip3 install rich + RUN pip3 install third-party-imports # Universal build scripts we will always need and are not target dependent. COPY --dir scripts /scripts @@ -58,9 +64,10 @@ BUILDER: CHECK: FUNCTION + ARG options # Execute the check script - RUN /scripts/std_checks.py + RUN /scripts/std_checks.py $options LINT_PYTHON: # Linting all Python code is done with ruff diff --git a/earthly/python/scripts/std_checks.py b/earthly/python/scripts/std_checks.py index e435976cb..44e74c621 100755 --- a/earthly/python/scripts/std_checks.py +++ b/earthly/python/scripts/std_checks.py @@ -4,44 +4,114 @@ import subprocess import sys -def check_pyproject_toml(): + +def check_pyproject_toml(stand_alone): # Check if 'pyproject.toml' exists in the project root. - if not os.path.isfile('pyproject.toml'): + if not os.path.isfile("pyproject.toml"): + if stand_alone: + print("pyproject.toml check passed.") + return True + print("Error: pyproject.toml not found.") return False else: + if stand_alone: + print("Error: pyproject.toml found in standalone python module.") + return False + print("pyproject.toml check passed.") return True - -def check_poetry_lock(): + + +def check_poetry_lock(stand_alone): # Check if 'poetry.lock' exists in the project root. - if not os.path.isfile('poetry.lock'): + if not os.path.isfile("poetry.lock"): + if stand_alone: + print("poetry.lock check passed.") + return True + print("Error: poetry.lock not found.") return False else: + if stand_alone: + print("Error: poetry.lock found in stand alone module.") + return False + print("poetry.lock check passed.") return True + +def check_lint_with_ruff(): + # Check Python code linting issues using 'ruff'. + result = subprocess.run(["ruff", "check", "."], capture_output=True) + if result.returncode != 0: + print("Code linting issues found.") + print(result.stdout.decode()) + return False + else: + print("Code linting check passed.") + return True + + def check_code_format_with_ruff(): # Check Python code formatting and linting issues using 'ruff'. - result = subprocess.run(['ruff', 'check', '.'], capture_output=True) + result = subprocess.run(["ruff", "format", "--check", "."], capture_output=True) if result.returncode != 0: - print("Code formatting and linting issues found.") + print("Code formatting issues found.") print(result.stdout.decode()) return False else: - print("Code formatting and linting check passed.") + print("Code formatting check passed.") return True -def main(): + +def zero_third_party_packages_found(output): + lines = output.split("\n") # Split the multiline string into individual lines + + if len(lines) < 2: + return False # The second line doesn't exist + else: + return lines[1].startswith("Found '0' third-party package imports") + + +def check_no_third_party_imports(): + # Check No third party imports have been used + result = subprocess.run(["third-party-imports", "."], capture_output=True) + output = result.stdout.decode() + + if result.returncode != 0 or not zero_third_party_packages_found(output): + print("Checking third party imports failed.") + print(output) + return False + else: + print("Checking third party imports passed.") + return True + + +def main(stand_alone): + if stand_alone: + print( + "Checking Standalone Python files (No third party imports or poetry project)" + ) checks_passed = True # Perform checks - checks_passed &= check_pyproject_toml() - checks_passed &= check_poetry_lock() + + # These are true on python programs that require third party libraries, false otherwise + checks_passed &= check_pyproject_toml(stand_alone) + checks_passed &= check_poetry_lock(stand_alone) + + # Always done + checks_passed &= check_lint_with_ruff() checks_passed &= check_code_format_with_ruff() + # Only done if the code should be able to run without third part libraries + if stand_alone: + checks_passed &= check_no_third_party_imports() + if not checks_passed: sys.exit(1) + if __name__ == "__main__": - main() + print(f"Current Working Directory: {os.getcwd()}") + main("--stand-alone" in sys.argv[1:]) diff --git a/earthly/rust/scripts/std_build.py b/earthly/rust/scripts/std_build.py index 06e2b15cb..863255bd8 100755 --- a/earthly/rust/scripts/std_build.py +++ b/earthly/rust/scripts/std_build.py @@ -2,14 +2,11 @@ # cspell: words lcov depgraph readelf sysroot -import concurrent.futures -import time -import os - import argparse -import rich +import os import python.exec_manager as exec_manager +import rich from python.utils import fix_quoted_earthly_args # This script is run inside the `build` stage. @@ -98,14 +95,11 @@ def cargo_doc(verbose: bool = False) -> exec_manager.Result: env = os.environ env["RUSTDOCFLAGS"] = "-Z unstable-options --enable-index-page" return exec_manager.cli_run( - "cargo +nightly docs", - name="Documentation build", - verbose=verbose + "cargo +nightly docs", name="Documentation build", verbose=verbose ) def cargo_depgraph(runner: exec_manager.ParallelRunner, verbose: bool = False) -> None: - runner.run( exec_manager.cli_run, "cargo depgraph " @@ -250,7 +244,6 @@ def strip(results: exec_manager.Results, bin: str): ) ) -import sys def main(): # Force color output in CI @@ -322,8 +315,12 @@ def main(): ) args = parser.parse_args() - libs = filter(lambda lib: lib.strip() and len(lib.strip()) > 0, args.libs.split(",")) - bins = list(filter(lambda bin: bin.strip() and len(bin.strip()) > 0, args.bins.split(","))) + libs = filter( + lambda lib: lib.strip() and len(lib.strip()) > 0, args.libs.split(",") + ) + bins = list( + filter(lambda bin: bin.strip() and len(bin.strip()) > 0, args.bins.split(",")) + ) with exec_manager.ParallelRunner("Rust build") as runner: # Build the code. @@ -365,7 +362,9 @@ def main(): cargo_modules_lib(runner, lib, not args.disable_docs, args.verbose) for bin in bins: package, bin_name = bin.split("/") - cargo_modules_bin(runner, package, bin_name, not args.disable_docs, args.verbose) + cargo_modules_bin( + runner, package, bin_name, not args.disable_docs, args.verbose + ) results = runner.get_results() @@ -377,7 +376,6 @@ def main(): # Check if all documentation tests pass. results.add(cargo_doctest(args.doctest_flags, args.verbose)) - results.print() if not results.ok(): exit(1) diff --git a/earthly/rust/scripts/std_checks.py b/earthly/rust/scripts/std_checks.py index 615142400..7d5ee4594 100755 --- a/earthly/rust/scripts/std_checks.py +++ b/earthly/rust/scripts/std_checks.py @@ -2,12 +2,13 @@ # cspell: words stdcfgs +import argparse +import os + import python.exec_manager as exec_manager import python.vendor_files_check as vendor_files_check -import argparse import rich from rich import print -import os # This script is run inside the `check` stage for rust projects to perform all # high level non-compilation checks. @@ -22,12 +23,12 @@ def main(): - rust_toolchain_enabled=False - + rust_toolchain_enabled = False + # Force color output in CI rich.reconfigure(color_system="256") - parser = argparse.ArgumentParser( + argparse.ArgumentParser( description="Rust high level non-compilation checks processing." ) @@ -68,7 +69,7 @@ def main(): results.add( vendor_files_check.toml_diff_check( - f"/stdcfgs/cargo_config.toml", ".cargo/config.toml" + "/stdcfgs/cargo_config.toml", ".cargo/config.toml" ) ) if rust_toolchain_enabled: @@ -104,7 +105,10 @@ def main(): results.add(exec_manager.cli_run("cargo machete", name="Unused Dependencies Check")) # Check if we have any supply chain issues with dependencies. results.add( - exec_manager.cli_run("cargo deny check --exclude-dev -W vulnerability -W unmaintained", name="Supply Chain Issues Check") + exec_manager.cli_run( + "cargo deny check --exclude-dev -W vulnerability -W unmaintained", + name="Supply Chain Issues Check", + ) ) results.print() diff --git a/earthly/wasm/c/scripts/std_build.py b/earthly/wasm/c/scripts/std_build.py index fdde66d9c..bbdb5ac91 100755 --- a/earthly/wasm/c/scripts/std_build.py +++ b/earthly/wasm/c/scripts/std_build.py @@ -2,11 +2,12 @@ # cspell: words lcov depgraph readelf sysroot bindgen autodrop mexec -import python.exec_manager as exec_manager import argparse -import rich import os +import python.exec_manager as exec_manager +import rich + # This script is run inside the `build` stage. BINDINGS_SRC = "bindings_src" @@ -15,12 +16,14 @@ def wit_bindgen_c(results: exec_manager.Results, wit_path: str): results.add( exec_manager.cli_run( - " ".join([ - "wit-bindgen c", - "--autodrop-borrows yes", - f"--out-dir {BINDINGS_SRC}", - wit_path - ]), + " ".join( + [ + "wit-bindgen c", + "--autodrop-borrows yes", + f"--out-dir {BINDINGS_SRC}", + wit_path, + ] + ), name="Generate bindings C code.", verbose=True, ) @@ -38,16 +41,18 @@ def clang_wasm_compile(results: exec_manager.Results, c_files: str): ) results.add( exec_manager.cli_run( - " ".join([ - "/opt/wasi-sdk/bin/clang", - "--sysroot=/opt/wasi-sdk/share/wasi-sysroot", - bindings_src, - c_files, - "-Oz", - "-o out.wasm", - "-mexec-model=reactor", - "--target=wasm32-wasi" - ]), + " ".join( + [ + "/opt/wasi-sdk/bin/clang", + "--sysroot=/opt/wasi-sdk/share/wasi-sysroot", + bindings_src, + c_files, + "-Oz", + "-o out.wasm", + "-mexec-model=reactor", + "--target=wasm32-wasi", + ] + ), name="Compile C code to wasm module", verbose=True, ) diff --git a/examples/python/src/sum.py b/examples/python/src/sum.py index a4a5d8094..914032759 100644 --- a/examples/python/src/sum.py +++ b/examples/python/src/sum.py @@ -1,2 +1,2 @@ def sum(a, b): - return a + b \ No newline at end of file + return a + b diff --git a/examples/python/src/test_sum.py b/examples/python/src/test_sum.py index 799fbd21d..66be7c2d9 100644 --- a/examples/python/src/test_sum.py +++ b/examples/python/src/test_sum.py @@ -1,5 +1,6 @@ from src.sum import sum + def test_sum(): assert sum(2, 4) == 6 assert sum(2, 4) != 5 diff --git a/utilities/earthly-cache-watcher/helper.py b/utilities/earthly-cache-watcher/helper.py index c62188a56..14ac26a29 100644 --- a/utilities/earthly-cache-watcher/helper.py +++ b/utilities/earthly-cache-watcher/helper.py @@ -27,19 +27,19 @@ def get_subdirectory_name(working_dir_path: str, path: str): working_dir_path = os.path.abspath(working_dir_path) path = os.path.abspath(path) - if ( - os.path.commonpath([working_dir_path]) - != os.path.commonpath([working_dir_path, path]) + if os.path.commonpath([working_dir_path]) != os.path.commonpath( + [working_dir_path, path] ): return None - + relative_path = os.path.relpath(path, working_dir_path) parts = relative_path.split(os.sep) - + if parts: return parts[0] return None + def add_or_init(obj: dict[str, int], key: str, value: int): obj.setdefault(key, 0) - obj[key] += value \ No newline at end of file + obj[key] += value diff --git a/utilities/earthly-cache-watcher/main.py b/utilities/earthly-cache-watcher/main.py index 5174b26e2..564e30c26 100644 --- a/utilities/earthly-cache-watcher/main.py +++ b/utilities/earthly-cache-watcher/main.py @@ -17,6 +17,7 @@ logger.remove() # Remove the default handler logger.add(sys.stdout, level="INFO", serialize=True, format="{message}") + class Interval: """ A class that repeatedly executes a function @@ -170,11 +171,15 @@ def handle_modified(self, file_path: str): # checks self.check_sizes(layer_name) - logger.debug(" ".join([ - f"file modified: {file_path}", - f"(size changed from {prev_size:,} bytes", - f"to {size:,} bytes)" - ])) + logger.debug( + " ".join( + [ + f"file modified: {file_path}", + f"(size changed from {prev_size:,} bytes", + f"to {size:,} bytes)", + ] + ) + ) else: logger.debug(f"file modified: {file_path} (size unchanged)") except OSError as e: @@ -216,8 +221,7 @@ def handle_deleted(self, file_path: str): def check_sizes(self, layer_name: str, skip_sum_check=False): if ( layer_name in self.layer_index - and self.layer_index[layer_name] - >= large_layer_size + and self.layer_index[layer_name] >= large_layer_size ): self.trigger_layer_size_exceeded(layer_name) @@ -241,17 +245,19 @@ def trigger_layer_size_exceeded(self, layer_name: str): self.triggered_layers.add(layer_name) logger.error( - " ".join([ - f"layer '{layer_name}' exceeds large layer size criteria", - f"(size: {self.layer_index[layer_name]:,} bytes", - f"- limit: {large_layer_size:,} bytes)" - ]), + " ".join( + [ + f"layer '{layer_name}' exceeds large layer size criteria", + f"(size: {self.layer_index[layer_name]:,} bytes", + f"- limit: {large_layer_size:,} bytes)", + ] + ), extra={ "err_type": "layer_size_exceeded", "layer": layer_name, "size": self.layer_index[layer_name], - "limit": large_layer_size - } + "limit": large_layer_size, + }, ) def trigger_interval_growth_exceeded(self): @@ -265,33 +271,37 @@ def trigger_interval_growth_exceeded(self): self.triggered_growth_layers.add(layer_name) logger.error( - " ".join([ - f"layer '{layer_name}'", - f"- {size:,} bytes within the interval" - ]), + " ".join( + [ + f"layer '{layer_name}'", + f"- {size:,} bytes within the interval", + ] + ), extra={ "err_type": "layer_list_growth_exceeded", "layer": layer_name, - "size": size - } + "size": size, + }, ) if has_triggered_layer: size = sum(self.layer_growth_index.values()) logger.error( - " ".join([ - "the total amount of cache growth", - f"within {time_window:,} secs exceeds the limit", - f"(size: {size:,} bytes", - f"- limit: {max_time_window_growth_size:,} bytes)" - ]), + " ".join( + [ + "the total amount of cache growth", + f"within {time_window:,} secs exceeds the limit", + f"(size: {size:,} bytes", + f"- limit: {max_time_window_growth_size:,} bytes)", + ] + ), extra={ "err_type": "interval_growth_exceeded", "size": size, "limit": max_time_window_growth_size, - "within": time_window - } + "within": time_window, + }, ) except RuntimeError as e: logger.error(f"an error occurred: {e}") @@ -300,21 +310,24 @@ def trigger_max_cache_size(self): size = sum(self.layer_index.values()) logger.error( - " ".join([ - "the total amount of cache exceeds the limit", - f"(size: {size:,} bytes", - f"- limit: {max_cache_size:,} bytes)" - ]), + " ".join( + [ + "the total amount of cache exceeds the limit", + f"(size: {size:,} bytes", + f"- limit: {max_cache_size:,} bytes)", + ] + ), extra={ "err_type": "max_cache_size_exceeded", "size": size, - "limit": max_cache_size - } + "limit": max_cache_size, + }, ) def drop(self): self.interval.drop() + def main(): global \ watch_dir, \ @@ -354,14 +367,19 @@ def main(): logger.info(f"with `large_layer_size` set to {large_layer_size:,} bytes") logger.info(f"with `max_cache_size` set to {max_cache_size:,} bytes") logger.info(f"with `time_window` set to {time_window:,} secs") - logger.info(" ".join([ - "with `max_time_window_growth_size` set to", - f"{max_time_window_growth_size:,} bytes" - ])) - logger.info(" ".join([ - "with `log_file_accessing_err` set to", - log_file_accessing_err - ])) + logger.info( + " ".join( + [ + "with `max_time_window_growth_size` set to", + f"{max_time_window_growth_size:,} bytes", + ] + ) + ) + logger.info( + " ".join( + ["with `log_file_accessing_err` set to", log_file_accessing_err] + ) + ) # init watcher handler = ChangeEventHandler(time_window) diff --git a/utilities/project-fields-validator/Earthfile b/utilities/project-fields-validator/Earthfile index 200ee3c47..07d1daf36 100644 --- a/utilities/project-fields-validator/Earthfile +++ b/utilities/project-fields-validator/Earthfile @@ -1,17 +1,20 @@ VERSION 0.8 -IMPORT github.com/input-output-hk/catalyst-ci/earthly/python:v3.1.7 AS python-ci +IMPORT ../../earthly/python AS python-ci test: FROM python-ci+python-base + WORKDIR /stand-alone COPY . . - DO python-ci+CHECK + # Stand alone python command doesn't use any third party libraries + DO python-ci+CHECK --options="--stand-alone" validate-project-fields: FROM python-ci+python-base - COPY . . - RUN pip install requests + RUN --no-cache echo No Cache + + COPY . . ARG PROJECT_NUMBER=102 ARG --required GITHUB_REPOSITORY @@ -21,4 +24,4 @@ validate-project-fields: ENV GITHUB_REPOSITORY=${GITHUB_REPOSITORY} ENV GITHUB_EVENT_NUMBER=${GITHUB_EVENT_NUMBER} - RUN --no-cache --secret GITHUB_TOKEN python3 main.py \ No newline at end of file + RUN --no-cache --secret GITHUB_PROJECTS_PAT python3 main.py \ No newline at end of file diff --git a/utilities/project-fields-validator/Justfile b/utilities/project-fields-validator/Justfile new file mode 100644 index 000000000..0a736ca11 --- /dev/null +++ b/utilities/project-fields-validator/Justfile @@ -0,0 +1,19 @@ +# use with https://github.com/casey/just +# + +# cspell: words prereqs, commitlog + +default: + @just --list --unsorted + +# Fix and Check Markdown files +format-code: + ruff check --select I --fix . + ruff format . + +# Fix and Check Markdown files +lint: + ruff check . + +# Pre Push Checks - intended to be run by a git pre-push hook. +pre-push: format-code lint diff --git a/utilities/project-fields-validator/main.py b/utilities/project-fields-validator/main.py old mode 100644 new mode 100755 index 8ac93d194..976cb150b --- a/utilities/project-fields-validator/main.py +++ b/utilities/project-fields-validator/main.py @@ -1,12 +1,161 @@ +#!/usr/bin/env python3 +import json as jsonlib import os import sys +import traceback +import urllib.error +import urllib.parse +import urllib.request from dataclasses import dataclass -from typing import Optional, List, Dict, Any, Set +from email.message import Message from enum import Enum -import requests -from requests.exceptions import RequestException -import json -import traceback +from typing import Any, Dict, Iterable, List, NamedTuple, Optional, Set + + +class SafeOpener(urllib.request.OpenerDirector): + """An opener with configurable set of handlers.""" + + opener = None + + def __init__(self, handlers: Iterable = None): + """ + Instantiate an OpenDirector with selected handlers. + + Args: + handlers: an Iterable of handler classes + """ + super().__init__() + handlers = handlers or ( + urllib.request.UnknownHandler, + urllib.request.HTTPDefaultErrorHandler, + urllib.request.HTTPRedirectHandler, + urllib.request.HTTPSHandler, + urllib.request.HTTPErrorProcessor, + ) + + for handler_class in handlers: + handler = handler_class() + self.add_handler(handler) + + +class RequestException(Exception): + """There was an ambiguous exception that occurred while handling your + request. + """ + + def __init__(self, *args, **kwargs): + """Initialize RequestException with `request` and `response` objects.""" + response = kwargs.pop("response", None) + self.response = response + self.request = kwargs.pop("request", None) + if response is not None and not self.request and hasattr(response, "request"): + self.request = self.response.request + super().__init__(*args, **kwargs) + + +class Response(NamedTuple): + """Container for HTTP response.""" + + body: str + headers: Message + status: int + url: str + request: urllib.request.Request + + def json(self) -> Any: + """ + Decode body's JSON. + + Returns: + Pythonic representation of the JSON object + """ + try: + output = jsonlib.loads(self.body) + except jsonlib.JSONDecodeError as e: + raise RequestException(e, response=self) + return output + + def raise_for_status(self): + """Raise an exception if the response is not successful.""" + if self.status >= 400: + raise RequestException(Exception("Status Error"), response=self) + + +# only used by `request` +opener = SafeOpener() + + +def request( + method: str, + url: str, + json: dict = None, + params: dict = None, + headers: dict = None, + data_as_json: bool = True, +) -> Response: + """ + Perform HTTP request. + + Args: + url: url to fetch + json: dict of keys/values to be encoded and submitted + params: dict of keys/values to be encoded in URL query string + headers: optional dict of request headers + method: HTTP method , such as GET or POST + data_as_json: if True, data will be JSON-encoded + + Returns: + A dict with headers, body, status code, and, if applicable, object + rendered from JSON + """ + try: + method = method.upper() + request_data = None + headers = headers or {} + json = json or {} + params = params or {} + headers = {"Accept": "application/json", **headers} + httprequest = None + response = None + + if method == "GET": + params = {**params, **json} + json = None + + if params: + url += "?" + urllib.parse.urlencode(params, doseq=True, safe="/") + + if json: + if data_as_json: + request_data = jsonlib.dumps(json).encode() + headers["Content-Type"] = "application/json; charset=UTF-8" + else: + request_data = urllib.parse.urlencode(json).encode() + + httprequest = urllib.request.Request( + url, + data=request_data, + headers=headers, + method=method, + ) + + with opener.open( + httprequest, + ) as httpresponse: + response = Response( + body=httpresponse.read().decode( + httpresponse.headers.get_content_charset("utf-8") + ), + headers=httpresponse.headers, + status=httpresponse.status, + url=httpresponse.url, + request=httprequest, + ) + except Exception as e: + raise RequestException(e, request=httprequest, response=response) + + return response + class FieldType(Enum): TEXT = "text" @@ -15,29 +164,33 @@ class FieldType(Enum): NUMBER = "number" ITERATION = "title" + @dataclass class ProjectField: name: str value: Optional[str] = None field_type: Optional[FieldType] = None + class GitHubAPIError(Exception): """Exception for GitHub API errors""" + def __init__(self, message: str, response_data: Optional[Dict] = None): super().__init__(message) self.response_data = response_data + class ProjectFieldsValidator: BASE_URL = "https://api.github.com" GRAPHQL_URL = f"{BASE_URL}/graphql" - def __init__(self, github_token: str): - if not github_token: + def __init__(self, GITHUB_PROJECTS_PAT: str): + if not GITHUB_PROJECTS_PAT: raise ValueError("GitHub token is required but was empty") self.headers = { - "Authorization": f"Bearer {github_token}", - "Accept": "application/vnd.github.v3+json" + "Authorization": f"Bearer {GITHUB_PROJECTS_PAT}", + "Accept": "application/vnd.github.v3+json", } self.required_fields = [ ProjectField("Status"), @@ -46,43 +199,50 @@ def __init__(self, github_token: str): ProjectField("Estimate"), ProjectField("Iteration"), ProjectField("Start"), - ProjectField("End") + ProjectField("End"), ] def _make_request(self, method: str, url: str, **kwargs) -> Dict[str, Any]: """Generic method to make HTTP requests with error handling""" try: - response = requests.request(method, url, headers=self.headers, **kwargs) + response = request(method, url, headers=self.headers, **kwargs) response.raise_for_status() - print(f"\nAPI Response Status: {response.status_code}") + print(f"\nAPI Response Status: {response.status}") try: data = response.json() - if 'errors' in data: - error_messages = '; '.join(error.get('message', 'Unknown error') for error in data['errors']) + if "errors" in data: + error_messages = "; ".join( + error.get("message", "Unknown error") + for error in data["errors"] + ) raise GitHubAPIError(f"GraphQL API errors: {error_messages}", data) - if 'data' in data and data['data'] is None: + if "data" in data and data["data"] is None: raise GitHubAPIError("API returned null data", data) return data - except json.JSONDecodeError as e: - raise GitHubAPIError(f"Failed to parse API response: {str(e)}") + except jsonlib.JSONDecodeError as e: + raise GitHubAPIError( + f"Failed to parse API response: {str(e)} METHOD={method} URL={url} JSON={kwargs.get('json')}" + ) except RequestException as e: - raise GitHubAPIError(f"GitHub API request failed: {str(e)}") + raise GitHubAPIError( + f"GitHub API request failed: {str(e)} METHOD={method} URL={url} ARGS={kwargs}" + ) def run_query(self, query: str, variables: Dict[str, Any]) -> Dict[str, Any]: """Execute a GraphQL query against GitHub's API.""" return self._make_request( - "POST", - self.GRAPHQL_URL, - json={'query': query, 'variables': variables} + "POST", self.GRAPHQL_URL, json={"query": query, "variables": variables} ) - def get_pr_details(self, org_name: str, repo_name: str, pr_number: int) -> Dict[str, Any]: + def get_pr_details( + self, org_name: str, repo_name: str, pr_number: int + ) -> Dict[str, Any]: """Get PR details including assignees.""" query = """ query($org: String!, $repo: String!, $number: Int!) { @@ -104,31 +264,35 @@ def get_pr_details(self, org_name: str, repo_name: str, pr_number: int) -> Dict[ print(f"\nFetching PR details for {org_name}/{repo_name}#{pr_number}") - result = self.run_query(query, { - "org": org_name, - "repo": repo_name, - "number": pr_number - }) + result = self.run_query( + query, {"org": org_name, "repo": repo_name, "number": pr_number} + ) - if not result.get('data'): + if not result.get("data"): raise GitHubAPIError("No data returned from API", result) - if not result['data'].get('repository'): + if not result["data"].get("repository"): raise GitHubAPIError("Repository not found", result) - if not result['data']['repository'].get('pullRequest'): + if not result["data"]["repository"].get("pullRequest"): raise GitHubAPIError(f"PR #{pr_number} not found", result) - return result['data']['repository']['pullRequest'] + return result["data"]["repository"]["pullRequest"] - def assign_pr(self, org_name: str, repo_name: str, pr_number: int, assignee: str) -> None: + def assign_pr( + self, org_name: str, repo_name: str, pr_number: int, assignee: str + ) -> None: """Assign PR to a user using REST API.""" - url = f"{self.BASE_URL}/repos/{org_name}/{repo_name}/issues/{pr_number}/assignees" + url = ( + f"{self.BASE_URL}/repos/{org_name}/{repo_name}/issues/{pr_number}/assignees" + ) try: self._make_request("POST", url, json={"assignees": [assignee]}) print(f"✅ PR assigned to @{assignee}") except GitHubAPIError as e: print(f"❌ Failed to assign PR to @{assignee}: {str(e)}") - def get_project_items(self, org_name: str, project_number: int) -> List[Dict[str, Any]]: + def get_project_items( + self, org_name: str, project_number: int + ) -> List[Dict[str, Any]]: """Fetch all items from the project with pagination.""" query = """ query($org: String!, $projectNumber: Int!, $cursor: String) { @@ -208,7 +372,9 @@ def get_project_items(self, org_name: str, project_number: int) -> List[Dict[str """ return self._paginate_items(query, org_name, project_number) - def _paginate_items(self, query: str, org_name: str, project_number: int) -> List[Dict[str, Any]]: + def _paginate_items( + self, query: str, org_name: str, project_number: int + ) -> List[Dict[str, Any]]: """Handle pagination for project items.""" all_items = [] cursor = None @@ -218,18 +384,19 @@ def _paginate_items(self, query: str, org_name: str, project_number: int) -> Lis variables = { "org": org_name, "projectNumber": project_number, - "cursor": cursor + "cursor": cursor, } try: result = self.run_query(query, variables) - if not result.get('data', {}).get('organization', {}).get('projectV2'): + if not result.get("data", {}).get("organization", {}).get("projectV2"): raise GitHubAPIError("Could not access project data", result) - project_data = result['data']['organization']['projectV2']['items'] + project_data = result["data"]["organization"]["projectV2"]["items"] valid_items = [ - item for item in project_data['nodes'] - if item.get('content') and isinstance(item['content'], dict) + item + for item in project_data["nodes"] + if item.get("content") and isinstance(item["content"], dict) ] all_items.extend(valid_items) @@ -238,16 +405,16 @@ def _paginate_items(self, query: str, org_name: str, project_number: int) -> Lis sys.stdout.write(f"\rFetching project items... {total_items} found") sys.stdout.flush() - if not project_data['pageInfo']['hasNextPage']: + if not project_data["pageInfo"]["hasNextPage"]: break - cursor = project_data['pageInfo']['endCursor'] + cursor = project_data["pageInfo"]["endCursor"] except GitHubAPIError as e: print(f"\nError fetching project items: {str(e)}") if e.response_data: print("\nAPI Response data:") - print(json.dumps(e.response_data, indent=2)) + print(jsonlib.dumps(e.response_data, indent=2)) raise print("\n") @@ -258,23 +425,27 @@ def validate_item(self, item: Dict[str, Any]) -> Set[str]: field_values = self._extract_field_values(item) print("\nCurrent field values:") - print("="*50) + print("=" * 50) for field in self.required_fields: - value = field_values.get(field.name, '❌ empty') + value = field_values.get(field.name, "❌ empty") print(f" • {field.name}: {value}") - return {field.name for field in self.required_fields if field.name not in field_values} + return { + field.name + for field in self.required_fields + if field.name not in field_values + } def _extract_field_values(self, item: Dict[str, Any]) -> Dict[str, str]: """Extract field values from item data.""" field_values = {} - for field_value in item['fieldValues']['nodes']: - if not isinstance(field_value, dict) or 'field' not in field_value: + for field_value in item["fieldValues"]["nodes"]: + if not isinstance(field_value, dict) or "field" not in field_value: continue try: - field_name = field_value['field']['name'] + field_name = field_value["field"]["name"] for field_type in FieldType: if field_type.value in field_value: value = field_value[field_type.value] @@ -290,9 +461,9 @@ def _extract_field_values(self, item: Dict[str, Any]) -> Dict[str, str]: @staticmethod def print_validation_results(empty_fields: Set[str]) -> None: """Print validation results in a formatted way.""" - print("\n" + "="*50) + print("\n" + "=" * 50) print("Validation Results:") - print("="*50) + print("=" * 50) if not empty_fields: print("✅ All required fields are filled. Validation passed!") @@ -302,55 +473,65 @@ def print_validation_results(empty_fields: Set[str]) -> None: print(f" • {field}") print("\nPlease fill in these fields in the project board.") - print("="*50) + print("=" * 50) + def clean_env_var(var: str) -> str: """Clean environment variable by removing quotes and extra whitespace""" if var is None: return None - return var.strip().strip('"\'') + return var.strip().strip("\"'") + def main(): try: env_vars = { - 'GITHUB_TOKEN': clean_env_var(os.environ.get('GITHUB_TOKEN')), - 'GITHUB_REPOSITORY': clean_env_var(os.environ.get('GITHUB_REPOSITORY')), - 'GITHUB_EVENT_NUMBER': clean_env_var(os.environ.get('GITHUB_EVENT_NUMBER')), - 'PROJECT_NUMBER': clean_env_var(os.environ.get('PROJECT_NUMBER')) + "GITHUB_PROJECTS_PAT": clean_env_var(os.environ.get("GITHUB_PROJECTS_PAT")), + "GITHUB_REPOSITORY": clean_env_var(os.environ.get("GITHUB_REPOSITORY")), + "GITHUB_EVENT_NUMBER": clean_env_var(os.environ.get("GITHUB_EVENT_NUMBER")), + "PROJECT_NUMBER": clean_env_var(os.environ.get("PROJECT_NUMBER")), } debug_vars = env_vars.copy() - debug_vars['GITHUB_TOKEN'] = '[REDACTED]' if env_vars['GITHUB_TOKEN'] else None + debug_vars["GITHUB_PROJECTS_PAT"] = "[REDACTED]" if env_vars["GITHUB_PROJECTS_PAT"] else None print("\nEnvironment variables:") for key, value in debug_vars.items(): print(f"{key}: {value}") missing_vars = [k for k, v in env_vars.items() if not v] if missing_vars: - raise ValueError(f"Missing required environment variables: {', '.join(missing_vars)}") + raise ValueError( + f"Missing required environment variables: {', '.join(missing_vars)}" + ) try: - pr_number = int(env_vars['GITHUB_EVENT_NUMBER']) - project_number = int(env_vars.get('PROJECT_NUMBER', '102')) # Default to 102 if not set + pr_number = int(env_vars["GITHUB_EVENT_NUMBER"]) + project_number = int( + env_vars.get("PROJECT_NUMBER", "102") + ) # Default to 102 if not set except ValueError as e: - raise ValueError(f"Invalid numeric value in environment variables: {str(e)}") + raise ValueError( + f"Invalid numeric value in environment variables: {str(e)}" + ) - github_repository = env_vars['GITHUB_REPOSITORY'] + github_repository = env_vars["GITHUB_REPOSITORY"] try: - org_name, repo_name = github_repository.split('/') + org_name, repo_name = github_repository.split("/") except ValueError: - raise ValueError(f"Invalid repository format: {github_repository}. Expected format: owner/repo") + raise ValueError( + f"Invalid repository format: {github_repository}. Expected format: owner/repo" + ) print(f"\nValidating PR #{pr_number} in {github_repository}") print(f"Project number: {project_number}") - print("="*50) + print("=" * 50) - validator = ProjectFieldsValidator(env_vars['GITHUB_TOKEN']) + validator = ProjectFieldsValidator(env_vars["GITHUB_PROJECTS_PAT"]) try: pr_details = validator.get_pr_details(org_name, repo_name, pr_number) - author = pr_details['author']['login'] - assignees = [node['login'] for node in pr_details['assignees']['nodes']] + author = pr_details["author"]["login"] + assignees = [node["login"] for node in pr_details["assignees"]["nodes"]] if not assignees: print(f"\nAssigning PR to author @{author}") @@ -358,13 +539,18 @@ def main(): project_items = validator.get_project_items(org_name, project_number) pr_items = [ - item for item in project_items - if (item['content'].get('number') == pr_number and - item['content'].get('repository', {}).get('name') == repo_name) + item + for item in project_items + if ( + item["content"].get("number") == pr_number + and item["content"].get("repository", {}).get("name") == repo_name + ) ] if not pr_items: - print(f"\nWarning: PR #{pr_number} is not linked to project #{project_number}") + print( + f"\nWarning: PR #{pr_number} is not linked to project #{project_number}" + ) print("Please add it to the project using the following steps:") print("1. Go to the project board") print("2. Click '+ Add items'") @@ -386,7 +572,7 @@ def main(): print(f"\nError accessing GitHub API: {str(e)}") if e.response_data: print("\nAPI Response data:") - print(json.dumps(e.response_data, indent=2)) + print(jsonlib.dumps(e.response_data, indent=2)) sys.exit(1) except ValueError as e: @@ -397,5 +583,6 @@ def main(): traceback.print_exc() sys.exit(1) + if __name__ == "__main__": - main() \ No newline at end of file + main() diff --git a/utilities/project-fields-validator/poetry.lock b/utilities/project-fields-validator/poetry.lock deleted file mode 100644 index 428a70ee1..000000000 --- a/utilities/project-fields-validator/poetry.lock +++ /dev/null @@ -1,183 +0,0 @@ -# This file is automatically @generated by Poetry 1.8.4 and should not be changed by hand. - -[[package]] -name = "certifi" -version = "2024.8.30" -description = "Python package for providing Mozilla's CA Bundle." -optional = false -python-versions = ">=3.6" -files = [ - {file = "certifi-2024.8.30-py3-none-any.whl", hash = "sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8"}, - {file = "certifi-2024.8.30.tar.gz", hash = "sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9"}, -] - -[[package]] -name = "charset-normalizer" -version = "3.4.0" -description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." -optional = false -python-versions = ">=3.7.0" -files = [ - {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:4f9fc98dad6c2eaa32fc3af1417d95b5e3d08aff968df0cd320066def971f9a6"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0de7b687289d3c1b3e8660d0741874abe7888100efe14bd0f9fd7141bcbda92b"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5ed2e36c3e9b4f21dd9422f6893dec0abf2cca553af509b10cd630f878d3eb99"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40d3ff7fc90b98c637bda91c89d51264a3dcf210cade3a2c6f838c7268d7a4ca"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1110e22af8ca26b90bd6364fe4c763329b0ebf1ee213ba32b68c73de5752323d"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:86f4e8cca779080f66ff4f191a685ced73d2f72d50216f7112185dc02b90b9b7"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f683ddc7eedd742e2889d2bfb96d69573fde1d92fcb811979cdb7165bb9c7d3"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:27623ba66c183eca01bf9ff833875b459cad267aeeb044477fedac35e19ba907"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f606a1881d2663630ea5b8ce2efe2111740df4b687bd78b34a8131baa007f79b"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:0b309d1747110feb25d7ed6b01afdec269c647d382c857ef4663bbe6ad95a912"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:136815f06a3ae311fae551c3df1f998a1ebd01ddd424aa5603a4336997629e95"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:14215b71a762336254351b00ec720a8e85cada43b987da5a042e4ce3e82bd68e"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:79983512b108e4a164b9c8d34de3992f76d48cadc9554c9e60b43f308988aabe"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-win32.whl", hash = "sha256:c94057af19bc953643a33581844649a7fdab902624d2eb739738a30e2b3e60fc"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:55f56e2ebd4e3bc50442fbc0888c9d8c94e4e06a933804e2af3e89e2f9c1c749"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0d99dd8ff461990f12d6e42c7347fd9ab2532fb70e9621ba520f9e8637161d7c"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c57516e58fd17d03ebe67e181a4e4e2ccab1168f8c2976c6a334d4f819fe5944"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6dba5d19c4dfab08e58d5b36304b3f92f3bd5d42c1a3fa37b5ba5cdf6dfcbcee"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf4475b82be41b07cc5e5ff94810e6a01f276e37c2d55571e3fe175e467a1a1c"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ce031db0408e487fd2775d745ce30a7cd2923667cf3b69d48d219f1d8f5ddeb6"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ff4e7cdfdb1ab5698e675ca622e72d58a6fa2a8aa58195de0c0061288e6e3ea"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3710a9751938947e6327ea9f3ea6332a09bf0ba0c09cae9cb1f250bd1f1549bc"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82357d85de703176b5587dbe6ade8ff67f9f69a41c0733cf2425378b49954de5"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:47334db71978b23ebcf3c0f9f5ee98b8d65992b65c9c4f2d34c2eaf5bcaf0594"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:8ce7fd6767a1cc5a92a639b391891bf1c268b03ec7e021c7d6d902285259685c"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f1a2f519ae173b5b6a2c9d5fa3116ce16e48b3462c8b96dfdded11055e3d6365"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:63bc5c4ae26e4bc6be6469943b8253c0fd4e4186c43ad46e713ea61a0ba49129"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:bcb4f8ea87d03bc51ad04add8ceaf9b0f085ac045ab4d74e73bbc2dc033f0236"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-win32.whl", hash = "sha256:9ae4ef0b3f6b41bad6366fb0ea4fc1d7ed051528e113a60fa2a65a9abb5b1d99"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:cee4373f4d3ad28f1ab6290684d8e2ebdb9e7a1b74fdc39e4c211995f77bec27"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0713f3adb9d03d49d365b70b84775d0a0d18e4ab08d12bc46baa6132ba78aaf6"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:de7376c29d95d6719048c194a9cf1a1b0393fbe8488a22008610b0361d834ecf"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4a51b48f42d9358460b78725283f04bddaf44a9358197b889657deba38f329db"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b295729485b06c1a0683af02a9e42d2caa9db04a373dc38a6a58cdd1e8abddf1"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ee803480535c44e7f5ad00788526da7d85525cfefaf8acf8ab9a310000be4b03"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3d59d125ffbd6d552765510e3f31ed75ebac2c7470c7274195b9161a32350284"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8cda06946eac330cbe6598f77bb54e690b4ca93f593dee1568ad22b04f347c15"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07afec21bbbbf8a5cc3651aa96b980afe2526e7f048fdfb7f1014d84acc8b6d8"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6b40e8d38afe634559e398cc32b1472f376a4099c75fe6299ae607e404c033b2"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b8dcd239c743aa2f9c22ce674a145e0a25cb1566c495928440a181ca1ccf6719"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:84450ba661fb96e9fd67629b93d2941c871ca86fc38d835d19d4225ff946a631"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:44aeb140295a2f0659e113b31cfe92c9061622cadbc9e2a2f7b8ef6b1e29ef4b"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1db4e7fefefd0f548d73e2e2e041f9df5c59e178b4c72fbac4cc6f535cfb1565"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-win32.whl", hash = "sha256:5726cf76c982532c1863fb64d8c6dd0e4c90b6ece9feb06c9f202417a31f7dd7"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:b197e7094f232959f8f20541ead1d9862ac5ebea1d58e9849c1bf979255dfac9"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:dd4eda173a9fcccb5f2e2bd2a9f423d180194b1bf17cf59e3269899235b2a114"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e9e3c4c9e1ed40ea53acf11e2a386383c3304212c965773704e4603d589343ed"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:92a7e36b000bf022ef3dbb9c46bfe2d52c047d5e3f3343f43204263c5addc250"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:54b6a92d009cbe2fb11054ba694bc9e284dad30a26757b1e372a1fdddaf21920"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ffd9493de4c922f2a38c2bf62b831dcec90ac673ed1ca182fe11b4d8e9f2a64"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:35c404d74c2926d0287fbd63ed5d27eb911eb9e4a3bb2c6d294f3cfd4a9e0c23"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4796efc4faf6b53a18e3d46343535caed491776a22af773f366534056c4e1fbc"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e7fdd52961feb4c96507aa649550ec2a0d527c086d284749b2f582f2d40a2e0d"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:92db3c28b5b2a273346bebb24857fda45601aef6ae1c011c0a997106581e8a88"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ab973df98fc99ab39080bfb0eb3a925181454d7c3ac8a1e695fddfae696d9e90"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:4b67fdab07fdd3c10bb21edab3cbfe8cf5696f453afce75d815d9d7223fbe88b"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:aa41e526a5d4a9dfcfbab0716c7e8a1b215abd3f3df5a45cf18a12721d31cb5d"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ffc519621dce0c767e96b9c53f09c5d215578e10b02c285809f76509a3931482"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-win32.whl", hash = "sha256:f19c1585933c82098c2a520f8ec1227f20e339e33aca8fa6f956f6691b784e67"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:707b82d19e65c9bd28b81dde95249b07bf9f5b90ebe1ef17d9b57473f8a64b7b"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:dbe03226baf438ac4fda9e2d0715022fd579cb641c4cf639fa40d53b2fe6f3e2"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd9a8bd8900e65504a305bf8ae6fa9fbc66de94178c420791d0293702fce2df7"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8831399554b92b72af5932cdbbd4ddc55c55f631bb13ff8fe4e6536a06c5c51"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a14969b8691f7998e74663b77b4c36c0337cb1df552da83d5c9004a93afdb574"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dcaf7c1524c0542ee2fc82cc8ec337f7a9f7edee2532421ab200d2b920fc97cf"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:425c5f215d0eecee9a56cdb703203dda90423247421bf0d67125add85d0c4455"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:d5b054862739d276e09928de37c79ddeec42a6e1bfc55863be96a36ba22926f6"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:f3e73a4255342d4eb26ef6df01e3962e73aa29baa3124a8e824c5d3364a65748"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:2f6c34da58ea9c1a9515621f4d9ac379871a8f21168ba1b5e09d74250de5ad62"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_s390x.whl", hash = "sha256:f09cb5a7bbe1ecae6e87901a2eb23e0256bb524a79ccc53eb0b7629fbe7677c4"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:0099d79bdfcf5c1f0c2c72f91516702ebf8b0b8ddd8905f97a8aecf49712c621"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-win32.whl", hash = "sha256:9c98230f5042f4945f957d006edccc2af1e03ed5e37ce7c373f00a5a4daa6149"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-win_amd64.whl", hash = "sha256:62f60aebecfc7f4b82e3f639a7d1433a20ec32824db2199a11ad4f5e146ef5ee"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:af73657b7a68211996527dbfeffbb0864e043d270580c5aef06dc4b659a4b578"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:cab5d0b79d987c67f3b9e9c53f54a61360422a5a0bc075f43cab5621d530c3b6"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:9289fd5dddcf57bab41d044f1756550f9e7cf0c8e373b8cdf0ce8773dc4bd417"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b493a043635eb376e50eedf7818f2f322eabbaa974e948bd8bdd29eb7ef2a51"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9fa2566ca27d67c86569e8c85297aaf413ffab85a8960500f12ea34ff98e4c41"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8e538f46104c815be19c975572d74afb53f29650ea2025bbfaef359d2de2f7f"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6fd30dc99682dc2c603c2b315bded2799019cea829f8bf57dc6b61efde6611c8"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2006769bd1640bdf4d5641c69a3d63b71b81445473cac5ded39740a226fa88ab"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:dc15e99b2d8a656f8e666854404f1ba54765871104e50c8e9813af8a7db07f12"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:ab2e5bef076f5a235c3774b4f4028a680432cded7cad37bba0fd90d64b187d19"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:4ec9dd88a5b71abfc74e9df5ebe7921c35cbb3b641181a531ca65cdb5e8e4dea"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:43193c5cda5d612f247172016c4bb71251c784d7a4d9314677186a838ad34858"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:aa693779a8b50cd97570e5a0f343538a8dbd3e496fa5dcb87e29406ad0299654"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-win32.whl", hash = "sha256:7706f5850360ac01d80c89bcef1640683cc12ed87f42579dab6c5d3ed6888613"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:c3e446d253bd88f6377260d07c895816ebf33ffffd56c1c792b13bff9c3e1ade"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:980b4f289d1d90ca5efcf07958d3eb38ed9c0b7676bf2831a54d4f66f9c27dfa"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f28f891ccd15c514a0981f3b9db9aa23d62fe1a99997512b0491d2ed323d229a"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8aacce6e2e1edcb6ac625fb0f8c3a9570ccc7bfba1f63419b3769ccf6a00ed0"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd7af3717683bea4c87acd8c0d3d5b44d56120b26fd3f8a692bdd2d5260c620a"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5ff2ed8194587faf56555927b3aa10e6fb69d931e33953943bc4f837dfee2242"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e91f541a85298cf35433bf66f3fab2a4a2cff05c127eeca4af174f6d497f0d4b"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:309a7de0a0ff3040acaebb35ec45d18db4b28232f21998851cfa709eeff49d62"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:285e96d9d53422efc0d7a17c60e59f37fbf3dfa942073f666db4ac71e8d726d0"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:5d447056e2ca60382d460a604b6302d8db69476fd2015c81e7c35417cfabe4cd"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:20587d20f557fe189b7947d8e7ec5afa110ccf72a3128d61a2a387c3313f46be"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:130272c698667a982a5d0e626851ceff662565379baf0ff2cc58067b81d4f11d"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:ab22fbd9765e6954bc0bcff24c25ff71dcbfdb185fcdaca49e81bac68fe724d3"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7782afc9b6b42200f7362858f9e73b1f8316afb276d316336c0ec3bd73312742"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-win32.whl", hash = "sha256:2de62e8801ddfff069cd5c504ce3bc9672b23266597d4e4f50eda28846c322f2"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:95c3c157765b031331dd4db3c775e58deaee050a3042fcad72cbc4189d7c8dca"}, - {file = "charset_normalizer-3.4.0-py3-none-any.whl", hash = "sha256:fe9f97feb71aa9896b81973a7bbada8c49501dc73e58a10fcef6663af95e5079"}, - {file = "charset_normalizer-3.4.0.tar.gz", hash = "sha256:223217c3d4f82c3ac5e29032b3f1c2eb0fb591b72161f86d93f5719079dae93e"}, -] - -[[package]] -name = "idna" -version = "3.10" -description = "Internationalized Domain Names in Applications (IDNA)" -optional = false -python-versions = ">=3.6" -files = [ - {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, - {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, -] - -[package.extras] -all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2)"] - -[[package]] -name = "requests" -version = "2.32.3" -description = "Python HTTP for Humans." -optional = false -python-versions = ">=3.8" -files = [ - {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, - {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, -] - -[package.dependencies] -certifi = ">=2017.4.17" -charset-normalizer = ">=2,<4" -idna = ">=2.5,<4" -urllib3 = ">=1.21.1,<3" - -[package.extras] -socks = ["PySocks (>=1.5.6,!=1.5.7)"] -use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] - -[[package]] -name = "urllib3" -version = "2.2.3" -description = "HTTP library with thread-safe connection pooling, file post, and more." -optional = false -python-versions = ">=3.8" -files = [ - {file = "urllib3-2.2.3-py3-none-any.whl", hash = "sha256:ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac"}, - {file = "urllib3-2.2.3.tar.gz", hash = "sha256:e7d814a81dad81e6caf2ec9fdedb284ecc9c73076b62654547cc64ccdcae26e9"}, -] - -[package.extras] -brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] -h2 = ["h2 (>=4,<5)"] -socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] -zstd = ["zstandard (>=0.18.0)"] - -[metadata] -lock-version = "2.0" -python-versions = "^3.11" -content-hash = "8f6b6c24f619f4b3c83feba88e6f3e8b31d33569993e9e520ccb70cf6c4799c5" diff --git a/utilities/project-fields-validator/pyproject.toml b/utilities/project-fields-validator/pyproject.toml deleted file mode 100644 index 77de0c760..000000000 --- a/utilities/project-fields-validator/pyproject.toml +++ /dev/null @@ -1,14 +0,0 @@ -[tool.poetry] -name = "project-fields-validator" -version = "0.1.0" -description = "" -authors = ["Catalyst Team"] -readme = "README.md" - -[tool.poetry.dependencies] -python = "^3.11" -requests = "^2.32.3" - -[build-system] -requires = ["poetry-core"] -build-backend = "poetry.core.masonry.api" diff --git a/utilities/scripts/python/db_ops.py b/utilities/scripts/python/db_ops.py index cea17277b..c1441a65a 100644 --- a/utilities/scripts/python/db_ops.py +++ b/utilities/scripts/python/db_ops.py @@ -7,11 +7,12 @@ import argparse import os +import tempfile +import threading import time from typing import Optional + import python.exec_manager as exec_manager -import tempfile -import threading DB_ARGUMENTS = [ ["dbhost", "DB_HOST", "localhost"], @@ -109,7 +110,7 @@ def init_database(self) -> exec_manager.Result: if res.ok(): with open(f"{self.args.dbpath}/pg_hba.conf", "a") as file: file.write(f"include_if_exists {self.args.dbpath}/pg_hba.extra.conf\n") - file.write(f"include_if_exists /sql/pg_hba.extra.conf\n") + file.write("include_if_exists /sql/pg_hba.extra.conf\n") return res @@ -203,7 +204,7 @@ def setup(self) -> exec_manager.Result: # WARNING: Will destroy all data in the DB return exec_manager.cli_run( - f"psql -v ON_ERROR_STOP=on" + "psql -v ON_ERROR_STOP=on" + f" -d {self.superuser_connection()} " + f" -f {self.args.setupdbsql}" + f' -v dbName="{self.args.dbname}"' @@ -224,7 +225,7 @@ def migrate_schema(self) -> exec_manager.Result: # Run schema migrations return exec_manager.cli_run( f"DATABASE_URL={self.user_connection()}" - + f" refinery migrate -e DATABASE_URL" + + " refinery migrate -e DATABASE_URL" + f" -c {self.args.dbrefinerytoml} " + f" -p {self.args.dbmigrations}", name="Migrate Schema", diff --git a/utilities/scripts/python/diff.py b/utilities/scripts/python/diff.py index 488df1599..3ddfab609 100644 --- a/utilities/scripts/python/diff.py +++ b/utilities/scripts/python/diff.py @@ -1,5 +1,4 @@ from dataclasses import dataclass -from typing import Dict @dataclass diff --git a/utilities/scripts/python/exec_manager.py b/utilities/scripts/python/exec_manager.py index 06cdb9660..05a4e78b0 100755 --- a/utilities/scripts/python/exec_manager.py +++ b/utilities/scripts/python/exec_manager.py @@ -1,15 +1,16 @@ # cspell: words rtype +import concurrent.futures +import multiprocessing import subprocess +import textwrap +import time +from dataclasses import dataclass from typing import Optional + from rich import print -from rich.text import Text from rich.table import Table -from dataclasses import dataclass -import textwrap -import time -import multiprocessing -import concurrent.futures +from rich.text import Text def status_for_rc(rc: int) -> str: @@ -150,7 +151,7 @@ def cli_run( log: bool = True, timeout=None, verbose=False, - env=None + env=None, ) -> Result: def procedure() -> ProcedureResult: result = subprocess.run( @@ -160,7 +161,7 @@ def procedure() -> ProcedureResult: stderr=subprocess.STDOUT, text=True, timeout=timeout, - env=env + env=env, ) return ProcedureResult(result.returncode, command, result.stdout) diff --git a/utilities/scripts/python/utils.py b/utilities/scripts/python/utils.py index 8bec32acb..a770d3a9e 100755 --- a/utilities/scripts/python/utils.py +++ b/utilities/scripts/python/utils.py @@ -32,7 +32,6 @@ def fix_quoted_earthly_args(): class TestProcessListWithQuotes(unittest.TestCase): - def test_process_list_with_quotes(self): sys.argv = [sys.argv[0]] + [ "this", @@ -42,7 +41,7 @@ def test_process_list_with_quotes(self): "this", "doesn't", ] - expected_result = ["this", 'has quoted strings in it', "this", "doesn't"] + expected_result = ["this", "has quoted strings in it", "this", "doesn't"] fix_quoted_earthly_args() self.assertEqual(sys.argv[1:], expected_result) diff --git a/utilities/scripts/python/vendor_files_check.py b/utilities/scripts/python/vendor_files_check.py index 155086574..bdd28054f 100644 --- a/utilities/scripts/python/vendor_files_check.py +++ b/utilities/scripts/python/vendor_files_check.py @@ -25,11 +25,12 @@ def toml_diff_check( f"{'' if strict else 'Non '}Strict Checking" + f" if Provided File {provided_file_path} == Vendored File {vendor_file_path}" ) - + try: - with open(vendor_file_path, "rb") as vendor_file, open( - provided_file_path, "rb" - ) as provided_file: + with ( + open(vendor_file_path, "rb") as vendor_file, + open(provided_file_path, "rb") as provided_file, + ): def procedure() -> exec_manager.ProcedureResult: vendor_obj = tomllib.load(vendor_file) @@ -52,8 +53,8 @@ def procedure() -> exec_manager.ProcedureResult: res = exec_manager.Result( 1, command_name, f"Exception caught: {exc}", 0.0, command_name ) - + if log: res.print(verbose_errors=True, verbose=False) - return res + return res