diff --git a/.ci/build-kit/scripts/build_docs.sh b/.ci/build-kit/scripts/build_docs.sh new file mode 100755 index 0000000000..896ed4e7d7 --- /dev/null +++ b/.ci/build-kit/scripts/build_docs.sh @@ -0,0 +1,35 @@ +#!/bin/sh + +TRAILBOOK_everest_IS_RELEASE=${TRAILBOOK_everest_IS_RELEASE:-"OFF"} +TRAILBOOK_everest_INSTANCE_NAME=${TRAILBOOK_everest_INSTANCE_NAME:-"nightly"} +TRAILBOOK_everest_OVERWRITE_EXISTING_INSTANCE=${TRAILBOOK_everest_OVERWRITE_EXISTING_INSTANCE:-"OFF"} + +mkdir -p ~/.ssh +ssh-keyscan github.com >> ~/.ssh/known_hosts +chmod 600 ~/.ssh/known_hosts + +cmake \ + -B "$EXT_MOUNT/build" \ + -S "$EXT_MOUNT/source" \ + -G Ninja \ + -D EVC_ENABLE_CCACHE=ON \ + -D EVEREST_ENABLE_COMPILE_WARNINGS=ON \ + -D EVEREST_ENABLE_RS_SUPPORT=ON \ + -D EVEREST_BUILD_DOCS=ON \ + -D TRAILBOOK_everest_DOWNLOAD_ALL_VERSIONS=ON \ + -D TRAILBOOK_everest_IS_RELEASE="$TRAILBOOK_everest_IS_RELEASE" \ + -D TRAILBOOK_everest_INSTANCE_NAME="$TRAILBOOK_everest_INSTANCE_NAME" \ + -D TRAILBOOK_everest_OVERWRITE_EXISTING_INSTANCE="$TRAILBOOK_everest_OVERWRITE_EXISTING_INSTANCE" \ + -D EVEREST_DOCS_REPO_URL="$EVEREST_DOCS_REPO_URL" +retVal=$? +if [ $retVal -ne 0 ]; then + echo "Configuring failed with return code $retVal" + exit $retVal +fi + +ninja -C "$EXT_MOUNT/build" trailbook_everest +retVal=$? +if [ $retVal -ne 0 ]; then + echo "Compiling failed with return code $retVal" + exit $retVal +fi diff --git a/.ci/build-kit/scripts/deploy_docs.sh b/.ci/build-kit/scripts/deploy_docs.sh new file mode 100755 index 0000000000..edd1d47cd5 --- /dev/null +++ b/.ci/build-kit/scripts/deploy_docs.sh @@ -0,0 +1,31 @@ +#!/bin/sh + +DEPLOYED_DOCS_REPO="$EXT_MOUNT/build/docs/deployed_docs_repo" + +mkdir -p ~/.ssh +ssh-keyscan github.com >> ~/.ssh/known_hosts +chmod 600 ~/.ssh/known_hosts + +git -C "$DEPLOYED_DOCS_REPO" config user.email "compiler@pionix.de" +git -C "$DEPLOYED_DOCS_REPO" config user.name "Pionix Github Service Account" + +git -C "$DEPLOYED_DOCS_REPO" add . +retVal=$? +if [ $retVal -ne 0 ]; then + echo "Staging changes failed with return code $retVal" + exit $retVal +fi + +git -C "$DEPLOYED_DOCS_REPO" commit -m "Update nightly documentation from commit $GITHUB_SHA" +retVal=$? +if [ $retVal -ne 0 ]; then + echo "Committing changes failed with return code $retVal" + exit $retVal +fi + +git -C "$DEPLOYED_DOCS_REPO" push +retVal=$? +if [ $retVal -ne 0 ]; then + echo "Pushing changes failed with return code $retVal" + exit $retVal +fi diff --git a/.github/workflows/bazel_build_and_test.yaml b/.github/workflows/job_bazel-build-test.yaml similarity index 74% rename from .github/workflows/bazel_build_and_test.yaml rename to .github/workflows/job_bazel-build-test.yaml index 60cbf6ff68..dc704dee3c 100644 --- a/.github/workflows/bazel_build_and_test.yaml +++ b/.github/workflows/job_bazel-build-test.yaml @@ -1,11 +1,16 @@ -name: Bazel Build +name: Bazel Build And Test run-name: ${{ github.actor }} is building with bazel on: - pull_request: {} - merge_group: {} + workflow_call: + inputs: + runner: + description: 'Which runner to use' + required: false + default: 'ubuntu-24.04' + type: string jobs: bazel-build-and-test: - runs-on: ubuntu-22.04 + runs-on: ${{ inputs.runner }} steps: - run: echo branch name is ${{ github.ref }} - name: Checkout diff --git a/.github/workflows/job_build-build-kit.yml b/.github/workflows/job_build-build-kit.yml new file mode 100644 index 0000000000..421563b7a6 --- /dev/null +++ b/.github/workflows/job_build-build-kit.yml @@ -0,0 +1,85 @@ +name: Build the build-kit + +on: + workflow_call: + inputs: + runner: + description: 'Which runner to use' + required: false + default: 'ubuntu-24.04' + type: string + build_kit_docker_directory: + description: 'Directory in the repository where the build kit Dockerfile is located' + required: false + default: '.ci/build-kit/docker' + type: string + base_image_tag_everest_ci: + description: 'The tag of the everest-ci base image to use for building the build-kit' + required: true + type: string + outputs: + build_kit_artifact_name: + description: 'The name of the build-kit artifact' + value: ${{ jobs.build-build-kit.outputs.build_kit_artifact_name }} + build_kit_image_tag: + description: 'The tag of the built build-kit image' + value: ${{ jobs.build-build-kit.outputs.build_kit_image_tag }} + +jobs: + build-build-kit: + name: Build the build-kit + runs-on: ${{ inputs.runner }} + env: + BUILD_KIT_ARTIFACT_NAME: build-kit + BUILD_KIT_IMAGE_NAME: local/build-kit-${{ github.event.repository.name }} + BUILD_ARGS: | + BASE_IMAGE_TAG=${{ inputs.base_image_tag_everest_ci }} + outputs: + build_kit_image_tag: ${{ steps.set-outputs.outputs.tag }} + build_kit_artifact_name: ${{ env.BUILD_KIT_ARTIFACT_NAME }} + steps: + - name: Checkout Dockerfile + uses: actions/checkout@v4 + with: + repository: ${{ github.repository }} + path: source + ref: ${{ github.ref }} + token: ${{ github.token}} + fetch-depth: 0 + - name: Docker Meta + id: meta + uses: docker/metadata-action@v5 + with: + images: ${{ env.BUILD_KIT_IMAGE_NAME }} + sep-tags: "," + - name: Setup Docker buildx + uses: docker/setup-buildx-action@v3 + - name: Build + uses: docker/build-push-action@v6 + with: + context: source/${{ inputs.build_kit_docker_directory }} + push: false + cache-from: type=gha + cache-to: type=gha,mode=max + build-args: ${{ env.BUILD_ARGS }} + tags: ${{ steps.meta.outputs.tags }} + labels: ${{ steps.meta.outputs.labels }} + outputs: type=docker,dest=build-kit.tar + - name: Upload build-kit image + uses: actions/upload-artifact@v4 + with: + name: ${{ env.BUILD_KIT_ARTIFACT_NAME }} + path: build-kit.tar + - name: Set output tag + id: set-outputs + shell: python3 {0} + run: | + import os + tags = "${{ steps.meta.outputs.tags }}".split(",") + if len(tags) == 0: + print("No tags found!❌") + exit(1) + tag = tags[0] + with open(os.environ["GITHUB_OUTPUT"], "a") as f: + f.write(f"tag={tag}\n") + print(f"Set tag={tag}") diff --git a/.github/workflows/job_build-cmake-gcc.yml b/.github/workflows/job_build-cmake-gcc.yml new file mode 100644 index 0000000000..af3095fd79 --- /dev/null +++ b/.github/workflows/job_build-cmake-gcc.yml @@ -0,0 +1,245 @@ +name: Build with CMake and GCC + +on: + workflow_call: + inputs: + runner: + description: 'Which runner to use' + required: false + default: 'ubuntu-24.04' + type: string + ref_everest_ci: + description: 'The reference of the everest-ci repository to checkout' + required: true + type: string + is_fork: + description: 'Whether the current repository is a fork' + required: true + type: string + build-kit-artifact-name: + description: 'The name of the build-kit artifact to download' + required: true + type: string + build_kit_image_tag: + description: 'The tag of the build-kit image to use for building the project' + required: true + type: string + build_kit_scripts_directory: + description: 'Directory in the repository where the build kit scripts are located' + required: false + default: '.ci/build-kit/scripts' + type: string + ctest_report_path: + description: 'The path to the ctest report, relative to the github workspace' + required: false + default: 'build/Testing/Temporary/LastTest.log' + type: string + coverage_report_path: + description: 'The path to the coverage report, relative to the github workspace' + required: false + default: 'build/gcovr-coverage' + type: string + coverage_xml_path: + description: 'The path to the coverage xml, relative to the github workspace' + required: false + default: 'build/coverage.xml' + type: string + artifact_deploy_target_repo: + description: 'Repository to deploy artifacts to' + required: true + type: string + wheels_path: + description: 'The path to the wheels directory, relative to the github workspace' + required: false + default: 'wheels' + type: string + secrets: + coverage_deploy_token: + description: 'The token to use to deploy the coverage report' + required: true + SA_GITHUB_SSH_KEY: + description: 'The ssh key to use for git operations' + required: false + outputs: + ctest_report_artifact_name: + description: 'The name of the ctest report artifact uploaded' + value: ${{ jobs.build-cmake-gcc.outputs.ctest_report_artifact_name }} + coverage_report_artifact_name: + description: 'The name of the coverage report artifact uploaded' + value: ${{ jobs.build-cmake-gcc.outputs.coverage_report_artifact_name }} + coverage_xml_artifact_name: + description: 'The name of the coverage xml artifact uploaded' + value: ${{ jobs.build-cmake-gcc.outputs.coverage_xml_artifact_name }} + dist_artifact_name: + description: 'The name of the dist artifact uploaded' + value: ${{ jobs.build-cmake-gcc.outputs.dist_artifact_name }} + wheels_artifact_name: + description: 'The name of the wheels artifact uploaded' + value: ${{ jobs.build-cmake-gcc.outputs.wheels_artifact_name }} + +jobs: + build-cmake-gcc: + name: Build with CMake and GCC, Unit Tests and Install + runs-on: ${{ inputs.runner }} + env: + CTEST_REPORT_ARTIFACT_NAME: ctest-report + COVERAGE_REPORT_ARTIFACT_NAME: coverage-report + COVERAGE_XML_ARTIFACT_NAME: coverage-xml + DIST_ARTIFACT_NAME: dist + WHEELS_ARTIFACT_NAME: wheels + BUILD_KIT_IMAGE: ${{ inputs.build_kit_image_tag }} + SSH_AUTH_SOCK: /tmp/ssh_agent.sock + outputs: + ctest_report_artifact_name: ${{ env.CTEST_REPORT_ARTIFACT_NAME }} + coverage_report_artifact_name: ${{ env.COVERAGE_REPORT_ARTIFACT_NAME }} + coverage_xml_artifact_name: ${{ env.COVERAGE_XML_ARTIFACT_NAME }} + dist_artifact_name: ${{ env.DIST_ARTIFACT_NAME }} + wheels_artifact_name: ${{ env.WHEELS_ARTIFACT_NAME }} + steps: + - name: Setup SSH Agent, optional with SSH key + env: + SSH_KEY: ${{ secrets.SA_GITHUB_SSH_KEY }} + run: | + ssh-agent -a $SSH_AUTH_SOCK > /dev/null + if [ -z "${{ env.SSH_KEY }}" ]; then + echo "No SSH key provided, skipping SSH key setup" + exit 0 + fi + mkdir -p ~/.ssh + echo "${{ env.SSH_KEY }}" > ~/.ssh/id_ed25519 + chmod 600 ~/.ssh/id_ed25519 + # Check if github.com is already in known_hosts, if not, add it + if ! grep -q "^github.com " ~/.ssh/known_hosts; then + ssh-keyscan github.com >> ~/.ssh/known_hosts + fi + ssh-add ~/.ssh/id_ed25519 + - name: Checkout local github actions + uses: actions/checkout@v4 + with: + repository: ${{ github.repository_owner }}/everest-ci + ref: ${{ inputs.ref_everest_ci }} + path: everest-ci + - name: Format branch name for cache key + run: | + BRANCH_NAME_FOR_CACHE="${GITHUB_REF_NAME//-/_}" + echo "branch_name_for_cache=${BRANCH_NAME_FOR_CACHE}" >> "$GITHUB_ENV" + - name: Setup cache + uses: actions/cache@v4 + with: + path: cache + key: compile-${{ env.branch_name_for_cache }}-${{ github.sha }} + restore-keys: | + compile-${{ env.branch_name_for_cache }}- + compile- + - name: Checkout repository + uses: actions/checkout@v4 + with: + path: source + - name: Setup run scripts + run: | + mkdir scripts + rsync -a source/${{ inputs.build_kit_scripts_directory }}/ scripts + - name: Download build-kit image + uses: actions/download-artifact@v5 + with: + name: ${{ inputs.build-kit-artifact-name }} + - name: Load build-kit image + run: | + docker load -i build-kit.tar + docker image tag ${{ env.BUILD_KIT_IMAGE }} build-kit + - name: Compile + run: | + docker run \ + --mount type=bind,source=$SSH_AUTH_SOCK,target=/ssh-agent \ + --env SSH_AUTH_SOCK=/ssh-agent \ + --volume "${{ github.workspace }}:/ext" \ + --name compile-container \ + build-kit run-script compile + - name: Run unit tests + id: run_unit_tests + run: | + docker run \ + --mount type=bind,source=$SSH_AUTH_SOCK,target=/ssh-agent \ + --env SSH_AUTH_SOCK=/ssh-agent \ + --volume "${{ github.workspace }}:/ext" \ + --name unit-test-container \ + build-kit run-script run_unit_tests + - name: Archive test results + if: ${{ always() && (steps.run_unit_tests.outcome == 'success' || steps.run_unit_tests.outcome == 'failure') }} + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: ${{ env.CTEST_REPORT_ARTIFACT_NAME }} + path: ${{ inputs.ctest_report_path }} + # - name: Run coverage + # id: run_coverage + # run: | + # docker run \ + # --mount type=bind,source=$SSH_AUTH_SOCK,target=/ssh-agent \ + # --env SSH_AUTH_SOCK=/ssh-agent \ + # --volume "${{ github.workspace }}:/ext" \ + # --name coverage-container \ + # build-kit run-script run_coverage + # - name: Archive coverage report + # if: ${{ always() && (steps.run_coverage.outcome == 'success' || steps.run_coverage.outcome == 'failure') }} + # uses: actions/upload-artifact@v4 + # with: + # if-no-files-found: error + # name: ${{ env.COVERAGE_REPORT_ARTIFACT_NAME }} + # path: ${{ inputs.coverage_report_path }} + # - name: Archive coverage xml + # if: ${{ always() && (steps.run_coverage.outcome == 'success' || steps.run_coverage.outcome == 'failure') }} + # uses: actions/upload-artifact@v4 + # with: + # if-no-files-found: error + # name: ${{ env.COVERAGE_XML_ARTIFACT_NAME }} + # path: ${{ inputs.coverage_xml_path}} + # - name: Deploy html coverage report + # id: deploy_coverage_report + # if: ${{ always() && ( steps.run_coverage.outcome == 'success' || steps.run_coverage.outcome == 'failure' ) && inputs.is_fork == 'false' }} + # uses: ./everest-ci/github-actions/deploy-ci-artifact + # with: + # target_repo: ${{ inputs.artifact_deploy_target_repo }} + # github_token: ${{ secrets.coverage_deploy_token }} + # artifact_name: coverage-report + # artifact_directory: ${{ inputs.coverage_report_path }} + # - name: Write summary coverage + # if: ${{ always() && (steps.run_coverage.outcome == 'success' || steps.run_coverage.outcome == 'failure') }} + # run: | + # echo "Coverage report deployed to: [everest.github.io](https://everest.github.io/${{ steps.deploy_coverage_report.outputs.deployed_path }})" >> $GITHUB_STEP_SUMMARY + - name: Create dist + id: create_dist + run: | + docker run \ + --mount type=bind,source=$SSH_AUTH_SOCK,target=/ssh-agent \ + --env SSH_AUTH_SOCK=/ssh-agent \ + --volume "${{ github.workspace }}:/ext" \ + --name dist-container \ + build-kit run-script install + - name: Tar dist dir and keep permissions + if: ${{ always() && (steps.create_dist.outcome == 'success' || steps.create_dist.outcome == 'failure') }} + run: | + tar -czf dist.tar.gz dist + - name: Upload dist artifact + if: ${{ always() && (steps.create_dist.outcome == 'success' || steps.create_dist.outcome == 'failure') }} + uses: actions/upload-artifact@v4.6.2 + with: + if-no-files-found: error + path: dist.tar.gz + name: ${{ env.DIST_ARTIFACT_NAME }} + - name: Create wheels + id: create_wheels + run: | + docker run \ + --mount type=bind,source=$SSH_AUTH_SOCK,target=/ssh-agent \ + --env SSH_AUTH_SOCK=/ssh-agent \ + --volume "${{ github.workspace }}:/ext" \ + --name wheels-container \ + build-kit run-script install_wheels + - name: Upload wheels artifact + if: ${{ always() && (steps.create_wheels.outcome == 'success' || steps.create_wheels.outcome == 'failure') }} + uses: actions/upload-artifact@v4.6.2 + with: + if-no-files-found: error + path: ${{ inputs.wheels_path }} + name: ${{ env.WHEELS_ARTIFACT_NAME }} diff --git a/.github/workflows/job_build-docs.yaml b/.github/workflows/job_build-docs.yaml new file mode 100644 index 0000000000..a949076f37 --- /dev/null +++ b/.github/workflows/job_build-docs.yaml @@ -0,0 +1,126 @@ +name: Build Documentation + +on: + workflow_call: + inputs: + runner: + description: 'Which runner to use' + required: false + default: 'ubuntu-24.04' + type: string + build_kit_artifact_name: + description: 'The name of the build kit artifact to download' + required: true + type: string + build_kit_image_tag: + description: 'The tag of the build-kit image to use for building the project' + required: true + type: string + build_kit_scripts_directory: + description: 'Directory in the repository where the build kit scripts are located' + required: false + default: '.ci/build-kit/scripts' + type: string + deploy_docs: + description: 'Whether to deploy the built documentation to the target repository' + required: false + default: false + type: boolean + secrets: + SA_GITHUB_SSH_KEY: + description: 'SSH key for the service account used to deploy the documentation' + required: false + outputs: + docs_html_artifact_name: + description: 'The name of the docs-html artifact' + value: ${{ jobs.build-docs.outputs.docs_html_artifact_name }} + + +jobs: + build-docs: + name: Build EVerest Documentation + runs-on: ${{ inputs.runner }} + env: + SSH_AUTH_SOCK: /tmp/ssh_agent.sock + DOCS_HTML_ARTIFACT_NAME: docs-html + outputs: + docs_html_artifact_name: ${{ env.DOCS_HTML_ARTIFACT_NAME }} + steps: + - name: Setup SSH Agent, optional with SSH key + env: + SSH_KEY: ${{ secrets.SA_GITHUB_SSH_KEY }} + run: | + ssh-agent -a $SSH_AUTH_SOCK > /dev/null + if [ -z "${{ env.SSH_KEY }}" ]; then + echo "No SSH key provided, skipping SSH key setup" + exit 0 + fi + mkdir -p ~/.ssh + echo "${{ env.SSH_KEY }}" > ~/.ssh/id_ed25519 + chmod 600 ~/.ssh/id_ed25519 + # Check if github.com is already in known_hosts, if not, add it + if ! grep -q "^github.com " ~/.ssh/known_hosts; then + ssh-keyscan github.com >> ~/.ssh/known_hosts + fi + ssh-add ~/.ssh/id_ed25519 + - name: Configure Git user + run: | + git config --global user.name "Pionix Github Service Account" + git config --global user.email "compiler@pionix.de" + - name: Download build-kit image + uses: actions/download-artifact@v4 + with: + name: ${{ inputs.build_kit_artifact_name }} + - name: Load build-kit image + run: | + docker load -i build-kit.tar + docker image tag ${{ inputs.build_kit_image_tag }} build-kit + - name: Format branch name for cache key + run: | + BRANCH_NAME_FOR_CACHE="${GITHUB_REF_NAME//-/_}" + echo "branch_name_for_cache=${BRANCH_NAME_FOR_CACHE}" >> "$GITHUB_ENV" + - name: Setup cache + uses: actions/cache@v4 + with: + path: cache + key: compile-${{ env.branch_name_for_cache }}-${{ github.sha }} + restore-keys: | + compile-${{ env.branch_name_for_cache }}- + compile- + - name: Checkout repository + uses: actions/checkout@v4 + with: + path: source + - name: Setup run scripts + run: | + mkdir scripts + rsync -a source/${{ inputs.build_kit_scripts_directory }}/ scripts + - name: Build Documentation + run: | + docker run \ + --mount type=bind,source=$SSH_AUTH_SOCK,target=/ssh-agent \ + --env SSH_AUTH_SOCK=/ssh-agent \ + --env TRAILBOOK_everest_IS_RELEASE=OFF \ + --env TRAILBOOK_everest_INSTANCE_NAME=nightly \ + --env TRAILBOOK_everest_OVERWRITE_EXISTING_INSTANCE=ON \ + --env EVEREST_DOCS_REPO_URL=git@github.com:${{ github.repository_owner }}/everest.github.io \ + --volume "${{ github.workspace }}:/ext" \ + --name build-docs-container \ + build-kit run-script build_docs + docker commit build-docs-container build-docs-image + - name: Upload Documentation Artifact + uses: actions/upload-artifact@v4.6.2 + with: + if-no-files-found: error + path: ${{ github.workspace }}/build/docs/deployed_docs_repo/docs/ + name: ${{ env.DOCS_HTML_ARTIFACT_NAME }} + - name: Deploy Documentation + if: ${{ inputs.deploy_docs == true }} + run: | + docker run \ + --mount type=bind,source=$SSH_AUTH_SOCK,target=/ssh-agent \ + --env SSH_AUTH_SOCK=/ssh-agent \ + --env GITHUB_SHA=${{ github.sha }} \ + --volume "${{ github.workspace }}:/ext" \ + --name deploy-docs-container \ + build-kit run-script deploy_docs diff --git a/.github/workflows/job_create-coverage-badge.yml b/.github/workflows/job_create-coverage-badge.yml new file mode 100644 index 0000000000..097d055472 --- /dev/null +++ b/.github/workflows/job_create-coverage-badge.yml @@ -0,0 +1,92 @@ +name: Create Coverage Badge + +on: + workflow_call: + inputs: + runner: + description: 'Which runner to use' + required: false + default: 'ubuntu-24.04' + type: string + ref_everest_ci: + description: 'The ref of the everest-ci repository to checkout' + required: true + type: string + is_fork: + description: 'Whether the current repository is a fork' + required: true + type: string + artifact_deploy_target_repo: + description: 'Repository to deploy artifacts to' + required: true + type: string + coverage_report_artifact_name: + description: 'The name of the coverage report artifact to download' + required: true + type: string + coverage_xml_artifact_name: + description: 'The name of the coverage xml artifact to download' + required: true + type: string + secrets: + coverage_deploy_token: + description: 'The token to use to deploy the coverage report' + required: true +jobs: + create-coverage-badge: + name: Create Coverage Badge + runs-on: ${{ inputs.runner }} + steps: + - name: Checkout local github actions + uses: actions/checkout@v4 + with: + repository: ${{ github.repository_owner }}/everest-ci + ref: ${{ inputs.ref_everest_ci }} + path: everest-ci + - name: Download xml coverage report + uses: actions/download-artifact@v5.0.0 + with: + if-no-files-found: error + name: ${{ inputs.coverage_xml_artifact_name }} + path: coverage-xml + - name: Parse coverage report + id: parse_coverage_report + shell: python3 {0} + run: | + import xml.etree.ElementTree + import os + tree = xml.etree.ElementTree.parse("${{ github.workspace }}/coverage-xml/gcovr-coverage-xml.xml") + line_coverage = tree.getroot().get("line-rate") + + with open(os.environ["GITHUB_OUTPUT"], "a") as f: + f.write(f"line_coverage={line_coverage}\n") + f.write(f"line_coverage_percentage={float(line_coverage) * 100}\n") + - name: Generate coverage badge + run: | + pip install anybadge + mkdir -p ${{ github.workspace }}/coverage-badge/ + anybadge -o --label Coverage --value ${{ steps.parse_coverage_report.outputs.line_coverage_percentage }} -s "%" --file ${{ github.workspace }}/coverage-badge/coverage-badge.svg 20=red 40=orange 60=yellow 80=yellowgreen 100=green + - name: Deploy coverage badge + uses: ./everest-ci/github-actions/deploy-ci-artifact + if: ${{ inputs.is_fork == 'false' }} + with: + target_repo: ${{ inputs.artifact_deploy_target_repo }} + github_token: ${{ secrets.coverage_deploy_token }} + artifact_name: coverage-badge + artifact_directory: ${{ github.workspace }}/coverage-badge/ + deploy_global_artifact: true + - name: Download html coverage report + uses: actions/download-artifact@v5.0.0 + with: + if-no-files-found: error + name: ${{ inputs.coverage_report_artifact_name }} + path: coverage-report + - name: Deploy html coverage report + uses: ./everest-ci/github-actions/deploy-ci-artifact + if: ${{ inputs.is_fork == 'false' }} + with: + target_repo: ${{ inputs.artifact_deploy_target_repo }} + github_token: ${{ secrets.coverage_deploy_token }} + artifact_name: ${{ inputs.coverage_report_artifact_name }} + artifact_directory: ${{ github.workspace }}/coverage-report/ + deploy_global_artifact: true diff --git a/.github/workflows/dco-check.yaml b/.github/workflows/job_dco-check.yaml similarity index 80% rename from .github/workflows/dco-check.yaml rename to .github/workflows/job_dco-check.yaml index 55f31fad11..ec16d52052 100644 --- a/.github/workflows/dco-check.yaml +++ b/.github/workflows/job_dco-check.yaml @@ -1,11 +1,17 @@ name: DCO Check -on: - pull_request: {} - merge_group: {} +on: + workflow_call: + inputs: + runner: + description: 'Which runner to use' + required: false + default: 'ubuntu-24.04' + type: string + jobs: dco_check: name: DCO Check - runs-on: ubuntu-22.04 + runs-on: ${{ inputs.runner }} steps: - uses: actions/checkout@v3 if: github.event_name == 'pull_request' diff --git a/.github/workflows/job_integrations-tests.yml b/.github/workflows/job_integrations-tests.yml new file mode 100644 index 0000000000..d283771774 --- /dev/null +++ b/.github/workflows/job_integrations-tests.yml @@ -0,0 +1,128 @@ +name: Integration Tests + +on: + workflow_call: + inputs: + runner: + description: 'Which runner to use' + required: false + default: 'ubuntu-24.04' + type: string + build_kit_artifact_name: + description: 'The name of the build-kit artifact to download' + required: true + type: string + build_kit_image_tag: + description: 'The tag of the build-kit image to use for building the project' + required: true + type: string + build_kit_scripts_directory: + description: 'Directory in the repository where the build kit scripts are located' + required: false + default: '.ci/build-kit/scripts' + type: string + docker_compose_file_path: + description: 'The path to the docker-compose file, relative to the repository root' + required: false + default: '.ci/e2e/docker-compose.yaml' + type: string + test_service_name: + description: 'The name of the service to run integration tests on' + required: false + default: 'e2e-test-server' + type: string + result_xml_path: + description: 'The path to the result xml file, relative to the github workspace' + required: false + default: 'result.xml' + type: string + report_html_path: + description: 'The path to the report html file, relative to the github workspace' + required: false + default: 'report.html' + type: string + dist_artifact_name: + description: 'The name of the dist artifact to download' + required: true + type: string + wheels_artifact_name: + description: 'The name of the wheels artifact to download' + required: true + type: string + outputs: + integration_tests_artifact_name: + description: 'The name of the integration tests artifact' + value: ${{ jobs.integration-tests.outputs.integration_tests_artifact_name }} + +jobs: + integration-tests: + name: Run Integration Tests + runs-on: ${{ inputs.runner }} + env: + INTEGRATION_IMAGE_NAME: integration-image + BUILD_KIT_IMAGE: ${{ inputs.build_kit_image_tag }} + INTEGRATION_TESTS_ARTIFACT_NAME: integration-tests-artifacts + outputs: + integration_tests_artifact_name: ${{ env.INTEGRATION_TESTS_ARTIFACT_NAME }} + steps: + - name: Download dist dir + uses: actions/download-artifact@v5.0.0 + with: + name: ${{ inputs.dist_artifact_name }} + - name: Extract dist.tar.gz + run: | + tar -xzf ${{ github.workspace }}/dist.tar.gz -C ${{ github.workspace }} + - name: Download wheels + uses: actions/download-artifact@v5.0.0 + with: + name: ${{ inputs.wheels_artifact_name }} + path: wheels + - name: Checkout repository + uses: actions/checkout@v4.2.2 + with: + path: source + - name: Setup run scripts + run: | + mkdir scripts + rsync -a source/${{ inputs.build_kit_scripts_directory }}/ scripts + - name: Download build-kit image + uses: actions/download-artifact@v5 + with: + name: ${{ inputs.build_kit_artifact_name }} + - name: Load build-kit image + run: | + docker load -i build-kit.tar + docker image tag ${{ env.BUILD_KIT_IMAGE }} build-kit + - name: Create integration-image + run: | + docker run \ + --volume "${{ github.workspace }}:/ext" \ + --name integration-container \ + build-kit run-script create_integration_image + docker commit integration-container ${{ env.INTEGRATION_IMAGE_NAME }} + - name: Run integration tests + id: run_integration_tests + run: | + docker compose \ + -f source/${{ inputs.docker_compose_file_path }} \ + run \ + ${{ inputs.test_service_name }} \ + run-script run_integration_tests + - name: Upload result and report as artifact + if: ${{ always() && (steps.run_integration_tests.outcome == 'success' || steps.run_integration_tests.outcome == 'failure') }} + uses: actions/upload-artifact@v4.6.2 + with: + if-no-files-found: error + name: ${{ env.INTEGRATION_TESTS_ARTIFACT_NAME }} + path: | + ${{ inputs.result_xml_path }} + ${{ inputs.report_html_path }} + - name: Render result + if: ${{ always() && (steps.run_integration_tests.outcome == 'success' || steps.run_integration_tests.outcome == 'failure') }} + uses: pmeier/pytest-results-action@v0.7.2 + with: + path: ${{ inputs.result_xml_path }} + summary: True + display-options: fEX + fail-on-empty: True + title: Test results diff --git a/.github/workflows/job_lint.yml b/.github/workflows/job_lint.yml new file mode 100644 index 0000000000..9c6c1d7548 --- /dev/null +++ b/.github/workflows/job_lint.yml @@ -0,0 +1,36 @@ +name: Lint Repository + +on: + workflow_call: + inputs: + runner: + description: 'Which runner to use' + required: false + default: 'ubuntu-24.04' + type: string + ref_everest_ci: + description: 'The ref of the everest-ci repository to checkout' + required: true + type: string + +jobs: + lint: + name: Lint Repository + runs-on: ${{ inputs.runner }} + steps: + - name: Checkout local github actions + uses: actions/checkout@v4 + with: + repository: ${{ github.repository_owner }}/everest-ci + ref: ${{ inputs.ref_everest_ci }} + path: everest-ci + - name: Checkout repository + uses: actions/checkout@v4 + with: + path: source + - name: Run clang-format + uses: ./everest-ci/github-actions/run-clang-format + with: + source-dir: source/ + extensions: hpp,cpp + exclude: cache diff --git a/.github/workflows/job_manifest-tests.yml b/.github/workflows/job_manifest-tests.yml new file mode 100644 index 0000000000..0d26bb047c --- /dev/null +++ b/.github/workflows/job_manifest-tests.yml @@ -0,0 +1,25 @@ + +name: Manifests Test +on: + workflow_call: + inputs: + runner: + description: 'Which runner to use' + required: false + default: 'ubuntu-24.04' + type: string + + +jobs: + manifests_tes: + name: Manifests Test + runs-on: ${{ inputs.runner }} + steps: + - uses: actions/checkout@v3 + - name: Install deps + run: pip3 install pytest + - name: Test manifests + run: | + cd ${{ github.workspace }} + python3 -m pytest tests/manifest_tests/*.py + diff --git a/.github/workflows/build_and_test.yaml b/.github/workflows/job_ocpp-tests.yml similarity index 54% rename from .github/workflows/build_and_test.yaml rename to .github/workflows/job_ocpp-tests.yml index 03fe268b83..4e01a8cd1b 100644 --- a/.github/workflows/build_and_test.yaml +++ b/.github/workflows/job_ocpp-tests.yml @@ -1,53 +1,60 @@ -name: Build, Lint and Test +name: OCPP Tests + on: - pull_request: {} - merge_group: {} - workflow_dispatch: + workflow_call: inputs: runner: - description: Which runner to use - type: choice - default: 'ubuntu-22.04' + description: 'Which runner to use' + required: false + default: 'ubuntu-24.04' + type: string + dist_artifact_name: + description: 'The name of the dist artifact to download' + required: true + type: string + wheels_artifact_name: + description: 'The name of the wheels artifact to download' + required: true + type: string + build_kit_artifact_name: + description: 'The name of the build-kit artifact to download' required: true - options: - - 'ubuntu-22.04' - - 'large-ubuntu-22.04-xxl' - schedule: - - cron: '37 13,1 * * *' + type: string + build_kit_image_tag: + description: 'The tag of the build-kit image to use for building the project' + required: true + type: string + build_kit_scripts_directory: + description: 'Directory in the repository where the build kit scripts are located' + required: false + default: '.ci/build-kit/scripts' + type: string + outputs: + ocpp_tests_artifact_name: + description: 'The name of the OCPP tests artifact' + value: ${{ jobs.ocpp-tests.outputs.ocpp_tests_artifact_name }} + jobs: - ci: - name: Build, Lint and Test - uses: everest/everest-ci/.github/workflows/continuous_integration.yml@v1.4.6 - permissions: - contents: read - secrets: - coverage_deploy_token: ${{ secrets.SA_GITHUB_PAT }} - with: - runner: ${{ inputs.runner || 'ubuntu-22.04' }} - artifact_deploy_target_repo: EVerest/everest.github.io - run_coverage: false - do_not_run_coverage_badge_creation: true - run_install_wheels: true - run_integration_tests: true ocpp-tests: - name: OCPP Tests - needs: - - ci - runs-on: ${{ inputs.runner || 'ubuntu-22.04' }} + name: Run OCPP Tests + runs-on: ${{ inputs.runner }} + env: + OCPP_TESTS_ARTIFACT_NAME: ocpp-tests-artifacts + outputs: + ocpp_tests_artifact_name: ${{ env.OCPP_TESTS_ARTIFACT_NAME }} steps: - name: Download dist dir uses: actions/download-artifact@v4.1.8 with: - name: dist + name: ${{ inputs.dist_artifact_name }} - name: Extract dist.tar.gz run: | tar -xzf ${{ github.workspace }}/dist.tar.gz -C ${{ github.workspace }} - name: Download wheels - # if: ${{ inputs.run_install_wheels == 'true' }} uses: actions/download-artifact@v4.1.8 with: - name: wheels + name: ${{ inputs.wheels_artifact_name }} path: wheels - name: Checkout repository uses: actions/checkout@v4.2.2 @@ -56,34 +63,15 @@ jobs: - name: Setup run scripts run: | mkdir scripts - rsync -a source/.ci/build-kit/scripts/ scripts - - name: Docker Meta - id: meta - uses: docker/metadata-action@v5 - with: - images: ${{ env.BUILD_KIT_IMAGE_NAME }} - sep-tags: "," - - name: Set output tag - id: buildkit_tag - shell: python3 {0} - run: | - import os - tags = "${{ steps.meta.outputs.tags }}".split(",") - if len(tags) == 0: - print("No tags found!❌") - exit(1) - tag = f"local/build-kit-everest-core:{tags[0]}" - with open(os.environ["GITHUB_OUTPUT"], "a") as f: - f.write(f"tag={tag}\n") - print(f"Set tag={tag}") + rsync -a source/${{ inputs.build_kit_scripts_directory }}/ scripts - name: Download build-kit image uses: actions/download-artifact@v4 with: - name: build-kit + name: ${{ inputs.build_kit_artifact_name }} - name: Load build-kit image run: | docker load -i build-kit.tar - docker image tag ${{ steps.buildkit_tag.outputs.tag }} build-kit + docker image tag ${{ inputs.build_kit_image_tag }} build-kit - name: Create integration-image run: | docker run \ @@ -106,7 +94,7 @@ jobs: uses: actions/upload-artifact@v4.4.3 with: if-no-files-found: error - name: ocpp-tests-report + name: ${{ env.OCPP_TESTS_ARTIFACT_NAME }} path: | ocpp-tests-result.xml ocpp-tests-report.html diff --git a/.github/workflows/job_setup-env.yml b/.github/workflows/job_setup-env.yml new file mode 100644 index 0000000000..8857ce35ab --- /dev/null +++ b/.github/workflows/job_setup-env.yml @@ -0,0 +1,81 @@ +name: Setup Environment + +on: + workflow_call: + inputs: + runner: + description: 'Which runner to use' + required: false + default: 'ubuntu-24.04' + type: string + ref_everest_ci: + description: 'The reference of the everest-ci repository to checkout' + required: true + type: string + outputs: + ref_everest_ci: + description: 'The reference of the everest-ci repository to checkout' + value: ${{ inputs.ref_everest_ci }} + closest_tag_everest_ci: + description: 'The closest tag of the everest-ci repository to use' + value: ${{ jobs.setup-env.outputs.closest_tag_everest_ci }} + is_fork: + description: 'Whether the current repository is a fork' + value: ${{ jobs.setup-env.outputs.is_fork }} + + +jobs: + setup-env: + name: Setup Environment + runs-on: ${{ inputs.runner }} + outputs: + is_fork: ${{ steps.is_fork.outputs.is_fork }} + closest_tag_everest_ci: ${{ steps.set_tag_everest_ci.outputs.closest_tag }} + steps: + - name: Determine closest tag of everest-ci + id: set_tag_everest_ci + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: | + TARGET_SHA="${{ steps.set_sha_everest_ci.outputs.sha }}" + OWNER="everest" + REPO="everest-ci" + + COMMITS=$(gh api repos/$OWNER/$REPO/commits?sha=$TARGET_SHA\&per_page=100 --jq '.[].sha') + for COMMIT in $COMMITS; do + TAG=$(gh api repos/$OWNER/$REPO/tags --jq '.[] | select(.commit.sha == "'$COMMIT'") | .name') + if [ -n "$TAG" ]; then + break + fi + done + + if [ -z "$TAG" ]; then + echo "No tag found for commit $TARGET_SHA (only last 100 commits were checked)" + exit 1 + fi + + # if inputs.build_kit_base_image_tag is != "", use it as the tag + if [ -n "${{ inputs.build_kit_base_image_tag }}" ]; then + echo "Using inputs.build_kit_base_image_tag as tag" + TAG="${{ inputs.build_kit_base_image_tag }}" + fi + + echo "closest_tag=$TAG" >> $GITHUB_OUTPUT + - name: Determine whether the PR comes from fork + id: is_fork + run: | + if [ "${{ github.event_name }}" == "pull_request" ]; then + if [ "${{ github.event.pull_request.head.repo.full_name }}" != "${{ github.repository }}" ]; then + is_fork=true + else + is_fork=false + fi + else + is_fork=false + fi + echo "is_fork=${is_fork}" >> $GITHUB_OUTPUT + if [ "${is_fork}" == "true" ]; then + echo "This is a forked PR" + else + echo "This is not a forked PR" + fi diff --git a/.github/workflows/on_main.yaml b/.github/workflows/on_main.yaml new file mode 100644 index 0000000000..57134c381a --- /dev/null +++ b/.github/workflows/on_main.yaml @@ -0,0 +1,73 @@ +name: Build, Lint and Test +on: + push: + branches: + - main + +jobs: + + + setup-env: + name: Setup Environment + uses: ./.github/workflows/job_setup-env.yml + with: + runner: ${{ inputs.runner || 'ubuntu-24.04' }} + ref_everest_ci: v1.5.4 + + + build-build-kit: + name: Build the build-kit + uses: ./.github/workflows/job_build-build-kit.yml + needs: + - setup-env + with: + runner: ${{ inputs.runner || 'ubuntu-24.04' }} + base_image_tag_everest_ci: ${{ needs.setup-env.outputs.closest_tag_everest_ci }} + + + build-cmake-gcc: + name: Build with CMake and GCC + uses: ./.github/workflows/job_build-cmake-gcc.yml + needs: + - setup-env + - build-build-kit + secrets: + coverage_deploy_token: "abcd" + with: + runner: ${{ inputs.runner || 'ubuntu-24.04' }} + ref_everest_ci: ${{ needs.setup-env.outputs.ref_everest_ci }} + is_fork: ${{ needs.setup-env.outputs.is_fork }} + build-kit-artifact-name: ${{ needs.build-build-kit.outputs.build_kit_artifact_name }} + build_kit_image_tag: ${{ needs.build-build-kit.outputs.build_kit_image_tag }} + artifact_deploy_target_repo: ${{ github.repository_owner }}/everest.github.io + + + # create-coverage-badge: + # uses: ./.github/workflows/job_create-coverage-badge.yml + # needs: + # - setup-env + # - build-cmake-gcc + # secrets: + # coverage_deploy_token: ${{ secrets.coverage_deploy_token }} + # with: + # runner: ${{ inputs.runner || 'ubuntu-24.04' }} + # ref_everest_ci: ${{ needs.setup-env.outputs.ref_everest_ci }} + # is_fork: ${{ needs.setup-env.outputs.is_fork }} + # artifact_deploy_target_repo: ${{ github.repository_owner }}/everest.github.io + # coverage_report_artifact_name: ${{ needs.build-cmake-gcc.outputs.coverage_report_artifact_name }} + # coverage_xml_artifact_name: ${{ needs.build-cmake-gcc.outputs.coverage_xml_artifact_name }} + + + build-docs: + name: Build and Deploy Documentation + needs: + - setup-env + - build-build-kit + uses: ./.github/workflows/job_build-docs.yaml + secrets: + SA_GITHUB_SSH_KEY: ${{ secrets.SA_GITHUB_SSH_KEY }} + with: + runner: ${{ inputs.runner || 'ubuntu-24.04' }} + build_kit_artifact_name: ${{ needs.build-build-kit.outputs.build_kit_artifact_name }} + build_kit_image_tag: ${{ needs.build-build-kit.outputs.build_kit_image_tag }} + deploy_docs: true diff --git a/.github/workflows/on_pr.yaml b/.github/workflows/on_pr.yaml new file mode 100644 index 0000000000..1be43867a2 --- /dev/null +++ b/.github/workflows/on_pr.yaml @@ -0,0 +1,129 @@ +name: Build, Lint and Test +on: + pull_request: {} + merge_group: {} + workflow_dispatch: + inputs: + runner: + description: Which runner to use + type: choice + default: 'ubuntu-24.04' + required: true + options: + - 'ubuntu-24.04' + - 'large-ubuntu-22.04-xxl' + schedule: + - cron: '37 13,1 * * *' + +jobs: + + + setup-env: + name: Setup Environment + uses: ./.github/workflows/job_setup-env.yml + with: + runner: ${{ inputs.runner || 'ubuntu-24.04' }} + ref_everest_ci: v1.5.4 + + + build-build-kit: + name: Build the build-kit + uses: ./.github/workflows/job_build-build-kit.yml + needs: + - setup-env + with: + runner: ${{ inputs.runner || 'ubuntu-24.04' }} + base_image_tag_everest_ci: ${{ needs.setup-env.outputs.closest_tag_everest_ci }} + + + build-cmake-gcc: + name: Build with CMake and GCC + uses: ./.github/workflows/job_build-cmake-gcc.yml + needs: + - setup-env + - build-build-kit + secrets: + coverage_deploy_token: "" + with: + runner: ${{ inputs.runner || 'ubuntu-24.04' }} + ref_everest_ci: ${{ needs.setup-env.outputs.ref_everest_ci }} + is_fork: ${{ needs.setup-env.outputs.is_fork }} + build-kit-artifact-name: ${{ needs.build-build-kit.outputs.build_kit_artifact_name }} + build_kit_image_tag: ${{ needs.build-build-kit.outputs.build_kit_image_tag }} + artifact_deploy_target_repo: ${{ github.repository_owner }}/everest.github.io + + + integration-tests: + name: Run Integration Tests + uses: ./.github/workflows/job_integrations-tests.yml + needs: + - setup-env + - build-cmake-gcc + - build-build-kit + with: + runner: ${{ inputs.runner || 'ubuntu-24.04' }} + build_kit_artifact_name: ${{ needs.build-build-kit.outputs.build_kit_artifact_name }} + build_kit_image_tag: ${{ needs.build-build-kit.outputs.build_kit_image_tag }} + dist_artifact_name: ${{ needs.build-cmake-gcc.outputs.dist_artifact_name }} + wheels_artifact_name: ${{ needs.build-cmake-gcc.outputs.wheels_artifact_name }} + + + ocpp-tests: + name: Run OCPP Tests + uses: ./.github/workflows/job_ocpp-tests.yml + needs: + - setup-env + - build-cmake-gcc + - build-build-kit + with: + runner: ${{ inputs.runner || 'ubuntu-24.04' }} + build_kit_artifact_name: ${{ needs.build-build-kit.outputs.build_kit_artifact_name }} + build_kit_image_tag: ${{ needs.build-build-kit.outputs.build_kit_image_tag }} + dist_artifact_name: ${{ needs.build-cmake-gcc.outputs.dist_artifact_name }} + wheels_artifact_name: ${{ needs.build-cmake-gcc.outputs.wheels_artifact_name }} + + + lint: + name: Lint Repository + uses: ./.github/workflows/job_lint.yml + needs: + - setup-env + with: + runner: ${{ inputs.runner || 'ubuntu-24.04' }} + ref_everest_ci: ${{ needs.setup-env.outputs.ref_everest_ci }} + + + build-docs: + name: Call Build Documentation + needs: + - setup-env + - build-build-kit + uses: ./.github/workflows/job_build-docs.yaml + secrets: + SA_GITHUB_SSH_KEY: ${{ secrets.SA_GITHUB_SSH_KEY }} + with: + runner: ${{ inputs.runner || 'ubuntu-24.04' }} + build_kit_artifact_name: ${{ needs.build-build-kit.outputs.build_kit_artifact_name }} + build_kit_image_tag: ${{ needs.build-build-kit.outputs.build_kit_image_tag }} + deploy_docs: false + + + dco-check: + name: DCO Check + uses: ./.github/workflows/job_dco-check.yaml + with: + runner: ${{ inputs.runner || 'ubuntu-24.04' }} + + + manifest-tests: + name: Manifests Test + uses: ./.github/workflows/job_manifest-tests.yml + with: + runner: ${{ inputs.runner || 'ubuntu-24.04' }} + + + bazel-build-and-test: + name: Bazel Build And Test + uses: ./.github/workflows/job_bazel-build-test.yaml + with: + runner: ${{ inputs.runner || 'ubuntu-24.04' }} diff --git a/.github/workflows/unit_test.yml b/.github/workflows/unit_test.yml deleted file mode 100644 index 7a6e755358..0000000000 --- a/.github/workflows/unit_test.yml +++ /dev/null @@ -1,29 +0,0 @@ - -name: Unit tests -on: - pull_request: {} - merge_group: {} - workflow_dispatch: - inputs: - runner: - description: Which runner to use - type: choice - default: 'ubuntu-22.04' - required: true - options: - - 'ubuntu-22.04' - - 'large-ubuntu-22.04-xxl' - -jobs: - manifests_tes: - name: Manifests test - runs-on: ${{ inputs.runner || 'ubuntu-22.04' }} - steps: - - uses: actions/checkout@v3 - - name: Install deps - run: pip3 install pytest - - name: Test mainfests - run: | - cd ${{ github.workspace }} - python3 -m pytest tests/manifest_tests/*.py - diff --git a/.gitignore b/.gitignore index dcf2072716..61a3e1475e 100644 --- a/.gitignore +++ b/.gitignore @@ -13,3 +13,4 @@ __pycache__/ *.diff *.orig *.rej +docs/source/conf.py diff --git a/CMakeLists.txt b/CMakeLists.txt index 31442c8fde..486994a088 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -63,9 +63,10 @@ endif() option(CREATE_SYMLINKS "Create symlinks to javascript modules and auxillary files - for development purposes" OFF) option(CMAKE_RUN_CLANG_TIDY "Run clang-tidy" OFF) -option(EVEREST_BUILD_API_DOCS "Build EVerest API documentation" OFF) option(ISO15118_2_GENERATE_AND_INSTALL_CERTIFICATES "Automatically generate and install certificates for development purposes" ON) option(EVEREST_ENABLE_RUN_SCRIPT_GENERATION "Enables the generation of run scripts (convenience scripts for starting available configurations)" ON) +option(EVEREST_BUILD_DOCS "Build EVerest documentation" OFF) +option(EVEREST_SKIP_BUILD_API_DOC "Skip building the async API html doc for the EVerest API" OFF) option(${PROJECT_NAME}_BUILD_TESTING "Build unit tests, used if included as dependency" OFF) option(BUILD_TESTING "Build unit tests, used if standalone project" OFF) option(EVEREST_ENABLE_COMPILE_WARNINGS "Enable compile warnings set in the EVEREST_COMPILE_OPTIONS flag" OFF) @@ -176,6 +177,9 @@ else() find_package(OpenSSL 3 REQUIRED) endif() +if(EVEREST_BUILD_DOCS) + add_subdirectory(docs) +endif() include(ev-project-bootstrap) @@ -192,8 +196,6 @@ add_subdirectory(lib) # FIXME (aw): this should be optional add_subdirectory(config) -add_subdirectory(doc) - if(EVEREST_BUILD_APPLICATIONS) add_subdirectory(applications) endif() diff --git a/THIRD_PARTY.md b/THIRD_PARTY.md index a72831426b..19b784c768 100644 --- a/THIRD_PARTY.md +++ b/THIRD_PARTY.md @@ -2,3 +2,4 @@ - [CodeCoverage.cmake](https://github.com/bilke/cmake-modules/blob/master/CodeCoverage.cmake) licensed under [The 3-Clause BSD License](https://opensource.org/licenses/BSD-3-Clause) - [Nanopb - Protocol Buffers for Embedded Systems](https://github.com/nanopb/nanopb) licensed under [The zlib License](https://opensource.org/licenses/Zlib) +- [sphinx-notfound-page](https://github.com/readthedocs/sphinx-notfound-page) licensed under [The MIT License](https://opensource.org/licenses/MIT) diff --git a/applications/utils/ev-dev-tools/src/ev_cli/templates/index.rst.j2 b/applications/utils/ev-dev-tools/src/ev_cli/templates/index.rst.j2 index d7af654c4c..2a56b81f9b 100644 --- a/applications/utils/ev-dev-tools/src/ev_cli/templates/index.rst.j2 +++ b/applications/utils/ev-dev-tools/src/ev_cli/templates/index.rst.j2 @@ -1,5 +1,3 @@ -:orphan: - .. _everest_modules_handwritten_{{ info.name }}: .. This file is a placeholder for optional multiple files @@ -12,9 +10,15 @@ and will be converted to HTML and PDF by Sphinx. This index.rst file is the entry point for the module documentation. -******************************************* -{{ info.name }} -******************************************* +.. Use underlined-only headlines inside this document (highest-level + sub-section headline should use "=" characters) + +.. The content of this file will be included in the auto-generated HTML + page for the module. You can link to it using the following + reference: everest_modules_{{ info.name }}. + +.. ******************************************* +.. {{ info.name }} +.. ******************************************* -:ref:`Link ` to the module's reference. {{ info.desc }} diff --git a/applications/utils/scripts/create_snapshot.py b/applications/utils/scripts/create_snapshot.py index c22381deeb..c2fdef1487 100755 --- a/applications/utils/scripts/create_snapshot.py +++ b/applications/utils/scripts/create_snapshot.py @@ -44,6 +44,7 @@ def main(): parser.add_argument('--allow-relative-to-working-dir', action='store_true', help='Allow temporary directory to be relative to working dir (dangerous!)') parser.add_argument('--post-process', action='store_true', help='Postprocess existing snapshot') parser.add_argument('--include-external-deps', action='store_true', help='Include external dependencies in snapshot') + parser.add_argument('--exclude-dir', action='append', dest='excluded_dirs', type=str, help='Exclude specified directory from snapshot (can be used multiple times)', default=[]) args = parser.parse_args() @@ -62,6 +63,12 @@ def main(): print(f'Temporary directory cannot be relative to working directory: {tmp_dir}') return 1 + excluded_paths = [] + for excluded_dir in args.excluded_dirs: + excluded_path = working_dir / excluded_dir + excluded_path = excluded_path.expanduser().resolve() + excluded_paths.append(excluded_path) + if not args.post_process and tmp_dir.exists(): print(f'Temporary directory dir already exists, deleting it: {tmp_dir}') shutil.rmtree(tmp_dir, ignore_errors=True) @@ -77,6 +84,9 @@ def main(): if subdir_path == tmp_dir: print(f'{subdir_path} is tmp dir, ignoring') continue + if subdir_path in excluded_paths: + print(f'{subdir_path} is excluded, ignoring') + continue print(f'Copying {subdir_path} to {tmp_dir}') destdir = tmp_dir / subdir_path.name @@ -113,7 +123,7 @@ def main(): for dep_versions in versions: dependency, version = dep_versions.split(':') if dependency in snapshot: - print(f'Overriding {dependency} version {snapshot[dependency]['git_tag']} to {version}') + print(f'Overriding {dependency} version {snapshot[dependency]["git_tag"]} to {version}') snapshot[dependency]['git_tag'] = version for dependency, entry in snapshot.items(): git_tag = '' diff --git a/cmake/everest-generate.cmake b/cmake/everest-generate.cmake index 9c75c7b13e..ee136cdfc3 100644 --- a/cmake/everest-generate.cmake +++ b/cmake/everest-generate.cmake @@ -27,22 +27,33 @@ set_target_properties(generate_cpp_files # out-of-tree interfaces/types/modules support # function(_ev_add_project) - # FIXME (aw): resort to proper argument handling! - if (ARGC EQUAL 2) - set (EVEREST_PROJECT_DIR ${ARGV0}) - set (EVEREST_PROJECT_NAME ${ARGV1}) - endif () + set(options SKIP_DOC_GENERATION) + set(oneValueArgs EV_PROJECT_DIRECTORY EV_PROJECT_NAME) + set(multiValueArgs "") + cmake_parse_arguments(args "${options}" "${oneValueArgs}" "${multiValueArgs}" ${ARGN}) + + if (args_UNPARSED_ARGUMENTS) + message(FATAL_ERROR "ev_add_project macro got unknown argument(s): ${args_UNPARSED_ARGUMENTS}") + endif() - if (NOT EVEREST_PROJECT_DIR) + if(args_KEYWORDS_MISSING_VALUES) + message(FATAL_ERROR "ev_add_project() keyword(s) missing values: ${args_KEYWORDS_MISSING_VALUES}") + endif() + + if(args_EV_PROJECT_DIRECTORY AND args_EV_PROJECT_NAME) + set (EVEREST_PROJECT_DIR ${args_EV_PROJECT_DIRECTORY}) + set (EVEREST_PROJECT_NAME ${args_EV_PROJECT_NAME}) + elseif(NOT args_PROJECT_NAME AND NOT args_PROJECT_DIRECTORY) # if we don't get a directory, we're assuming project directory set (EVEREST_PROJECT_DIR ${PROJECT_SOURCE_DIR}) set (CALLED_FROM_WITHIN_PROJECT TRUE) - elseif (NOT EXISTS ${EVEREST_PROJECT_DIR}) - message(FATAL_ERROR "${CMAKE_CURRENT_FUNCTION} got non-existing project path: ${EVEREST_PROJECT_DIR}") - endif () - - if (NOT EVEREST_PROJECT_NAME) set (EVEREST_PROJECT_NAME ${PROJECT_NAME}) + else() + message(FATAL_ERROR "ev_add_project() can only be called with ALL or NONE of: 'EV_PROJECT_DIRECTORY', 'EV_PROJECT_NAME'") + endif() + + if (NOT EXISTS ${EVEREST_PROJECT_DIR}) + message(FATAL_ERROR "${CMAKE_CURRENT_FUNCTION} got non-existing project path: ${EVEREST_PROJECT_DIR}") endif () message(STATUS "APPENDING ${EVEREST_PROJECT_DIR} to EVEREST_PROJECT_DIRS") @@ -59,6 +70,21 @@ function(_ev_add_project) ${TYPES_DIR}/*.yaml ) + if(EVEREST_BUILD_DOCS AND NOT args_SKIP_DOC_GENERATION) + find_package( + trailbook-ext-everest + 0.1.0 + REQUIRED + PATHS "${CMAKE_SOURCE_DIR}/cmake" + ) + foreach(TYPES_FILE ${TYPES_FILES}) + trailbook_ev_generate_rst_from_types( + TRAILBOOK_NAME "everest" + TYPES_FILE "${TYPES_FILE}" + ) + endforeach() + endif() + _ev_add_types(${TYPES_FILES}) if (CALLED_FROM_WITHIN_PROJECT) @@ -70,6 +96,40 @@ function(_ev_add_project) endif () endif () + # check for API + set(API_DIR "${EVEREST_PROJECT_DIR}/docs/source/reference/EVerest_API") + if (EXISTS ${API_DIR}) + if (${EVEREST_SKIP_BUILD_API_DOC}) + message(WARNING "Skipping the generation of the EVerest API AsyncAPI html documentation") + else() + message(STATUS "Adding API definitions from ${API_DIR}") + file(GLOB API_FILES + ${API_DIR}/*.yaml + ) + + if(EVEREST_BUILD_DOCS AND NOT args_SKIP_DOC_GENERATION) + find_package( + trailbook-ext-everest + 0.1.0 + REQUIRED + PATHS "${CMAKE_SOURCE_DIR}/cmake" + ) + trailbook_ev_generate_api_doc( + TRAILBOOK_NAME "everest" + API_FILES ${API_FILES} + ) + endif() + + if (CALLED_FROM_WITHIN_PROJECT) + install( + DIRECTORY ${API_DIR} + DESTINATION "${CMAKE_INSTALL_DATADIR}/everest" + FILES_MATCHING PATTERN "*.yaml" + ) + endif () + endif () + endif () + # check for errors set(ERRORS_DIR "${EVEREST_PROJECT_DIR}/errors") if (EXISTS ${ERRORS_DIR}) @@ -91,6 +151,21 @@ function(_ev_add_project) ${INTERFACES_DIR}/*.yaml ) + if(EVEREST_BUILD_DOCS AND NOT args_SKIP_DOC_GENERATION) + find_package( + trailbook-ext-everest + 0.1.0 + REQUIRED + PATHS "${CMAKE_SOURCE_DIR}/cmake" + ) + foreach(INTERFACE_FILE ${INTERFACE_FILES}) + trailbook_ev_generate_rst_from_interface( + TRAILBOOK_NAME "everest" + INTERFACE_FILE "${INTERFACE_FILE}" + ) + endforeach() + endif() + _ev_add_interfaces(${INTERFACE_FILES}) if (CALLED_FROM_WITHIN_PROJECT) @@ -126,6 +201,25 @@ function(_ev_add_project) endfunction() macro(ev_add_project) + set(options SKIP_DOC_GENERATION) + set(oneValueArgs EV_PROJECT_DIRECTORY EV_PROJECT_NAME) + set(multiValueArgs "") + cmake_parse_arguments(args "${options}" "${oneValueArgs}" "${multiValueArgs}" ${ARGN}) + + if (args_UNPARSED_ARGUMENTS) + message(FATAL_ERROR "ev_add_project macro got unknown argument(s): ${args_UNPARSED_ARGUMENTS}") + endif() + + if(args_KEYWORDS_MISSING_VALUES) + message(FATAL_ERROR "ev_add_project() keyword(s) missing values: ${args_KEYWORDS_MISSING_VALUES}") + endif() + + if(args_EV_PROJECT_DIRECTORY AND NOT args_EV_PROJECT_NAME) + message(FATAL_ERROR "ev_add_project() was called with EV_PROJECT_DIRECTORY but is missing EV_PROJECT_NAME.") + elseif(NOT args_EV_PROJECT_DIRECTORY AND args_EV_PROJECT_NAME) + message(FATAL_ERROR "ev_add_project() was called with EV_PROJECT_NAME but is missing EV_PROJECT_DIRECTORY.") + endif() + ev_setup_cmake_variables_python_wheel() set(${PROJECT_NAME}_PYTHON_VENV_PATH "${CMAKE_BINARY_DIR}/venv" CACHE PATH "Path to python venv") @@ -136,11 +230,14 @@ macro(ev_add_project) setup_ev_cli() - # FIXME (aw): resort to proper argument handling! - if (${ARGC} EQUAL 2) - _ev_add_project(${ARGV0} ${ARGV1}) + if (${args_SKIP_DOC_GENERATION}) + set (fwd_OPTION "SKIP_DOC_GENERATION") + endif() + + if (${args_EV_PROJECT_DIRECTORY} and ${args_EV_PROJECT_NAME}) + _ev_add_project(${fwd_OPTION} ${args_EV_PROJECT_DIRECTORY} ${args_EV_PROJECT_NAME}) else() - _ev_add_project() + _ev_add_project(${fwd_OPTION}) endif () endmacro() @@ -392,7 +489,7 @@ function (ev_add_module) # # handle passed arguments # - set(options "") + set(options SKIP_DOC_GENERATION) set(one_value_args "") set(multi_value_args DEPENDENCIES @@ -420,6 +517,36 @@ function (ev_add_module) endforeach() endif() + if (EVEREST_BUILD_DOCS AND NOT OPTNS_SKIP_DOC_GENERATION) + find_package( + trailbook-ext-everest + 0.1.0 + REQUIRED + PATHS "${CMAKE_SOURCE_DIR}/cmake" + ) + if (EXISTS "${CMAKE_CURRENT_SOURCE_DIR}/${MODULE_NAME}/docs/") + trailbook_ev_add_module_handwritten_doc( + TRAILBOOK_NAME "everest" + MODULE_NAME "${MODULE_NAME}" + HANDWRITTEN_DIR "${CMAKE_CURRENT_SOURCE_DIR}/${MODULE_NAME}/docs" + ) + endif() + if (EXISTS "${CMAKE_CURRENT_SOURCE_DIR}/${MODULE_NAME}/doc.rst") + message( + FATAL_ERROR + "Module ${MODULE_NAME} contains a doc.rst file" + " this is not supported anymore, please move to" + " docs/index.rst.inc, then it will be picked up automatically." + " For now this file will be ignored." + ) + endif() + trailbook_ev_generate_rst_from_manifest( + TRAILBOOK_NAME "everest" + MANIFEST_FILE "${CMAKE_CURRENT_SOURCE_DIR}/${MODULE_NAME}/manifest.yaml" + ) + endif() + + # check if python module string(FIND ${MODULE_NAME} "Py" MODULE_PREFIX_POS) if (MODULE_PREFIX_POS EQUAL 0) diff --git a/cmake/fetch_async_api.cmake b/cmake/fetch_async_api.cmake deleted file mode 100644 index b24fd3431e..0000000000 --- a/cmake/fetch_async_api.cmake +++ /dev/null @@ -1,28 +0,0 @@ -if(asyncapi-cli_DIR) - message(STATUS "Using asyncapi-cli at this location: ${asyncapi-cli_DIR}") -else() - message(STATUS "Retrieving asyncapi-cli using FetchContent") - include(FetchContent) - FetchContent_Declare( - asyncapi-cli - GIT_REPOSITORY https://github.com/asyncapi/cli.git - GIT_TAG v2.7.1 - ) - FetchContent_MakeAvailable(asyncapi-cli) - set(asyncapi-cli_DIR "${asyncapi-cli_SOURCE_DIR}") - set(asyncapi-cli_FIND_COMPONENTS "bundling") -endif() - -add_custom_command( - OUTPUT ${CMAKE_CURRENT_BINARY_DIR}/generated/asyncapi_cli_install_done - COMMAND cd ${asyncapi-cli_DIR} && npm install && npm run build - COMMAND ${CMAKE_COMMAND} -E make_directory ${CMAKE_CURRENT_BINARY_DIR}/generated - COMMAND ${CMAKE_COMMAND} -E touch ${CMAKE_CURRENT_BINARY_DIR}/generated/asyncapi_cli_install_done - COMMENT "AsyncApi/cli Install once only" -) - -add_custom_target(asyncapi_cli_install_target - DEPENDS ${CMAKE_CURRENT_BINARY_DIR}/generated/asyncapi_cli_install_done -) - -set(ASYNCAPI_CMD ${asyncapi-cli_DIR}/bin/run) diff --git a/cmake/fetch_async_api_html_template.cmake b/cmake/fetch_async_api_html_template.cmake deleted file mode 100644 index 24928c9cec..0000000000 --- a/cmake/fetch_async_api_html_template.cmake +++ /dev/null @@ -1,29 +0,0 @@ -find_package(asyncapi-html-template - COMPONENTS bundling - PATHS ../asyncapi-html-template -) - -if(NOT asyncapi-html-template_FOUND) - message(STATUS "Retrieving asyncapi-html-template using FetchContent") - include(FetchContent) - FetchContent_Declare( - asyncapi-html-template - GIT_REPOSITORY https://github.com/asyncapi/html-template.git - GIT_TAG v3.0.0 - ) - FetchContent_MakeAvailable(asyncapi-html-template) - set(asyncapi-html-template_DIR "${asyncapi-html-template_SOURCE_DIR}") - set(asyncapi-html-template_FIND_COMPONENTS "bundling") -endif() - -add_custom_command( - OUTPUT ${CMAKE_CURRENT_BINARY_DIR}/generated/asyncapi_html_template_install_done - COMMAND cd ${asyncapi-html-template_SOURCE_DIR} && npm install - COMMAND ${CMAKE_COMMAND} -E make_directory ${CMAKE_CURRENT_BINARY_DIR}/generated - COMMAND ${CMAKE_COMMAND} -E touch ${CMAKE_CURRENT_BINARY_DIR}/generated/asyncapi_html_template_install_done - COMMENT "AsyncApi/html-template Install once only" -) - -add_custom_target(asyncapi_html_template_install_target - DEPENDS ${CMAKE_CURRENT_BINARY_DIR}/generated/asyncapi_html_template_install_done -) \ No newline at end of file diff --git a/cmake/generate-api-docs.cmake b/cmake/generate-api-docs.cmake deleted file mode 100644 index c840f38ec8..0000000000 --- a/cmake/generate-api-docs.cmake +++ /dev/null @@ -1,60 +0,0 @@ -function(generate_async_api_docs) - set(oneValueArgs - API_PATH - API_NAME - ) - cmake_parse_arguments(FN - "" - "${oneValueArgs}" - "" - ${ARGN} - ) - if ("${FN_API_PATH}" STREQUAL "") - message(FATAL_ERROR "API_PATH is required") - endif() - if ("${FN_API_NAME}" STREQUAL "") - message(FATAL_ERROR "API_NAME is required") - endif() - - set(OUTPUT_DIR ${CMAKE_BINARY_DIR}/everest_api_docs/${FN_API_NAME}) - set(ASYNC_TARGET_NAME ${FN_API_NAME}_AsyncApi) - set(API_DOC_INSTALL_DIR ${CMAKE_INSTALL_DOCDIR}/everest_api) - - add_custom_command( - OUTPUT ${OUTPUT_DIR}/yaml/asyncapi.yaml - COMMAND ${CMAKE_COMMAND} -E make_directory ./html - COMMAND ${CMAKE_COMMAND} -E make_directory ./yaml - COMMAND ${CMAKE_COMMAND} -E copy ${FN_API_PATH} ${OUTPUT_DIR}/yaml/asyncapi.yaml - DEPENDS ${FN_API_PATH} - COMMENT "${FN_API_NAME}: Prepare AsyncApi documentation generation and copy asyncapi.yaml" - ) - - add_custom_command( - OUTPUT ${OUTPUT_DIR}/html/index.html - COMMAND ${ASYNCAPI_CMD} generate fromTemplate ${FN_API_PATH} ${asyncapi-html-template_SOURCE_DIR} --force-write --use-new-generator --output=${OUTPUT_DIR}/html - DEPENDS ${FN_API_PATH} - COMMENT "${FN_API_NAME}: Generate AsyncApi HTML documentation" - ) - - add_custom_target(${FN_API_NAME}_AsyncApi - DEPENDS - ${OUTPUT_DIR}/yaml/asyncapi.yaml - ${OUTPUT_DIR}/html/index.html - asyncapi_cli_install_target - asyncapi_html_template_install_target - ) - - install( - DIRECTORY ${OUTPUT_DIR}/html/ - DESTINATION ${API_DOC_INSTALL_DIR}/html/${FN_API_NAME}/ - OPTIONAL - ) - - install( - FILES ${OUTPUT_DIR}/yaml/asyncapi.yaml - DESTINATION ${API_DOC_INSTALL_DIR}/yaml/${FN_API_NAME}/ - OPTIONAL - ) - - add_dependencies(everest_api_docs ${ASYNC_TARGET_NAME}) -endfunction() diff --git a/cmake/trailbook-ext-everest/README.md b/cmake/trailbook-ext-everest/README.md new file mode 100644 index 0000000000..b0465b56b4 --- /dev/null +++ b/cmake/trailbook-ext-everest/README.md @@ -0,0 +1,18 @@ +# Trailbook Extension for Everest + +This CMake package is an extension for the Trailbook CMake package +that provides additional functionality specifically for building +documentation for the Everest project. + +The following additional features are provided: + +* cmake function: `trailbook_ev_add_module_explanation()` +* cmake function: `trailbook_ev_create_snapshot()` +* cmake function: `trailbook_ev_generate_api_doc()` +* cmake function: `trailbook_ev_generate_rst_from_manifest()` +* cmake function: `trailbook_ev_generate_rst_from_interface()` +* cmake function: `trailbook_ev_generate_rst_from_types()` + +Check out the inline documentation of the functions for more details +on how to use them. Each function is defined in its own CMake file +located in this package directory. diff --git a/cmake/trailbook-ext-everest/add-module-handwritten-doc.cmake b/cmake/trailbook-ext-everest/add-module-handwritten-doc.cmake new file mode 100644 index 0000000000..d864458a84 --- /dev/null +++ b/cmake/trailbook-ext-everest/add-module-handwritten-doc.cmake @@ -0,0 +1,214 @@ +# This macro is for internal use only +# +# It is used in the function trailbook_ev_add_module_handwritten_doc(). +# It adds a custom command to copy the handwritten module files to the reference modules directory. +macro(_trailbook_ev_add_module_reference_copy_handwritten_command) + file( + GLOB_RECURSE + MODULE_HANDWRITTEN_SOURCE_FILES + RELATIVE "${args_HANDWRITTEN_DIR}" + CONFIGURE_DEPENDS + "${args_HANDWRITTEN_DIR}/*" + ) + + set(EXPECTED_DEST_FILES "") + set(COPY_DEPENDENCIES "") + + foreach(SOURCE_FILE IN LISTS MODULE_HANDWRITTEN_SOURCE_FILES) + set(SRC_FILE_PATH "${args_HANDWRITTEN_DIR}/${SOURCE_FILE}") + + if(IS_DIRECTORY "${SRC_FILE_PATH}") + continue() + endif() + + get_filename_component(RELATIVE_SUBDIR "${SOURCE_FILE}" DIRECTORY) + get_filename_component(FILE_NAME "${SOURCE_FILE}" NAME) + + # when copying 'index.rst' then rename it + if("${FILE_NAME}" STREQUAL "index.rst") + set(DEST_FILENAME "index.inc") + else() + set(DEST_FILENAME "${FILE_NAME}") + endif() + + if("${RELATIVE_SUBDIR}" STREQUAL "") + set(DEST_FILE_PATH "${TRAILBOOK_EV_HANDWRITTEN_MODULE_DOC_DIRECTORY}/${DEST_FILENAME}") + set(DEST_DIR "${TRAILBOOK_EV_HANDWRITTEN_MODULE_DOC_DIRECTORY}") + else() + set(DEST_FILE_PATH "${TRAILBOOK_EV_HANDWRITTEN_MODULE_DOC_DIRECTORY}/${RELATIVE_SUBDIR}/${DEST_FILENAME}") + set(DEST_DIR "${TRAILBOOK_EV_HANDWRITTEN_MODULE_DOC_DIRECTORY}/${RELATIVE_SUBDIR}") + endif() + + list(APPEND EXPECTED_DEST_FILES "${DEST_FILE_PATH}") + + # One command per file + add_custom_command( + OUTPUT "${DEST_FILE_PATH}" + COMMAND ${CMAKE_COMMAND} -E make_directory "${DEST_DIR}" + COMMAND ${CMAKE_COMMAND} -E copy "${SRC_FILE_PATH}" "${DEST_FILE_PATH}" + DEPENDS "${SRC_FILE_PATH}" + COMMENT "Processing doc file: ${SOURCE_FILE} -> ${RELATIVE_SUBDIR}/${DEST_FILENAME}" + VERBATIM + ) + + list(APPEND COPY_DEPENDENCIES "${DEST_FILE_PATH}") + endforeach() + + # Remove files if they were deleted in the source tree + if(EXISTS "${TRAILBOOK_EV_HANDWRITTEN_MODULE_DOC_DIRECTORY}") + file(GLOB_RECURSE EXISTING_DEST_FILES "${TRAILBOOK_EV_HANDWRITTEN_MODULE_DOC_DIRECTORY}/*") + + foreach(EXISTING_FILE IN LISTS EXISTING_DEST_FILES) + if(IS_DIRECTORY "${EXISTING_FILE}") + continue() + endif() + + list(FIND EXPECTED_DEST_FILES "${EXISTING_FILE}" FILE_INDEX) + if(FILE_INDEX EQUAL -1) + message(STATUS " Removing orphaned doc file: ${EXISTING_FILE}") + file(REMOVE "${EXISTING_FILE}") + endif() + endforeach() + endif() + + if(COPY_DEPENDENCIES) + set(ASSET_TARGET "${TARGET_NAME_PREFIX}_assets") + + # Guard against multiple definitions + if(NOT TARGET trailbook_${args_TRAILBOOK_NAME}_handwritten_doc_module_${args_MODULE_NAME}) + add_custom_target( + trailbook_${args_TRAILBOOK_NAME}_handwritten_doc_module_${args_MODULE_NAME} + DEPENDS + ${COPY_DEPENDENCIES} + COMMENT + "Handwritten documentation of module ${args_MODULE_NAME} for trailbook ${args_TRAILBOOK_NAME} is available." + ) + + set_property( + TARGET + trailbook_${args_TRAILBOOK_NAME} + APPEND + PROPERTY + ADDITIONAL_DEPS_STAGE_BUILD_SPHINX_BEFORE + ${COPY_DEPENDENCIES} + trailbook_${args_TRAILBOOK_NAME}_handwritten_doc_module_${args_MODULE_NAME} + ) + endif() + endif() + + # set(MODULE_HANDWRITTEN_TARGET_FILES "") + # foreach(source_file IN LISTS MODULE_HANDWRITTEN_SOURCE_FILES) + # file(RELATIVE_PATH rel_path "${args_HANDWRITTEN_DIR}" "${source_file}") + # set(target_file "${TRAILBOOK_EV_HANDWRITTEN_MODULE_DOC_DIRECTORY}/${rel_path}") + # list(APPEND MODULE_HANDWRITTEN_TARGET_FILES "${target_file}") + # endforeach() + + # add_custom_command( + # OUTPUT + # ${MODULE_HANDWRITTEN_TARGET_FILES} + # DEPENDS + # ${MODULE_HANDWRITTEN_SOURCE_FILES} + # ${DEPS_STAGE_PREPARE_SPHINX_SOURCE_AFTER} + # COMMENT + # "Copying handwritten documentation files of module ${args_MODULE_NAME} to: ${TRAILBOOK_EV_HANDWRITTEN_MODULE_DOC_DIRECTORY}/" + # COMMAND + # ${CMAKE_COMMAND} -E rm -rf + # ${MODULE_HANDWRITTEN_TARGET_FILES} + # COMMAND + # ${CMAKE_COMMAND} -E make_directory + # ${TRAILBOOK_EV_HANDWRITTEN_MODULE_DOC_DIRECTORY}/ + # COMMAND + # ${CMAKE_COMMAND} -E copy_directory + # ${args_HANDWRITTEN_DIR} + # ${TRAILBOOK_EV_HANDWRITTEN_MODULE_DOC_DIRECTORY}/ + # ) +endmacro() + +# This function adds a handwritten module documentation to a trailbook. +# It takes the following parameters: +# TRAILBOOK_NAME (required): The name of the trailbook to add the +# documentation to. +# MODULE_NAME (required): The name of the module. +# HANDWRITTEN_DIR (required): The absolute path to the directory +# containing the module's handwritten files. +# +# Usage: +# trailbook_ev_add_module_handwritten_doc( +# TRAILBOOK_NAME +# MODULE_NAME +# HANDWRITTEN_DIR +# ) +function(trailbook_ev_add_module_handwritten_doc) + set(options) + set(one_value_args + TRAILBOOK_NAME + MODULE_NAME + HANDWRITTEN_DIR + ) + set(multi_value_args) + cmake_parse_arguments( + "args" + "${options}" + "${one_value_args}" + "${multi_value_args}" + ${ARGN} + ) + + # Parameter TRAILBOOK_NAME + # - is required + # - there should be a target named trailbook_ + if(NOT args_TRAILBOOK_NAME) + message(FATAL_ERROR "trailbook_ev_add_module_handwritten_doc: TRAILBOOK_NAME argument is required") + endif() + if(NOT TARGET trailbook_${args_TRAILBOOK_NAME}) + message( + FATAL_ERROR + "trailbook_ev_add_module_handwritten_doc: No target named trailbook_${args_TRAILBOOK_NAME} found." + " Did you forget to call add_trailbook() first?" + ) + endif() + + # Parameter MODULE_NAME + # - is required + if(NOT args_MODULE_NAME) + message(FATAL_ERROR "trailbook_ev_add_module_handwritten_doc: MODULE_NAME argument is required") + endif() + + # Parameter HANDWRITTEN_DIR + # - is required + # - must be a absolute path + # - must exist + if(NOT args_HANDWRITTEN_DIR) + message(FATAL_ERROR "trailbook_ev_add_module_handwritten_doc: HANDWRITTEN_DIR argument is required") + endif() + if(NOT IS_ABSOLUTE "${args_HANDWRITTEN_DIR}") + message(FATAL_ERROR "trailbook_ev_add_module_handwritten_doc: HANDWRITTEN_DIR must be an absolute path") + endif() + if(NOT EXISTS "${args_HANDWRITTEN_DIR}") + message(FATAL_ERROR "trailbook_ev_add_module_handwritten_doc: HANDWRITTEN_DIR does not exist") + endif() + + + get_target_property( + TRAILBOOK_INSTANCE_SOURCE_DIRECTORY + trailbook_${args_TRAILBOOK_NAME} + TRAILBOOK_INSTANCE_SOURCE_DIRECTORY + ) + get_target_property( + DEPS_STAGE_PREPARE_SPHINX_SOURCE_AFTER + trailbook_${args_TRAILBOOK_NAME} + DEPS_STAGE_PREPARE_SPHINX_SOURCE_AFTER + ) + + file(RELATIVE_PATH RELATIVE_PATH_HANDWRITTEN_DIR + "${CMAKE_SOURCE_DIR}/modules" + "${args_HANDWRITTEN_DIR}" + ) + + get_filename_component(RELATIVE_PATH_MODULE "${RELATIVE_PATH_HANDWRITTEN_DIR}" DIRECTORY) + + set(TRAILBOOK_EV_REFERENCE_DIRECTORY "${TRAILBOOK_INSTANCE_SOURCE_DIRECTORY}/reference") + set(TRAILBOOK_EV_HANDWRITTEN_MODULE_DOC_DIRECTORY "${TRAILBOOK_EV_REFERENCE_DIRECTORY}/modules/${RELATIVE_PATH_MODULE}") + + _trailbook_ev_add_module_reference_copy_handwritten_command() +endfunction() diff --git a/cmake/trailbook-ext-everest/create-snapshot.cmake b/cmake/trailbook-ext-everest/create-snapshot.cmake new file mode 100644 index 0000000000..5ed1970031 --- /dev/null +++ b/cmake/trailbook-ext-everest/create-snapshot.cmake @@ -0,0 +1,140 @@ +if(NOT DEFINED _TRAILBOOK_EXT_EVEREST_CREATE_SNAPSHOT_SETUP) + if(NOT DEFINED everest-utils_SOURCE_DIR) + message(FATAL_ERROR "everest-utils not found. Did you forget to add it to your dependencies.yaml?") + endif() + set(_TRAILBOOK_EXT_EVEREST_CREATE_SNAPSHOT_SCRIPT + "${everest-utils_SOURCE_DIR}/scripts/create_snapshot.py" + ) + if(NOT EXISTS "${_TRAILBOOK_EXT_EVEREST_CREATE_SNAPSHOT_SCRIPT}") + message(FATAL_ERROR "everest-utils found, but create_snapshot.py script is missing at ${_TRAILBOOK_EXT_EVEREST_CREATE_SNAPSHOT_SCRIPT}") + endif() + set(_TRAILBOOK_EXT_EVEREST_CREATE_SNAPSHOT_SETUP TRUE) +endif() + + +# This function creates a snapshot file and adds it +# to the given trailbook +# Parameters: +# EVEREST_WORKSPACE_DIRECTORY (required): Absolute path to the EVerest workspace +# directory +# TRAILBOOK_NAME (required): Name of the trailbook (the +# target must exist) +# OUTPUT_FILE (required): Absolute path to the output +# snapshot file +# Usage: +# trailbook_ev_create_snapshot( +# EVEREST_WORKSPACE_DIRECTORY +# TRAILBOOK_NAME +# OUTPUT_FILE +# ) +function(trailbook_ev_create_snapshot) + set(options) + set(one_value_args + EVEREST_WORKSPACE_DIRECTORY + TRAILBOOK_NAME + OUTPUT_FILE + ) + set(multi_value_args) + cmake_parse_arguments( + "args" + "${options}" + "${one_value_args}" + "${multi_value_args}" + ${ARGN} + ) + + # Parameter EVEREST_WORKSPACE_DIRECTORY + # - is required + # - must be a absolute path + # - must exist + if(NOT EVEREST_WORKSPACE_DIRECTORY) + message(FATAL_ERROR "trailbook_ev_create_snapshot: EVEREST_WORKSPACE_DIRECTORY argument is required") + endif() + if(NOT IS_ABSOLUTE "${EVEREST_WORKSPACE_DIRECTORY}") + message(FATAL_ERROR "trailbook_ev_create_snapshot: EVEREST_WORKSPACE_DIRECTORY must be an absolute path") + endif() + if(NOT EXISTS "${EVEREST_WORKSPACE_DIRECTORY}") + message(FATAL_ERROR "trailbook_ev_create_snapshot: EVEREST_WORKSPACE_DIRECTORY must exist") + endif() + + # Parameter TRAILBOOK_NAME + # - is required + # - there should be a target named trailbook_ + if(NOT args_TRAILBOOK_NAME) + message(FATAL_ERROR "trailbook_ev_create_snapshot: TRAILBOOK_NAME argument is required") + endif() + if(NOT TARGET trailbook_${args_TRAILBOOK_NAME}) + message( + FATAL_ERROR + "trailbook_ev_create_snapshot: No target named trailbook_${args_TRAILBOOK_NAME} found." + " Did you forget to call add_trailbook() first?" + ) + endif() + + # Parameter OUTPUT_FILE + # - is required + # - must be a absolute path + if(NOT args_OUTPUT_FILE) + message(FATAL_ERROR "trailbook_ev_create_snapshot: OUTPUT_FILE argument is required") + endif() + if(NOT IS_ABSOLUTE "${args_OUTPUT_FILE}") + message(FATAL_ERROR "trailbook_ev_create_snapshot: OUTPUT_FILE must be an absolute path") + endif() + + get_target_property( + TRAILBOOK_CURRENT_BINARY_DIR + trailbook_${args_TRAILBOOK_NAME} + TRAILBOOK_CURRENT_BINARY_DIR + ) + get_target_property( + DEPS_STAGE_PREPARE_SPHINX_SOURCE_AFTER + trailbook_${args_TRAILBOOK_NAME} + DEPS_STAGE_PREPARE_SPHINX_SOURCE_AFTER + ) + set(CREATE_SNAPSHOT_TEMP_DIR "${TRAILBOOK_CURRENT_BINARY_DIR}/create_snapshot_temp") + add_custom_command( + OUTPUT + ${args_OUTPUT_FILE} + DEPENDS + ${_TRAILBOOK_EXT_EVEREST_CREATE_SNAPSHOT_SCRIPT} + trailbook_${args_TRAILBOOK_NAME}_stage_prepare_sphinx_source_after + ${DEPS_STAGE_PREPARE_SPHINX_SOURCE_AFTER} + USES_TERMINAL + COMMAND + ${CMAKE_COMMAND} -E rm -rf + ${CREATE_SNAPSHOT_TEMP_DIR} + COMMAND + ${Python3_EXECUTABLE} + ${_TRAILBOOK_EXT_EVEREST_CREATE_SNAPSHOT_SCRIPT} + --working-dir ${args_EVEREST_WORKSPACE_DIRECTORY} + --temp-dir ${CREATE_SNAPSHOT_TEMP_DIR} + --allow-relative-to-working-dir + --exclude-dir build/ + --exclude-dir .vscode/ + --exclude-dir dist/ + --exclude-dir cache/ + --exclude-dir scripts/ + COMMAND + ${CMAKE_COMMAND} -E copy + ${CREATE_SNAPSHOT_TEMP_DIR}/snapshot.yaml + ${args_OUTPUT_FILE} + ) + add_custom_target( + trailbook_${args_TRAILBOOK_NAME}_create_snapshot + DEPENDS + trailbook_${args_TRAILBOOK_NAME}_stage_prepare_sphinx_source_after + ${DEPS_STAGE_PREPARE_SPHINX_SOURCE_AFTER} + ${args_OUTPUT_FILE} + COMMENT + "Target to create snapshot file ${args_OUTPUT_FILE} for trailbook ${args_TRAILBOOK_NAME}" + ) + set_property( + TARGET + trailbook_${args_TRAILBOOK_NAME} + APPEND + PROPERTY + ADDITIONAL_DEPS_STAGE_BUILD_SPHINX_BEFORE + ${args_OUTPUT_FILE} + trailbook_${args_TRAILBOOK_NAME}_create_snapshot + ) +endfunction() diff --git a/cmake/trailbook-ext-everest/fetch_async_api.cmake b/cmake/trailbook-ext-everest/fetch_async_api.cmake new file mode 100644 index 0000000000..b74b92ee7e --- /dev/null +++ b/cmake/trailbook-ext-everest/fetch_async_api.cmake @@ -0,0 +1,41 @@ +if(asyncapi-cli_DIR) + message(STATUS "Using asyncapi-cli at this location: ${asyncapi-cli_DIR}") +else() + message(STATUS "Retrieving asyncapi-cli using FetchContent") + include(FetchContent) + FetchContent_Declare( + asyncapi-cli + GIT_REPOSITORY https://github.com/asyncapi/cli.git + GIT_TAG v2.7.1 + ) + FetchContent_MakeAvailable(asyncapi-cli) + set(asyncapi-cli_DIR "${asyncapi-cli_SOURCE_DIR}") + set(asyncapi-cli_FIND_COMPONENTS "bundling") +endif() + +set(ASYNCAPI_CLI_INSTALL_SENTINEL_PATH "${CMAKE_CURRENT_BINARY_DIR}/generated") +set(ASYNCAPI_CLI_INSTALL_SENTINEL "${ASYNCAPI_CLI_INSTALL_SENTINEL_PATH}/asyncapi_cli_install_done") + +if(NOT TARGET asyncapi_cli_install_target) + add_custom_command( + OUTPUT ${ASYNCAPI_CLI_INSTALL_SENTINEL} + + # Do installation + COMMAND ${CMAKE_COMMAND} -E chdir ${asyncapi-cli_DIR} npm install + COMMAND ${CMAKE_COMMAND} -E chdir ${asyncapi-cli_DIR} npm run build + + # Create sentinel file + COMMAND ${CMAKE_COMMAND} -E make_directory ${ASYNCAPI_CLI_INSTALL_SENTINEL_PATH} + COMMAND ${CMAKE_COMMAND} -E touch ${ASYNCAPI_CLI_INSTALL_SENTINEL} + + COMMENT "AsyncApi/cli Install once only" + ) + + add_custom_target(asyncapi_cli_install_target + DEPENDS ${ASYNCAPI_CLI_INSTALL_SENTINEL} + ) +else() + message(STATUS "Skipping definition of 'asyncapi_cli_install_target'; already exists.") +endif() + +set(ASYNCAPI_CMD ${asyncapi-cli_DIR}/bin/run) \ No newline at end of file diff --git a/cmake/trailbook-ext-everest/fetch_async_api_html_template.cmake b/cmake/trailbook-ext-everest/fetch_async_api_html_template.cmake new file mode 100644 index 0000000000..92601bb537 --- /dev/null +++ b/cmake/trailbook-ext-everest/fetch_async_api_html_template.cmake @@ -0,0 +1,45 @@ +if(asyncapi-html-template_DIR) + message(STATUS "Using existing asyncapi-html-template location: ${asyncapi-html-template_DIR}") +else() + find_package(asyncapi-html-template + COMPONENTS bundling + PATHS ../asyncapi-html-template + ) + + if(NOT asyncapi-html-template_FOUND) + message(STATUS "Retrieving asyncapi-html-template using FetchContent") + include(FetchContent) + FetchContent_Declare( + asyncapi-html-template + GIT_REPOSITORY https://github.com/asyncapi/html-template.git + GIT_TAG v3.0.0 + ) + FetchContent_MakeAvailable(asyncapi-html-template) + set(asyncapi-html-template_DIR "${asyncapi-html-template_SOURCE_DIR}") + set(asyncapi-html-template_FIND_COMPONENTS "bundling") + endif() +endif() + +set(ASYNCAPI_HTML_TEMPLATE_INSTALL_SENTINEL_PATH "${CMAKE_CURRENT_BINARY_DIR}/generate") +set(ASYNCAPI_HTML_TEMPLATE_INSTALL_SENTINEL "${ASYNCAPI_HTML_TEMPLATE_INSTALL_SENTINEL_PATH}/asyncapi_html_template_install_done") + +if(NOT TARGET asyncapi_html_template_install_target) + add_custom_command( + OUTPUT ${ASYNCAPI_HTML_TEMPLATE_INSTALL_SENTINEL} + + # Do installation + COMMAND ${CMAKE_COMMAND} -E chdir ${asyncapi-html-template_DIR} npm install + + # Create sentinel file + COMMAND ${CMAKE_COMMAND} -E make_directory ${ASYNCAPI_HTML_TEMPLATE_INSTALL_SENTINEL_PATH} + COMMAND ${CMAKE_COMMAND} -E touch ${ASYNCAPI_HTML_TEMPLATE_INSTALL_SENTINEL} + + COMMENT "AsyncApi/html-template Install once only" + ) + + add_custom_target(asyncapi_html_template_install_target + DEPENDS ${ASYNCAPI_HTML_TEMPLATE_INSTALL_SENTINEL} + ) +else() + message(STATUS "Skipping definition of 'asyncapi_html_template_install_target'; already exists.") +endif() \ No newline at end of file diff --git a/cmake/trailbook-ext-everest/generate-api-docs.cmake b/cmake/trailbook-ext-everest/generate-api-docs.cmake new file mode 100644 index 0000000000..4f10926269 --- /dev/null +++ b/cmake/trailbook-ext-everest/generate-api-docs.cmake @@ -0,0 +1,231 @@ +include(${CMAKE_CURRENT_LIST_DIR}/fetch_async_api_html_template.cmake) +include(${CMAKE_CURRENT_LIST_DIR}/fetch_async_api.cmake) + +function(_trailbook_ev_generate_html_from_api) + set(options) + set(one_value_args + TRAILBOOK_NAME + API_FILE + HTML_PATH + ) + set(multi_value_args) + cmake_parse_arguments( + "args" + "${options}" + "${one_value_args}" + "${multi_value_args}" + ${ARGN} + ) + # Parameter TRAILBOOK_NAME + # - is required + # - there should be a target named trailbook_ + if(NOT args_TRAILBOOK_NAME) + message(FATAL_ERROR "trailbook_ev_generate_rst_from_api: TRAILBOOK_NAME argument is required") + endif() + if(NOT TARGET trailbook_${args_TRAILBOOK_NAME}) + message( + FATAL_ERROR + "trailbook_ev_generate_rst_from_api: No target named trailbook_${args_TRAILBOOK_NAME} found." + " Did you forget to call add_trailbook() first?" + ) + endif() + + # Parameter API_FILE + # - is required + # - must be a absolute path + # - must exist + if(NOT args_API_FILE) + message(FATAL_ERROR "trailbook_ev_generate_rst_from_api: API_FILE argument is required") + endif() + if(NOT IS_ABSOLUTE "${args_API_FILE}") + message(FATAL_ERROR "trailbook_ev_generate_rst_from_api: API_FILE must be an absolute path") + endif() + if(NOT EXISTS "${args_API_FILE}") + message(FATAL_ERROR "trailbook_ev_generate_rst_from_api: API_FILE must exist") + endif() + + # Parameter HTML_PATH + # - is required + # - must be a relative path + if(NOT args_HTML_PATH) + message(FATAL_ERROR "trailbook_ev_generate_html_from_api: HTML_PATH argument is required") + endif() + if(IS_ABSOLUTE "${args_HTML_PATH}") + message(FATAL_ERROR "trailbook_ev_generate_html_from_api: HTML_PATH must be an relative path: ${args_HTML_PATH}") + endif() + + get_target_property( + TRAILBOOK_INSTANCE_BUILD_DIRECTORY + trailbook_everest + TRAILBOOK_INSTANCE_BUILD_DIRECTORY + ) + + get_target_property( + TRAILBOOK_BUILD_DIRECTORY + trailbook_everest + TRAILBOOK_BUILD_DIRECTORY + ) + + get_filename_component(API_NAME ${args_API_FILE} NAME_WE) + + set(ASYNC_TARGET_NAME ${API_NAME}_AsyncApi) + set(GENERATED_HTML_PATH ${TRAILBOOK_INSTANCE_BUILD_DIRECTORY}/${args_HTML_PATH}/${API_NAME}) + set(PREVIEW_HTML_PATH ${TRAILBOOK_BUILD_DIRECTORY}/latest/reference/api/${API_NAME}) + set(COPY_FOR_PREVIEW_SENTINEL ${GENERATED_HTML_PATH}/copy_for_preview_sentinel) + + add_custom_command( + OUTPUT + ${GENERATED_HTML_PATH}/index.html + COMMAND + ${ASYNCAPI_CMD} generate fromTemplate ${args_API_FILE} ${asyncapi-html-template_SOURCE_DIR} --force-write --use-new-generator --output=${GENERATED_HTML_PATH} + DEPENDS + ${args_API_FILE} + COMMENT + "${API_NAME}: Generate AsyncApi HTML documentation" + ) + + add_custom_target( + trailbook_${args_TRAILBOOK_NAME}_generate_html_from_api_${ASYNC_TARGET_NAME} + DEPENDS + ${GENERATED_HTML_PATH}/index.html + trailbook_${args_TRAILBOOK_NAME}_stage_prepare_sphinx_source_after + ${DEPS_STAGE_PREPARE_SPHINX_SOURCE_AFTER} + asyncapi_cli_install_target + asyncapi_html_template_install_target + COMMENT + "Target to generate HTML files in ${GENERATED_HTML_PATH} from api definition ${args_API_FILE}" + ) + + set_property( + TARGET + trailbook_${args_TRAILBOOK_NAME} + APPEND + PROPERTY + ADDITIONAL_DEPS_STAGE_BUILD_SPHINX_BEFORE + ${GENERATED_HTML_PATH}/index.html + trailbook_${args_TRAILBOOK_NAME}_generate_html_from_api_${ASYNC_TARGET_NAME} + ) + + add_custom_command( + OUTPUT ${COPY_FOR_PREVIEW_SENTINEL} + + COMMAND ${CMAKE_COMMAND} -E remove_directory "${PREVIEW_HTML_PATH}" + COMMAND ${CMAKE_COMMAND} -E make_directory "${PREVIEW_HTML_PATH}" + COMMAND ${CMAKE_COMMAND} -E copy_directory "${GENERATED_HTML_PATH}" "${PREVIEW_HTML_PATH}" + COMMAND ${CMAKE_COMMAND} -E touch ${COPY_FOR_PREVIEW_SENTINEL} + + DEPENDS + ${GENERATED_HTML_PATH}/index.html + + COMMENT "Copy autogenerated AsyncAPI HTML files to a path where it is available for preview" + ) + add_custom_target( + trailbook_${args_TRAILBOOK_NAME}_copy_asyncapi_html_to_preview_${ASYNC_TARGET_NAME} + DEPENDS ${COPY_FOR_PREVIEW_SENTINEL} + ) + + add_dependencies(trailbook_${args_TRAILBOOK_NAME}_copy_asyncapi_html_to_preview_${ASYNC_TARGET_NAME} trailbook_${args_TRAILBOOK_NAME}_generate_html_from_api_${ASYNC_TARGET_NAME}) + add_dependencies(trailbook_${args_TRAILBOOK_NAME}_preview trailbook_${args_TRAILBOOK_NAME}_copy_asyncapi_html_to_preview_${ASYNC_TARGET_NAME}) +endfunction() + + +function(trailbook_ev_generate_api_doc) + set(options) + set(one_value_args + TRAILBOOK_NAME + ) + set(multi_value_args + API_FILES) + cmake_parse_arguments( + "args" + "${options}" + "${one_value_args}" + "${multi_value_args}" + ${ARGN} + ) + + # Parameter TRAILBOOK_NAME + # - is required + # - there should be a target named trailbook_ + if(NOT args_TRAILBOOK_NAME) + message(FATAL_ERROR "trailbook_ev_generate_api_dock: TRAILBOOK_NAME argument is required") + endif() + if(NOT TARGET trailbook_${args_TRAILBOOK_NAME}) + message( + FATAL_ERROR + "trailbook_ev_generate_api_dock: No target named trailbook_${args_TRAILBOOK_NAME} found." + " Did you forget to call add_trailbook() first?" + ) + endif() + + # Parameter API_FILES + # - is required + if(NOT API_FILES) + message(FATAL_ERROR "trailbook_ev_generate_api_dock: API_FILES argument is required") + endif() + + get_target_property( + TRAILBOOK_INSTANCE_SOURCE_DIRECTORY + trailbook_everest + TRAILBOOK_INSTANCE_SOURCE_DIRECTORY + ) + + # Make a list of all API's names + set(API_NAMES "") + + foreach(API_FILE ${API_FILES}) + get_filename_component(API_NAME ${API_FILE} NAME_WE) + set(DESTINATION_SUBFOLDER "reference/api/") + list(APPEND API_NAMES "${API_NAME}") + + _trailbook_ev_generate_html_from_api( + TRAILBOOK_NAME "everest" + API_FILE "${API_FILE}" + HTML_PATH "${DESTINATION_SUBFOLDER}" + ) + endforeach() + + list(JOIN API_NAMES "," CSV_API_NAMES) + + set(INDEX_RST_FILE "${TRAILBOOK_INSTANCE_SOURCE_DIRECTORY}/${DESTINATION_SUBFOLDER}/autogenerated_api_index.rst") + set(PYTHON_SCRIPT "${CMAKE_CURRENT_FUNCTION_LIST_DIR}/process_api_template.py") + set(TEMPLATES_DIRECTORY "${CMAKE_CURRENT_FUNCTION_LIST_DIR}/templates") + set(TEMPLATES_FILE "${TEMPLATES_DIRECTORY}/api_index.rst.jinja") + + add_custom_command( + OUTPUT ${INDEX_RST_FILE} + COMMAND ${Python3_EXECUTABLE} + ${PYTHON_SCRIPT} + --template-dir "${TEMPLATES_DIRECTORY}" + --template-file "${TEMPLATES_FILE}" + --apis "${CSV_API_NAMES}" + --target-file "${INDEX_RST_FILE}" + DEPENDS + ${PYTHON_SCRIPT} + ${TEMPLATES_FILE} + ${TEMPLATES_DIRECTORY}/macros.jinja + ${API_FILES} + COMMENT + "Generating RST index for API doc" + ) + + add_custom_target( + trailbook_${args_TRAILBOOK_NAME}_generate_rst_api_index + DEPENDS + ${INDEX_RST_FILE} + trailbook_${args_TRAILBOOK_NAME}_stage_prepare_sphinx_source_after + ${DEPS_STAGE_PREPARE_SPHINX_SOURCE_AFTER} + COMMENT + "Target to generate RST file ${INDEX_RST_FILE}" + ) + + set_property( + TARGET + trailbook_${args_TRAILBOOK_NAME} + APPEND + PROPERTY + ADDITIONAL_DEPS_STAGE_BUILD_SPHINX_BEFORE + ${INDEX_RST_FILE} + trailbook_${args_TRAILBOOK_NAME}_generate_rst_api_index + ) +endfunction() \ No newline at end of file diff --git a/cmake/trailbook-ext-everest/generate-rst-from-interface.cmake b/cmake/trailbook-ext-everest/generate-rst-from-interface.cmake new file mode 100644 index 0000000000..80f8a0fac4 --- /dev/null +++ b/cmake/trailbook-ext-everest/generate-rst-from-interface.cmake @@ -0,0 +1,128 @@ +# This macro is for internal use only +# +# It is used in the function trailbook_ev_generate_rst_from_interface(). +# It adds an custom command to generate the RST file from the interface definition file +macro(_trailbook_ev_generate_rst_from_interface_generate_command) + get_filename_component(INTERFACE_NAME ${args_INTERFACE_FILE} NAME_WE) + set(GENERATED_FILE "${TRAILBOOK_EV_REFERENCE_INTERFACES_DIRECTORY}/${INTERFACE_NAME}.rst") + set(TEMPLATES_DIRECTORY "${CMAKE_CURRENT_FUNCTION_LIST_DIR}/templates") + add_custom_command( + OUTPUT + ${GENERATED_FILE} + DEPENDS + trailbook_${args_TRAILBOOK_NAME}_stage_prepare_sphinx_source_after + ${DEPS_STAGE_PREPARE_SPHINX_SOURCE_AFTER} + ${CMAKE_CURRENT_FUNCTION_LIST_DIR}/process_template.py + ${args_INTERFACE_FILE} + ${TEMPLATES_DIRECTORY}/interface.rst.jinja + ${TEMPLATES_DIRECTORY}/macros.jinja + COMMENT + "Generating RST file ${GENERATED_FILE} from interface definition ${args_INTERFACE_FILE}" + COMMAND + ${Python3_EXECUTABLE} + ${CMAKE_CURRENT_FUNCTION_LIST_DIR}/process_template.py + --template-dir "${TEMPLATES_DIRECTORY}" + --template-file "${TEMPLATES_DIRECTORY}/interface.rst.jinja" + --name "${INTERFACE_NAME}" + --data-file "${args_INTERFACE_FILE}" + --errors-yaml-path "${CMAKE_SOURCE_DIR}/errors/" + --target-file "${GENERATED_FILE}" + ) +endmacro() + + +# This function generates an RST file from an interface definition file. +# +# Arguments: +# TRAILBOOK_NAME (required): Name of the trailbook instance. +# INTERFACE_FILE (required): Path to the interface definition file +# Usage: +# trailbook_ev_generate_rst_from_interface( +# TRAILBOOK_NAME +# INTERFACE_FILE +# ) +function(trailbook_ev_generate_rst_from_interface) + set(options) + set(one_value_args + TRAILBOOK_NAME + INTERFACE_FILE + ) + set(multi_value_args) + cmake_parse_arguments( + "args" + "${options}" + "${one_value_args}" + "${multi_value_args}" + ${ARGN} + ) + + # Parameter TRAILBOOK_NAME + # - is required + # - there should be a target named trailbook_ + if(NOT args_TRAILBOOK_NAME) + message(FATAL_ERROR "trailbook_ext_ev_generate_rst_from_interface: TRAILBOOK_NAME argument is required") + endif() + if(NOT TARGET trailbook_${args_TRAILBOOK_NAME}) + message( + FATAL_ERROR + "trailbook_ext_ev_generate_rst_from_interface: No target named trailbook_${args_TRAILBOOK_NAME} found." + " Did you forget to call add_trailbook() first?" + ) + endif() + + # Parameter INTERFACE_FILE + # - is required + # - must be a absolute path + # - must exist + if(NOT args_INTERFACE_FILE) + message(FATAL_ERROR "trailbook_ext_ev_generate_rst_from_interface: INTERFACE_FILE argument is required") + endif() + if(NOT IS_ABSOLUTE "${args_INTERFACE_FILE}") + message(FATAL_ERROR "trailbook_ext_ev_generate_rst_from_interface: INTERFACE_FILE must be an absolute path") + endif() + if(NOT EXISTS "${args_INTERFACE_FILE}") + message(FATAL_ERROR "trailbook_ext_ev_generate_rst_from_interface: INTERFACE_FILE must exist") + endif() + + get_target_property( + TRAILBOOK_INSTANCE_SOURCE_DIRECTORY + trailbook_${args_TRAILBOOK_NAME} + TRAILBOOK_INSTANCE_SOURCE_DIRECTORY + ) + get_target_property( + TRAILBOOK_CURRENT_BINARY_DIR + trailbook_${args_TRAILBOOK_NAME} + TRAILBOOK_CURRENT_BINARY_DIR + ) + get_target_property( + DEPS_STAGE_PREPARE_SPHINX_SOURCE_AFTER + trailbook_${args_TRAILBOOK_NAME} + DEPS_STAGE_PREPARE_SPHINX_SOURCE_AFTER + ) + + + set(TRAILBOOK_EV_REFERENCE_DIRECTORY "${TRAILBOOK_INSTANCE_SOURCE_DIRECTORY}/reference") + set(TRAILBOOK_EV_REFERENCE_INTERFACES_DIRECTORY "${TRAILBOOK_EV_REFERENCE_DIRECTORY}/interfaces") + + + _trailbook_ev_generate_rst_from_interface_generate_command() + + add_custom_target( + trailbook_${args_TRAILBOOK_NAME}_generate_rst_from_interface_${INTERFACE_NAME} + DEPENDS + ${GENERATED_FILE} + trailbook_${args_TRAILBOOK_NAME}_stage_prepare_sphinx_source_after + ${DEPS_STAGE_PREPARE_SPHINX_SOURCE_AFTER} + COMMENT + "Target to generate RST file ${GENERATED_FILE} from interface definition ${args_INTERFACE_FILE}" + ) + set_property( + TARGET + trailbook_${args_TRAILBOOK_NAME} + APPEND + PROPERTY + ADDITIONAL_DEPS_STAGE_BUILD_SPHINX_BEFORE + ${GENERATED_FILE} + trailbook_${args_TRAILBOOK_NAME}_generate_rst_from_interface_${INTERFACE_NAME} + ) +endfunction() diff --git a/cmake/trailbook-ext-everest/generate-rst-from-manifest.cmake b/cmake/trailbook-ext-everest/generate-rst-from-manifest.cmake new file mode 100644 index 0000000000..d057c8aed5 --- /dev/null +++ b/cmake/trailbook-ext-everest/generate-rst-from-manifest.cmake @@ -0,0 +1,214 @@ +# This macro is for internal use only +# +# It is used in the function trailbook_ev_generate_rst_from_manifest(). +# It adds an custom command to generate the RST file from the manifest file +macro(_trailbook_ev_generate_rst_from_manifest_generate_command) + string(REPLACE "/manifest.yaml" ".rst" GENERATED_FILE "${TRAILBOOK_EV_REFERENCE_MODULES_DIRECTORY}/${RELATIVE_PATH_MANIFEST}") + + get_filename_component(RELATIVE_PATH ${RELATIVE_PATH_MANIFEST} DIRECTORY) + + set(GENERATED_FILE "${TRAILBOOK_EV_REFERENCE_MODULES_DIRECTORY}/${RELATIVE_PATH}/autogenerated.rst") + get_filename_component(GENERATED_PATH ${GENERATED_FILE} DIRECTORY) + set(TEMPLATES_DIRECTORY "${CMAKE_CURRENT_FUNCTION_LIST_DIR}/templates") + if(EXISTS "${MODULE_DIR}/docs/") + set(HANDWRITTEN_MODULE_DOC "--module-handwritten-doc" "index.inc") + endif() + + add_custom_command( + OUTPUT + ${GENERATED_FILE} + DEPENDS + ${CMAKE_CURRENT_FUNCTION_LIST_DIR}/process_template.py + ${args_MANIFEST_FILE} + ${TEMPLATES_DIRECTORY}/module.rst.jinja + ${TEMPLATES_DIRECTORY}/macros.jinja + trailbook_${args_TRAILBOOK_NAME}_stage_prepare_sphinx_source_after + ${DEPS_STAGE_PREPARE_SPHINX_SOURCE_AFTER} + COMMENT + "Generating RST file ${GENERATED_FILE} from manifest ${args_MANIFEST_FILE}" + COMMAND + ${CMAKE_COMMAND} -E make_directory "${GENERATED_PATH}" + COMMAND + ${Python3_EXECUTABLE} + ${CMAKE_CURRENT_FUNCTION_LIST_DIR}/process_template.py + --template-dir "${TEMPLATES_DIRECTORY}" + --template-file "${TEMPLATES_DIRECTORY}/module.rst.jinja" + --name "${MODULE_NAME}" + --data-file "${args_MANIFEST_FILE}" + --target-file "${GENERATED_FILE}" + ${HANDWRITTEN_MODULE_DOC} + ) + + set(TOP_LEVEL_MODULE_DIRECTORY "${CMAKE_SOURCE_DIR}") + get_filename_component(CURRENT_RELATIVE_FOLDER_TMP "${RELATIVE_PATH_MANIFEST}" DIRECTORY) + get_filename_component(CURRENT_RELATIVE_FOLDER "modules/${CURRENT_RELATIVE_FOLDER_TMP}" DIRECTORY) + + set(INDEX_TOC_GLOB_EXPRESSION " */autogenerated") + + while(NOT CURRENT_RELATIVE_FOLDER STREQUAL "modules") + + set(CURRENT_INDEX_DIRECTORY_CONCAT "${CMAKE_SOURCE_DIR}/${CURRENT_RELATIVE_FOLDER}") + get_filename_component(CURRENT_INDEX_DIRECTORY "${CURRENT_INDEX_DIRECTORY_CONCAT}" ABSOLUTE) + set(CURRENT_DEST_DIRECTORY_CONCAT "${TRAILBOOK_EV_REFERENCE_DIRECTORY}/${CURRENT_RELATIVE_FOLDER}") + get_filename_component(CURRENT_DEST_DIRECTORY "${CURRENT_DEST_DIRECTORY_CONCAT}" ABSOLUTE) + + set(INDEX_FILE "${CURRENT_DEST_DIRECTORY}/index.rst") + get_filename_component(CURRENT_FOLDER_NAME "${CURRENT_INDEX_DIRECTORY}" NAME) + string(REPLACE "/" "_" INDEX_TARGET_SUFFIX "${CURRENT_RELATIVE_FOLDER}") + get_property(is_create_index_cmd_added DIRECTORY "${CURRENT_INDEX_DIRECTORY}" PROPERTY did_add_create_index_cmd SET) + + if (NOT ${is_create_index_cmd_added}) + message(VERBOSE "ADDING DEFINITION OF add_custom_command FOR THE INDEX FILE OF ${CURRENT_DEST_DIRECTORY} FOR ${INDEX_FILE}") + + set(INDEX_TEMPLATE_FILE "${TEMPLATES_DIRECTORY}/module_ref_index.rst.jinja") + + set(CUSTOM_TEMPLATE_SUBSTITUTION_SCRIPT "${CMAKE_CURRENT_BINARY_DIR}/DOCS_process_index_template_${INDEX_TARGET_SUFFIX}.py") + + configure_file( + "${CMAKE_CURRENT_FUNCTION_LIST_DIR}/process_index_template.py.in" + "${CUSTOM_TEMPLATE_SUBSTITUTION_SCRIPT}" + @ONLY + ) + + add_custom_command( + OUTPUT + "${INDEX_FILE}" + COMMAND ${Python3_EXECUTABLE} "${CUSTOM_TEMPLATE_SUBSTITUTION_SCRIPT}" + DEPENDS + "${INDEX_TEMPLATE_FILE}" + "${CUSTOM_TEMPLATE_SUBSTITUTION_SCRIPT}" + trailbook_${args_TRAILBOOK_NAME}_stage_prepare_sphinx_source_after + ${DEPS_STAGE_PREPARE_SPHINX_SOURCE_AFTER} + COMMENT "Rendering Jinja2 template for ${INDEX_TARGET_SUFFIX}" + VERBATIM + ) + + set(INDEX_TOC_GLOB_EXPRESSION " */index") + set_property( + DIRECTORY "${CURRENT_INDEX_DIRECTORY}" + PROPERTY did_add_create_index_cmd + "TRUE" + ) + add_custom_target( + trailbook_${args_TRAILBOOK_NAME}_generate_reference_index_for_module_${INDEX_TARGET_SUFFIX} + DEPENDS + "${INDEX_FILE}" + trailbook_${args_TRAILBOOK_NAME}_stage_prepare_sphinx_source_after + ${DEPS_STAGE_PREPARE_SPHINX_SOURCE_AFTER} + COMMENT + "Target to generate RST file ${INDEX_FILE}" + ) + set_property( + TARGET + trailbook_${args_TRAILBOOK_NAME} + APPEND + PROPERTY + ADDITIONAL_DEPS_STAGE_BUILD_SPHINX_BEFORE + "${INDEX_FILE}" + trailbook_${args_TRAILBOOK_NAME}_generate_reference_index_for_module_${INDEX_TARGET_SUFFIX} + ) + else() + message(VERBOSE "SKIPPING DEFINITION OF add_custom_command FOR THE INDEX FILE OF ${CURRENT_FOLDER_NAME}") + endif() + + # go up one level + get_filename_component(CURRENT_RELATIVE_FOLDER "${CURRENT_RELATIVE_FOLDER}" DIRECTORY) + endwhile() +endmacro() + + +# This function generates an RST file from a manifest definition file. +# It takes the following arguments: +# TRAILBOOK_NAME (required): The name of the trailbook. +# MANIFEST_FILE (required): The absolute path to the manifest +# definition file. +# Usage: +# trailbook_ev_generate_rst_from_manifest( +# TRAILBOOK_NAME +# MANIFEST_FILE +# ) +function(trailbook_ev_generate_rst_from_manifest) + set(options) + set(one_value_args + TRAILBOOK_NAME + MANIFEST_FILE + ) + set(multi_value_args) + cmake_parse_arguments( + "args" + "${options}" + "${one_value_args}" + "${multi_value_args}" + ${ARGN} + ) + + # Parameter TRAILBOOK_NAME + # - is required + # - there should be a target named trailbook_ + if(NOT args_TRAILBOOK_NAME) + message(FATAL_ERROR "trailbook_ext_ev_generate_rst_from_manifest: TRAILBOOK_NAME argument is required") + endif() + if(NOT TARGET trailbook_${args_TRAILBOOK_NAME}) + message( + FATAL_ERROR + "trailbook_ext_ev_generate_rst_from_manifest: No target named trailbook_${args_TRAILBOOK_NAME} found." + " Did you forget to call add_trailbook() first?" + ) + endif() + + # Parameter MANIFEST_FILE + # - is required + # - must be a absolute path + # - must exist + if(NOT args_MANIFEST_FILE) + message(FATAL_ERROR "trailbook_ext_ev_generate_rst_from_manifest: MANIFEST_FILE argument is required") + endif() + if(NOT IS_ABSOLUTE "${args_MANIFEST_FILE}") + message(FATAL_ERROR "trailbook_ext_ev_generate_rst_from_manifest: MANIFEST_FILE must be an absolute path") + endif() + if(NOT EXISTS "${args_MANIFEST_FILE}") + message(FATAL_ERROR "trailbook_ext_ev_generate_rst_from_manifest: MANIFEST_FILE must exist") + endif() + + get_target_property( + TRAILBOOK_INSTANCE_SOURCE_DIRECTORY + trailbook_${args_TRAILBOOK_NAME} + TRAILBOOK_INSTANCE_SOURCE_DIRECTORY + ) + get_target_property( + DEPS_STAGE_PREPARE_SPHINX_SOURCE_AFTER + trailbook_${args_TRAILBOOK_NAME} + DEPS_STAGE_PREPARE_SPHINX_SOURCE_AFTER + ) + + set(TRAILBOOK_EV_REFERENCE_DIRECTORY "${TRAILBOOK_INSTANCE_SOURCE_DIRECTORY}/reference") + set(TRAILBOOK_EV_REFERENCE_MODULES_DIRECTORY "${TRAILBOOK_EV_REFERENCE_DIRECTORY}/modules") + get_filename_component(MODULE_DIR ${args_MANIFEST_FILE} DIRECTORY) + get_filename_component(MODULE_NAME ${MODULE_DIR} NAME_WE) + + file(RELATIVE_PATH RELATIVE_PATH_MANIFEST + "${CMAKE_SOURCE_DIR}/modules" + "${args_MANIFEST_FILE}" + ) + + _trailbook_ev_generate_rst_from_manifest_generate_command() + + add_custom_target( + trailbook_${args_TRAILBOOK_NAME}_generate_rst_from_manifest_${MODULE_NAME} + DEPENDS + ${GENERATED_FILE} + trailbook_${args_TRAILBOOK_NAME}_stage_prepare_sphinx_source_after + ${DEPS_STAGE_PREPARE_SPHINX_SOURCE_AFTER} + COMMENT + "Target to generate RST file ${GENERATED_FILE} from manifest definition ${args_MANIFEST_FILE}" + ) + set_property( + TARGET + trailbook_${args_TRAILBOOK_NAME} + APPEND + PROPERTY + ADDITIONAL_DEPS_STAGE_BUILD_SPHINX_BEFORE + ${GENERATED_FILE} + trailbook_${args_TRAILBOOK_NAME}_generate_rst_from_manifest_${MODULE_NAME} + ) +endfunction() diff --git a/cmake/trailbook-ext-everest/generate-rst-from-types.cmake b/cmake/trailbook-ext-everest/generate-rst-from-types.cmake new file mode 100644 index 0000000000..b78182173a --- /dev/null +++ b/cmake/trailbook-ext-everest/generate-rst-from-types.cmake @@ -0,0 +1,117 @@ +# This macro is for internal use only +# +# It is used in the function trailbook_ev_generate_rst_from_types(). +# It adds an custom command to generate the RST file from the types definition file +macro(_trailbook_ev_generate_rst_from_types_generate_command) + get_filename_component(TYPES_NAME ${args_TYPES_FILE} NAME_WE) + set(GENERATED_FILE "${TRAILBOOK_EV_REFERENCE_TYPES_DIRECTORY}/${TYPES_NAME}.rst") + set(TEMPLATES_DIRECTORY "${CMAKE_CURRENT_FUNCTION_LIST_DIR}/templates") + add_custom_command( + OUTPUT + ${GENERATED_FILE} + DEPENDS + ${CMAKE_CURRENT_FUNCTION_LIST_DIR}/process_template.py + ${args_TYPES_FILE} + ${TEMPLATES_DIRECTORY}/types.rst.jinja + ${TEMPLATES_DIRECTORY}/macros.jinja + COMMENT + "Generating RST file ${GENERATED_FILE} from types definition ${args_TYPES_FILE}" + COMMAND + ${Python3_EXECUTABLE} + ${CMAKE_CURRENT_FUNCTION_LIST_DIR}/process_template.py + --template-dir "${TEMPLATES_DIRECTORY}" + --template-file "${TEMPLATES_DIRECTORY}/types.rst.jinja" + --name "${TYPES_NAME}" + --data-file "${args_TYPES_FILE}" + --target-file "${GENERATED_FILE}" + ) +endmacro() + + +# This function generates an RST file from a types definition file. +# It takes the following arguments: +# TRAILBOOK_NAME (required): The name of the trailbook. +# TYPES_FILE (required): The absolute path to the types definition file. +# Usage: +# trailbook_ev_generate_rst_from_types( +# TRAILBOOK_NAME +# TYPES_FILE +# ) +function(trailbook_ev_generate_rst_from_types) + set(options) + set(one_value_args + TRAILBOOK_NAME + TYPES_FILE + ) + set(multi_value_args) + cmake_parse_arguments( + "args" + "${options}" + "${one_value_args}" + "${multi_value_args}" + ${ARGN} + ) + + # Parameter TRAILBOOK_NAME + # - is required + # - there should be a target named trailbook_ + if(NOT args_TRAILBOOK_NAME) + message(FATAL_ERROR "trailbook_ext_ev_generate_rst_from_types: TRAILBOOK_NAME argument is required") + endif() + if(NOT TARGET trailbook_${args_TRAILBOOK_NAME}) + message( + FATAL_ERROR + "trailbook_ext_ev_generate_rst_from_types: No target named trailbook_${args_TRAILBOOK_NAME} found." + " Did you forget to call add_trailbook() first?" + ) + endif() + + # Parameter TYPES_FILE + # - is required + # - must be a absolute path + # - must exist + if(NOT args_TYPES_FILE) + message(FATAL_ERROR "trailbook_ext_ev_generate_rst_from_types: TYPES_FILE argument is required") + endif() + if(NOT IS_ABSOLUTE "${args_TYPES_FILE}") + message(FATAL_ERROR "trailbook_ext_ev_generate_rst_from_types: TYPES_FILE must be an absolute path") + endif() + if(NOT EXISTS "${args_TYPES_FILE}") + message(FATAL_ERROR "trailbook_ext_ev_generate_rst_from_types: TYPES_FILE must exist") + endif() + + get_target_property( + TRAILBOOK_INSTANCE_SOURCE_DIRECTORY + trailbook_${args_TRAILBOOK_NAME} + TRAILBOOK_INSTANCE_SOURCE_DIRECTORY + ) + get_target_property( + DEPS_STAGE_PREPARE_SPHINX_SOURCE_AFTER + trailbook_${args_TRAILBOOK_NAME} + DEPS_STAGE_PREPARE_SPHINX_SOURCE_AFTER + ) + + set(TRAILBOOK_EV_REFERENCE_DIRECTORY "${TRAILBOOK_INSTANCE_SOURCE_DIRECTORY}/reference") + set(TRAILBOOK_EV_REFERENCE_TYPES_DIRECTORY "${TRAILBOOK_EV_REFERENCE_DIRECTORY}/types") + + _trailbook_ev_generate_rst_from_types_generate_command() + + add_custom_target( + trailbook_${args_TRAILBOOK_NAME}_generate_rst_from_types_${TYPES_NAME} + DEPENDS + ${GENERATED_FILE} + trailbook_${args_TRAILBOOK_NAME}_stage_prepare_sphinx_source_after + ${DEPS_STAGE_PREPARE_SPHINX_SOURCE_AFTER} + COMMENT + "Target to generate RST file ${GENERATED_FILE} from types definition ${args_TYPES_FILE}" + ) + set_property( + TARGET + trailbook_${args_TRAILBOOK_NAME} + APPEND + PROPERTY + ADDITIONAL_DEPS_STAGE_BUILD_SPHINX_BEFORE + ${GENERATED_FILE} + trailbook_${args_TRAILBOOK_NAME}_generate_rst_from_types_${TYPES_NAME} + ) +endfunction() diff --git a/cmake/trailbook-ext-everest/process_api_template.py b/cmake/trailbook-ext-everest/process_api_template.py new file mode 100755 index 0000000000..534f90d24e --- /dev/null +++ b/cmake/trailbook-ext-everest/process_api_template.py @@ -0,0 +1,112 @@ +import argparse +import jinja2 +from pathlib import Path + + +def rst_indent(input): + lines = input.splitlines() + lines = [f"| {line}\r\n" for line in lines] + return "".join(lines) + + +def make_rst_ref(input): + output = input.replace("/", "") + output = output.replace("#", "-") + return output + + +def main(): + parser = argparse.ArgumentParser(description="Generate RST index from file list") + parser.add_argument( + '--template-dir', + type=Path, + dest='template_dir', + action='store', + required=True, + help='Directory containing the Jinja2 template files' + ) + parser.add_argument( + '--template-file', + type=Path, + dest='template_file', + action='store', + required=True, + help='Jinja2 template file to process' + ) + parser.add_argument( + '--apis', + type=str, + dest='apis', + action='store', + required=True, + help='Comma separated list of api names' + ) + parser.add_argument( + '--target-file', + type=Path, + dest='target_file', + action='store', + required=True, + help='Output file for the processed template' + ) + + args = parser.parse_args() + + if not args.template_dir.is_absolute(): + raise ValueError("Template directory path must be absolute") + if not args.template_dir.exists(): + raise ValueError("Template directory does not exist") + if not args.template_dir.is_dir(): + raise ValueError("Template directory path is not a directory") + + if not args.template_file.is_absolute(): + raise ValueError("Template file path must be absolute") + if not args.template_file.exists(): + raise ValueError("Template file does not exist") + if not args.template_file.is_file(): + raise ValueError("Template file path is not a file") + if not args.template_file.is_relative_to(args.template_dir): + raise ValueError("Template file path is not relative to template directory") + + if not args.target_file.is_absolute(): + raise ValueError("Target file path must be absolute") + if args.target_file.suffix != '.rst': + raise ValueError("Target file must have a .rst extension") + + if not args.target_file.parent.exists(): + args.target_file.parent.mkdir(parents=True, exist_ok=True) + + # Split comma-separated string back into a list + api_list = args.apis.split(',') + + # turn list into dict + apis = [] + + for api_name in api_list: + if not api_name: + continue # Skip empty strings + + apis.append({"name": api_name, "path": api_name}) + + env = jinja2.Environment( + loader=jinja2.FileSystemLoader(args.template_dir), + trim_blocks=True, + lstrip_blocks=True + ) + env.filters['rst_indent'] = rst_indent + env.filters['make_rst_ref'] = make_rst_ref + + template_file_name = args.template_file.relative_to(args.template_dir) + template = env.get_template(str(template_file_name)) + output = template.render( + apis=apis + ) + args.target_file.write_text(output) + + +if __name__ == "__main__": + try: + main() + except Exception as e: + print(f"Error: {e}") + exit(1) diff --git a/cmake/trailbook-ext-everest/process_index_template.py.in b/cmake/trailbook-ext-everest/process_index_template.py.in new file mode 100644 index 0000000000..442bebe626 --- /dev/null +++ b/cmake/trailbook-ext-everest/process_index_template.py.in @@ -0,0 +1,15 @@ +import jinja2 + +template_file = r"@INDEX_TEMPLATE_FILE@" +output_file = r"@INDEX_FILE@" + +substitutions = { + "HEADLINE": "@CURRENT_FOLDER_NAME@", + "TOC_GLOB_EXPRESSION": "@INDEX_TOC_GLOB_EXPRESSION@" +} + +with open(template_file, 'r') as f: + template = jinja2.Template(f.read()) + +with open(output_file, 'w') as f: + f.write(template.render(**substitutions)) diff --git a/cmake/trailbook-ext-everest/process_template.py b/cmake/trailbook-ext-everest/process_template.py new file mode 100755 index 0000000000..cf6cff4c8e --- /dev/null +++ b/cmake/trailbook-ext-everest/process_template.py @@ -0,0 +1,192 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- +# +# SPDX-License-Identifier: Apache-2.0 +# Copyright Pionix GmbH and Contributors to EVerest +# +""" +author: andreas.heinrich@pionix.de +This script processes a template file with Jinja2 and YAML data. +""" + + +import argparse +import jinja2 +import yaml +from pathlib import Path + + +def rst_indent(input): + lines = input.splitlines() + lines = [f"| {line}\r\n" for line in lines] + return "".join(lines) + + +def make_rst_ref(input): + output = input.replace("/", "") + output = output.replace("#", "-") + return output + + +def literal_rst_filter(value): + """Wraps a string in double backticks to treat it as a literal in RST.""" + str_value = str(value) + has_trailing_whitespace = (str_value != str_value.rstrip()) + cleaned_value = str_value.rstrip() + parts = cleaned_value.split(':', 1) + if len(parts) == 2 and parts[0] == "pattern": + formatted_value = f"{parts[0]}:``{parts[1].lstrip()}``" + else: + formatted_value = cleaned_value + if has_trailing_whitespace: + return formatted_value + '\r\n' + else: + return formatted_value + + +def main(): + parser = argparse.ArgumentParser(description='Processes a template file with Jinja2 and YAML data.') + parser.add_argument( + '--template-dir', + type=Path, + dest='template_dir', + action='store', + required=True, + help='Directory containing the Jinja2 template files' + ) + parser.add_argument( + '--template-file', + type=Path, + dest='template_file', + action='store', + required=True, + help='Jinja2 template file to process' + ) + parser.add_argument( + '--name', + type=str, + dest='name', + action='store', + required=True, + help='Name to be used in the template rendering' + ) + parser.add_argument( + '--data-file', + type=Path, + dest='data_file', + action='store', + required=True, + help='YAML file containing data for the template' + ) + parser.add_argument( + '--module-handwritten-doc', + type=Path, + dest='module_handwritten_doc', + action='store', + help='Path to the handwritten module documentation if it exists' + ) + parser.add_argument( + '--errors-yaml-path', + type=Path, + dest='errors_path', + action='store', + help='Path to the error definition yaml files' + ) + parser.add_argument( + '--target-file', + type=Path, + dest='target_file', + action='store', + required=True, + help='Output file for the processed template' + ) + args = parser.parse_args() + + if not args.template_dir.is_absolute(): + raise ValueError("Template directory path must be absolute") + if not args.template_dir.exists(): + raise ValueError("Template directory does not exist") + if not args.template_dir.is_dir(): + raise ValueError("Template directory path is not a directory") + + if not args.template_file.is_absolute(): + raise ValueError("Template file path must be absolute") + if not args.template_file.exists(): + raise ValueError("Template file does not exist") + if not args.template_file.is_file(): + raise ValueError("Template file path is not a file") + if not args.template_file.is_relative_to(args.template_dir): + raise ValueError("Template file path is not relative to template directory") + + if not args.data_file.is_absolute(): + raise ValueError("Data file path must be absolute") + if not args.data_file.exists(): + raise ValueError("Data file does not exist") + if not args.data_file.is_file(): + raise ValueError("Data file path is not a file") + if args.data_file.suffix not in ['.yml', '.yaml']: + raise ValueError("Data file must have a .yml or .yaml extension") + + if not args.target_file.is_absolute(): + raise ValueError("Target file path must be absolute") + if args.target_file.suffix != '.rst': + raise ValueError("Target file must have a .rst extension") + + if args.errors_path: + if not args.errors_path.is_absolute(): + raise ValueError("Errors yaml directory path must be absolute") + if not args.errors_path.exists(): + raise ValueError(f"Errors yaml directory '{args.errors_path}' does not exist") + if not args.errors_path.is_dir(): + raise ValueError("Errors yaml directory path is not a directory") + + if not args.target_file.parent.exists(): + args.target_file.parent.mkdir(parents=True, exist_ok=True) + + env = jinja2.Environment( + loader=jinja2.FileSystemLoader(args.template_dir), + trim_blocks=True, + lstrip_blocks=True + ) + env.filters['rst_indent'] = rst_indent + env.filters['make_rst_ref'] = make_rst_ref + env.filters['literal_rst'] = literal_rst_filter + + template_file_name = args.template_file.relative_to(args.template_dir) + template = env.get_template(str(template_file_name)) + data=yaml.safe_load(args.data_file.read_text()) + data["errors_sanitized"] = {} + data['error_definitions'] = {} + if args.errors_path and "errors" in data.keys(): + for err in data["errors"]: + error_path = err['reference'].split('#')[0] + + filename = Path(args.errors_path, error_path.split('/')[-1]) + with open(filename.with_suffix(".yaml")) as f: + text = f.read() + yaml_content = yaml.safe_load(text) + data['error_definitions'][error_path] = {} + for err_def in yaml_content['errors']: + data['error_definitions'][error_path][err_def['name']] = err_def['description'] + + if not error_path in data["errors_sanitized"]: + data["errors_sanitized"][error_path] = [] + if len(err['reference'].split('#')) > 1: + data["errors_sanitized"][error_path].append(err['reference'].split('#')[1][1:]) + else: + for error in data['error_definitions'][error_path].keys(): + data["errors_sanitized"][error_path].append(error) + output = template.render( + name=args.name, + handwritten_module_doc=args.module_handwritten_doc, + data=data, + ) + args.target_file.write_text(output) + + +if __name__ == "__main__": + try: + main() + except Exception as e: + print(f"Error: {e}") + exit(1) diff --git a/cmake/trailbook-ext-everest/templates/api_index.rst.jinja b/cmake/trailbook-ext-everest/templates/api_index.rst.jinja new file mode 100644 index 0000000000..35a1055040 --- /dev/null +++ b/cmake/trailbook-ext-everest/templates/api_index.rst.jinja @@ -0,0 +1,12 @@ +{% import 'macros.jinja' as funcs %} + +.. toctree:: + :maxdepth: 1 + :glob: + +{{ funcs.explicit_target('everest_api') }} +{{ funcs.h1("EVerest API Specification") }} + +{% for item in apis %} +* `{{ item.name }} <{{ item.path }}/index.html>`_ +{% endfor %} diff --git a/cmake/trailbook-ext-everest/templates/interface.rst.jinja b/cmake/trailbook-ext-everest/templates/interface.rst.jinja new file mode 100644 index 0000000000..be3c16d6ac --- /dev/null +++ b/cmake/trailbook-ext-everest/templates/interface.rst.jinja @@ -0,0 +1,24 @@ +{% import 'macros.jinja' as funcs %} +:orphan: + +{{ funcs.explicit_target('everest_interfaces_' + name) }} +{{ funcs.h1(name) }} +{{ data.description | rst_indent() }} +{% if 'documentation' in interface %} +{{ funcs.documentation(data.documentation) | rst_indent() -}} +{% endif %} + +{% if data.vars %} +{{ funcs.h2('Variables') -}} +{{ funcs.vars(data.vars.items(), False) | rst_indent() -}} +{% endif %} + +{% if data.cmds %} +{{ funcs.h2('Commands') -}} +{{ funcs.cmds(data.cmds.items()) | rst_indent() -}} +{% endif %} + +{% if data.errors %} +{{ funcs.h2('Errors') -}} +{{ funcs.error_categories(data.errors_sanitized, data.error_definitions) | rst_indent() -}} +{% endif %} diff --git a/cmake/trailbook-ext-everest/templates/macros.jinja b/cmake/trailbook-ext-everest/templates/macros.jinja new file mode 100644 index 0000000000..6ed1dcc45e --- /dev/null +++ b/cmake/trailbook-ext-everest/templates/macros.jinja @@ -0,0 +1,359 @@ +{#################################} +{### General json macros ###} +{#################################} + +{### Renders a key-value-pair ###} +{% macro keyvalue(key, value) %} +{{ key }}:{{ value }} +{% endmacro %} + +{### Renders a sequence ###} +{% macro sequence(key, data) %} +{{ key }}: +{% for entry in data %} + - {{ entry }} +{% endfor %} +{% endmacro %} + +{### Renders a mapping ###} +{% macro mapping(key, data, ignore_keys, render_key) %} +{% set indent_width = 0 %} +{% if render_key %} +{% set indent_width = 1 %} +{{ key }}: +{% endif %} +{% for sub_key, sub_data in data %} +{% if not sub_key in ignore_keys %} +{% if sub_data is mapping %} +{{ mapping(sub_key, sub_data.items(), [], True) | indent(indent_width, True) -}} +{% elif sub_data is string %} +{{ keyvalue(sub_key, sub_data) | indent(indent_width, True) -}} +{% elif sub_data is sequence %} +{{ sequence(sub_key, sub_data) | indent(indent_width, True) -}} +{% endif %} +{% endif %} +{% endfor %} +{% endmacro %} + +{#################################} +{### General RST macros ###} +{#################################} + +{### Make H1 headline ###} +{% macro h1(title) %} +{% set title_length = title|length %} +{{ '#' * title_length }} +{{ title }} +{{ '#' * title_length }} +{% endmacro %} + +{### Make H2 headline ###} +{% macro h2(title) %} +{% set title_length = title|length %} +{{ '*' * title_length }} +{{ title }} +{{ '*' * title_length }} +{% endmacro %} + +{### Make H3 headline ###} +{% macro h3(title) %} +{% set title_length = title|length %} +{{ title }} +{{ '=' * title_length }} +{% endmacro %} + +{### Make H4 headline ###} +{% macro h4(title) %} +{% set title_length = title|length %} +{{ title }} +{{ '-' * title_length }} +{% endmacro %} + +{### Make H5 headline ###} +{% macro h5(title) %} +{% set title_length = title|length %} +{{ title }} +{{ '^' * title_length }} +{% endmacro %} + +{### Make H6 headline ###} +{% macro h6(title) %} +{% set title_length = title|length %} +{{ title }} +{{ '"' * title_length }} +{% endmacro %} + +{### Make explicit target ###} +{% macro explicit_target(target_name) %} + +.. _{{ target_name | make_rst_ref() }}: + +{% endmacro %} + +{### References an explicit target ###} +{% macro ref(target_name, text) %} +:ref:`{{ text }} <{{ target_name | make_rst_ref() }}>` +{%- endmacro %} + +{#################################} +{### Interface.json macros ###} +{#################################} + +{### Renders a multiline documentation ###} +{% macro documentation(lines) %} +{% for line in lines %} +{{ line }} +{% endfor %} +{% endmacro %} + +{### Renders a single var ###} +{% macro var(var_name, var_data, show_required=true, required=None) %} +{% if required == None %} +{% set required = 'default' not in var_data %} +{% endif %} +{% set optional = '' %} +{% if show_required == true %} +{% if required == true %} +{% set optional = '' %} +{% elif required == false %} +{% set optional = '' %} +{% else %} +{% include "required needs to be set" %} +{% endif %} +{% endif %} +{% set var_type = var_data.type %} +{% if not var_data.type %} +{% set var_type = "string/object" %} +{% endif %} +**{{ var_name }}**: *{{ var_type }}* {{ optional }} +{%- if '$ref' in var_data %}{{ ' (' + ref(var_data['$ref'], var_data['$ref'] | make_rst_ref()) + ')' }}{% endif +%} +{# Add default value for config entries #} +{% if 'default' in var_data %} +{% if var_data.default is string %} +{% set var_default = "\"" + var_data.default + "\"" %} +{% else %} +{% set var_default = var_data.default %} +{% endif %} +*default: {{ var_default }}* +{% endif %} +{% if var_data.description %} +{% for line in var_data.description.split('\n') %} +{% if line != '' %} + {{ line }} +{% endif %} +{% endfor %} +{% endif %} +{% if 'documentation' in var_data %} +{{ documentation(var_data['documentation']) | indent(1, True) -}} +{% endif %} +{% set ignore_keys = ['default', 'description', 'type', 'properties', 'documentation', '$ref', 'required', 'items'] %} +{% set mapping_result = mapping( var_name, var_data.items(), ignore_keys, False) %} +{% if mapping_result != '' %} +{{ mapping_result | literal_rst | indent(1, True) -}} +{% endif %} +{% if var_data.type == 'object' and 'properties' in var_data %} + properties: +{% if not 'required' in var_data %} +{% set all_required = True %} +{% set required_array = [] %} +{% else %} +{% set all_required = False %} +{% set required_array = var_data.required %} +{% endif %} +{{ vars(var_data.properties.items(), True, required_array, all_required) | indent(2, True) -}} +{% endif %} +{% if var_data.type == 'array' and 'items' in var_data %} +{{ var('array_item', var_data['items'], False, True) | indent(1, True) -}} +{% endif %} +{% endmacro %} + +{### Renders a list of vars ###} +{% macro vars(vars, show_required=true, required=[], all_required=False) %} +{% for var_name, var_data in vars %} +{% if show_required == true %} +{% if all_required %} +{% set is_required = True %} +{% else %} +{% set is_required = var_name in required %} +{% endif %} +{% else %} +{% set is_required = None %} +{% endif %} +{{ var(var_name, var_data, show_required, is_required) -}} +{% endfor %} +{% endmacro %} + + +{### Renders cmd result ###} +{% macro cmd_result(result_data) %} +{{ var('Result', result_data, False) -}} +{% endmacro %} + +{### Renders single cmd argument ###} +{% macro cmd_argument(arg_name, arg_data) %} +{{ var(arg_name, arg_data, True) -}} +{% endmacro %} + +{### Renders cmd arguments ###} +{% macro cmd_arguments(args) %} +{% for arg_name, arg_data in args %} +{{ cmd_argument(arg_name, arg_data) -}} +{% endfor %} +{% endmacro %} + +{### Renders a single cmd ###} +{% macro cmd(cmd_name, cmd_data) %} +{% if 'result' in cmd_data %} +{% set type_string = cmd_data.result.type %} +{% else %} +{% set type_string = 'void' %} +{% endif %} +**{{ cmd_name }}**:*{{ type_string }}* + {{ cmd_data.description }} +{% if 'arguments' in cmd_data %} +{{ cmd_arguments(cmd_data.arguments.items()) | indent(1, True) -}} +{% endif %} +{% if 'result' in cmd_data %} +{{ cmd_result(cmd_data['result']) | indent(1, True) -}} +{% endif %} +{% if 'documentation' in cmd_data %} +{{ documentation(cmd_data['documentation']) | indent(1, True) -}} +{% endif %} +{% endmacro %} + +{### Renders a list of cmds ###} +{% macro cmds(cmds) %} +{% for cmd_name, cmd_data in cmds %} +{{ cmd(cmd_name, cmd_data) -}} +{% endfor %} +{% endmacro %} + + +{### Renders a list of errors ###} +{% macro errors(errs, definitions) %} +{% for err in errs %} +**{{ err }}** : {{ definitions[err] }} +{% endfor %} +{% endmacro %} + +{### Renders a list of error_categories ###} +{% macro error_categories(categories, error_definitions) %} +{% for cat in categories %} +**{{ cat }}** : +{{ errors(categories[cat], error_definitions[cat]) | indent(1, True) -}} +{% endfor %} +{% endmacro %} + +{#################################} +{### types.json macros ###} +{#################################} + +{### Renders a single type ###} +{% macro type(type_name, type_data, file_name) %} +{% set target_name = '/' + file_name + '#/' + type_name %} +{{ explicit_target(target_name) -}} +{{ var(type_name, type_data, False) | rst_indent() -}} +{% endmacro %} + +{### Renders a list of types ###} +{% macro types(types, file_name) %} +{% for type_name, type_data in types %} +{{ type(type_name, type_data, file_name) -}} +{% endfor %} +{% endmacro %} + +{#################################} +{### manifest.json macros ###} +{#################################} + +{### Renders a single config entry ###} +{% macro config_entry(name, data) %} +{{ var(name, data, True) -}} +{% endmacro %} + +{### Renders a list of config entries ###} +{% macro config(config_data) %} +{% for entry_name, entry_data in config_data %} +{{ config_entry(entry_name, entry_data) -}} +{% endfor %} +{% endmacro %} + +{### Renders a single impl ###} +{% macro impl(name, data) %} +{% set interface_target = 'everest_interfaces_' + data.interface %} +**{{ name }}**: {{ ref(interface_target, data.interface) }} + {{ data.description }} +{% if 'documentation' in data %} +{{ documentation(data['documentation']) | indent(1, True) -}} +{% endif %} +{% set ignore_keys = ['description', 'interface', 'documentation', 'config'] %} +{% set mapping_result = mapping( name, data.items(), ignore_keys, False) %} +{% if mapping_result != '' %} +X +{{ mapping_result | indent(1, True) -}} +Y +{% endif %} +{% if 'config' in data %} + **config:** +{{ config(data.config.items()) | indent(2, True) -}} +{% endif %} +{% endmacro %} + +{### Renders a list of impls ###} +{% macro impls(impls) %} +{% for impl_name, impl_data in impls %} +{{ impl(impl_name, impl_data) -}} +{% endfor %} +{% endmacro %} + +{### Renders a single requirement ###} +{% macro req(name, data) %} +{% if not 'min_connections' in data %} +{% set min_conns = 1 %} +{% else %} +{% set min_conns = data.min_connections %} +{% endif %} +{% if not 'max_connections' in data %} +{% set max_conns = 1 %} +{% else %} +{% set max_conns = data.max_connections %} +{% endif %} +{% set conns = min_conns|string + ".." + max_conns|string %} +{% if min_conns == max_conns %} +{% set conns = min_conns|string %} +{% endif %} +{% set interface_target = 'everest_interfaces_' + data.interface %} +**{{ name }}**: {{ ref(interface_target, data.interface) }} {{conns}} +{% set ignore_keys = ['interface'] %} +{% set mapping_result = mapping( name, data.items(), ignore_keys, False) %} +{% if mapping_result != '' %} +{{ mapping_result | indent(1, True) -}} +{% endif %} +{% endmacro %} + +{### Renders a list of requirements ###} +{% macro reqs(reqs) %} +{% for req_name, req_data in reqs %} +{{ req(req_name, req_data) -}} +{% endfor %} +{% endmacro %} + +{### Renders metadata ###} +{% macro metadata(data) %} +{{ h2('Metadata') -}} +{{ h3('Authors') -}} +{% for author in data['authors'] %} +| {{ author }} +{% endfor %} + +{{ h3('License') -}} +| {{ data['license'] }} + +{% set ignore_keys = ['authors', 'license'] %} +{% set mapping_result = mapping('metadata', data.items(), ignore_keys, False) %} +{% if mapping_result != '' %} +{{ h3('Misc') -}} +{{ mapping_result | indent(1, True) | rst_indent() -}} + +{% endif %} +{% endmacro %} diff --git a/cmake/trailbook-ext-everest/templates/module.rst.jinja b/cmake/trailbook-ext-everest/templates/module.rst.jinja new file mode 100644 index 0000000000..1a541859d4 --- /dev/null +++ b/cmake/trailbook-ext-everest/templates/module.rst.jinja @@ -0,0 +1,32 @@ +{% import 'macros.jinja' as funcs %} + +{{ funcs.explicit_target('everest_modules_' + name) -}} +{{ funcs.h1(name) -}} +{{ data.description | rst_indent() }} +{% if 'documentation' in manifest %} +{{ funcs.documentation(data.documentation) | rst_indent() -}} +{% endif %} + +{% if handwritten_module_doc %} +{{ funcs.h2("Handwritten Documentation") }} +.. include:: {{ handwritten_module_doc }} +{% endif %} + +{{ funcs.h2("Auto-Generated Reference") }} + +{% if data.config %} +{{ funcs.h3('Module Configuration') -}} +{{ funcs.config(data.config.items()) | rst_indent() -}} +{% endif %} + +{% if data.provides %} +{{ funcs.h3('Provides') -}} +{{ funcs.impls(data.provides.items()) | rst_indent() -}} +{% endif %} + +{% if data.requires %} +{{ funcs.h3('Requirements') -}} +{{ funcs.reqs(data.requires.items()) | rst_indent() -}} +{% endif %} + +{{ funcs.metadata(data.metadata) -}} diff --git a/cmake/trailbook-ext-everest/templates/module_ref_index.rst.jinja b/cmake/trailbook-ext-everest/templates/module_ref_index.rst.jinja new file mode 100644 index 0000000000..1c933cde56 --- /dev/null +++ b/cmake/trailbook-ext-everest/templates/module_ref_index.rst.jinja @@ -0,0 +1,9 @@ +========================================================= +{{ HEADLINE }} +========================================================= + +.. toctree:: + :maxdepth: 1 + :glob: + +{{ TOC_GLOB_EXPRESSION }} diff --git a/cmake/trailbook-ext-everest/templates/types.rst.jinja b/cmake/trailbook-ext-everest/templates/types.rst.jinja new file mode 100644 index 0000000000..aa0c4b72f2 --- /dev/null +++ b/cmake/trailbook-ext-everest/templates/types.rst.jinja @@ -0,0 +1,10 @@ +{% import 'macros.jinja' as funcs %} +:orphan: + +{{ funcs.explicit_target('everest_types_' + name) }} +{{ funcs.h1(name) }} +{{ data.description}} +{% if 'documentation' in types %} +{{ funcs.documentation(data.documentation) -}} +{% endif %} +{{ funcs.types(data.types.items(), name) }} diff --git a/cmake/trailbook-ext-everest/trailbook-ext-everest-config-version.cmake b/cmake/trailbook-ext-everest/trailbook-ext-everest-config-version.cmake new file mode 100644 index 0000000000..cf62352471 --- /dev/null +++ b/cmake/trailbook-ext-everest/trailbook-ext-everest-config-version.cmake @@ -0,0 +1,13 @@ +set(PACKAGE_VERSION 0.1.0) + +if(PACKAGE_FIND_VERSION STREQUAL PACKAGE_VERSION) + set(PACKAGE_VERSION_EXACT TRUE) +elseif(PACKAGE_FIND_VERSION_MAJOR STREQUAL "0") + if(PACKAGE_FIND_VERSION_MINOR GREATER "1") + set(PACKAGE_VERSION_UNSUITABLE TRUE) + else() + set(PACKAGE_VERSION_COMPATIBLE TRUE) + endif() +else() + set(PACKAGE_VERSION_UNSUITABLE TRUE) +endif() diff --git a/cmake/trailbook-ext-everest/trailbook-ext-everest-config.cmake b/cmake/trailbook-ext-everest/trailbook-ext-everest-config.cmake new file mode 100644 index 0000000000..69b95af17e --- /dev/null +++ b/cmake/trailbook-ext-everest/trailbook-ext-everest-config.cmake @@ -0,0 +1,13 @@ +find_package( + trailbook + 0.1.0 + REQUIRED + PATHS "${CMAKE_SOURCE_DIR}/cmake" +) + +include("${CMAKE_CURRENT_LIST_DIR}/add-module-handwritten-doc.cmake") +include("${CMAKE_CURRENT_LIST_DIR}/generate-api-docs.cmake") +include("${CMAKE_CURRENT_LIST_DIR}/generate-rst-from-interface.cmake") +include("${CMAKE_CURRENT_LIST_DIR}/generate-rst-from-types.cmake") +include("${CMAKE_CURRENT_LIST_DIR}/generate-rst-from-manifest.cmake") +include("${CMAKE_CURRENT_LIST_DIR}/create-snapshot.cmake") diff --git a/cmake/trailbook/EXTENDING.md b/cmake/trailbook/EXTENDING.md new file mode 100644 index 0000000000..7b37b0273b --- /dev/null +++ b/cmake/trailbook/EXTENDING.md @@ -0,0 +1,181 @@ +# Extending the trailbook package + +The trailbook package provides a set of targets and target properties +that can be used to hook into the build process of the trailbook documentation +and extend it with custom functionality. + +## Important Note + +Since the trailbook packages work a lot with custom CMake targets and +custom CMake commands, it is important to set dependencies correctly +when extending the trailbook package. + +This means that it is not sufficient to just depend on the targets +and extend target dependencies with `add_dependencies()`. Instead, +you should also make sure to extend the file-level dependencies. For this +a set of custom target properties is provided that can be used +to add additional dependencies to the custom commands used in the +trailbook build process. + +## Available Stages to Hook Into + +To hook into the build process custom commands can be placed in between +stages + +### Hook in before stage: Prepare Sphinx Source + +If you want to hook into the build process before the Sphinx source +is prepared, you can define a custom command that doesn't need to +depend on any files, but the created files and targets should be appended to +the target list property `ADDITIONAL_DEPS_STAGE_PREPARE_SPHINX_SOURCE_BEFORE`. + +This can be done by using the following code snippet: + +```cmake +# Your custom cmake code here +add_custom_command( + OUTPUT + + + COMMAND + + DEPENDS + +) + +add_custom_target( + + DEPENDS + + +) + +# Hook into the trailbook build process +set_property( + TARGET trailbook_ + APPEND + PROPERTY + ADDITIONAL_DEPS_STAGE_PREPARE_SPHINX_SOURCE_BEFORE + + + +) +``` + +* ``, `` can be any custom files created + by your command. +* `` is a custom target that + wraps your command for example. +* `` should be replaced with the name of your trailbook + provided in the `add_trailbook()` function call. + +With this target-level dependencies and file-level dependencies can be added. +If there is a target that depends on the output files, the file-level +dependencies should be added as well. + +### Hook in before stage: Build Sphinx + +If you want to hook in before the Sphinx build process starts, +you can use the target list property `ADDITIONAL_DEPS_STAGE_BUILD_SPHINX_BEFORE`. +and `DEPS_STAGE_PREPARE_SPHINX_SOURCE_AFTER` to add file-level dependencies +to the stage before. + +This can be done by using the following code snippet: + +```cmake +# Hook into the trailbook build process after the prepare stage +get_target_property( + DEPS_STAGE_PREPARE_SPHINX_SOURCE_AFTER + trailbook_ + DEPS_STAGE_PREPARE_SPHINX_SOURCE_AFTER +) + +# Your custom cmake code here +add_custom_command( + OUTPUT + + + DEPENDS + + ${DEPS_STAGE_PREPARE_SPHINX_SOURCE_AFTER} + COMMAND + +) +add_custom_target( + + DEPENDS + + + ${DEPS_STAGE_PREPARE_SPHINX_SOURCE_AFTER} +) + +# Hook into the trailbook build process before the build stage +set_property( + TARGET trailbook_ + APPEND + PROPERTY + ADDITIONAL_DEPS_STAGE_BUILD_SPHINX_BEFORE + + + +) +``` + +* ``, `` can be any custom files created + by your command. +* `` is a custom target that + wraps your command for example. +* `` should be replaced with the name of your trailbook + provided in the `add_trailbook()` function call. + +With the `get_target_property()` call the file-level dependencies +from the previous stage are retrieved and added to the custom command +and the custom target. This ensures that the custom command is executed +after the previous stage is completed. + +With the `set_property()` call the custom target and the output files +are added to the target-level dependencies of the build stage. +This ensures that the build stage waits for the custom command +to complete before starting the Sphinx build process. + +### Hook in before stage: Post Process Sphinx + +This can be done analogously to the previous stage, but using the target list property +`ADDITIONAL_DEPS_STAGE_POSTPROCESS_SPHINX_BEFORE` and `DEPS_STAGE_BUILD_SPHINX_AFTER`. + +```cmake +# Hook into the trailbook build process after the build stage +get_target_property( + DEPS_STAGE_BUILD_SPHINX_AFTER + trailbook_ + DEPS_STAGE_BUILD_SPHINX_AFTER +) +# Your custom cmake code here +add_custom_command( + OUTPUT + + + DEPENDS + + ${DEPS_STAGE_BUILD_SPHINX_AFTER} + COMMAND + +) +add_custom_target( + + DEPENDS + + + ${DEPS_STAGE_BUILD_SPHINX_AFTER} +) +# Hook into the trailbook build process before the post process stage +set_property( + TARGET trailbook_ + APPEND + PROPERTY + ADDITIONAL_DEPS_STAGE_POSTPROCESS_SPHINX_BEFORE + + + +) +``` diff --git a/cmake/trailbook/README.md b/cmake/trailbook/README.md new file mode 100644 index 0000000000..a699afb312 --- /dev/null +++ b/cmake/trailbook/README.md @@ -0,0 +1,137 @@ +# CMake Package trailbook + +This package provides CMake functions and macros to include +the build of a trailbook documentation in a CMake-based project. + +## Usage in CMake + +To use this package in your CMake project, include the following line in your `CMakeLists.txt` file: + +```cmake +find_package( + trailbook + 0.1.0 + REQUIRED + PATHS "${CMAKE_SOURCE_DIR}/" +) +``` + +* Specify the version to make sure you are using +a compatible version of the package. +* If the package is not found, CMake will +stop with an error due to the `REQUIRED` keyword. +* If the package is not installed in a standard +location, you can specify the path to the package using the `PATHS` option. + +After finding the package, you can use the provided functions. +At the moment, the package provides the following functions: + +### `add_trailbook()` + +This function is the initial call for your trailbook documentation. +It can be called as follows: + +```cmake +add_trailbook( + NAME + [STEM_DIRECTORY ] + [REQUIREMENTS_TXT ] + INSTANCE_NAME + [DEPLOYED_DOCS_REPO_URL ] + [DEPLOYED_DOCS_REPO_BRANCH ] +) +``` + +* This function needs to be called once per trailbook. +* The `NAME` argument specifies the name of the trailbook. + This name will be used to create unique target names. +* The optional `STEM_DIRECTORY` argument specifies the + directory containing the Sphinx source files. + If not provided, it defaults to `${CMAKE_CURRENT_SOURCE_DIR}` +* The optional `REQUIREMENTS_TXT` argument specifies the path to a + `requirements.txt` file for Python dependencies. + If not provided, it defaults to `${STEM_DIRECTORY}/requirements.txt`, + if this file exists. + This requirements file will be used to check if the required Python packages are installed and if not to install them, if a + python virtual environment is active +* The `INSTANCE_NAME` argument specifies the name that is used for + the version in the multiversion structure. +* The optional `DEPLOYED_DOCS_REPO_URL` argument specifies the URL of the + repository where the already deployed documentation is located. + It is required if `TRAILBOOK__DOWNLOAD_ALL_VERSIONS` is set to `ON`. +* The optional `DEPLOYED_DOCS_REPO_BRANCH` argument + specifies the branch of the deployed documentation repository. + It defaults to `main` if not provided. + +## Configuring + +There are several options that can be configured +for each trailbook by setting CMake variables. + +### `TRAILBOOK__DOWNLOAD_ALL_VERSIONS` + +* `` should be replaced with the trailbook name provided + in the `add_trailbook()` function call. + +If `TRAILBOOK__DOWNLOAD_ALL_VERSIONS` is set to `ON`, +the build process will attempt to download all previously deployed versions +of the trailbook from the specified repository. And then embed the +new version into the multiversion structure. + +If `TRAILBOOK__DOWNLOAD_ALL_VERSIONS` is set to `OFF` (default), +only the current version of the trailbook will be built. For this +an empty multiversion skeleton will be created. + +This configuration shouldn'T be changed after the first build. + +### `TRAILBOOK__IS_RELEASE` + +* `` should be replaced with the trailbook name provided + in the `add_trailbook()` function call. + +If `TRAILBOOK__IS_RELEASE` is set to `ON` (default), +the trailbook will be built as a release version. This means +that the `latest` version is updated, and the `index.html` and +`404.html` files are updated. + +If `TRAILBOOK__IS_RELEASE` is set to `OFF`, +the mentioned files are not updated, and the `latest` version +is not changed. This can be used for example to build +nightly versions without affecting the released version. + +## Building + +To build the trailbook documentation, simply run the following command, after configuring the project with CMake: + +```bash +cmake --build --target trailbook_ +``` + +* Replace `` with the path to your CMake build directory. +* Replace `` with the name of your trailbook + provided in the `add_trailbook()` function call. + +This target will trigger the full build of the trailbook documentation + +Furthermore, you can use the following additional targets: + +```bash +cmake --build --target trailbook__preview +``` + +This target will start a local server to preview the built documentation. + +```bash +cmake --build --target trailbook__live_preview +``` +This target will start a local server that watches for changes +in the source files and automatically rebuilds the documentation +and refreshes the preview in the browser. + +## How to build a extension for the trailbook package + +The trailbook package provides a set of targets and target properties +that can be used to hook into the build process of the trailbook documentation +and extend it with custom functionality. + +See the full explanation in the [EXTENDING.md](EXTENDING.md) file. diff --git a/cmake/trailbook/add-trailbook.cmake b/cmake/trailbook/add-trailbook.cmake new file mode 100644 index 0000000000..0aaf559e93 --- /dev/null +++ b/cmake/trailbook/add-trailbook.cmake @@ -0,0 +1,721 @@ + +# This macro is for internal use only +# +# It is used in the function add_trailbook. +# It checks the requirements defined by the requirements.txt file +# and installs any missing packages into the current Python virtual environment. +# It checks during the configuration phase. +macro(_add_trailbook_check_requirements_txt) + if(EXISTS ${args_REQUIREMENTS_TXT}) + execute_process( + COMMAND + ${Python3_EXECUTABLE} + ${CMAKE_CURRENT_FUNCTION_LIST_DIR}/check_requirements_txt.py + ${args_REQUIREMENTS_TXT} + --fix-in-venv + RESULT_VARIABLE _CHECK_REQUIREMENTS_TXT_RESULT + ) + + if(NOT _CHECK_REQUIREMENTS_TXT_RESULT EQUAL 0) + message(FATAL_ERROR "Trailbook: ${args_NAME} - ${args_REQUIREMENTS_TXT} not satisfied.") + else() + message(STATUS "Trailbook: ${args_NAME} - ${args_REQUIREMENTS_TXT} satisfied.") + endif() + else() + message(STATUS "Trailbook: ${args_NAME} - No requirements.txt found.") + endif() +endmacro() + +# This macro is for internal use only +# +# It is used in the function add_trailbook. +# It sets up the trailbook build directory where the multiversion HTML docs will be located. +# If TRAILBOOK_INSTANCE_DOWNLOAD_ALL_VERSIONS is ON, it clones the deployed docs repo. +# Otherwise, it creates an empty skeleton directory. +# This configuration is checked during the configuration phase and should not be switched +macro(_add_trailbook_setup_build_directory) + set(CHECK_DONE_FILE_SETUP_BUILD_DIRECTORY "${CMAKE_CURRENT_BINARY_DIR}/setup_build_directory.check_done") + set(SETUP_BUILD_DIRECTORY_FILE_LIST "${CMAKE_CURRENT_BINARY_DIR}/setup_build_directory_filelist.yaml") + set(DEPLOYED_DOCS_REPO_DIR "${CMAKE_CURRENT_BINARY_DIR}/deployed_docs_repo/") + + if(TRAILBOOK_INSTANCE_DOWNLOAD_ALL_VERSIONS) + if(_SETUP_BUILD_DIRECTORY_LAST_CONFIGURATION STREQUAL "EMPTY_SKELETON") + message(FATAL_ERROR "add_trailbook: Cannot switch between DOWNLOAD_ALL_VERSIONS and EMPTY_SKELETON configurations for trailbook ${args_NAME} without cleaning build directory") + endif() + else() + if(_SETUP_BUILD_DIRECTORY_LAST_CONFIGURATION STREQUAL "DOWNLOAD_ALL_VERSIONS") + message(FATAL_ERROR "add_trailbook: Cannot switch between DOWNLOAD_ALL_VERSIONS and EMPTY_SKELETON configurations for trailbook ${args_NAME} without cleaning build directory") + endif() + endif() + + if(TRAILBOOK_INSTANCE_DOWNLOAD_ALL_VERSIONS) + find_program( + GIT_EXECUTABLE + NAMES git + REQUIRED + ) + + set(CONDITIONAL_DELETE_LATEST_DIR_COMMAND "") + if(TRAILBOOK_INSTANCE_IS_RELEASE) + set(CONDITIONAL_DELETE_LATEST_DIR_COMMAND + COMMAND + ${CMAKE_COMMAND} -E rm -rf + ${CMAKE_CURRENT_BINARY_DIR}/tmp_repo_download/docs/latest + ) + endif() + + set(CONDITIONAL_DELETE_INSTANCE_DIR_OR_FAIL_COMMAND "") + if(TRAILBOOK_${args_NAME}_OVERWRITE_EXISTING_INSTANCE) + set(CONDITIONAL_DELETE_INSTANCE_DIR_OR_FAIL_COMMAND + COMMAND + ${CMAKE_COMMAND} -E rm -rf + ${CMAKE_CURRENT_BINARY_DIR}/tmp_repo_download/docs/${TRAILBOOK_${args_NAME}_INSTANCE_NAME} + ) + else() + # check if instance directory already exists and fail if it does + set(CONDITIONAL_DELETE_INSTANCE_DIR_OR_FAIL_COMMAND + COMMAND + ${Python3_EXECUTABLE} ${CMAKE_CURRENT_FUNCTION_LIST_DIR}/check_path_exists.py + --directory ${CMAKE_CURRENT_BINARY_DIR}/tmp_repo_download/docs/${TRAILBOOK_${args_NAME}_INSTANCE_NAME} + --return-zero-if-not-exists + ) + endif() + + add_custom_command( + OUTPUT + ${CHECK_DONE_FILE_SETUP_BUILD_DIRECTORY} + DEPENDS + trailbook_${args_NAME}_stage_prepare_sphinx_source_before + $ + ${CMAKE_CURRENT_FUNCTION_LIST_DIR}/filelist_manager.py + ${Python3_EXECUTABLE} ${CMAKE_CURRENT_FUNCTION_LIST_DIR}/check_path_exists.py + COMMENT + "Trailbook: ${args_NAME} - Downloading all versions repo" + COMMAND # Remove existing files in deployed docs repo directory from previous builds + ${Python3_EXECUTABLE} + ${CMAKE_CURRENT_FUNCTION_LIST_DIR}/filelist_manager.py + remove + --data-file ${SETUP_BUILD_DIRECTORY_FILE_LIST} + --root-directory ${DEPLOYED_DOCS_REPO_DIR} + COMMAND # Clone deployed docs repo + ${GIT_EXECUTABLE} clone + -b ${args_DEPLOYED_DOCS_REPO_BRANCH} + --depth 1 + ${args_DEPLOYED_DOCS_REPO_URL} + ${CMAKE_CURRENT_BINARY_DIR}/tmp_repo_download/ + # Remove latest directory if this is a release instance + ${CONDITIONAL_DELETE_LATEST_DIR_COMMAND} + # Remove existing instance directory if overwrite is enabled or fail if it exists + ${CONDITIONAL_DELETE_INSTANCE_DIR_OR_FAIL_COMMAND} + COMMAND # Create file list of existing files in deployed docs repo directory after clone + ${Python3_EXECUTABLE} + ${CMAKE_CURRENT_FUNCTION_LIST_DIR}/filelist_manager.py + create + --data-file ${SETUP_BUILD_DIRECTORY_FILE_LIST} + --root-directory ${CMAKE_CURRENT_BINARY_DIR}/tmp_repo_download + COMMAND # Move cloned files to deployed docs repo directory + ${Python3_EXECUTABLE} + ${CMAKE_CURRENT_FUNCTION_LIST_DIR}/filelist_manager.py + move + --data-file ${SETUP_BUILD_DIRECTORY_FILE_LIST} + --root-directory ${CMAKE_CURRENT_BINARY_DIR}/tmp_repo_download + --target-root-directory ${DEPLOYED_DOCS_REPO_DIR}/ + COMMAND # Delete temporary clone directory + ${CMAKE_COMMAND} -E rm -rf + ${CMAKE_CURRENT_BINARY_DIR}/tmp_repo_download/ + COMMAND # Create convenience symlink to docs/ in build directory + ${CMAKE_COMMAND} -E create_symlink + ${DEPLOYED_DOCS_REPO_DIR}/docs/ + ${TRAILBOOK_BUILD_DIRECTORY} + COMMAND # Create done file + ${CMAKE_COMMAND} -E touch ${CHECK_DONE_FILE_SETUP_BUILD_DIRECTORY} + ) + set(_SETUP_BUILD_DIRECTORY_LAST_CONFIGURATION "DOWNLOAD_ALL_VERSIONS") + else() + set(CONDITIONAL_CLEANUP_COMMAND "") + add_custom_command( + OUTPUT + ${CHECK_DONE_FILE_SETUP_BUILD_DIRECTORY} + DEPENDS + trailbook_${args_NAME}_stage_prepare_sphinx_source_before + $ + COMMENT + "Trailbook: ${args_NAME} - Creating empty skeleton multiversion root directory" + COMMAND + ${CMAKE_COMMAND} -E make_directory + ${TRAILBOOK_BUILD_DIRECTORY}/ + COMMAND + ${CMAKE_COMMAND} -E touch ${CHECK_DONE_FILE_SETUP_BUILD_DIRECTORY} + ) + set(_SETUP_BUILD_DIRECTORY_LAST_CONFIGURATION "EMPTY_SKELETON") + endif() +endmacro() + +# This macro is for internal use only +# +# It is used in the function add_trailbook. +# It adds a custom command to copy the trailbook stem files to the build directory. +# To be used a base for the tailbook instance source directory. +macro(_add_trailbook_copy_stem_command) + file( + GLOB_RECURSE + STEM_FILES_SOURCE_DIR + CONFIGURE_DEPENDS + "${args_STEM_DIRECTORY}/*" + ) + + set(STEM_FILES_BUILD_DIR "") + foreach(file_path IN LISTS STEM_FILES_SOURCE_DIR) + file(RELATIVE_PATH rel_path "${args_STEM_DIRECTORY}" "${file_path}") + list(APPEND STEM_FILES_BUILD_DIR "${TRAILBOOK_INSTANCE_SOURCE_DIRECTORY}/${rel_path}") + endforeach() + + add_custom_command( + OUTPUT + ${STEM_FILES_BUILD_DIR} + DEPENDS + ${STEM_FILES_SOURCE_DIR} + trailbook_${args_NAME}_stage_prepare_sphinx_source_before + $ + ${CMAKE_CURRENT_FUNCTION_LIST_DIR}/filelist_manager.py + COMMENT + "Trailbook: ${args_NAME} - Copying stem files to build directory" + COMMAND + ${Python3_EXECUTABLE} + ${CMAKE_CURRENT_FUNCTION_LIST_DIR}/filelist_manager.py + remove + --data-file ${CMAKE_CURRENT_BINARY_DIR}/copy_stem_filelist.yaml + --root-directory ${TRAILBOOK_INSTANCE_SOURCE_DIRECTORY} + COMMAND + ${Python3_EXECUTABLE} + ${CMAKE_CURRENT_FUNCTION_LIST_DIR}/filelist_manager.py + create + --data-file ${CMAKE_CURRENT_BINARY_DIR}/copy_stem_filelist.yaml + --root-directory ${args_STEM_DIRECTORY} + COMMAND + ${CMAKE_COMMAND} -E copy_directory + ${args_STEM_DIRECTORY} + ${TRAILBOOK_INSTANCE_SOURCE_DIRECTORY} + ) +endmacro() + +# This macro is for internal use only +# +# It is used in the function add_trailbook. +# It adds a custom command to create the metadata YAML file for the trailbook instance. +# The metadata YAML file is used by Sphinx during the build process. +# It contains a list of all versions available in the multiversion root directory. +macro(_add_trailbook_create_metadata_yaml_command) + set(METADATA_YAML_FILE "${CMAKE_CURRENT_BINARY_DIR}/metadata_${args_NAME}.yaml") + + add_custom_command( + OUTPUT + ${METADATA_YAML_FILE} + DEPENDS + ${CMAKE_CURRENT_FUNCTION_LIST_DIR}/create_metadata_yaml.py + ${STEM_FILES_BUILD_DIR} + ${CHECK_DONE_FILE_SETUP_BUILD_DIRECTORY} + trailbook_${args_NAME}_stage_prepare_sphinx_source_before + $ + COMMENT + "Trailbook: ${args_NAME} - Creating metadata YAML file" + COMMAND + ${CMAKE_COMMAND} -E rm -f ${METADATA_YAML_FILE} + COMMAND + ${Python3_EXECUTABLE} + ${CMAKE_CURRENT_FUNCTION_LIST_DIR}/create_metadata_yaml.py + --multiversion-root-directory "${TRAILBOOK_BUILD_DIRECTORY}" + "--output-path" "${METADATA_YAML_FILE}" + --additional-version "${TRAILBOOK_${args_NAME}_INSTANCE_NAME}" + ) +endmacro() + +# This macro is for internal use only +# +# It is used in the function add_trailbook. +# It adds a custom command to build the Sphinx HTML documentation for the trailbook instance. +# It builds from the trailbook instance source directory to the trailbook instance build directory. +macro(_add_trailbook_sphinx_build_command) + set(CHECK_DONE_FILE_SPHINX_BUILD_COMMAND "${CMAKE_CURRENT_BINARY_DIR}/build_html.check_done") + + add_custom_command( + OUTPUT + ${CHECK_DONE_FILE_SPHINX_BUILD_COMMAND} + DEPENDS + trailbook_${args_NAME}_stage_build_sphinx_before + $ + ${STEM_FILES_BUILD_DIR} + ${METADATA_YAML_FILE} + ${CMAKE_CURRENT_FUNCTION_LIST_DIR}/filelist_manager.py + COMMENT + "Trailbook: ${args_NAME} - Building HTML documentation with Sphinx" + USES_TERMINAL + COMMAND + ${Python3_EXECUTABLE} + ${CMAKE_CURRENT_FUNCTION_LIST_DIR}/filelist_manager.py + remove + --data-file ${CMAKE_CURRENT_BINARY_DIR}/sphinx_build_filelist.yaml + --root-directory ${TRAILBOOK_INSTANCE_BUILD_DIRECTORY}/ + COMMAND + EVEREST_METADATA_YAML_PATH=${METADATA_YAML_FILE} + ${_SPHINX_BUILD_EXECUTABLE} + -b html + ${TRAILBOOK_INSTANCE_SOURCE_DIRECTORY} + ${CMAKE_CURRENT_BINARY_DIR}/sphinx_build_temp/ + COMMAND + ${Python3_EXECUTABLE} + ${CMAKE_CURRENT_FUNCTION_LIST_DIR}/filelist_manager.py + create + --data-file ${CMAKE_CURRENT_BINARY_DIR}/sphinx_build_filelist.yaml + --root-directory ${CMAKE_CURRENT_BINARY_DIR}/sphinx_build_temp/ + COMMAND + ${Python3_EXECUTABLE} + ${CMAKE_CURRENT_FUNCTION_LIST_DIR}/filelist_manager.py + move + --data-file ${CMAKE_CURRENT_BINARY_DIR}/sphinx_build_filelist.yaml + --root-directory ${CMAKE_CURRENT_BINARY_DIR}/sphinx_build_temp/ + --target-root-directory ${TRAILBOOK_INSTANCE_BUILD_DIRECTORY}/ + COMMAND + ${CMAKE_COMMAND} -E echo + "Trailbook: ${args_NAME} - HTML documentation built at ${TRAILBOOK_INSTANCE_BUILD_DIRECTORY}" + COMMAND + ${CMAKE_COMMAND} -E touch ${CHECK_DONE_FILE_SPHINX_BUILD_COMMAND} + ) +endmacro() + +# This macro is for internal use only +# +# It is used in the function add_trailbook. +# It adds a custom command to replace the 'latest' copy in the multiversion root directory +# It should be only called if TRAILBOOK_INSTANCE_IS_RELEASE is ON. +macro(_add_trailbook_replace_latest_command) + set(CHECK_DONE_FILE_REPLACE_LATEST "${CMAKE_CURRENT_BINARY_DIR}/replace_latest.check_done") + add_custom_command( + OUTPUT + ${CHECK_DONE_FILE_REPLACE_LATEST} + DEPENDS + trailbook_${args_NAME}_stage_postprocess_sphinx_before + $ + ${CHECK_DONE_FILE_SPHINX_BUILD_COMMAND} + COMMENT + "Trailbook: ${args_NAME} - Replacing 'latest' copy with copy of current instance" + COMMAND + ${CMAKE_COMMAND} -E rm -rf ${TRAILBOOK_BUILD_DIRECTORY}/latest + COMMAND + ${CMAKE_COMMAND} -E copy_directory + ${TRAILBOOK_INSTANCE_BUILD_DIRECTORY} + ${TRAILBOOK_BUILD_DIRECTORY}/latest + COMMAND + ${CMAKE_COMMAND} -E touch ${CHECK_DONE_FILE_REPLACE_LATEST} + ) +endmacro() + +# This macro is for internal use only +# +# It is used in the function add_trailbook. +# It copies the 404.html file from the trailbook instance build directory +# to the multiversion root directory. +# It should only be called if TRAILBOOK_INSTANCE_IS_RELEASE is ON. +macro(_add_trailbook_copy_404_command) + set(CHECK_DONE_FILE_COPY_404 "${CMAKE_CURRENT_BINARY_DIR}/copy_404.check_done") + set(TRAILBOOK_404_FILE "${TRAILBOOK_BUILD_DIRECTORY}/404.html") + set(TRAILBOOK_INSTANCE_404_FILE "${TRAILBOOK_INSTANCE_BUILD_DIRECTORY}/404.html") + add_custom_command( + OUTPUT + ${TRAILBOOK_INSTANCE_404_FILE} + DEPENDS + trailbook_${args_NAME}_stage_postprocess_sphinx_before + $ + ${CHECK_DONE_FILE_SPHINX_BUILD_COMMAND} + ${CMAKE_CURRENT_FUNCTION_LIST_DIR}/check_path_exists.py + COMMENT + "Trailbook: ${args_NAME} - Checking for 404.html in built documentation" + COMMAND + ${Python3_EXECUTABLE} + ${CMAKE_CURRENT_FUNCTION_LIST_DIR}/check_path_exists.py + --file "${TRAILBOOK_INSTANCE_404_FILE}" + --return-zero-if-exists + ) + add_custom_command( + OUTPUT + ${CHECK_DONE_FILE_COPY_404} + DEPENDS + trailbook_${args_NAME}_stage_postprocess_sphinx_before + $ + ${CHECK_DONE_FILE_SPHINX_BUILD_COMMAND} + ${TRAILBOOK_INSTANCE_404_FILE} + COMMENT + "Trailbook: ${args_NAME} - Copying 404.html to multiversion root directory" + COMMAND + ${CMAKE_COMMAND} -E rm -f ${TRAILBOOK_404_FILE} + COMMAND + ${CMAKE_COMMAND} -E copy + ${TRAILBOOK_INSTANCE_404_FILE} + ${TRAILBOOK_404_FILE} + COMMAND + ${CMAKE_COMMAND} -E touch ${CHECK_DONE_FILE_COPY_404} + ) +endmacro() + +# This macro is for internal use only +# +# It is used in the function add_tailbook. +# It adds a custom command to render the redirect template. The rendered file +# will be used as the index.html in the multiversion root directory. +# This macro should only be called if TRAILBOOK_INSTANCE_IS_RELEASE is ON. +macro(_add_trailbook_render_redirect_template_command) + set(CHECK_DONE_FILE_RENDER_REDIRECT_TEMPLATE "${CMAKE_CURRENT_BINARY_DIR}/render_redirect_template.check_done") + set(REDIRECT_TEMPLATE_FILE "${CMAKE_CURRENT_FUNCTION_LIST_DIR}/templates/redirect.html.jinja") + set(TRAILBOOK_REDIRECT_FILE "${TRAILBOOK_BUILD_DIRECTORY}/index.html") + add_custom_command( + OUTPUT + ${CHECK_DONE_FILE_RENDER_REDIRECT_TEMPLATE} + DEPENDS + trailbook_${args_NAME}_stage_postprocess_sphinx_before + $ + ${CHECK_DONE_FILE_SPHINX_BUILD_COMMAND} + ${CMAKE_CURRENT_FUNCTION_LIST_DIR}/render_redirect_template.py + COMMENT + "Trailbook: ${args_NAME} - Rendering redirect.html from template" + COMMAND + ${CMAKE_COMMAND} -E rm -f ${TRAILBOOK_REDIRECT_FILE} + COMMAND + ${Python3_EXECUTABLE} + ${CMAKE_CURRENT_FUNCTION_LIST_DIR}/render_redirect_template.py + --redirect-template "${REDIRECT_TEMPLATE_FILE}" + "--target-path" "${TRAILBOOK_REDIRECT_FILE}" + COMMAND + ${CMAKE_COMMAND} -E touch ${CHECK_DONE_FILE_RENDER_REDIRECT_TEMPLATE} + ) +endmacro() + +# This macro is for internal use only +# +# It is used in the function add_trailbook. +# It adds a custom command to copy the versions_index.html file to the multiversion root directory +macro(_add_trailbook_copy_versions_index_command) + set(CHECK_DONE_FILE_COPY_VERSIONS_INDEX "${CMAKE_CURRENT_BINARY_DIR}/copy_versions_index.check_done") + set(TRAILBOOK_VERSIONS_INDEX_FILE "${TRAILBOOK_BUILD_DIRECTORY}/versions_index.html") + set(TRAILBOOK_INSTANCE_VERSIONS_INDEX_FILE "${TRAILBOOK_INSTANCE_BUILD_DIRECTORY}/versions_index.html") + set(CHECK_DONE_FILE_CHECK_LATEST_INSTANCE "${CMAKE_CURRENT_BINARY_DIR}/check_latest_instance.check_done") + add_custom_command( + OUTPUT + ${TRAILBOOK_INSTANCE_VERSIONS_INDEX_FILE} + DEPENDS + trailbook_${args_NAME}_stage_postprocess_sphinx_before + $ + ${CHECK_DONE_FILE_SPHINX_BUILD_COMMAND} + ${CMAKE_CURRENT_FUNCTION_LIST_DIR}/check_path_exists.py + COMMENT + "Trailbook: ${args_NAME} - Checking for versions_index.html in built documentation" + COMMAND + ${Python3_EXECUTABLE} + ${CMAKE_CURRENT_FUNCTION_LIST_DIR}/check_path_exists.py + --file "${TRAILBOOK_INSTANCE_VERSIONS_INDEX_FILE}" + --return-zero-if-exists + ) + add_custom_command( + OUTPUT + ${CHECK_DONE_FILE_CHECK_LATEST_INSTANCE} + DEPENDS + trailbook_${args_NAME}_stage_postprocess_sphinx_before + $ + ${CHECK_DONE_FILE_SPHINX_BUILD_COMMAND} + ${CHECK_DONE_FILE_REPLACE_LATEST} + ${CMAKE_CURRENT_FUNCTION_LIST_DIR}/check_path_exists.py + COMMENT + "Trailbook: ${args_NAME} - Checking for latest/ in multiversion root directory" + COMMAND + ${Python3_EXECUTABLE} + ${CMAKE_CURRENT_FUNCTION_LIST_DIR}/check_path_exists.py + --directory ${TRAILBOOK_BUILD_DIRECTORY}/latest + --return-zero-if-exists + COMMAND + ${CMAKE_COMMAND} -E touch ${CHECK_DONE_FILE_CHECK_LATEST_INSTANCE} + ) + add_custom_command( + OUTPUT + ${CHECK_DONE_FILE_COPY_VERSIONS_INDEX} + DEPENDS + trailbook_${args_NAME}_stage_postprocess_sphinx_before + $ + ${CHECK_DONE_FILE_SPHINX_BUILD_COMMAND} + ${TRAILBOOK_INSTANCE_VERSIONS_INDEX_FILE} + ${CHECK_DONE_FILE_CHECK_LATEST_INSTANCE} + COMMENT + "Trailbook: ${args_NAME} - Copying versions_index.html to multiversion root directory" + COMMAND + ${CMAKE_COMMAND} -E rm -f ${TRAILBOOK_VERSIONS_INDEX_FILE} + COMMAND + ${CMAKE_COMMAND} -E copy + ${TRAILBOOK_INSTANCE_VERSIONS_INDEX_FILE} + ${TRAILBOOK_VERSIONS_INDEX_FILE} + COMMAND + ${CMAKE_COMMAND} -E touch ${CHECK_DONE_FILE_COPY_VERSIONS_INDEX} + ) +endmacro() + +# This macro is for internal use only +# +# It is used in the function add_tailbook. +# It adds a custom target to serve the built HTML documentation via a simple HTTP server. +macro(_add_trailbook_preview_target) + add_custom_target( + trailbook_${args_NAME}_preview + DEPENDS + trailbook_${args_NAME} + COMMENT + "Trailbook: ${args_NAME} - Serve HTML documentation" + USES_TERMINAL + COMMAND + ${CMAKE_COMMAND} -E echo + "Trailbook: ${args_NAME} - Serving HTML output at http://localhost:8000/" + COMMAND + ${Python3_EXECUTABLE} -m http.server --directory ${TRAILBOOK_BUILD_DIRECTORY} 8000 + ) +endmacro() + +# This macro is for internal use only +# +# It is used in the function add_tailbook. +# It adds a custom target to watch the trailbook instance target for changes +# and automatically rebuild the HTML documentation with Sphinx and serve it. +macro(_add_trailbook_live_preview_target) + add_custom_target( + trailbook_${args_NAME}_live_preview + DEPENDS + trailbook_${args_NAME} + ${CMAKE_CURRENT_FUNCTION_LIST_DIR}/target_observer.py + COMMENT + "Trailbook: ${args_NAME} - Auto-build HTML documentation with Sphinx and serve" + USES_TERMINAL + COMMAND + ${CMAKE_COMMAND} -E echo + "Trailbook: ${args_NAME} - Auto-building HTML output and serving at http://localhost:8000/" + COMMAND + ${Python3_EXECUTABLE} + ${CMAKE_CURRENT_FUNCTION_LIST_DIR}/target_observer.py + "trailbook_${args_NAME}" + "trailbook_${args_NAME}_preview" + --build-dir ${CMAKE_BINARY_DIR} + --interval-ms 2000 + ) +endmacro() + +# This is the main function to add a trailbook to the build system. +# It sets up the necessary build commands and targets +# to build the trailbook documentation. +# It takes the following parameters: +# NAME (required): The name of the trailbook. +# STEM_DIRECTORY (optional): The directory containing the trailbook stem files. +# Defaults to CMAKE_CURRENT_SOURCE_DIR. +# REQUIREMENTS_TXT (optional): The path to the requirements.txt file. +# Defaults to CMAKE_CURRENT_SOURCE_DIR/requirements.txt if exists. +# INSTANCE_NAME (required): The instance name for the trailbook. +# Needs to be lowercase alphanumeric and underscores only. +# DEPLOYED_DOCS_REPO_URL (optional): The URL of the deployed docs repository. +# Required if TRAILBOOK__DOWNLOAD_ALL_VERSIONS is ON. +# DEPLOYED_DOCS_REPO_BRANCH (optional): The branch of the deployed docs repository. +# Defaults to 'main'. +# Usage: +# add_trailbook( +# NAME +# [STEM_DIRECTORY ] +# [REQUIREMENTS_TXT ] +# INSTANCE_NAME +# [DEPLOYED_DOCS_REPO_URL ] +# [DEPLOYED_DOCS_REPO_BRANCH ] +# ) +function(add_trailbook) + set(options) + set(one_value_args + NAME + STEM_DIRECTORY + REQUIREMENTS_TXT + DEPLOYED_DOCS_REPO_URL + DEPLOYED_DOCS_REPO_BRANCH + ) + set(multi_value_args) + cmake_parse_arguments( + "args" + "${options}" + "${one_value_args}" + "${multi_value_args}" + ${ARGN} + ) + + option(TRAILBOOK_${args_NAME}_DOWNLOAD_ALL_VERSIONS "Download all versions for trailbook ${args_NAME} and build complete trailbook" OFF) + option(TRAILBOOK_${args_NAME}_IS_RELEASE "If enabled, the trailbook ${args_NAME} will be marked as release version in versions index" ON) + set(TRAILBOOK_${args_NAME}_INSTANCE_NAME "local" CACHE STRING "Instance name for trailbook ${args_NAME}") + option(TRAILBOOK_${args_NAME}_OVERWRITE_EXISTING_INSTANCE "Overwrite existing instance with name ${TRAILBOOK_${args_NAME}_INSTANCE_NAME} if it exists" OFF) + # Check that at least one of DOWNLOAD_ALL_VERSIONS or IS_RELEASE is ON + if(NOT TRAILBOOK_${args_NAME}_DOWNLOAD_ALL_VERSIONS AND NOT TRAILBOOK_${args_NAME}_IS_RELEASE) + message(FATAL_ERROR "add_trailbook: TRAILBOOK_${args_NAME}_DOWNLOAD_ALL_VERSIONS and TRAILBOOK_${args_NAME}_IS_RELEASE cannot both be OFF") + endif() + # Check that instance name is lowercase alphanumeric and underscores only + string(REGEX MATCH "^[a-z0-9_]+$" _valid_instance_name "${TRAILBOOK_${args_NAME}_INSTANCE_NAME}") + if("${_valid_instance_name}" STREQUAL "") + message(FATAL_ERROR "add_trailbook: TRAILBOOK_${args_NAME}_INSTANCE_NAME needs to be lowercase alphanumeric and underscores only") + endif() + + # Parameter NAME + # is required + if("${args_NAME}" STREQUAL "") + message(FATAL_ERROR "add_trailbook: NAME argument is required") + endif() + + # Parameter STEM_DIRECTORY + # - defaults to CMAKE_CURRENT_SOURCE_DIR + # - needs to be absolute path + if("${args_STEM_DIRECTORY}" STREQUAL "") + set(args_STEM_DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}") + endif() + if(NOT IS_ABSOLUTE "${args_STEM_DIRECTORY}") + message(FATAL_ERROR "add_trailbook: STEM_DIRECTORY needs to be an absolute path") + endif() + cmake_path(SET args_STEM_DIRECTORY NORMALIZE ${args_STEM_DIRECTORY}) + + # Parameter REQUIREMENTS_TXT + # - defaults to ${CMAKE_CURRENT_SOURCE_DIR}/requirements.txt if exists + # - needs to be absolute path if set + if("${args_REQUIREMENTS_TXT}" STREQUAL "") + if(EXISTS "${CMAKE_CURRENT_SOURCE_DIR}/requirements.txt") + set(args_REQUIREMENTS_TXT "${CMAKE_CURRENT_SOURCE_DIR}/requirements.txt") + endif() + endif() + if(NOT "${args_REQUIREMENTS_TXT}" STREQUAL "") + if(NOT IS_ABSOLUTE "${args_REQUIREMENTS_TXT}") + message(FATAL_ERROR "add_trailbook: REQUIREMENTS_TXT needs to be an absolute path") + endif() + if(NOT EXISTS "${args_REQUIREMENTS_TXT}") + message(FATAL_ERROR "add_trailbook: REQUIREMENTS_TXT file does not exist: ${args_REQUIREMENTS_TXT}") + endif() + endif() + + # Parameter DEPLOYED_DOCS_REPO_URL + # - required if TRAILBOOK__DOWNLOAD_ALL_VERSIONS is ON + if(TRAILBOOK_${args_NAME}_DOWNLOAD_ALL_VERSIONS AND "${args_DEPLOYED_DOCS_REPO_URL}" STREQUAL "") + message(FATAL_ERROR "add_trailbook: DEPLOYED_DOCS_REPO_URL argument is required if TRAILBOOK_${args_NAME}_DOWNLOAD_ALL_VERSIONS is ON") + endif() + + # Parameter DEPLOYED_DOCS_REPO_BRANCH + # - defaults to 'main' + if("${args_DEPLOYED_DOCS_REPO_BRANCH}" STREQUAL "") + set(args_DEPLOYED_DOCS_REPO_BRANCH "main") + endif() + + set(TRAILBOOK_INSTANCE_SOURCE_DIRECTORY "${CMAKE_CURRENT_BINARY_DIR}/trailbook_${args_NAME}_source") + set(TRAILBOOK_BUILD_DIRECTORY "${CMAKE_CURRENT_BINARY_DIR}/trailbook_${args_NAME}_build") + set(TRAILBOOK_INSTANCE_BUILD_DIRECTORY "${TRAILBOOK_BUILD_DIRECTORY}/${TRAILBOOK_${args_NAME}_INSTANCE_NAME}") + set(TRAILBOOK_INSTANCE_IS_RELEASE "${TRAILBOOK_${args_NAME}_IS_RELEASE}") + set(TRAILBOOK_INSTANCE_DOWNLOAD_ALL_VERSIONS "${TRAILBOOK_${args_NAME}_DOWNLOAD_ALL_VERSIONS}") + + message(STATUS "Adding trailbook: ${args_NAME}") + message(STATUS " Stem directory: ${args_STEM_DIRECTORY}") + message(STATUS " Build directory: ${TRAILBOOK_BUILD_DIRECTORY}") + message(STATUS " Instance source directory: ${TRAILBOOK_INSTANCE_SOURCE_DIRECTORY}") + message(STATUS " Instance build directory: ${TRAILBOOK_INSTANCE_BUILD_DIRECTORY}") + if(NOT "${args_REQUIREMENTS_TXT}" STREQUAL "") + message(STATUS " Requirements.txt: ${args_REQUIREMENTS_TXT}") + else() + message(STATUS " Requirements.txt: ") + endif() + message(STATUS " Deployed docs repo url: ${args_DEPLOYED_DOCS_REPO_URL}") + message(STATUS " Deployed docs repo branch: ${args_DEPLOYED_DOCS_REPO_BRANCH}") + + _add_trailbook_check_requirements_txt() + + add_custom_target( + trailbook_${args_NAME}_stage_prepare_sphinx_source_before + DEPENDS + $ + ) + + _add_trailbook_setup_build_directory() + _add_trailbook_copy_stem_command() + _add_trailbook_create_metadata_yaml_command() + set(DEPS_STAGE_PREPARE_SPHINX_SOURCE_AFTER + trailbook_${args_NAME}_stage_prepare_sphinx_source_before + ${CHECK_DONE_FILE_SETUP_BUILD_DIRECTORY} + ${STEM_FILES_BUILD_DIR} + ${METADATA_YAML_FILE} + ) + add_custom_target( + trailbook_${args_NAME}_stage_prepare_sphinx_source_after + DEPENDS + ${DEPS_STAGE_PREPARE_SPHINX_SOURCE_AFTER} + COMMENT + "Prepare Sphinx source for trailbook: ${args_NAME}" + ) + add_custom_target( + trailbook_${args_NAME}_stage_build_sphinx_before + DEPENDS + $ + trailbook_${args_NAME}_stage_prepare_sphinx_source_after + ) + _add_trailbook_sphinx_build_command() + set(DEPS_STAGE_BUILD_SPHINX_AFTER + trailbook_${args_NAME}_stage_build_sphinx_before + ${CHECK_DONE_FILE_SPHINX_BUILD_COMMAND} + ) + add_custom_target( + trailbook_${args_NAME}_stage_build_sphinx_after + DEPENDS + ${DEPS_STAGE_BUILD_SPHINX_AFTER} + COMMENT + "Build Sphinx documentation for trailbook: ${args_NAME}" + ) + add_custom_target( + trailbook_${args_NAME}_stage_postprocess_sphinx_before + DEPENDS + $ + trailbook_${args_NAME}_stage_build_sphinx_after + ) + if(TRAILBOOK_INSTANCE_IS_RELEASE) + _add_trailbook_replace_latest_command() + _add_trailbook_copy_404_command() + _add_trailbook_render_redirect_template_command() + endif() + _add_trailbook_copy_versions_index_command() + + set(DEPS_STAGE_POSTPROCESS_SPHINX_AFTER + trailbook_${args_NAME}_stage_postprocess_sphinx_before + ${CHECK_DONE_FILE_REPLACE_LATEST} + ${CHECK_DONE_FILE_COPY_404} + ${CHECK_DONE_FILE_COPY_VERSIONS_INDEX} + ${CHECK_DONE_FILE_RENDER_REDIRECT_TEMPLATE} + ) + add_custom_target( + trailbook_${args_NAME}_stage_postprocess_sphinx_after + DEPENDS + ${DEPS_STAGE_POSTPROCESS_SPHINX_AFTER} + COMMENT + "Post-process Sphinx documentation for trailbook: ${args_NAME}" + ) + add_custom_target( + trailbook_${args_NAME} ALL + DEPENDS + trailbook_${args_NAME}_stage_postprocess_sphinx_after + COMMENT + "Build trailbook: ${args_NAME}" + ) + + _add_trailbook_preview_target() + _add_trailbook_live_preview_target() + + set_target_properties( + trailbook_${args_NAME} + PROPERTIES + TRAILBOOK_INSTANCE_BUILD_DIRECTORY "${TRAILBOOK_INSTANCE_BUILD_DIRECTORY}" + TRAILBOOK_BUILD_DIRECTORY "${TRAILBOOK_BUILD_DIRECTORY}" + TRAILBOOK_INSTANCE_NAME "${TRAILBOOK_${args_NAME}_INSTANCE_NAME}" + TRAILBOOK_INSTANCE_SOURCE_DIRECTORY "${TRAILBOOK_INSTANCE_SOURCE_DIRECTORY}" + TRAILBOOK_CURRENT_BINARY_DIR "${CMAKE_CURRENT_BINARY_DIR}" + ADDITIONAL_DEPS_STAGE_PREPARE_SPHINX_SOURCE_BEFORE "" + ADDITIONAL_DEPS_STAGE_BUILD_SPHINX_BEFORE "" + ADDITIONAL_DEPS_STAGE_POSTPROCESS_SPHINX_BEFORE "" + DEPS_STAGE_PREPARE_SPHINX_SOURCE_AFTER "${DEPS_STAGE_PREPARE_SPHINX_SOURCE_AFTER}" + DEPS_STAGE_BUILD_SPHINX_AFTER "${DEPS_STAGE_BUILD_SPHINX_AFTER}" + DEPS_STAGE_POSTPROCESS_SPHINX_AFTER "${DEPS_STAGE_POSTPROCESS_SPHINX_AFTER}" + ) +endfunction() diff --git a/cmake/trailbook/check_path_exists.py b/cmake/trailbook/check_path_exists.py new file mode 100755 index 0000000000..0bb7190848 --- /dev/null +++ b/cmake/trailbook/check_path_exists.py @@ -0,0 +1,107 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- +# +# SPDX-License-Identifier: Apache-2.0 +# Copyright Pionix GmbH and Contributors to EVerest +# +""" +author: andreas.heinrich@pionix.de +This script checks whether a directory exists or not and returns zero based on the flags provided. +""" + + +import argparse +from pathlib import Path + + +def main(): + parser = argparse.ArgumentParser(description='Checks whether a directory exists or not and returns zero based on the flags provided') + parser.add_argument( + '--directory', + type=Path, + dest='directory', + action='store', + required=False, + help='Directory to check for existence' + ) + parser.add_argument( + '--file', + type=Path, + dest='file', + action='store', + required=False, + help='Path to a file to check for existence' + ) + parser.add_argument( + '--return-zero-if-exists', + action='store_true', + help='Return zero if the file/directory exists', + dest='return_zero_if_exists', + ) + parser.add_argument( + '--return-zero-if-not-exists', + action='store_true', + help='Return zero if the file/directory does not exist', + dest='return_zero_if_not_exists', + ) + args = parser.parse_args() + + if not args.directory and not args.file: + raise ValueError("Either --directory or --file must be specified") + if args.return_zero_if_exists and args.return_zero_if_not_exists: + raise ValueError("Cannot use both --return-zero-if-exists and --return-zero-if-not-exists at the same time") + + if args.file: + if not args.file.is_absolute(): + raise ValueError("File path must be absolute") + if args.return_zero_if_exists: + if not args.file.exists(): + print(f"❌ File does not exist at {args.file}") + exit(1) + if not args.file.is_file(): + print(f"❌ Path exists but is not a file at {args.file}") + exit(2) + print(f"✅ File exists at {args.file}") + exit(0) + elif args.return_zero_if_not_exists: + if args.file.is_file(): + print(f"❌ File exists at {args.file}") + exit(1) + if args.file.exists(): + print(f"❌ Path exists but is not a file at {args.file}") + exit(2) + print(f"✅ File does not exist at {args.file}") + exit(0) + else: + raise ValueError("Either --return-zero-if-exists or --return-zero-if-not-exists must be specified") + else: + if not args.directory.is_absolute(): + raise ValueError("Directory path must be absolute") + if args.return_zero_if_exists: + if not args.directory.exists(): + print(f"❌ Directory does not exist at {args.directory}") + exit(1) + if not args.directory.is_dir(): + print(f"❌ Path exists but is not a directory at {args.directory}") + exit(2) + print(f"✅ Directory exists at {args.directory}") + exit(0) + elif args.return_zero_if_not_exists: + if args.directory.is_dir(): + print(f"❌ Directory exists at {args.directory}") + exit(1) + if args.directory.exists(): + print(f"❌ Path exists but is not a directory at {args.directory}") + exit(2) + print(f"✅ Directory does not exist at {args.directory}") + exit(0) + else: + raise ValueError("Either --return-zero-if-exists or --return-zero-if-not-exists must be specified") + + +if __name__ == "__main__": + try: + main() + except Exception as e: + print(f"Error: {e}") + exit(1) diff --git a/cmake/trailbook/check_requirements_txt.py b/cmake/trailbook/check_requirements_txt.py new file mode 100755 index 0000000000..74afdf9057 --- /dev/null +++ b/cmake/trailbook/check_requirements_txt.py @@ -0,0 +1,76 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- +# +# SPDX-License-Identifier: Apache-2.0 +# Copyright Pionix GmbH and Contributors to EVerest +# +""" +author: andreas.heinrich@pionix.de +This script checks whether the packages in a requirements.txt are satisfied. +If run inside a virtual environment, it can optionally fix unmet requirements by running pip install -r. +""" + + +import argparse +import sys +from importlib.metadata import version, PackageNotFoundError +import re +import subprocess + + +def parse_requirement(req_line: str): + req_line = req_line.strip() + if not req_line or req_line.startswith("#"): + return None + match = re.match(r"([a-zA-Z0-9_\-]+)==([0-9\.]+)", req_line) + if match: + return match.groups() + return (req_line, None) + + +def check_requirements(file_path: str, fix_in_venv: bool = False): + errors = [] + with open(file_path, "r") as f: + for line in f: + parsed = parse_requirement(line) + if not parsed: + continue + pkg, req_version = parsed + try: + installed_version = version(pkg) + if req_version and installed_version != req_version: + errors.append(f"{pkg}=={req_version} (installed: {installed_version})") + except PackageNotFoundError: + errors.append(f"{pkg}=={req_version or 'any version'} (not installed)") + + if fix_in_venv and errors: + if sys.prefix != sys.base_prefix: + print(f"Attempting to fix requirements in the current venv: {sys.prefix}") + subprocess.run([sys.executable, "-m", "pip", "install", "-r", file_path], check=True) + return check_requirements(file_path, fix_in_venv=False) + else: + print("Not in a virtual environment. Cannot fix requirements automatically.") + + if not errors: + print("✅ All requirements are met.") + else: + print("❌ There are unmet requirements:") + for e in errors: + print(" ", e) + sys.exit(1) + + +def main(): + parser = argparse.ArgumentParser(description="Checks if the packages in a requirements.txt are satisfied.") + parser.add_argument("requirements_file", type=str, help="Path to the requirements.txt") + parser.add_argument("--fix-in-venv", action="store_true", help="Run pip install -r in the current venv if there are unmet requirements") + args = parser.parse_args() + check_requirements(args.requirements_file, args.fix_in_venv) + + +if __name__ == "__main__": + try: + main() + except Exception as e: + print(f"Error: {e}") + exit(1) diff --git a/cmake/trailbook/create_metadata_yaml.py b/cmake/trailbook/create_metadata_yaml.py new file mode 100755 index 0000000000..1230c1dd8b --- /dev/null +++ b/cmake/trailbook/create_metadata_yaml.py @@ -0,0 +1,85 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- +# +# SPDX-License-Identifier: Apache-2.0 +# Copyright Pionix GmbH and Contributors to EVerest +# +""" +author: andreas.heinrich@pionix.de +This script creates a trailbook_metadata.yaml file +based on the versions found in the multiversion root directory. +""" + + +import argparse +from pathlib import Path +import yaml + + +def main(): + parser = argparse.ArgumentParser(description='Creates a trailbook_metadata.yaml file') + + parser.add_argument( + '--multiversion-root-directory', + type=Path, + dest='multiversion_root_dir', + action='store', + required=True, + help='Path to the root directory of the multiversion documentation' + ) + parser.add_argument( + '--output-path', + type=Path, + dest='output_path', + action='store', + required=True, + help='Path where the trailbook_metadata.yaml file will be created' + ) + parser.add_argument( + '--additional-version', + type=str, + dest='additional_versions', + action='append', + default=[], + help='Additional version to include in the metadata (can be used multiple times)' + ) + args = parser.parse_args() + + if not args.multiversion_root_dir.is_absolute(): + raise ValueError("Multiversion root directory must be absolute") + if not args.multiversion_root_dir.is_dir(): + print(f"\033[33mWarning: {args.multiversion_root_dir} does not exist or is not a directory, it is treated as an empty multiversion root dir\033[0m") + if not args.output_path.is_absolute(): + raise ValueError("Output path must be absolute") + if args.output_path.exists(): + raise FileExistsError("Output path already exists") + + versions_list = [] + if args.multiversion_root_dir.is_dir(): + for instance_dir in args.multiversion_root_dir.iterdir(): + if not instance_dir.is_dir(): + continue + if not (instance_dir / 'index.html').is_file(): + continue + versions_list.append(instance_dir.name) + versions_list.extend(args.additional_versions) + versions_list = list(set(versions_list)) + if len(versions_list) == 0: + raise ValueError("No versions found in the specified multiversion root directory") + versions_list.sort() + + # create yaml content + data = { + 'versions': versions_list + } + # render yaml content + with args.output_path.open('w') as f: + yaml.dump(data, f, default_flow_style=False) + + +if __name__ == "__main__": + try: + main() + except Exception as e: + print(f"Error: {e}") + exit(1) diff --git a/cmake/trailbook/filelist_manager.py b/cmake/trailbook/filelist_manager.py new file mode 100755 index 0000000000..e08c05db0d --- /dev/null +++ b/cmake/trailbook/filelist_manager.py @@ -0,0 +1,223 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- +# +# SPDX-License-Identifier: Apache-2.0 +# Copyright Pionix GmbH and Contributors to EVerest +# +""" +author: andreas.heinrich@pionix.de +This script provides command to manage a list of file paths +It can be used for custom cmake commands to track created files and directories +and later remove or move them. +""" + + +import argparse +from pathlib import Path +import yaml + + +def create_filelist(args): + if not args.root_dir.exists(): + raise ValueError("Root directory does not exist") + if not args.root_dir.is_dir(): + raise ValueError("Root directory must be a directory") + + if args.data_file.exists(): + raise FileExistsError("Data file already exists") + + file_paths = [] + directory_paths = [] + for item in args.root_dir.rglob('*'): + relative_path = item.relative_to(args.root_dir) + if item.is_dir(): + directory_paths.append(str(relative_path)) + elif item.is_file(): + file_paths.append(str(relative_path)) + else: + raise ValueError(f"Unknown file type: {item}") + + data = { + 'files': file_paths, + 'directories': directory_paths + } + + args.data_file.parent.mkdir(parents=True, exist_ok=True) + with args.data_file.open('w') as f: + yaml.dump(data, f) + exit(0) + + +def remove_filelist(args): + if not args.data_file.exists(): + exit(0) + if not args.data_file.is_file(): + raise ValueError("Data file path is not a file") + + with args.data_file.open('r') as f: + data = yaml.safe_load(f) + + for file_path in data.get('files', []): + full_path = args.root_dir / file_path + if not full_path.exists(): + raise FileNotFoundError(f"File does not exist: {full_path}") + if not full_path.is_file(): + raise ValueError(f"Path is not a file: {full_path}") + full_path.unlink() + + for dir_path in data.get('directories', []): + full_path = args.root_dir / dir_path + if not full_path.exists(): + raise FileNotFoundError(f"Directory does not exist: {full_path}") + if not full_path.is_dir(): + raise ValueError(f"Path is not a directory: {full_path}") + + if len(list(full_path.iterdir())) > 0: + continue + + full_path.rmdir() + + args.data_file.unlink() + + exit(0) + + +def move_filelist(args): + if not args.root_dir.exists(): + raise ValueError("Root directory does not exist") + if not args.root_dir.is_dir(): + raise ValueError("Root directory must be a directory") + + if not args.data_file.exists(): + raise FileNotFoundError("Data file does not exist") + if not args.data_file.is_file(): + raise ValueError("Data file path is not a file") + + if not args.target_root_dir.is_absolute(): + raise ValueError("Target root directory must be absolute") + if args.target_root_dir.exists(): + if not args.target_root_dir.is_dir(): + raise ValueError("Target root directory must be a directory") + + with args.data_file.open('r') as f: + data = yaml.safe_load(f) + + for file_path in data.get('files', []): + source_file = args.root_dir / file_path + target_file = args.target_root_dir / file_path + target_file.parent.mkdir(parents=True, exist_ok=True) + source_file.rename(target_file) + + for dir_path in data.get('directories', []): + source_dir = args.root_dir / dir_path + target_dir = args.target_root_dir / dir_path + if not target_dir.exists(): + source_dir.rename(target_dir) + exit(0) + + +def main(): + parser = argparse.ArgumentParser(description='This script provides command to manage a list of file paths') + + subparsers = parser.add_subparsers() + + create_parser = subparsers.add_parser( + "create", + description="Creates the file with a list of all paths in it", + add_help=True, + ) + create_parser.add_argument( + '--data-file', + type=Path, + dest='data_file', + action='store', + required=True, + help='File to read/write from/to filelist' + ) + create_parser.add_argument( + '--root-directory', + type=Path, + dest='root_dir', + action='store', + required=True, + help='Path to the directory to list' + ) + create_parser.set_defaults( + action_handler=create_filelist + ) + + remove_parser = subparsers.add_parser( + "remove", + description="Removes all files and directories listed in the filelist", + add_help=True, + ) + remove_parser.add_argument( + '--data-file', + type=Path, + dest='data_file', + action='store', + required=True, + help='File to read/write from/to filelist' + ) + remove_parser.add_argument( + '--root-directory', + type=Path, + dest='root_dir', + action='store', + required=True, + help='Path to the directory to list' + ) + remove_parser.set_defaults( + action_handler=remove_filelist + ) + + move_parser = subparsers.add_parser( + "move", + description="Moves all files and directories listed in the filelist to a new root directory", + add_help=True, + ) + move_parser.add_argument( + '--data-file', + type=Path, + dest='data_file', + action='store', + required=True, + help='File to read/write from/to filelist' + ) + move_parser.add_argument( + '--root-directory', + type=Path, + dest='root_dir', + action='store', + required=True, + help='Path to the directory to list' + ) + move_parser.add_argument( + '--target-root-directory', + type=Path, + dest='target_root_dir', + action='store', + required=True, + help='Path to the target root directory to move files to' + ) + move_parser.set_defaults( + action_handler=move_filelist + ) + + args = parser.parse_args() + + if not args.root_dir.is_absolute(): + raise ValueError("Root directory must be absolute") + + if not args.data_file.is_absolute(): + raise ValueError("Data file path must be absolute") + + if 'action_handler' not in args: + raise ValueError("No action specified") + + args.action_handler(args) + exit(0) + + +if __name__ == "__main__": + main() diff --git a/cmake/trailbook/render_redirect_template.py b/cmake/trailbook/render_redirect_template.py new file mode 100755 index 0000000000..c00089b8f9 --- /dev/null +++ b/cmake/trailbook/render_redirect_template.py @@ -0,0 +1,80 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- +# +# SPDX-License-Identifier: Apache-2.0 +# Copyright Pionix GmbH and Contributors to EVerest +# +""" +author: andreas.heinrich@pionix.de +This script processes a redirect template and generates a .html file +""" + + +import argparse +import jinja2 +from pathlib import Path + + +def main(): + parser = argparse.ArgumentParser(description='Process versions_index.html.jinja and place redirect.html in the output directory') + parser.add_argument( + '--redirect-template', + type=Path, + dest='redirect_template', + action='store', + required=True, + help="Redirect jinja template file" + ) + parser.add_argument( + '--target-path', + type=Path, + dest='target_path', + action='store', + required=True, + help="Target path for the output" + ) + parser.add_argument( + '--latest-release-name', + type=str, + dest='latest_release_name', + action='store', + default="latest", + help="Name of the latest release" + ) + args = parser.parse_args() + + if not args.redirect_template.is_absolute(): + raise ValueError("Redirect template path must be absolute") + if not args.redirect_template.exists(): + raise FileNotFoundError( + "Redirect template path: '" + + str(args.redirect_template) + + "' doesn't exist" + ) + if not args.redirect_template.is_file(): + raise FileNotFoundError( + f"Redirect template path: '{args.redirect_template}' is not a file" + ) + + template_dir = args.redirect_template.parent + template_name = args.redirect_template.name + + env = jinja2.Environment( + loader=jinja2.FileSystemLoader(template_dir), + trim_blocks=True, + lstrip_blocks=True + ) + + template = env.get_template(template_name) + output = template.render( + latest_release=args.latest_release_name + ) + args.target_path.write_text(output) + + +if __name__ == "__main__": + try: + main() + except Exception as e: + print(f"Error: {e}") + exit(1) diff --git a/cmake/trailbook/setup-trailbook.cmake b/cmake/trailbook/setup-trailbook.cmake new file mode 100644 index 0000000000..e3671185ad --- /dev/null +++ b/cmake/trailbook/setup-trailbook.cmake @@ -0,0 +1,46 @@ +# Internal macro to find the sphinx-build executable. +macro(_find_sphinx_build) + execute_process( + COMMAND + ${Python3_EXECUTABLE} -m sphinx.cmd.build --version + RESULT_VARIABLE RESULT_SPHINX_VERSION + ) + if("${RESULT_SPHINX_VERSION}" STREQUAL "0") + set(_SPHINX_BUILD_EXECUTABLE "${Python3_EXECUTABLE}" "-m" "sphinx.cmd.build") + else() + set(_SPHINX_BUILD_EXECUTABLE "_SPHINX_BUILD_EXECUTABLE-NOTFOUND") + endif() +endmacro() + +# Internal macro to find sphinx-build, and if not found, try to install it in an active python venv. +macro(_find_and_fix_sphinx_build) + _find_sphinx_build() + + if("${_SPHINX_BUILD_EXECUTABLE}" STREQUAL "_SPHINX_BUILD_EXECUTABLE-NOTFOUND") + ev_is_python_venv_active( + RESULT_VAR IS_PYTHON_VENV_ACTIVE + ) + if(IS_PYTHON_VENV_ACTIVE) + message(STATUS "sphinx-build executable not found in system, but python venv is active. Trying to use 'python3 -m pip install sphinx'.") + execute_process( + COMMAND ${Python3_EXECUTABLE} -m pip install sphinx + ) + _find_sphinx_build() + endif() + endif() + + if("${_SPHINX_BUILD_EXECUTABLE}" STREQUAL "_SPHINX_BUILD_EXECUTABLE-NOTFOUND") + message(FATAL_ERROR "sphinx-build executable not found. Please install Sphinx. You can install it via pip: pip install sphinx") + endif() + + message(STATUS "Found sphinx-build: ${_SPHINX_BUILD_EXECUTABLE}") +endmacro() + +# Internal macro to set up the trailbook environment. +macro(_setup_trailbook) + if(NOT _TRAILBOOK_SETUP_DONE) + _find_and_fix_sphinx_build() + + set(_TRAILBOOK_SETUP_DONE TRUE) + endif() +endmacro() diff --git a/cmake/trailbook/target_observer.py b/cmake/trailbook/target_observer.py new file mode 100755 index 0000000000..fa407556cf --- /dev/null +++ b/cmake/trailbook/target_observer.py @@ -0,0 +1,123 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- +# +# SPDX-License-Identifier: Apache-2.0 +# Copyright Pionix GmbH and Contributors to EVerest +# +""" +author: andreas.heinrich@pionix.de +This script starts a CMake target http server and triggers +regular rebuilds of a specified CMake target upon changes. +""" + + +import subprocess +import sys +import time +import argparse +import signal +from pathlib import Path +from rich.live import Live +from rich.console import Console +from rich.panel import Panel +from rich.layout import Layout +from threading import Thread + + +console = Console() + + +def run_target(build_dir: Path, target: str, live_panel, panel_size: int) -> None: + process = subprocess.Popen( + [ + "cmake", + "--build", build_dir.as_posix(), + "--target", target + ], + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + text=True + ) + output_lines = [] + output_lines.append(f"Running target {target} at {time.strftime('%X')}\n") + for line in iter(process.stdout.readline, ""): + line = line.rstrip() + output_lines.append(line) + output_lines = output_lines[-panel_size:] + live_panel.update(Panel("\n".join(output_lines), title=f"{target} output")) + process.wait() + + +def start_server(build_dir: Path, server_target: str, server_lines: list, live_panel, panel_size: int) -> subprocess.Popen: + print(f"Starting server target {server_target}...") + process = subprocess.Popen( + [ + "cmake", + "--build", str(build_dir), + "--target", server_target + ], + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + text=True, + ) + + def read_server_output(): + for line in iter(process.stdout.readline, ""): + line = line.rstrip() + server_lines.append(line) + server_lines[:] = server_lines[-panel_size:] + live_panel.update(Panel("\n".join(server_lines), title=f"{server_target} output")) + + t = Thread(target=read_server_output, daemon=True) + t.start() + return process + + +def stop_server(proc: subprocess.Popen) -> None: + if proc and proc.poll() is None: + print("Stopping server...") + proc.send_signal(signal.SIGINT) + try: + proc.wait(timeout=5) + except subprocess.TimeoutExpired: + proc.kill() + + +def main(): + parser = argparse.ArgumentParser(description="Watch CMake target and manage server target") + parser.add_argument("watch_target", help="CMake target to monitor and rerun if changed") + parser.add_argument("server_target", help="CMake target that runs the server") + parser.add_argument("--build-dir", default="build", help="CMake build directory") + parser.add_argument("--interval-ms", type=int, default=2000, help="Check interval in milliseconds") + args = parser.parse_args() + + build_dir = Path(args.build_dir) + watch_target = args.watch_target + server_target = args.server_target + + panel_size = 10 + layout = Layout() + layout.split_column( + Layout(name="server", size=panel_size+2), + Layout(name="watch", size=panel_size+2) + ) + with Live(layout, console=console, refresh_per_second=1): + server_lines = [] + server_lines.append("Starting server...") + server_panel = Panel("\n".join(server_lines), title=f"{server_target} output") + layout["server"].update(server_panel) + watch_panel = Panel("\n\n\n", title=f"{watch_target} output") + layout["watch"].update(watch_panel) + + server_proc = start_server(build_dir, server_target, server_lines, layout["server"], panel_size) + try: + while True: + time.sleep(args.interval_ms / 1000) + run_target(build_dir, watch_target, layout["watch"], panel_size) + except KeyboardInterrupt: + stop_server(server_proc) + print("\n Exiting.") + + +if __name__ == "__main__": + main() diff --git a/cmake/trailbook/templates/redirect.html.jinja b/cmake/trailbook/templates/redirect.html.jinja new file mode 100644 index 0000000000..4a9a012881 --- /dev/null +++ b/cmake/trailbook/templates/redirect.html.jinja @@ -0,0 +1,11 @@ + + + + + Redirecting... + + + + + + diff --git a/cmake/trailbook/trailbook-config-version.cmake b/cmake/trailbook/trailbook-config-version.cmake new file mode 100644 index 0000000000..cf62352471 --- /dev/null +++ b/cmake/trailbook/trailbook-config-version.cmake @@ -0,0 +1,13 @@ +set(PACKAGE_VERSION 0.1.0) + +if(PACKAGE_FIND_VERSION STREQUAL PACKAGE_VERSION) + set(PACKAGE_VERSION_EXACT TRUE) +elseif(PACKAGE_FIND_VERSION_MAJOR STREQUAL "0") + if(PACKAGE_FIND_VERSION_MINOR GREATER "1") + set(PACKAGE_VERSION_UNSUITABLE TRUE) + else() + set(PACKAGE_VERSION_COMPATIBLE TRUE) + endif() +else() + set(PACKAGE_VERSION_UNSUITABLE TRUE) +endif() diff --git a/cmake/trailbook/trailbook-config.cmake b/cmake/trailbook/trailbook-config.cmake new file mode 100644 index 0000000000..be54a385eb --- /dev/null +++ b/cmake/trailbook/trailbook-config.cmake @@ -0,0 +1,5 @@ +include("${CMAKE_CURRENT_LIST_DIR}/setup-trailbook.cmake") +_setup_trailbook() + + +include("${CMAKE_CURRENT_LIST_DIR}/add-trailbook.cmake") diff --git a/doc/CMakeLists.txt b/doc/CMakeLists.txt deleted file mode 100644 index 8f4f2692cf..0000000000 --- a/doc/CMakeLists.txt +++ /dev/null @@ -1 +0,0 @@ -add_subdirectory(everest_api_specs) diff --git a/doc/everest_api_specs/CMakeLists.txt b/doc/everest_api_specs/CMakeLists.txt deleted file mode 100644 index ea7cd531a9..0000000000 --- a/doc/everest_api_specs/CMakeLists.txt +++ /dev/null @@ -1,47 +0,0 @@ -if(EVEREST_BUILD_API_DOCS) - include(${PROJECT_SOURCE_DIR}/cmake/fetch_async_api.cmake) - include(${PROJECT_SOURCE_DIR}/cmake/fetch_async_api_html_template.cmake) -endif() - -# Create for each API a list with: folder name, api camel case, api snake case and the prefix -set(SET01 "auth_consumer_API" "AuthConsumerAPI" "auth_consumer_api" "tc") -set(SET02 "auth_token_provider_API" "AuthTokenProviderAPI" "auth_token_provider_api" "tp") -set(SET03 "auth_token_validator_API" "AuthTokenValidatorAPI" "auth_token_validator_api" "tv") -set(SET04 "dc_external_derate_consumer_API" "DcExternalDerateConsumerAPI" "dc_external_derate_consumer_api" "ded") -set(SET05 "display_message_API" "DisplayMessageAPI" "display_message_api" "dm") -set(SET06 "error_history_consumer_API" "ErrorHistoryConsumerAPI" "error_history_consumer_api" "eh") -set(SET07 "evse_board_support_API" "EvseBspAPI" "evse_bsp_api" "bsp") -set(SET08 "evse_manager_consumer_API" "EvseManagerConsumerAPI" "evse_manager_consumer_api" "em") -set(SET09 "external_energy_limits_consumer_API" "ExternalEnergyLimitsConsumerAPI" "external_energy_limits_consumer_api" "eel") -set(SET10 "generic_error_raiser_API" "GenericErrorRaiserAPI" "generic_error_raiser_api" "err") -set(SET11 "isolation_monitor_API" "IsolationMonitorAPI" "isolation_monitor_api" "im") -set(SET12 "ocpp_consumer_API" "OcppConsumerAPI" "ocpp_consumer_api" "oc") -set(SET13 "over_voltage_monitor_API" "OverVoltageMonitorAPI" "over_voltage_monitor_api" "ovm") -set(SET14 "power_supply_DC_API" "PowerSupplyDCAPI" "power_supply_dc_api" "ps") -set(SET15 "powermeter_API" "PowermeterAPI" "powermeter_api" "pm") -set(SET16 "session_cost_API" "SessionCostAPI" "session_cost_api" "sc") -set(SET17 "session_cost_consumer_API" "SessionCostConsumerAPI" "session_cost_api" "scc") -set(SET18 "system_API" "SystemAPI" "system_api" "sys") - - - -# create a list to iterate over (for documetation generation and client) -set(API_SPECS "SET01" "SET02" "SET03" "SET04" "SET05" "SET06" "SET07" "SET08" "SET09" "SET10" "SET11" "SET12" "SET13" "SET14" "SET15" "SET16" "SET17" "SET18") - -# Iterate over the list and create the documentation -if(EVEREST_BUILD_API_DOCS) - include(${PROJECT_SOURCE_DIR}/cmake/generate-api-docs.cmake) - - add_custom_target(everest_api_docs ALL) - - foreach(API_SPEC IN LISTS API_SPECS) - set(API_SPEC_PARAMS ${${API_SPEC}}) - list(GET API_SPEC_PARAMS 0 API_NAME) - message(STATUS "Documentation generation for: ${API_NAME}") - generate_async_api_docs( - API_PATH ${CMAKE_CURRENT_SOURCE_DIR}/${API_NAME}/asyncapi.yaml - API_NAME ${API_NAME} - ) - endforeach() -endif() - diff --git a/doc/ocmf/powermeter_start.md b/doc/ocmf/powermeter_start.md deleted file mode 100644 index de33b065fe..0000000000 --- a/doc/ocmf/powermeter_start.md +++ /dev/null @@ -1,32 +0,0 @@ -```mermaid -sequenceDiagram -autonumber -participant Powermeter -participant EvseManager - -title Start of Powermeter or recovery after communication loss - -Note over Powermeter: Device communication (re)established - -Powermeter->>Powermeter: Request status from device -Powermeter->>Powermeter: Detects a running transaction -Powermeter->>Powermeter: Mark need_to_stop_transaction to true - -alt Next command is startTransaction - EvseManager->>Powermeter: startTransaction - Powermeter-->>Powermeter: stopTransaction - Note over Powermeter: internal triggered stopTransaction will not send
a response to EvseManager since no stopTransaction was issued - Powermeter->>Powermeter: Mark need_to_stop_transaction to false - Powermeter-->>EvseManager: startTransaction Response (OK/ID) - Powermeter->>Powermeter: Mark need_to_stop_transaction to true - - Note over EvseManager: Transaction started successfully - -else Next command is stopTransaction - EvseManager->>Powermeter: stopTransaction - Powermeter-->>EvseManager: stopTransaction Response (OK/OCMF) - Powermeter->>Powermeter: Mark need_to_stop_transaction to false -end - -Note over Powermeter: In case of CommunicationError during start/stop
transaction please check the start/stop transaction diagrams -``` \ No newline at end of file diff --git a/doc/ocmf/powermeter_start_transaction.md b/doc/ocmf/powermeter_start_transaction.md deleted file mode 100644 index efb8b7c0a9..0000000000 --- a/doc/ocmf/powermeter_start_transaction.md +++ /dev/null @@ -1,52 +0,0 @@ -```mermaid -sequenceDiagram -autonumber -participant Powermeter -participant EvseManager -participant OCPP -participant CSMS - -title Start of a Transaction - -Note over EvseManager: User plugs in EV and authorizes - -EvseManager->>OCPP: Event(SessionStarted) - -OCPP->>CSMS: StatusNotification.req(Preparing) -CSMS-->>OCPP: StatusNotification.conf - -alt successful case - EvseManager->>Powermeter: startTransaction - Powermeter-->>EvseManager: startTransaction Response (OK/ID) - - EvseManager->>OCPP: Event(TransactionStarted) - OCPP->>CSMS: StartTransaction.req - CSMS-->>OCPP: StartTransaction.conf - - Note over EvseManager: Transaction started successfully - -else startTransaction failing due to power loss - EvseManager->>Powermeter: startTransaction - Powermeter-->>EvseManager: startTransaction Response (FAIL) - - EvseManager->>OCPP: Event(Deauthorized) - - OCPP->>CSMS: StatusNotification.req(Finishing) - CSMS-->>OCPP: StatusNotification.conf - - EvseManager->>OCPP: raiseError (PowermeterTransactionStartFailed) - OCPP->>CSMS: StatusNotification.req(Finishing, PowermeterTransactionStartFailed) - CSMS-->>OCPP: StatusNotification.conf - - Note over EvseManager: Transaction did not start -end - -alt EvseManager configured to become inoperative in case of Powermeter CommunicationError - Powermeter->>EvseManager: raise_error(CommunicationError) - Note over Powermeter,EvseManager: Powermeter raises a CommunicationError
and EvseManager is registered for notification - EvseManager->>OCPP: raise_error (Inoperative) - OCPP->>CSMS: StatusNotification.req(Faulted) - CSMS-->>OCPP: StatusNotification.conf -end - -``` \ No newline at end of file diff --git a/doc/ocmf/powermeter_stop_transaction.md b/doc/ocmf/powermeter_stop_transaction.md deleted file mode 100644 index 5c36d14b13..0000000000 --- a/doc/ocmf/powermeter_stop_transaction.md +++ /dev/null @@ -1,46 +0,0 @@ -```mermaid -sequenceDiagram -autonumber -participant Powermeter -participant EvseManager -participant OCPP -participant CSMS - -title Stopping Transaction in Error - -Note over Powermeter, CSMS: Transaction is running - -Powermeter->>Powermeter: detects a
CommunicationError -Note over Powermeter,EvseManager: Powermeter raises a CommunicationError
and EvseManager is registered for notification -Powermeter->>EvseManager: raise_error (CommunicationFault) -Powermeter->>OCPP: raise_error (CommunicationFault) - -OCPP->>CSMS: StatusNotification.req(Charging, CommunicationFault) -CSMS-->>OCPP: StatusNotification.conf - -alt EvseManager configured to become inoperative in case of PowermeterCommError - EvseManager->>EvseManager: Pause charging - EvseManager->>OCPP: raiseError (Inoperative) - OCPP->>CSMS: StatusNotification.req(Faulted) - Note over EvseManager: Note that we would just continue charging otherwise -end - -Note over Powermeter, CSMS: User stops the transaction - -alt successful case (Powermeter has no CommunicationError) - EvseManager->>Powermeter: stopTransaction (ID) - Powermeter-->>EvseManager: stopTransaction Response (OK/OCMF) - EvseManager->>OCPP: Event(TransactionFinished(OCMF)) - - OCPP->>CSMS: StopTransaction.req(OCMF) - CSMS-->>OCPP: StopTransaction.conf -else stopTransaction failing due to subsequent power loss (this applies as well when Powermeter still in CommunicationError) - EvseManager->>Powermeter: stopTransaction (ID) - Powermeter->>EvseManager: stopTransaction Response (FAIL) - EvseManager->>OCPP: Event(TransactionFinished) - - Note right of OCPP: In this case we can't stop the transaction including the OCMF - OCPP->>CSMS: StopTransaction.req() - CSMS-->>OCPP: StopTransaction.conf -end -``` \ No newline at end of file diff --git a/docs/CMakeLists.txt b/docs/CMakeLists.txt new file mode 100644 index 0000000000..11cef5cb67 --- /dev/null +++ b/docs/CMakeLists.txt @@ -0,0 +1,38 @@ +configure_file( + ${CMAKE_CURRENT_SOURCE_DIR}/source/conf.py.in + ${CMAKE_CURRENT_SOURCE_DIR}/source/conf.py + @ONLY +) + +find_package( + trailbook + 0.1.0 + REQUIRED + PATHS "${CMAKE_SOURCE_DIR}/cmake" +) +find_package( + trailbook-ext-everest + 0.1.0 + REQUIRED + PATHS "${CMAKE_SOURCE_DIR}/cmake" +) + +set(EVEREST_DOCS_REPO_URL "git@github.com:everest/everest.github.io" CACHE STRING "The git URL of the repository where the deployed docs are stored") +add_trailbook( + NAME "everest" + STEM_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}/source + DEPLOYED_DOCS_REPO_URL ${EVEREST_DOCS_REPO_URL} + DEPLOYED_DOCS_REPO_BRANCH "main" +) + +get_target_property( + TRAILBOOK_INSTANCE_SOURCE_DIRECTORY + trailbook_everest + TRAILBOOK_INSTANCE_SOURCE_DIRECTORY +) +get_filename_component(EVEREST_WORKSPACE_DIRECTORY ${CMAKE_SOURCE_DIR} DIRECTORY) +trailbook_ev_create_snapshot( + EVEREST_WORKSPACE_DIRECTORY "${EVEREST_WORKSPACE_DIRECTORY}" + TRAILBOOK_NAME "everest" + OUTPUT_FILE "${TRAILBOOK_INSTANCE_SOURCE_DIRECTORY}/reference/assets/snapshot.yaml" +) diff --git a/docs/README.md b/docs/README.md new file mode 100644 index 0000000000..6b630447f4 --- /dev/null +++ b/docs/README.md @@ -0,0 +1,85 @@ +# trailbook: EVerest + +The EVerest documentation uses the CMake package `trailbook` +to build and manage its documentation. +Additional it uses the extension package `trailbook-ext-everest` + +## Configure CMake + +There are three CMake variables you need to know about. + +### `EVEREST_BUILD_DOCS` + +The CMake variable `EVEREST_BUILD_DOCS` enables or disables the building of the +EVerest documentation. It is disabled by default. To enable it, set the variable to `ON` when configuring CMake: + +```bash +cmake -D EVEREST_BUILD_DOCS=ON +``` + +### `TRAILBOOK_everest_DOWNLOAD_ALL_VERSIONS` + +The CMake variable `TRAILBOOK_everest_DOWNLOAD_ALL_VERSIONS` controls whether +all available versions of the EVerest documentation are downloaded during the build process. +By default, this variable is set to `OFF`, meaning an empty multiversion skeleton is created +during the build. + +To enable the downloading of all available versions, set the variable to `ON` when configuring CMake: + +```bash +cmake -D EVEREST_BUILD_DOCS=ON -D TRAILBOOK_everest_DOWNLOAD_ALL_VERSIONS=ON +``` + +With this the `everest.github.io` repository cloned and the new built documentation is embedded into +the exisiting multiversion structure. + +### `TRAILBOOK_everest_IS_RELEASE` + +If you don't want to deploy the documentation you probably don't need to care about this variable. + +The CMake variable `TRAILBOOK_everest_IS_RELEASE` indicates whether the current build is a release build. It defaults to `ON` which means that files in the root of the multiversion structure +as `index.html` and `404.html` become updated. Additionally the `latest` symlink is updated to point to the current version. + +In case of the need to build the documentation as nightly for example the variable can +be set to `OFF` when configuring CMake: + +```bash +cmake -D EVEREST_BUILD_DOCS=ON -D TRAILBOOK_everest_DOWNLOAD_ALL_VERSIONS=ON -D TRAILBOOK_everest_IS_RELEASE=OFF +``` + +## Build + +There are three targets available to work with the EVerest documentation: + +```bash +cmake --build --target trailbook_everest +``` +Builds the EVerest documentation. + +```bash +cmake --build --target trailbook_everest_preview +``` +Builds the EVerest documentation and serves it with a local web server +for previewing. + +```bash +cmake --build --target trailbook_everest_live_preview +``` +Builds the EVerest documentation and serves it with a local web server +for previewing. Additionally it watches for changes in the source files +and automatically rebuilds the documentation and refreshes the preview +in the browser. + +## How things work + +The trailbook is initialized in `${CMAKE_SOURCE_DIR}/docs/CMakeLists.txt` +with the `add_trailbook()` function call. + +At the same file an edm snapshot yaml file is added to the documentation by +calling the function `trailbook_ev_create_snapshot()`. + +Module explanations, modules references, type references and interface references are added automatically in `${CMAKE_SOURCE_DIR}/cmake/everest-generate.cmake`, by calling the functions +* `trailbook_ev_generate_rst_from_manifest()`, +* `trailbook_ev_generate_rst_from_types()`, +* `trailbook_ev_generate_rst_from_interface()`, and +* `trailbook_ev_add_module_explanation()`. diff --git a/docs/requirements.txt b/docs/requirements.txt new file mode 100644 index 0000000000..c029912737 --- /dev/null +++ b/docs/requirements.txt @@ -0,0 +1,17 @@ +# Main Sphinx library +sphinx + +# The Furo theme +furo + +# Provides modern design elements like grids, cards, and tabs +sphinx-design + +# Adds a "copy" button to code blocks for a better user experience +sphinx-copybutton + +# For rendering mermaid diagrams +sphinxcontrib-mermaid + +# Required for processing templates +PyYAML diff --git a/docs/source/404.rst b/docs/source/404.rst new file mode 100644 index 0000000000..5ef56d9c22 --- /dev/null +++ b/docs/source/404.rst @@ -0,0 +1,13 @@ +:orphan: + +########################################## +Page not found +########################################## + +Your request could not be processed because the page you requested does not exist. + +.. note:: + + Since the 2023.1.0 release of the documentation. + The structure of the documentation has changed. + You can find the latest build here: https://everest.github.io/latest diff --git a/docs/source/_ext/staticpages/LICENSE b/docs/source/_ext/staticpages/LICENSE new file mode 100644 index 0000000000..2407d3f070 --- /dev/null +++ b/docs/source/_ext/staticpages/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2019 Manuel Kaufmann + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/docs/source/_ext/staticpages/__init__.py b/docs/source/_ext/staticpages/__init__.py new file mode 100644 index 0000000000..d3ec452c31 --- /dev/null +++ b/docs/source/_ext/staticpages/__init__.py @@ -0,0 +1 @@ +__version__ = "0.2.0" diff --git a/docs/source/_ext/staticpages/extension.py b/docs/source/_ext/staticpages/extension.py new file mode 100644 index 0000000000..ae5e1e54a8 --- /dev/null +++ b/docs/source/_ext/staticpages/extension.py @@ -0,0 +1,329 @@ +import docutils +import os +import sphinx +import warnings + +from sphinx.environment.collectors import EnvironmentCollector +from sphinx.errors import ExtensionError + +from . import __version__ +from .utils import replace_uris + + +class BaseURIError(ExtensionError): + """Exception for malformed base URI.""" + pass + + +# https://www.sphinx-doc.org/en/stable/extdev/appapi.html#event-html-collect-pages +def html_collect_pages(app): + """ + Create for each ```` a html page. + + Uses ``