Skip to content

feat(ontology): Complete all 9 architectural issues - DashMap, transi… #1

feat(ontology): Complete all 9 architectural issues - DashMap, transi…

feat(ontology): Complete all 9 architectural issues - DashMap, transi… #1

name: Ontology Federation CI/CD
on:
push:
branches: [main, develop]
paths:
- 'ontology/**'
- 'public/ontology/**'
- '.github/workflows/ontology-publish.yml'
pull_request:
branches: [main]
paths:
- 'ontology/**'
- 'public/ontology/**'
repository_dispatch:
types: [logseq-sync]
workflow_dispatch:
inputs:
source_repo:
description: 'Source repository for ontology (e.g., jjohare/logseq)'
required: false
default: 'jjohare/logseq'
force_sync:
description: 'Force full sync even if no changes detected'
required: false
default: 'false'
type: boolean
env:
ONTOLOGY_SOURCE_REPO: ${{ github.event.inputs.source_repo || 'jjohare/logseq' }}
JSS_URL: ${{ vars.JSS_URL || 'http://jss:3030' }}
JSS_PUBLIC_PATH: /public/ontology
CACHE_DIR: .ontology-cache
ARTIFACT_RETENTION_DAYS: 30
jobs:
validate-source:
name: Validate Ontology Source
runs-on: ubuntu-latest
outputs:
has_changes: ${{ steps.detect.outputs.has_changes }}
source_sha: ${{ steps.detect.outputs.source_sha }}
changed_files: ${{ steps.detect.outputs.changed_files }}
steps:
- name: Checkout repository
uses: actions/checkout@v4
with:
fetch-depth: 2
- name: Checkout ontology source
uses: actions/checkout@v4
with:
repository: ${{ env.ONTOLOGY_SOURCE_REPO }}
path: logseq-source
token: ${{ secrets.GITHUB_TOKEN }}
- name: Detect changes
id: detect
run: |
cd logseq-source
# Get current SHA
SOURCE_SHA=$(git rev-parse HEAD)
echo "source_sha=$SOURCE_SHA" >> $GITHUB_OUTPUT
# Find markdown files with ontology content
CHANGED_FILES=$(find . -name "*.md" -type f | head -100)
echo "changed_files<<EOF" >> $GITHUB_OUTPUT
echo "$CHANGED_FILES" >> $GITHUB_OUTPUT
echo "EOF" >> $GITHUB_OUTPUT
# Check if we have ontology content
if [ -n "$CHANGED_FILES" ]; then
echo "has_changes=true" >> $GITHUB_OUTPUT
else
echo "has_changes=${{ github.event.inputs.force_sync || 'false' }}" >> $GITHUB_OUTPUT
fi
- name: Cache previous state
uses: actions/cache@v4
with:
path: ${{ env.CACHE_DIR }}
key: ontology-state-${{ runner.os }}-${{ github.sha }}
restore-keys: |
ontology-state-${{ runner.os }}-
convert-ontology:
name: Convert Markdown to RDF
runs-on: ubuntu-latest
needs: validate-source
if: needs.validate-source.outputs.has_changes == 'true'
outputs:
ttl_sha: ${{ steps.convert.outputs.ttl_sha }}
conversion_status: ${{ steps.convert.outputs.status }}
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Checkout ontology source
uses: actions/checkout@v4
with:
repository: ${{ env.ONTOLOGY_SOURCE_REPO }}
path: logseq-source
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: '3.11'
cache: 'pip'
- name: Install RDF tools
run: |
pip install rdflib pyld markdown pyyaml
- name: Create conversion script
run: |
mkdir -p scripts
cat > scripts/md_to_ttl.py << 'PYTHON_SCRIPT'
#!/usr/bin/env python3
"""
Markdown to Turtle (TTL) Converter for Logseq Ontology
Parses Logseq markdown pages with ontology annotations and generates
valid OWL/RDF Turtle format for federation.
"""
import os
import re
import sys
import yaml
import hashlib
from pathlib import Path
from rdflib import Graph, Namespace, Literal, URIRef, BNode
from rdflib.namespace import RDF, RDFS, OWL, XSD, FOAF
# Custom namespaces
ONTO = Namespace("https://example.org/ontology/")
LOGSEQ = Namespace("https://logseq.com/ns/")
VISION = Namespace("https://visionflow.io/ontology/")
def parse_logseq_page(filepath: Path) -> dict:
"""Parse a Logseq markdown page for ontology content."""
with open(filepath, 'r', encoding='utf-8') as f:
content = f.read()
# Extract YAML frontmatter if present
frontmatter = {}
if content.startswith('---'):
parts = content.split('---', 2)
if len(parts) >= 3:
try:
frontmatter = yaml.safe_load(parts[1]) or {}
except yaml.YAMLError:
pass
content = parts[2]
# Extract properties from Logseq format
# e.g., type:: [[Class]], subClassOf:: [[Thing]]
properties = {}
for match in re.finditer(r'^(\w+):: (.+)$', content, re.MULTILINE):
key = match.group(1)
value = match.group(2)
# Handle Logseq links [[Link]]
links = re.findall(r'\[\[([^\]]+)\]\]', value)
if links:
properties[key] = links
else:
properties[key] = [value.strip()]
# Extract title from first heading or filename
title_match = re.search(r'^#\s+(.+)$', content, re.MULTILINE)
title = title_match.group(1) if title_match else filepath.stem
# Extract description from content
desc_match = re.search(r'^(?!#|-)(.+)$', content, re.MULTILINE)
description = desc_match.group(1).strip() if desc_match else None
return {
'title': title,
'description': description,
'frontmatter': frontmatter,
'properties': properties,
'filepath': str(filepath)
}
def create_ontology_graph(pages: list) -> Graph:
"""Create RDF graph from parsed Logseq pages."""
g = Graph()
# Bind namespaces
g.bind('owl', OWL)
g.bind('rdfs', RDFS)
g.bind('xsd', XSD)
g.bind('foaf', FOAF)
g.bind('onto', ONTO)
g.bind('logseq', LOGSEQ)
g.bind('vision', VISION)
# Create ontology declaration
ontology_uri = VISION['visionflow-ontology']
g.add((ontology_uri, RDF.type, OWL.Ontology))
g.add((ontology_uri, RDFS.label, Literal("VisionFlow Ontology", lang="en")))
g.add((ontology_uri, OWL.versionInfo, Literal("1.0")))
for page in pages:
props = page.get('properties', {})
title = page['title']
# Generate IRI from title
iri = ONTO[re.sub(r'[^\w]', '_', title)]
# Determine type
page_type = props.get('type', ['Thing'])[0]
if page_type.lower() in ['class', 'concept']:
g.add((iri, RDF.type, OWL.Class))
g.add((iri, RDFS.label, Literal(title, lang="en")))
# Handle subClassOf
for parent in props.get('subClassOf', props.get('parent', [])):
parent_iri = ONTO[re.sub(r'[^\w]', '_', parent)]
g.add((iri, RDFS.subClassOf, parent_iri))
elif page_type.lower() in ['property', 'relation']:
prop_range = props.get('range', ['Thing'])[0]
if prop_range.lower() in ['string', 'integer', 'boolean', 'date']:
g.add((iri, RDF.type, OWL.DatatypeProperty))
else:
g.add((iri, RDF.type, OWL.ObjectProperty))
g.add((iri, RDFS.label, Literal(title, lang="en")))
# Domain and range
for domain in props.get('domain', []):
domain_iri = ONTO[re.sub(r'[^\w]', '_', domain)]
g.add((iri, RDFS.domain, domain_iri))
for range_val in props.get('range', []):
if range_val.lower() == 'string':
g.add((iri, RDFS.range, XSD.string))
elif range_val.lower() == 'integer':
g.add((iri, RDFS.range, XSD.integer))
else:
range_iri = ONTO[re.sub(r'[^\w]', '_', range_val)]
g.add((iri, RDFS.range, range_iri))
elif page_type.lower() in ['individual', 'instance']:
instance_of = props.get('instanceOf', props.get('a', ['Thing']))[0]
class_iri = ONTO[re.sub(r'[^\w]', '_', instance_of)]
g.add((iri, RDF.type, class_iri))
g.add((iri, RDFS.label, Literal(title, lang="en")))
# Add description if present
if page.get('description'):
g.add((iri, RDFS.comment, Literal(page['description'], lang="en")))
# Add source tracking
g.add((iri, LOGSEQ.sourcePage, Literal(page['filepath'])))
return g
def main():
if len(sys.argv) < 3:
print("Usage: md_to_ttl.py <input_dir> <output_file>")
sys.exit(1)
input_dir = Path(sys.argv[1])
output_file = Path(sys.argv[2])
# Find all markdown files
md_files = list(input_dir.rglob("*.md"))
print(f"Found {len(md_files)} markdown files")
# Parse pages
pages = []
for md_file in md_files:
try:
page = parse_logseq_page(md_file)
if page['properties']: # Only include pages with properties
pages.append(page)
except Exception as e:
print(f"Warning: Failed to parse {md_file}: {e}")
print(f"Parsed {len(pages)} pages with ontology content")
# Create graph
graph = create_ontology_graph(pages)
# Serialize to Turtle
output_file.parent.mkdir(parents=True, exist_ok=True)
graph.serialize(destination=str(output_file), format='turtle')
# Calculate SHA1
with open(output_file, 'rb') as f:
sha1 = hashlib.sha1(f.read()).hexdigest()
print(f"Generated {output_file} with {len(graph)} triples")
print(f"SHA1: {sha1}")
# Output stats
with open(output_file.with_suffix('.stats.json'), 'w') as f:
import json
json.dump({
'triples': len(graph),
'pages_processed': len(pages),
'sha1': sha1
}, f)
if __name__ == '__main__':
main()
PYTHON_SCRIPT
chmod +x scripts/md_to_ttl.py
- name: Convert Markdown to TTL
id: convert
run: |
mkdir -p output/ontology
python scripts/md_to_ttl.py logseq-source output/ontology/visionflow.ttl
if [ -f output/ontology/visionflow.ttl ]; then
TTL_SHA=$(sha1sum output/ontology/visionflow.ttl | cut -d' ' -f1)
echo "ttl_sha=$TTL_SHA" >> $GITHUB_OUTPUT
echo "status=success" >> $GITHUB_OUTPUT
else
echo "status=failed" >> $GITHUB_OUTPUT
exit 1
fi
- name: Validate TTL syntax
run: |
python -c "
from rdflib import Graph
g = Graph()
g.parse('output/ontology/visionflow.ttl', format='turtle')
print(f'Validation passed: {len(g)} triples')
# Basic OWL validation
owl_classes = list(g.subjects(predicate=None, object=None))
print(f'Found {len(set(owl_classes))} unique subjects')
"
- name: Upload TTL artifact
uses: actions/upload-artifact@v4
with:
name: ontology-ttl
path: output/ontology/
retention-days: ${{ env.ARTIFACT_RETENTION_DAYS }}
convert-jsonld:
name: Convert TTL to JSON-LD
runs-on: ubuntu-latest
needs: convert-ontology
if: needs.convert-ontology.outputs.conversion_status == 'success'
outputs:
jsonld_sha: ${{ steps.convert.outputs.jsonld_sha }}
context_sha: ${{ steps.convert.outputs.context_sha }}
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Download TTL artifact
uses: actions/download-artifact@v4
with:
name: ontology-ttl
path: output/ontology/
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: '3.11'
cache: 'pip'
- name: Install JSON-LD tools
run: |
pip install rdflib pyld
- name: Convert TTL to JSON-LD
id: convert
run: |
mkdir -p output/jsonld
python << 'PYTHON_SCRIPT'
import json
import hashlib
from datetime import datetime, timezone
from rdflib import Graph
from pyld import jsonld
# Load TTL
g = Graph()
g.parse('output/ontology/visionflow.ttl', format='turtle')
# Serialize to JSON-LD
jsonld_str = g.serialize(format='json-ld', indent=2)
jsonld_data = json.loads(jsonld_str)
# Create optimized context
context = {
"@context": {
"@vocab": "https://example.org/ontology/",
"owl": "http://www.w3.org/2002/07/owl#",
"rdfs": "http://www.w3.org/2000/01/rdf-schema#",
"rdf": "http://www.w3.org/1999/02/22-rdf-syntax-ns#",
"xsd": "http://www.w3.org/2001/XMLSchema#",
"foaf": "http://xmlns.com/foaf/0.1/",
"vision": "https://visionflow.io/ontology/",
"logseq": "https://logseq.com/ns/",
"label": {"@id": "rdfs:label", "@language": "en"},
"comment": {"@id": "rdfs:comment", "@language": "en"},
"subClassOf": {"@id": "rdfs:subClassOf", "@type": "@id"},
"domain": {"@id": "rdfs:domain", "@type": "@id"},
"range": {"@id": "rdfs:range", "@type": "@id"}
}
}
# Write context
with open('output/jsonld/context.jsonld', 'w') as f:
json.dump(context, f, indent=2)
# Compact JSON-LD with context
try:
compacted = jsonld.compact(jsonld_data, context['@context'])
except:
compacted = jsonld_data
# Write compacted ontology
with open('output/jsonld/ontology.jsonld', 'w') as f:
json.dump(compacted, f, indent=2)
# Calculate SHA1 hashes
with open('output/jsonld/ontology.jsonld', 'rb') as f:
jsonld_sha = hashlib.sha1(f.read()).hexdigest()
with open('output/jsonld/context.jsonld', 'rb') as f:
context_sha = hashlib.sha1(f.read()).hexdigest()
# Create index manifest
manifest = {
"@context": "https://www.w3.org/ns/ldp",
"@id": "/public/ontology/",
"@type": ["ldp:Container", "ldp:BasicContainer"],
"dcterms:title": "VisionFlow Ontology Federation",
"dcterms:modified": datetime.now(timezone.utc).isoformat(),
"ldp:contains": [
{
"@id": "ontology.jsonld",
"@type": "ldp:Resource",
"dcterms:format": "application/ld+json",
"digest:sha1": jsonld_sha
},
{
"@id": "context.jsonld",
"@type": "ldp:Resource",
"dcterms:format": "application/ld+json",
"digest:sha1": context_sha
},
{
"@id": "visionflow.ttl",
"@type": "ldp:Resource",
"dcterms:format": "text/turtle",
"digest:sha1": "${{ needs.convert-ontology.outputs.ttl_sha }}"
}
],
"visionflow:sourceRepository": "${{ env.ONTOLOGY_SOURCE_REPO }}",
"visionflow:buildSha": "${{ github.sha }}",
"visionflow:buildNumber": "${{ github.run_number }}"
}
with open('output/jsonld/index.jsonld', 'w') as f:
json.dump(manifest, f, indent=2)
print(f"Generated JSON-LD files")
print(f"ontology.jsonld SHA1: {jsonld_sha}")
print(f"context.jsonld SHA1: {context_sha}")
# Write hashes for GitHub Actions
with open('hashes.txt', 'w') as f:
f.write(f"jsonld_sha={jsonld_sha}\n")
f.write(f"context_sha={context_sha}\n")
PYTHON_SCRIPT
# Export hashes to outputs
source hashes.txt
echo "jsonld_sha=$jsonld_sha" >> $GITHUB_OUTPUT
echo "context_sha=$context_sha" >> $GITHUB_OUTPUT
- name: Validate JSON-LD
run: |
python -c "
import json
from pyld import jsonld
with open('output/jsonld/ontology.jsonld') as f:
data = json.load(f)
# Expand to verify structure
expanded = jsonld.expand(data)
print(f'JSON-LD validation passed: {len(expanded)} top-level items')
with open('output/jsonld/index.jsonld') as f:
manifest = json.load(f)
assert 'ldp:contains' in manifest, 'Manifest missing ldp:contains'
print(f'Manifest contains {len(manifest[\"ldp:contains\"])} resources')
"
- name: Upload JSON-LD artifact
uses: actions/upload-artifact@v4
with:
name: ontology-jsonld
path: output/jsonld/
retention-days: ${{ env.ARTIFACT_RETENTION_DAYS }}
deploy-jss:
name: Deploy to JSS
runs-on: ubuntu-latest
needs: [convert-ontology, convert-jsonld]
if: github.ref == 'refs/heads/main' && github.event_name != 'pull_request'
environment: production
outputs:
deployment_status: ${{ steps.deploy.outputs.status }}
rollback_sha: ${{ steps.backup.outputs.rollback_sha }}
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Download artifacts
uses: actions/download-artifact@v4
with:
path: artifacts/
- name: Backup current state
id: backup
continue-on-error: true
run: |
mkdir -p backup
# Fetch current index for rollback reference
ROLLBACK_SHA=""
if curl -sf "${{ env.JSS_URL }}${{ env.JSS_PUBLIC_PATH }}/index.jsonld" -o backup/index.jsonld; then
ROLLBACK_SHA=$(jq -r '.["visionflow:buildSha"] // empty' backup/index.jsonld)
fi
echo "rollback_sha=$ROLLBACK_SHA" >> $GITHUB_OUTPUT
echo "Previous deployment SHA: $ROLLBACK_SHA"
- name: Deploy to JSS
id: deploy
run: |
set -e
JSS_URL="${{ env.JSS_URL }}"
PUBLIC_PATH="${{ env.JSS_PUBLIC_PATH }}"
echo "Deploying to $JSS_URL$PUBLIC_PATH"
# Create container if not exists
curl -X PUT "$JSS_URL$PUBLIC_PATH/" \
-H "Content-Type: text/turtle" \
-H "Link: <http://www.w3.org/ns/ldp#BasicContainer>; rel=\"type\"" \
--fail-with-body || true
# Deploy TTL
echo "Uploading visionflow.ttl..."
curl -X PUT "$JSS_URL$PUBLIC_PATH/visionflow.ttl" \
-H "Content-Type: text/turtle" \
--data-binary @artifacts/ontology-ttl/visionflow.ttl \
--fail-with-body
# Deploy JSON-LD files
for file in context.jsonld ontology.jsonld index.jsonld; do
echo "Uploading $file..."
curl -X PUT "$JSS_URL$PUBLIC_PATH/$file" \
-H "Content-Type: application/ld+json" \
--data-binary @artifacts/ontology-jsonld/$file \
--fail-with-body
done
echo "status=success" >> $GITHUB_OUTPUT
echo "Deployment completed successfully"
- name: Verify deployment
run: |
JSS_URL="${{ env.JSS_URL }}"
PUBLIC_PATH="${{ env.JSS_PUBLIC_PATH }}"
echo "Verifying deployment..."
# Check each resource
for file in index.jsonld context.jsonld ontology.jsonld visionflow.ttl; do
HTTP_CODE=$(curl -s -o /dev/null -w "%{http_code}" "$JSS_URL$PUBLIC_PATH/$file")
if [ "$HTTP_CODE" != "200" ]; then
echo "ERROR: Failed to verify $file (HTTP $HTTP_CODE)"
exit 1
fi
echo "Verified: $file (HTTP $HTTP_CODE)"
done
# Verify SHA1 matches
DEPLOYED_INDEX=$(curl -sf "$JSS_URL$PUBLIC_PATH/index.jsonld")
DEPLOYED_SHA=$(echo "$DEPLOYED_INDEX" | jq -r '.["visionflow:buildSha"]')
if [ "$DEPLOYED_SHA" == "${{ github.sha }}" ]; then
echo "SHA verification passed: $DEPLOYED_SHA"
else
echo "WARNING: SHA mismatch - expected ${{ github.sha }}, got $DEPLOYED_SHA"
fi
- name: Rollback on failure
if: failure() && steps.backup.outputs.rollback_sha != ''
run: |
echo "Deployment failed, attempting rollback to ${{ steps.backup.outputs.rollback_sha }}"
# Restore from backup
if [ -f backup/index.jsonld ]; then
curl -X PUT "${{ env.JSS_URL }}${{ env.JSS_PUBLIC_PATH }}/index.jsonld" \
-H "Content-Type: application/ld+json" \
--data-binary @backup/index.jsonld || true
fi
echo "Rollback attempted - manual verification required"
notify-websocket:
name: Send WebSocket Notification
runs-on: ubuntu-latest
needs: deploy-jss
if: needs.deploy-jss.outputs.deployment_status == 'success'
steps:
- name: Send ontology update notification
run: |
# Construct notification payload
NOTIFICATION=$(cat << EOF
{
"type": "ontology_updated",
"data": {
"source": "github-actions",
"repository": "${{ env.ONTOLOGY_SOURCE_REPO }}",
"buildSha": "${{ github.sha }}",
"buildNumber": "${{ github.run_number }}",
"timestamp": "$(date -u +%Y-%m-%dT%H:%M:%SZ)",
"resources": {
"index": "${{ env.JSS_URL }}${{ env.JSS_PUBLIC_PATH }}/index.jsonld",
"ontology": "${{ env.JSS_URL }}${{ env.JSS_PUBLIC_PATH }}/ontology.jsonld",
"context": "${{ env.JSS_URL }}${{ env.JSS_PUBLIC_PATH }}/context.jsonld",
"ttl": "${{ env.JSS_URL }}${{ env.JSS_PUBLIC_PATH }}/visionflow.ttl"
},
"hashes": {
"ttl": "${{ needs.convert-ontology.outputs.ttl_sha }}",
"jsonld": "${{ needs.convert-jsonld.outputs.jsonld_sha }}",
"context": "${{ needs.convert-jsonld.outputs.context_sha }}"
}
}
}
EOF
)
echo "Notification payload:"
echo "$NOTIFICATION" | jq .
# Send to JSS notifications endpoint (triggers WebSocket broadcast)
# JSS will broadcast "pub /public/ontology/" to all subscribers
curl -X POST "${{ env.JSS_URL }}/.notifications" \
-H "Content-Type: application/json" \
-d "$NOTIFICATION" || echo "Direct notification endpoint not available"
# Alternative: Touch the index to trigger LDP notification
curl -X PATCH "${{ env.JSS_URL }}${{ env.JSS_PUBLIC_PATH }}/index.jsonld" \
-H "Content-Type: application/sparql-update" \
-d "INSERT DATA { <> <http://purl.org/dc/terms/modified> \"$(date -u +%Y-%m-%dT%H:%M:%SZ)\"^^<http://www.w3.org/2001/XMLSchema#dateTime> }" || true
echo "WebSocket notification triggered"
- name: Create deployment summary
run: |
cat << EOF >> $GITHUB_STEP_SUMMARY
## Ontology Federation Deployment
**Status**: Deployed successfully
**Build**: #${{ github.run_number }}
**Commit**: \`${{ github.sha }}\`
### Resources Deployed
| Resource | SHA1 | URL |
|----------|------|-----|
| TTL | \`${{ needs.convert-ontology.outputs.ttl_sha }}\` | [${{ env.JSS_PUBLIC_PATH }}/visionflow.ttl](${{ env.JSS_URL }}${{ env.JSS_PUBLIC_PATH }}/visionflow.ttl) |
| JSON-LD | \`${{ needs.convert-jsonld.outputs.jsonld_sha }}\` | [${{ env.JSS_PUBLIC_PATH }}/ontology.jsonld](${{ env.JSS_URL }}${{ env.JSS_PUBLIC_PATH }}/ontology.jsonld) |
| Context | \`${{ needs.convert-jsonld.outputs.context_sha }}\` | [${{ env.JSS_PUBLIC_PATH }}/context.jsonld](${{ env.JSS_URL }}${{ env.JSS_PUBLIC_PATH }}/context.jsonld) |
### Pipeline
\`\`\`
jjohare/logseq -> Markdown -> TTL -> JSON-LD -> JSS -> WebSocket -> React
\`\`\`
WebSocket subscribers notified at: $(date -u +%Y-%m-%dT%H:%M:%SZ)
EOF
pr-preview:
name: PR Preview (Dry Run)
runs-on: ubuntu-latest
needs: [convert-ontology, convert-jsonld]
if: github.event_name == 'pull_request'
steps:
- name: Download artifacts
uses: actions/download-artifact@v4
with:
path: artifacts/
- name: Generate PR comment
uses: actions/github-script@v7
with:
script: |
const fs = require('fs');
// Read manifest
const manifest = JSON.parse(fs.readFileSync('artifacts/ontology-jsonld/index.jsonld', 'utf8'));
// Read stats if available
let stats = { triples: 'N/A', pages_processed: 'N/A' };
try {
stats = JSON.parse(fs.readFileSync('artifacts/ontology-ttl/visionflow.stats.json', 'utf8'));
} catch (e) {}
const comment = `## Ontology Conversion Preview
**Conversion Status**: Success
**TTL SHA1**: \`${{ needs.convert-ontology.outputs.ttl_sha }}\`
**JSON-LD SHA1**: \`${{ needs.convert-jsonld.outputs.jsonld_sha }}\`
### Statistics
- Triples generated: ${stats.triples}
- Pages processed: ${stats.pages_processed}
- Resources in manifest: ${manifest['ldp:contains']?.length || 0}
### Preview Files
Artifacts are available for download from the workflow run.
**Note**: This is a dry run. Merge to \`main\` to deploy to JSS.`;
github.rest.issues.createComment({
issue_number: context.issue.number,
owner: context.repo.owner,
repo: context.repo.repo,
body: comment
});