Skip to content

Commit

Permalink
Implement a Persistent Job Scheduler
Browse files Browse the repository at this point in the history
Closes: #1486
  • Loading branch information
alexanderkiel committed Mar 29, 2024
1 parent 5662408 commit a98d184
Show file tree
Hide file tree
Showing 141 changed files with 3,595 additions and 830 deletions.
1 change: 1 addition & 0 deletions .clj-kondo/root/config.edn
Original file line number Diff line number Diff line change
Expand Up @@ -71,6 +71,7 @@
blaze.executors ex
blaze.fhir.spec.references fsr
blaze.fhir.structure-definition-repo sdr
blaze.job-scheduler js
blaze.middleware.fhir.db db
blaze.rest-api.header header
blaze.scheduler sched
Expand Down
4 changes: 4 additions & 0 deletions .github/distributed-test/cassandra-init.cql
Original file line number Diff line number Diff line change
@@ -1,3 +1,7 @@
CREATE KEYSPACE blaze WITH REPLICATION = {'class': 'SimpleStrategy', 'replication_factor' : 2};
CREATE TABLE blaze.resources (hash text PRIMARY KEY, content blob);
CREATE TABLE blaze.clauses ("token" text PRIMARY KEY, content blob);

CREATE KEYSPACE blaze-admin WITH REPLICATION = {'class': 'SimpleStrategy', 'replication_factor' : 2};
CREATE TABLE blaze-admin.resources (hash text PRIMARY KEY, content blob);
CREATE TABLE blaze-admin.clauses ("token" text PRIMARY KEY, content blob);
1 change: 1 addition & 0 deletions .github/distributed-test/credentials
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
password
111 changes: 66 additions & 45 deletions .github/distributed-test/docker-compose.yml
Original file line number Diff line number Diff line change
@@ -1,67 +1,91 @@
services:
zookeeper:
image: "docker.io/bitnami/zookeeper:3@sha256:8104cf1ff552cb2c0270ed71c8d2aed31ac4b69cb109695871a2d08193dea7c1"
volumes:
- "zookeeper-data:/bitnami"
environment:
ALLOW_ANONYMOUS_LOGIN: "yes"

kafka:
image: "docker.io/bitnami/kafka:2@sha256:2096ad73e2cbe3b615bcb7953b19f021cde62011435465c08707d5cfd9ecc9da"
hostname: "kafka"
image: "apache/kafka:3.7.0"
environment:
KAFKA_CFG_ZOOKEEPER_CONNECT: "zookeeper:2181"
ALLOW_PLAINTEXT_LISTENER: "yes"
KAFKA_CFG_LISTENER_SECURITY_PROTOCOL_MAP: "CLIENT:SSL"
KAFKA_CFG_LISTENERS: "CLIENT://:9092"
KAFKA_CFG_ADVERTISED_LISTENERS: "CLIENT://kafka:9092"
KAFKA_INTER_BROKER_LISTENER_NAME: "CLIENT"
KAFKA_CERTIFICATE_PASSWORD: "password"
KAFKA_CFG_TLS_TYPE: "JKS"
KAFKA_TLS_TRUSTSTORE_FILE: "/opt/bitnami/kafka/config/certs/kafka.truststore.jks"
KAFKA_NODE_ID: 1
CLUSTER_ID: '5L6g3nShT-eMCtK--X86sw'
# KRaft
KAFKA_PROCESS_ROLES: "broker,controller"
KAFKA_CONTROLLER_QUORUM_VOTERS: '1@localhost:29093'
KAFKA_CONTROLLER_LISTENER_NAMES: 'CONTROLLER'
KAFKA_INTER_BROKER_LISTENER_NAME: 'SSL-INTERNAL'
# Listeners
KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: "SSL:SSL,CONTROLLER:PLAINTEXT,SSL-INTERNAL:SSL"
KAFKA_LISTENERS: "SSL://:9092,CONTROLLER://:29093,SSL-INTERNAL://:19093"
KAFKA_ADVERTISED_LISTENERS: "SSL://kafka:9092,SSL-INTERNAL://kafka:19093"
KAFKA_SSL_KEYSTORE_FILENAME: "kafka.keystore.jks"
KAFKA_SSL_KEY_CREDENTIALS: "credentials"
KAFKA_SSL_KEYSTORE_CREDENTIALS: "credentials"
KAFKA_SSL_TRUSTSTORE_FILENAME: "kafka.truststore.jks"
KAFKA_SSL_TRUSTSTORE_CREDENTIALS: "credentials"
KAFKA_SSL_CLIENT_AUTH: 'required'
# It's important to create the tx topic ourselves, because it needs to use
# LogAppendTime timestamps
KAFKA_CFG_AUTO_CREATE_TOPICS_ENABLE: "false"
#KAFKA_AUTO_CREATE_TOPICS_ENABLE: "false"
volumes:
- "./kafka.keystore.jks:/opt/bitnami/kafka/config/certs/kafka.keystore.jks:ro"
- "./kafka.truststore.jks:/opt/bitnami/kafka/config/certs/kafka.truststore.jks:ro"
- "kafka-data:/bitnami"
depends_on:
- zookeeper
- "./kafka.keystore.jks:/etc/kafka/secrets/kafka.keystore.jks:ro"
- "./kafka.truststore.jks:/etc/kafka/secrets/kafka.truststore.jks:ro"
- "./credentials:/etc/kafka/secrets/credentials:ro"
healthcheck:
test: nc -z localhost 9092 || exit -1
start_period: 15s
interval: 5s
timeout: 10s
retries: 10

kafka-topic-creator:
image: "docker.io/bitnami/kafka:2@sha256:2096ad73e2cbe3b615bcb7953b19f021cde62011435465c08707d5cfd9ecc9da"
command: "/opt/bitnami/kafka/bin/kafka-init.sh"
image: "apache/kafka:3.7.0"
command: "/opt/kafka/bin/kafka-topics.sh --bootstrap-server kafka:9092 --command-config /etc/kafka/kafka-init.conf --create --if-not-exists --topic tx --partitions 1 --replication-factor 1 --config message.timestamp.type=LogAppendTime --config retention.ms=-1"
volumes:
- "./kafka-init.sh:/opt/bitnami/kafka/bin/kafka-init.sh:ro"
- "./kafka-init.conf:/opt/bitnami/kafka/config/kafka-init.conf:ro"
- "./kafka-topic-creator.keystore.jks:/opt/bitnami/kafka/config/certs/kafka-topic-creator.keystore.jks:ro"
- "./kafka.truststore.jks:/opt/bitnami/kafka/config/certs/kafka.truststore.jks:ro"
- "./kafka-init.conf:/etc/kafka/kafka-init.conf:ro"
- "./kafka-topic-creator.keystore.jks:/etc/kafka/secrets/kafka-topic-creator.keystore.jks:ro"
- "./kafka.truststore.jks:/etc/kafka/secrets/kafka.truststore.jks:ro"
depends_on:
- zookeeper
- kafka
kafka:
condition: service_healthy

cassandra-1:
image: "docker.io/bitnami/cassandra:3@sha256:60b13bf8f0a01ad6db72689be114c8fabf9134c7a2b5ba6d855b79ad457344e9"
hostname: "cassandra-1"
image: "cassandra:4.1.4"
volumes:
- "./cassandra-init.cql:/docker-entrypoint-initdb.d/cassandra-init.cql:ro"
- "cassandra-1-data:/bitnami"
- "cassandra-1-data:/var/lib/cassandra"
environment:
CASSANDRA_SEEDS: "cassandra-1,cassandra-2"
CASSANDRA_PASSWORD_SEEDER: "yes"
MAX_HEAP_SIZE: "512M"
HEAP_NEWSIZE: "100M"
healthcheck:
test: ["CMD", "cqlsh", "-e", "describe keyspaces"]
start_period: 15s
interval: 5s
timeout: 10s
retries: 10

cassandra-2:
image: "docker.io/bitnami/cassandra:3@sha256:60b13bf8f0a01ad6db72689be114c8fabf9134c7a2b5ba6d855b79ad457344e9"
hostname: "cassandra-2"
image: "cassandra:4.1.4"
volumes:
- "cassandra-2-data:/bitnami"
- "cassandra-2-data:/var/lib/cassandra"
environment:
CASSANDRA_SEEDS: "cassandra-1,cassandra-2"
MAX_HEAP_SIZE: "512M"
HEAP_NEWSIZE: "100M"
healthcheck:
test: ["CMD", "cqlsh", "-e", "describe keyspaces"]
start_period: 15s
interval: 5s
timeout: 10s
retries: 10

cassandra-init-data:
image: "cassandra:4.1.4"
command: "cqlsh -f /scripts/cassandra-init.cql"
environment:
CQLSH_HOST: "cassandra-1"
volumes:
- "./cassandra-init.cql:/scripts/cassandra-init.cql:ro"
depends_on:
cassandra-1:
condition: service_healthy
cassandra-2:
condition: service_healthy

blaze-1:
image: "blaze:latest"
Expand Down Expand Up @@ -89,8 +113,7 @@ services:
- "blaze-1-data:/app/data"
depends_on:
- kafka-topic-creator
- cassandra-1
- cassandra-2
- cassandra-init-data
restart: unless-stopped

blaze-2:
Expand Down Expand Up @@ -119,8 +142,7 @@ services:
- "blaze-2-data:/app/data"
depends_on:
- kafka-topic-creator
- cassandra-1
- cassandra-2
- cassandra-init-data
restart: unless-stopped

ingress:
Expand All @@ -134,7 +156,6 @@ services:
- blaze-2

volumes:
zookeeper-data:
kafka-data:
cassandra-1-data:
cassandra-2-data:
Expand Down
4 changes: 2 additions & 2 deletions .github/distributed-test/kafka-init.conf
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
security.protocol=SSL
ssl.truststore.location=/opt/bitnami/kafka/config/certs/kafka.truststore.jks
ssl.truststore.location=/etc/kafka/secrets/kafka.truststore.jks
ssl.truststore.password=password
ssl.keystore.location=/opt/bitnami/kafka/config/certs/kafka-topic-creator.keystore.jks
ssl.keystore.location=/etc/kafka/secrets/kafka-topic-creator.keystore.jks
ssl.keystore.password=password
ssl.key.password=password
14 changes: 0 additions & 14 deletions .github/distributed-test/kafka-init.sh

This file was deleted.

6 changes: 6 additions & 0 deletions .github/scripts/jobs/re-index/create-invalid-job.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
#!/bin/bash -e

SCRIPT_DIR="$(dirname "$(readlink -f "$0")")"
. "$SCRIPT_DIR/../../util.sh"

BASE="http://localhost:8080/fhir"
8 changes: 8 additions & 0 deletions .github/scripts/util.sh
Original file line number Diff line number Diff line change
Expand Up @@ -25,3 +25,11 @@ create() {
update() {
curl -XPUT -s -H 'Accept: application/fhir+json' -H "Content-Type: application/fhir+json" -d @- -o /dev/null "$1"
}

create() {
curl -s -H 'Accept: application/fhir+json' -H "Content-Type: application/fhir+json" -d @- "$1"
}

update() {
curl -XPUT -s -H 'Accept: application/fhir+json' -H "Content-Type: application/fhir+json" -d @- -o /dev/null "$1"
}
4 changes: 0 additions & 4 deletions .github/workflows/build.yml
Original file line number Diff line number Diff line change
Expand Up @@ -327,10 +327,6 @@ jobs:
run: docker logs blaze
if: ${{ matrix.variant == 'standalone' }}

- name: Docker Logs Zookepper
run: docker-compose -f .github/distributed-test/docker-compose.yml logs zookeeper
if: ${{ matrix.variant == 'distributed' }}

- name: Docker Logs Kafka
run: docker-compose -f .github/distributed-test/docker-compose.yml logs kafka
if: ${{ matrix.variant == 'distributed' }}
Expand Down
15 changes: 14 additions & 1 deletion Dockerfile
Original file line number Diff line number Diff line change
@@ -1,4 +1,9 @@
# Update the SHA by calling crane digest eclipse-temurin:17-jre-jammy
FROM mcr.microsoft.com/dotnet/sdk:6.0 as fhir-packages

RUN dotnet tool install -g firely.terminal
RUN /root/.dotnet/tools/fhir install hl7.fhir.r4.core 4.0.1
RUN /root/.dotnet/tools/fhir install hl7.fhir.xver-extensions 0.1.0

FROM eclipse-temurin:17-jre-jammy@sha256:2da160772ec16d9d6a0c71585cf87b689dbbda531dc002de1856d8970cd0daf3

RUN apt-get update && apt-get upgrade -y && \
Expand All @@ -7,8 +12,13 @@ RUN apt-get update && apt-get upgrade -y && \
apt-get autoremove -y && apt-get clean && \
rm -rf /var/lib/apt/lists/

RUN groupadd -g 1001 blaze
RUN useradd -u 1001 -g 1001 --create-home blaze

RUN mkdir -p /app/data && chown 1001:1001 /app/data
COPY target/blaze-0.25.0-standalone.jar /app/
COPY --from=fhir-packages /root/.fhir/packages /home/blaze/.fhir/packages/
RUN chown -R 1001:1001 /home/blaze/.fhir

WORKDIR /app
USER 1001
Expand All @@ -18,5 +28,8 @@ ENV STORAGE="standalone"
ENV INDEX_DB_DIR="/app/data/index"
ENV TRANSACTION_DB_DIR="/app/data/transaction"
ENV RESOURCE_DB_DIR="/app/data/resource"
ENV ADMIN_INDEX_DB_DIR="/app/data/admin-index"
ENV ADMIN_TRANSACTION_DB_DIR="/app/data/admin-transaction"
ENV ADMIN_RESOURCE_DB_DIR="/app/data/admin-resource"

CMD ["java", "-jar", "blaze-0.25.0-standalone.jar"]
1 change: 1 addition & 0 deletions docs/implementation/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -2,3 +2,4 @@

* [Database](database.md)
* [FHIR Data Model](fhir-data-model.md)
* [Frontend](frontend.md)
17 changes: 17 additions & 0 deletions docs/implementation/frontend.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
# Frontend Implementation

## Authentication / Authorization

The frontend uses the [Auth.js][1] [@auth/sveltekit][2] library for for authentication and authorization.

* a single Keycloak provider is used
* the env vars `AUTH_CLIENT_ID`, `AUTH_CLIENT_SECRET`, `AUTH_ISSUER` and `AUTH_SECRET` are used as config
* the authorization code flow is used
* at sign-in the access token and refresh token are stored in a secure, HTTP only, encrypted JWT session cookie
* nobody can access the tokens in the session cookie, because it is encrypted and only the server-side of the frontend has the secret
* the session cookie is transferred for every request (the frontend is stateless)
* the access token will be refreshed via the refresh token if possible
* the session will expire at the same time as the last successful refreshed access token will expire

[1]: <https://authjs.dev>
[2]: <https://www.npmjs.com/package/@auth/sveltekit>
5 changes: 5 additions & 0 deletions job-ig/.gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
/fsh-generated
/input-cache
/output
/temp
/template
3 changes: 3 additions & 0 deletions job-ig/ig.ini
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
[IG]
ig = fsh-generated/resources/ImplementationGuide-fhir.example.json
template = fhir.base.template#current
27 changes: 27 additions & 0 deletions job-ig/input/fsh/compact-job.fsh
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
Alias: $JT = https://samply.github.io/blaze/fhir/CodeSystem/JobType
Alias: $CJP = https://samply.github.io/blaze/fhir/CodeSystem/CompactJobParameter

CodeSystem: CompactJobParameter
Id: CompactJobParameter
* #column-family-name "Column Family Name"

Profile: CompactJob
Parent: Task
* code 1..1
* code = $JT#compact "Compact Database Column Families"
* input ^slicing.discriminator.type = #pattern
* input ^slicing.discriminator.path = "type"
* input ^slicing.rules = #closed
* input contains columnFamilyName 1..1
* input[columnFamilyName] ^short = "Column Family Name"
* input[columnFamilyName] ^definition = "The name of the column family to compact."
* input[columnFamilyName].type = $CJP#column-family-name
* input[columnFamilyName].value[x] only string

Instance: CompactJobExample
InstanceOf: CompactJob
* status = #ready
* intent = #order
* code = $JT#compact "Compact Database Column Families"
* input[columnFamilyName].type = $CJP#column-family-name
* input[columnFamilyName].valueString = "SearchParamValueIndex"
5 changes: 5 additions & 0 deletions job-ig/input/fsh/job-type.fsh
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
CodeSystem: JobType
Id: JobType
Title: "Job Type"
* #re-index "(Re)Index a Search Parameter"
* #compact "Compact Database Column Families"
27 changes: 27 additions & 0 deletions job-ig/input/fsh/re-index-job.fsh
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
Alias: $JT = https://samply.github.io/blaze/fhir/CodeSystem/JobType
Alias: $RJP = https://samply.github.io/blaze/fhir/CodeSystem/ReIndexJobParameter

CodeSystem: ReIndexJobParameter
Id: ReIndexJobParameter
* #search-param-url "Search Param URL"

Profile: ReIndexJob
Parent: Task
* code 1..1
* code = $JT#re-index "(Re)Index a Search Parameter"
* input ^slicing.discriminator.type = #pattern
* input ^slicing.discriminator.path = "type"
* input ^slicing.rules = #closed
* input contains searchParamUrl 1..1
* input[searchParamUrl] ^short = "Search Param URL"
* input[searchParamUrl] ^definition = "The URL of the Search Parameter to (re)index."
* input[searchParamUrl].type = $RJP#search-param-url
* input[searchParamUrl].value[x] only canonical

Instance: ReIndexJobExample
InstanceOf: ReIndexJob
* status = #ready
* intent = #order
* code = $JT#re-index "(Re)Index a Search Parameter"
* input[searchParamUrl].type = $RJP#search-param-url
* input[searchParamUrl].valueCanonical = "http://hl7.org/fhir/SearchParameter/Resource-profile"
5 changes: 5 additions & 0 deletions job-ig/input/ignoreWarnings.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
== Suppressed Messages ==

# Add warning and/or information messages here after you've confirmed that they aren't really a problem
# (And include comments like this justifying why)
# See https://github.com/FHIR/sample-ig/blob/master/input/ignoreWarnings.txt for examples
3 changes: 3 additions & 0 deletions job-ig/input/pagecontent/index.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
# job-ig

Feel free to modify this index page with your own awesome content!
Loading

0 comments on commit a98d184

Please sign in to comment.