diff --git a/.env.example b/.env.example index b87daf7c..79b6b6b2 100644 --- a/.env.example +++ b/.env.example @@ -26,7 +26,7 @@ DJANGO_DB_LOG_HANDLER=console #PHOTO_ROOT_PATH= # Should the database be migrated before start (entrypoint.sh - docker setup). Will be migrated anyway if $SITE_ROOT=api. Comment out for False DJANGO_MIGRATE=True -# Should the modules be searched for scheduled tasks. Comment out for false + PROJECT_NAME=dev # set up you main domain #DOMAIN=dev-openimis.org @@ -51,6 +51,7 @@ PASSWORD_LOWERCASE=1 # Minimum number of lowercase letters PASSWORD_DIGITS=1 # Minimum number of digits PASSWORD_SYMBOLS=1 # Minimum number of symbols PASSWORD_SPACES=1 # Maximum number of spaces allowed + # Define the trusted origins for CSRF protection, separated by commas CSRF_TRUSTED_ORIGINS=http://localhost:3000,http://localhost:8000 @@ -60,4 +61,4 @@ RATELIMIT_KEY=ip # Key to identify the client; 'ip' means it will use the clien RATELIMIT_RATE=150/m # Rate limit (150 requests per minute) RATELIMIT_METHOD=ALL # HTTP methods to rate limit; 'ALL' means all methods RATELIMIT_GROUP=graphql # Group name for the rate limit -RATELIMIT_SKIP_TIMEOUT=False # Whether to skip rate limiting during c +RATELIMIT_SKIP_TIMEOUT=False # Whether to skip rate limiting diff --git a/.github/workflows/ci_module.yml b/.github/workflows/ci_module.yml index b4fce97a..b13567c3 100755 --- a/.github/workflows/ci_module.yml +++ b/.github/workflows/ci_module.yml @@ -109,13 +109,13 @@ jobs: tar -czf openimis.tar.gz ./openimis ./current-module - name: Upload compressed site-packages as artifact - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v4 with: name: site-packages path: site-packages.tar.gz - name: Upload build as artifact - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v4 with: name: code-artifacts path: openimis.tar.gz @@ -145,7 +145,7 @@ jobs: python-version: '3.10' - name: Download site-packages artifact - uses: actions/download-artifact@v2 + uses: actions/download-artifact@v4 with: name: site-packages path: /tmp/ @@ -156,7 +156,7 @@ jobs: echo "SITE_PACKAGES=$SITE_PACKAGES" >> $GITHUB_ENV - name: Download site-packages artifact - uses: actions/download-artifact@v2 + uses: actions/download-artifact@v4 with: name: code-artifacts path: /tmp/ @@ -255,7 +255,7 @@ jobs: python-version: '3.10' - name: Download site-packages artifact - uses: actions/download-artifact@v2 + uses: actions/download-artifact@v4 with: name: site-packages path: /tmp/ @@ -266,7 +266,7 @@ jobs: echo "SITE_PACKAGES=$SITE_PACKAGES" >> $GITHUB_ENV - name: Download site-packages artifact - uses: actions/download-artifact@v2 + uses: actions/download-artifact@v4 with: name: code-artifacts path: /tmp/ @@ -359,7 +359,7 @@ jobs: python-version: '3.10' - name: Download site-packages artifact - uses: actions/download-artifact@v2 + uses: actions/download-artifact@v4 with: name: site-packages path: /tmp/ @@ -370,7 +370,7 @@ jobs: echo "SITE_PACKAGES=$SITE_PACKAGES" >> $GITHUB_ENV - name: Download site-packages artifact - uses: actions/download-artifact@v2 + uses: actions/download-artifact@v4 with: name: code-artifacts path: /tmp/ @@ -450,7 +450,7 @@ jobs: cat coverage.xml - name: Coverage results if: success() || failure() - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: coverage.xml path: ./openimis/openIMIS/coverage @@ -464,7 +464,7 @@ jobs: with: python-version: '3.10' - name: Download site-packages artifact - uses: actions/download-artifact@v2 + uses: actions/download-artifact@v4 with: name: site-packages path: /tmp/ @@ -475,7 +475,7 @@ jobs: echo "SITE_PACKAGES=$SITE_PACKAGES" >> $GITHUB_ENV - name: Download code artifact - uses: actions/download-artifact@v2 + uses: actions/download-artifact@v4 with: name: code-artifacts path: /tmp/ @@ -498,7 +498,7 @@ jobs: python -m flake8 $MOD_DIR - name: Flake8 results upload if: success() || failure() - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: flake8-report.txt path: ./openimis/flake8-report.txt @@ -530,17 +530,17 @@ jobs: echo "SONAR_EXCLUSIONS=${{ inputs.SONAR_EXCLUSIONS }}" >> $GITHUB_ENV fi - name: Download coverage artifact - uses: actions/download-artifact@v2 + uses: actions/download-artifact@v4 with: name: coverage.xml path: report - name: Download flake8 artifact - uses: actions/download-artifact@v2 + uses: actions/download-artifact@v4 with: name: flake8-report.txt path: report - name: Download code artifact - uses: actions/download-artifact@v2 + uses: actions/download-artifact@v4 with: name: code-artifacts path: /tmp/ diff --git a/.github/workflows/pgsql.yml b/.github/workflows/pgsql.yml new file mode 100644 index 00000000..1ca06a11 --- /dev/null +++ b/.github/workflows/pgsql.yml @@ -0,0 +1,92 @@ +name: Automated CI PSQL testing +# This workflow run automatically for every commit on github it checks the syntax and launch the tests. +# | grep . | uniq -c filters out empty lines and then groups consecutive lines together with the number of occurrences +on: + push: + workflow_dispatch: + inputs: + comment: + description: Just a simple comment to know the purpose of the manual build + required: false + +jobs: + build: + runs-on: ubuntu-20.04 + services: + pgsql: + image: postgres + env: + DB_HOST: localhost + DB_PORT: 5432 + POSTGRES_DB: imis + POSTGRES_USER: postgres + POSTGRES_PASSWORD: GitHub999 + ports: + - 5432:5432 + # needed because the mssql container does not provide a health check + options: >- + --health-cmd pg_isready + --health-interval 10s + --health-timeout 5s + --health-retries 5 + + steps: + - uses: actions/checkout@v2 + + - name: Set up Python 3.8 + uses: actions/setup-python@v1 + with: + python-version: 3.8 + + - name: Install Python dependencies + run: | + python -m pip install --upgrade pip + pip install -r requirements.txt + python modules-requirements.py openimis.json > modules-requirements.txt + pip install -r modules-requirements.txt + + + - name: Environment info + run: | + pip list + + export DBBRANCH="$([ $GITHUB_REF == 'main' ] && echo "main" || echo "develop")" + + if [ ${GITHUB_REF##*/} = "develop" ]; then export DBBRANCH="main"; else export DBBRANCH="develop"; fi + echo "Branch ${GITHUB_REF##*/}, usign ${DBBRANCH} branch for database" + + git clone --depth 1 --branch $DBBRANCH https://github.com/openimis/database_postgresql.git ./sql + + wget --quiet -O - https://www.postgresql.org/media/keys/ACCC4CF8.asc | sudo apt-key add - + echo "deb http://apt.postgresql.org/pub/repos/apt/ $(lsb_release -cs)-pgdg main" | sudo tee /etc/apt/sources.list.d/postgresql-pgdg.list > /dev/null + sudo apt-get install postgresql-client + + - name: Initialize DB + run: | + echo 'set search_path to public' >> ~/.psqlrc + PGPASSWORD=GitHub999 psql -U postgres -h localhost -d imis -U postgres -f ./sql/database\ scripts/00_dump.sql | grep . | uniq -c + PGPASSWORD=GitHub999 psql -U postgres -h localhost -d imis -U postgres -f ./sql/database\ scripts/02_aux_functions.sql | grep . | uniq -c + PGPASSWORD=GitHub999 psql -U postgres -h localhost -d imis -U postgres -f ./sql/database\ scripts/03_views.sql | grep . | uniq -c + PGPASSWORD=GitHub999 psql -U postgres -h localhost -d imis -U postgres -f ./sql/database\ scripts/04_functions.sql | grep . | uniq -c + PGPASSWORD=GitHub999 psql -U postgres -h localhost -d imis -U postgres -f ./sql/database\ scripts/05_stored_procs.sql | grep . | uniq -c + PGPASSWORD=GitHub999 psql -U postgres -h localhost -d imis -U postgres -f ./sql/database\ scripts/demo_db.sql | grep . | uniq -c + + - name: Django tests + run: | + python -V + ls -l + cd openIMIS + ls -l + python manage.py test --keepdb $(jq -r '(.modules[]|.name)' ../openimis.json) + env: + SECRET_KEY: secret + DEBUG: true + DB_ENGINE: django.db.backends.postgresql + #DJANGO_SETTINGS_MODULE: hat.settings + DB_HOST: localhost + DB_PORT: 5432 + DB_NAME: imis + DB_USER: postgres + DB_PASSWORD: GitHub999 + #DEV_SERVER: true + SITE_ROOT: api diff --git a/.gitignore b/.gitignore index da2798b2..662822c9 100644 --- a/.gitignore +++ b/.gitignore @@ -1,25 +1,28 @@ -**/.idea -**/venv -**/.venv -**/.env -**/__pycache__ -modules-requirements.txt -modules-tests.sh -**/.coverage -**/coverage.xml -**/coverage -**/*.log -**/*.log.* -**/*.mo -openIMIS/locale/en/LC_MESSAGES/django.mo -**/staticfiles -extracted_translations_fe -script/config.py -**/src/* -**/images/insurees -openimis-dev.json -# Except for the runConfigurations folder -!.idea/runConfigurations +**/.vscode +**/.idea +**/venv +**/.venv +**/.env +**/__pycache__ +modules-requirements.txt +modules-tests.sh +**/.coverage +**/coverage.xml +**/coverage +**/*.log +**/*.log.* +**/*.mo +openIMIS/file_storage +openIMIS/locale/en/LC_MESSAGES/django.mo +**/staticfiles +extracted_translations_fe +script/config.py +**/src/* +**/images/insurees +openimis-dev.json + +# Except for the runConfigurations folder +!.idea/runConfigurations -# Ensure all files in runConfigurations are included -!.idea/runConfigurations/* +# Ensure all files in runConfigurations are included +!.idea/runConfigurations/* diff --git a/README.md b/README.md index 3fc617db..6b4d9f27 100644 --- a/README.md +++ b/README.md @@ -91,9 +91,8 @@ install -r dev-requirements.txt` instead, for more modules. - start openIMIS from within `openimis-be_py/openIMIS`: `python manage.py runserver` At this stage, you may (depends on the database you connect to) need to: - -- apply django migrations, from `openimis-be_py/openIMIS`: `python manage.py migrate` -- create a superuser for django admin console, from +* apply django migrations, from `openimis-be_py/openIMIS`: `python manage.py migrate`. See [PostgresQL section](#postgresql) if you are using postgresql for dev DB. +* create a superuser for django admin console, from `openimis-be_py/openIMIS`: `python manage.py createsuperuser` (will not prompt for a password) and then `python manage.py changepassword ` @@ -223,6 +222,60 @@ The configuration for connection to the database is identical for developers and - default 'options' in openIMIS are `{'driver': 'ODBC Driver 17 for SQL Server','unicode_results': True}` If you need to provide other options, use the `DB_OPTIONS` entry in the `.env` file (be complete: the new json string will entirely replace the default one) +### PostgresQL + +**Requirement:** `postgres-json-schema`. +Follow the README at https://github.com/gavinwahl/postgres-json-schema to install. + +To use postgresql as the dev database, specify `DB_ENGINE` in `.env` alongside other DB config vars: +``` +DB_ENGINE=django.db.backends.postgresql +DB_HOST=localhost +DB_PORT=5432 +DB_NAME=coremis +DB_USER=postgres +``` + +Create the database, named `coremis` using the example `DB_NAME` above: +```bash +psql postgres -c 'create database coremis' +``` + +Before applying django migrations, the database needs to be prepared using scripts from https://github.com/openimis/database_postgresql: + +```bash +git clone https://github.com/openimis/database_postgresql +cd database_postgresql + +# generate concatenated .sql for easy execution +bash concatenate_files.sh + +# prepare the database - replace fullDemoDatabase.sql with EmptyDatabase.sql if you don't need demo data +psql -d coremis -a -f output/fullDemoDatabase.sql +``` + +From here on django's `python manage.py migrate` should execute succesfully. + + +## OpenSearch + +### OpenSearch - Adding Environmental Variables to Your Build +To configure environmental variables for your build, include the following: +* `OPENSEARCH_HOST` - For the non-dockerized instance in a local context, set it to 0.0.0.0:9200. +For the dockerized instance, use opensearch:9200. +* `OPENSEARCH_ADMIN` This variable is used for the admin username. (default value: admin) +* `OPENSEARCH_PASSWORD` This variable is used for the admin password. (default value: admin) + +### OpenSearch - How to initialize data after deployment +* If you have initialized the application but still have some data to be transferred, you can effortlessly +achieve this by using the commands available in the business module: `python manage.py add__data_to_opensearch`. +This command loads existing data into OpenSearch. + +### OpenSearch - more details +* For more comprehensive details on OpenSearch configuration, please consult the [resource](https://github.com/openimis/openimis-be-opensearch_reports_py/tree/develop) +provided in the README section. + + ## Developer tools ### To create backend module skeleton in single command @@ -394,6 +447,71 @@ In production, additional security settings are applied to cookies used for CSRF - **JWT_COOKIE_SAMESITE**: Sets the `SameSite` attribute to 'Lax' for the JWT cookie. +### JWT Security Configuration + +To enhance JWT token security, you can configure the system to use RSA keys for signing and verifying tokens. + +1. **Generate RSA Keys**: + ```bash + # Generate a private key + openssl genpkey -algorithm RSA -out jwt_private_key.pem -aes256 + + # Generate a public key + openssl rsa -pubout -in jwt_private_key.pem -out jwt_public_key.pem + +2. **Store RSA Keys**: + Place jwt_private_key.pem and jwt_public_key.pem in a secure directory within your project, e.g., keys/. + +3. **Django Configuration**: + Ensure that the settings.py file is configured to read these keys. If RSA keys are found, the system will use RS256. Otherwise, it will fallback to HS256 using DJANGO_SECRET_KEY. + +Note: If RSA keys are not provided, the system defaults to HS256. Using RS256 with RSA keys is recommended for enhanced security. + +## CSRF Setup Guide + +CSRF (Cross-Site Request Forgery) protection ensures that unauthorized commands are not performed on behalf of authenticated users without their consent. It achieves this by including a unique token in each form submission or AJAX request, which is then validated by the server. +When using JWT (JSON Web Token) for authentication, CSRF protection is not executed because the server does not rely on cookies for authentication. Instead, the JWT is included in the request headers, making CSRF attacks less likely. + +### Development Environment + +In the development environment, CSRF protection is configured to allow requests from `localhost:3000` and `localhost:8000` by default in .env.example file. + +### Production Environment + +In the production environment, you need to specify the trusted origins in your `.env` file. + +1. **Trusted Origins Setup**: + - Define the trusted origins in your `.env` file to allow cross-origin requests from specific domains. + - Use a comma-separated list to specify multiple origins. + - Example of setting trusted origins in `.env`: + ```env + CSRF_TRUSTED_ORIGINS=https://example.com,https://api.example.com + ``` + + +## Security Headers + +This section describes the security headers used in the application, based on OWASP recommendations, to enhance the security of your Django application. + +### Security Headers in Production + +In the production environment, several security headers are set to protect the application from common vulnerabilities: + +- **Strict-Transport-Security**: `max-age=63072000; includeSubDomains` - Enforces secure (HTTP over SSL/TLS) connections to the server and ensures all subdomains also follow this rule. +- **Content-Security-Policy**: `default-src 'self';` - Prevents a wide range of attacks, including Cross-Site Scripting (XSS), by restricting sources of content to the same origin. +- **X-Frame-Options**: `DENY` - Protects against clickjacking attacks by preventing the page from being framed. +- **X-Content-Type-Options**: `nosniff` - Prevents the browser from MIME-sniffing the content type, ensuring that the browser uses the declared content type. +- **Referrer-Policy**: `no-referrer` - Controls how much referrer information is included with requests by not sending any referrer information with requests. +- **Permissions-Policy**: `geolocation=(), microphone=()` - Controls access to browser features by disabling access to geolocation and microphone features. + +In production, additional security settings are applied to cookies used for CSRF and JWT: + +- **CSRF_COOKIE_SECURE**: Ensures the CSRF cookie is only sent over HTTPS. +- **CSRF_COOKIE_HTTPONLY**: Prevents JavaScript from accessing the CSRF cookie. +- **CSRF_COOKIE_SAMESITE**: Sets the `SameSite` attribute to 'Lax', which allows the cookie to be sent with top-level navigations and gets rid of the risk of CSRF attacks. +- **JWT_COOKIE_SECURE**: Ensures the JWT cookie is only sent over HTTPS. +- **JWT_COOKIE_SAMESITE**: Sets the `SameSite` attribute to 'Lax' for the JWT cookie. + ## Custom exception handler for new modules REST-based modules If the module you want to add to the openIMIS uses its own REST exception handler you have to register diff --git a/openIMIS/developer_tools/management/commands/load_fixture_foreign_key.py b/openIMIS/developer_tools/management/commands/load_fixture_foreign_key.py new file mode 100644 index 00000000..1317b7c6 --- /dev/null +++ b/openIMIS/developer_tools/management/commands/load_fixture_foreign_key.py @@ -0,0 +1,100 @@ +from django.core.management.base import BaseCommand +from django.core.exceptions import ObjectDoesNotExist +import json +from django.apps import apps +from django.core.management import call_command +import os + + +class Command(BaseCommand): + help = 'Load a fixture and replace foreign keys using a natural key (like uuid, name, location, etc.) with corresponding model IDs' + + def add_arguments(self, parser): + # Argument for the fixture file + parser.add_argument( + 'fixture_file', + type=str, + help='Path to the fixture file (JSON format)' + ) + # Argument for the field name that will be used as the natural key (e.g., "uuid", "name", "location", etc.) + parser.add_argument( + '--field', + type=str, + help="The unique field to use for resolving foreign keys (e.g., 'uuid', 'name', 'location', etc.)", + required=True + ) + + def handle(self, *args, **kwargs): + fixture_file = kwargs['fixture_file'] + field_name = kwargs['field'] + + # Load the fixture data + try: + with open(fixture_file, 'r') as f: + data = json.load(f) + except FileNotFoundError: + self.stdout.write(self.style.ERROR(f"Fixture file '{fixture_file}' not found")) + return + except json.JSONDecodeError: + self.stdout.write(self.style.ERROR(f"Fixture file '{fixture_file}' is not valid JSON")) + return + + # Process the fixture data + for obj in data: + model = obj['model'] + if model: + # Get the model class dynamically using the app label and model name + app_label, model_name = model.split('.') + try: + model_class = apps.get_model(app_label, model_name) + except LookupError: + self.stdout.write(self.style.ERROR(f"Model '{model}' not found")) + continue + + # Loop through fields in the fixture and process foreign keys + for field, field_value in obj['fields'].items(): + # If the field value is a list (e.g., ["uuid_value"]), handle it as a list + if isinstance(field_value, list): + if len(field_value) == 1: # If there is only one element (like with the 'role' field) + related_field = model_class._meta.get_field(field) + + if related_field.is_relation: # Check if it's a foreign key + # Look up the related model (e.g., Role) using the field_name (e.g., uuid, name) + related_model = related_field.related_model + try: + # We fetch the related object by the unique field (e.g., uuid, name, location, etc.) + related_object = related_model.objects.get(**{field_name: field_value[0]}) + # Replace the field value with the primary key (ID) - not a list anymore + obj['fields'][field] = related_object.id + except ObjectDoesNotExist: + self.stdout.write(self.style.ERROR( + f"{related_model} with {field_name} '{field_value[0]}' not found")) + continue + # If it's not a list, process as usual (no change needed) + elif isinstance(field_value, str): # Checking if the value is a string (uuid, name, etc.) + related_field = model_class._meta.get_field(field) + if related_field.is_relation: + related_model = related_field.related_model + try: + # Fetch the related object + related_object = related_model.objects.get(**{field_name: field_value}) + # Replace the value with the ID + obj['fields'][field] = related_object.id + except ObjectDoesNotExist: + self.stdout.write( + self.style.ERROR(f"{related_model} with {field_name} '{field_value}' not found")) + continue + + # Save the modified fixture to a new file + output_file = fixture_file.replace('.json', '_modified.json') + with open(output_file, 'w') as f: + json.dump(data, f, indent=4) + + self.stdout.write(self.style.SUCCESS(f'Successfully transformed the fixture and saved it as {output_file}')) + + # Now that unique field values are replaced with IDs, load this fixture into the database + try: + call_command('loaddata', output_file) # This will load the modified fixture + self.stdout.write(self.style.SUCCESS(f'Successfully loaded the modified fixture into the database')) + except Exception as e: + self.stdout.write(self.style.ERROR(f'Error loading fixture: {e}')) diff --git a/openIMIS/openIMIS/ExceptionHandlerDispatcher.py b/openIMIS/openIMIS/ExceptionHandlerDispatcher.py index 4dde35df..fabad6e9 100644 --- a/openIMIS/openIMIS/ExceptionHandlerDispatcher.py +++ b/openIMIS/openIMIS/ExceptionHandlerDispatcher.py @@ -1,7 +1,6 @@ from .ExceptionHandlerRegistry import ExceptionHandlerRegistry from rest_framework.views import exception_handler -from rest_framework import exceptions, status - +from rest_framework import exceptions, status, views def dispatcher(exc, context): """ @@ -10,6 +9,10 @@ def dispatcher(exc, context): module_name = _extract_module_name(context['request']) handler = ExceptionHandlerRegistry.get_exception_handler(module_name) + response = _process_exception_handler(exc, context) + if response: + return response + if handler is None: # Fallback to default DRF exception handler if no handler is defined for the module handler = exception_handler @@ -21,9 +24,16 @@ def dispatcher(exc, context): return response +def _process_exception_handler(exc, context): + response = views.exception_handler(exc, context) + + if isinstance(exc, (exceptions.AuthenticationFailed, exceptions.NotAuthenticated)): + response.status_code = status.HTTP_401_UNAUTHORIZED + return response + + def _extract_module_name(request): """ Extracts the module name from the request URL. """ return request.path.split('/')[2] - diff --git a/openIMIS/openIMIS/settings.py b/openIMIS/openIMIS/settings.py index 0c6e1ea9..516f2c9c 100644 --- a/openIMIS/openIMIS/settings.py +++ b/openIMIS/openIMIS/settings.py @@ -4,6 +4,7 @@ import json import logging import os +import sys from dotenv import load_dotenv from .openimisapps import openimis_apps, get_locale_folders @@ -18,8 +19,6 @@ # Build paths inside the project like this: os.path.join(BASE_DIR, ...) BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) - - # SECURITY WARNING: don't run with debug turned on in production! DEBUG = os.environ.get("MODE", "PROD") == "DEV" DEFAULT_LOGGING_HANDLER = os.getenv("DJANGO_LOG_HANDLER", "console") @@ -28,10 +27,6 @@ if DEBUG: EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend' - - - - LOGGING = { "version": 1, "disable_existing_loggers": False, @@ -172,6 +167,7 @@ def SITE_URL(): "developer_tools", "drf_spectacular", # Swagger UI for FHIR API "axes", + "django_opensearch_dsl" ] INSTALLED_APPS += OPENIMIS_APPS INSTALLED_APPS += ["apscheduler_runner", "signal_binding"] # Signal binding should be last installed module @@ -228,6 +224,7 @@ def SITE_URL(): "django.middleware.csrf.CsrfViewMiddleware", "django.contrib.auth.middleware.AuthenticationMiddleware", "core.middleware.SecurityHeadersMiddleware", + "csp.middleware.CSPMiddleware", ] MODE = os.environ.get("MODE") @@ -338,10 +335,6 @@ def SITE_URL(): "JWT_COOKIE_SAMESITE": "Lax", }) - CSRF_COOKIE_SECURE = True - CSRF_COOKIE_HTTPONLY = True - CSRF_COOKIE_SAMESITE = 'Lax' - SECURE_BROWSER_XSS_FILTER = True SECURE_CONTENT_TYPE_NOSNIFF = True SECURE_HSTS_SECONDS = 63072000 @@ -454,13 +447,39 @@ def SITE_URL(): CELERY_RESULT_BACKEND = os.environ.get("CELERY_RESULT_BACKEND") if 'CACHE_BACKEND' in os.environ and 'CACHE_URL' in os.environ: - CACHES = { - 'default': { - 'BACKEND': os.environ.get('CACHE_BACKEND'), - 'LOCATION': os.environ.get("CACHE_URL"), - 'OPTIONS': json.loads(os.environ.get("CACHE_OPTIONS", "")) - } + CACHE_BACKEND = os.environ.get('CACHE_BACKEND') + CACHE_URL = os.environ.get("CACHE_URL") + CACHE_OPTIONS = os.environ.get("CACHE_OPTIONS", None) + if CACHE_OPTIONS: + CACHE_OPTIONS = json.loads(CACHE_OPTIONS) +else: + CACHE_BACKEND = 'django.core.cache.backends.locmem.LocMemCache' + CACHE_URL = None + CACHE_OPTIONS = None + +CACHE_PARAM = {} +CACHE_PARAM['BACKEND'] = CACHE_BACKEND +if CACHE_URL: + CACHE_PARAM['LOCATION'] = CACHE_URL + +if CACHE_OPTIONS: + CACHE_PARAM['OPTIONS'] = CACHE_OPTIONS + +CACHES = { + 'default': { + **CACHE_PARAM, + 'KEY_PREFIX': "oi" + }, + 'location': { + **CACHE_PARAM, + 'KEY_PREFIX': "loc" + }, + 'coverage': { + **CACHE_PARAM, + 'KEY_PREFIX': "cov" + } +} # This scheduler config will: # - Store jobs in the project database @@ -479,11 +498,6 @@ def SITE_URL(): # This list will be called with scheduler.add_job() as specified: # Note that the document implies that the time is local and follows DST but that seems false and in UTC regardless SCHEDULER_JOBS = [ - { - "method": "core.tasks.openimis_test_batch", - "args": ["cron"], - "kwargs": {"id": "openimis_test_batch", "minute": 16, "replace_existing": True}, - }, { "method": "policy.tasks.get_policies_for_renewal", "args": ["cron"], @@ -552,7 +566,6 @@ def SITE_URL(): # https://docs.djangoproject.com/en/2.1/howto/static-files/ STATIC_ROOT = os.path.join(BASE_DIR, "staticfiles") -STATICFILES_STORAGE = "whitenoise.storage.CompressedManifestStaticFilesStorage" STATIC_URL = "/%sstatic/" % SITE_ROOT() @@ -610,8 +623,62 @@ def SITE_URL(): USE_X_FORWARDED_HOST = True SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https') +OPENSEARCH_DSL = { + 'default': { + 'hosts': os.environ.get("OPENSEARCH_HOST", '0.0.0.0:9200'), + 'http_auth': ( + f"{os.environ.get('OPENSEARCH_ADMIN')}", + f"{os.environ.get('OPENSEARCH_PASSWORD')}" + ), + 'timeout': 120, + } +} + +MEDIA_URL = "/file_storage/" +MEDIA_ROOT = os.path.join(BASE_DIR, "file_storage/") + +if not os.path.exists(MEDIA_ROOT): + os.makedirs(MEDIA_ROOT) + +STORAGES = { + "default": { + "BACKEND": "django.core.files.storage.FileSystemStorage", + }, + 'staticfiles': { + 'BACKEND': "whitenoise.storage.CompressedManifestStaticFilesStorage", + }, +} + PASSWORD_MIN_LENGTH = int(os.getenv('PASSWORD_MIN_LENGTH', 8)) PASSWORD_UPPERCASE = int(os.getenv('PASSWORD_UPPERCASE', 1)) PASSWORD_LOWERCASE = int(os.getenv('PASSWORD_LOWERCASE', 1)) PASSWORD_DIGITS = int(os.getenv('PASSWORD_DIGITS', 1)) PASSWORD_SYMBOLS = int(os.getenv('PASSWORD_SYMBOLS', 1)) + +IS_UNIT_TEST_ENV = 'test' in sys.argv + +# CSRF settings +CSRF_COOKIE_SECURE = True +SESSION_COOKIE_SECURE = True +# session cookie validity = 8 hours +SESSION_COOKIE_AGE = 28800 +SESSION_COOKIE_NAME = "openimis_session" + +# CORS settings +CORS_ALLOW_CREDENTIALS = True + +# Cookie settings +CSRF_COOKIE_NAME = 'csrftoken' +CSRF_USE_SESSIONS = True +SESSION_COOKIE_SAMESITE = 'Lax' # or 'None' if cross-site +CSRF_COOKIE_SAMESITE = 'Lax' # or 'None' if cross-site +CSRF_COOKIE_HTTPONLY = False # False if you need to access it from JavaScript + +USER_AGENT_CSRF_BYPASS = [] + +CSP_DEFAULT_SRC = ["'self'"] +CSP_SCRIPT_SRC = ["'self'"] +CSP_STYLE_SRC = ["'self'"] +CSP_IMG_SRC = ["'self'", "data:"] # Allows images from the same origin and base64 encoded images +CSP_FRAME_ANCESTORS = ["'self'"] + diff --git a/openimis.json b/openimis.json index c84e1771..ab934385 100644 --- a/openimis.json +++ b/openimis.json @@ -13,8 +13,8 @@ "pip": "git+https://github.com/openimis/openimis-be-workflow_py.git@develop#egg=openimis-be-workflow" }, { - "name": "tasks_management", - "pip": "git+https://github.com/openimis/openimis-be-tasks_management_py.git@develop#egg=openimis-be-tasks_management" + "name": "tasks_management", + "pip": "git+https://github.com/openimis/openimis-be-tasks_management_py.git@develop#egg=openimis-be-tasks_management" }, { "name": "report", @@ -68,10 +68,6 @@ "name": "tools", "pip": "git+https://github.com/openimis/openimis-be-tools_py.git@develop#egg=openimis-be-tools" }, - { - "name": "api_fhir_r4", - "pip": "git+https://github.com/openimis/openimis-be-api_fhir_r4_py.git@develop#egg=openimis-be-api_fhir_r4" - }, { "name": "calculation", "pip": "git+https://github.com/openimis/openimis-be-calculation_py.git@develop#egg=openimis-be-calculation" @@ -148,6 +144,14 @@ "name": "payroll", "pip": "git+https://github.com/openimis/openimis-be-payroll_py.git@develop#egg=openimis-be-payroll" }, + { + "name": "deduplication", + "pip": "git+https://github.com/openimis/openimis-be-deduplication_py.git@develop#egg=openimis-be-deduplication" + }, + { + "name": "calcrule_validations", + "pip": "git+https://github.com/openimis/openimis-be-calcrule_validations_py.git@develop#egg=openimis-be-calcrule_validations" + }, { "name": "controls", "pip": "git+https://github.com/openimis/openimis-be-controls_py.git@develop#egg=openimis-be-controls" @@ -159,6 +163,10 @@ { "name": "claim_sampling", "pip": "git+https://github.com/openimis/openimis-be-claim_sampling_py.git@develop#egg=openimis-be-claim_sampling" + }, + { + "name": "api_etl", + "pip": "git+https://github.com/openimis/openimis-be-api_etl_py.git@develop#egg=openimis-be-api_etl" } ] } diff --git a/requirements.txt b/requirements.txt index 5bc8b0b2..55111bdf 100644 --- a/requirements.txt +++ b/requirements.txt @@ -32,7 +32,7 @@ waitress wheel whitenoise django-health-check -requests~=2.32.0 +requests~=2.32.0 apscheduler==3.10.1 # As from v0.4, Django-apscheduler has a migration that is incompatible with SQL Server # (autoincrement int => bigint) so we are using our own fork with a squashed migration @@ -57,3 +57,4 @@ django-opensearch-dsl==0.5.1 zxcvbn~=4.4.28 password-validator==1.0 django-axes==6.4.0 +django-csp