Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
34 changes: 34 additions & 0 deletions .github/workflows/lint.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,34 @@
name: Python Lint

on:
push:
branches: [main]
pull_request:
branches: [main]

jobs:
python-lint:
name: Lint Python Code
runs-on: ubuntu-latest

steps:
- name: Checkout code
uses: actions/checkout@v3

- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: '3.13'

- name: Install linters
run: |
python -m pip install --upgrade pip
pip install flake8 black isort

- name: Run flake8
run: |
flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics

- name: Check import sorting with isort
run: |
isort . --check-only
43 changes: 43 additions & 0 deletions .github/workflows/test.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,43 @@
name: Python Tests with Coverage

on:
push:
branches: [main]
pull_request:
branches: [main]

jobs:
test:
if: false # disables the job temporary
runs-on: ubuntu-latest

strategy:
matrix:
python-version: ['3.11', '3.12', '3.13']

steps:
- name: Checkout code
uses: actions/checkout@v3

- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: ${{ matrix.python-version }}

- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install -r requirements.txt
pip install pytest pytest-cov

- name: Run tests with coverage
run: |
pytest --cov=your_package_name --cov-report=term --cov-report=xml --cov-fail-under=80

# Optional: Upload coverage report to Codecov (for public repos or with CODECOV_TOKEN)
- name: Upload to Codecov
uses: codecov/codecov-action@v3
with:
files: coverage.xml
env:
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} # Only needed for private repos
24 changes: 24 additions & 0 deletions .github/workflows/trivy.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
name: Trivy Security Scan

on:
push:
branches: [main]
pull_request:
branches: [main]

jobs:
trivy-scan:
runs-on: ubuntu-latest
name: Trivy FS Scan

steps:
- name: Checkout repo
uses: actions/checkout@v3

- name: Run Trivy vulnerability scanner on file system
uses: aquasecurity/trivy-action@master
with:
scan-type: 'fs'
scan-ref: '.'
scanners: 'vuln,secret,config'
ignore-unfixed: true
17 changes: 17 additions & 0 deletions Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
FROM python:3.13-alpine

# Set working directory
WORKDIR /app

# Install system dependencies (use Alpine package manager)
# 'apk add --no-cache' keeps the image small and matches the alpine base
RUN apk add --no-cache postgresql-client

# Copy requirements first for better caching
COPY requirements.txt .
RUN pip install --no-cache-dir -r requirements.txt

# Copy the rest of the application
COPY . .

CMD ["python", "cleanup.py"]
50 changes: 50 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,50 @@
# OTP Cleanup Service

This service handles the automatic cleanup of expired OTPs in the JamAndFlow database.

## Setup

1. Create a `.env` file:
```env
POSTGRES_USER=postgres
POSTGRES_PASSWORD=PassWord
POSTGRES_DB=JamAndFlow
POSTGRES_HOST=db
POSTGRES_PORT=5432
CLEANUP_INTERVAL_SECONDS=300 # 5 minutes
```

2. Build and run with Docker Compose:
```bash
docker compose up --build
```

## Configuration

- `POSTGRES_USER`: PostgreSQL username
- `POSTGRES_PASSWORD`: PostgreSQL password
- `POSTGRES_DB`: PostgreSQL database name
- `POSTGRES_HOST`: PostgreSQL host (default: `db` for Docker Compose)
- `POSTGRES_PORT`: PostgreSQL port (default: `5432`)
- `CLEANUP_INTERVAL_SECONDS`: Interval between cleanup runs (default: 300 seconds / 5 minutes)

## Docker Network

This service needs to be on the same network as your main JamAndFlow API:

```bash
# Create the network if it doesn't exist
docker network create jamandflows-network
```

## Development

1. Install dependencies:
```bash
pip install -r requirements.txt
```

2. Run locally:
```bash
python cleanup.py
```
109 changes: 109 additions & 0 deletions cleanup.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,109 @@
import asyncio
import logging
from datetime import datetime, timezone

from sqlalchemy import create_engine, text
from sqlalchemy.orm import sessionmaker

from config import settings
from models import OTP, Base

# Set up logging with explicit handler control
logger = logging.getLogger(__name__)
# Remove any existing handlers to prevent duplicates
for handler in logger.handlers[:]:
logger.removeHandler(handler)
# Remove root logger handlers
for handler in logging.getLogger().handlers[:]:
logging.getLogger().removeHandler(handler)

# Add single stream handler
handler = logging.StreamHandler()
handler.setFormatter(logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s'))
logger.addHandler(handler)
logger.setLevel(logging.DEBUG)

# Database setup
engine = create_engine(settings.database_url)
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)

def wait_for_db(max_retries=5, retry_interval=5):
"""Wait for database to be available."""
retry_count = 0
while retry_count < max_retries:
try:
db = SessionLocal()
try:
db.execute(text("SELECT 1"))
logger.info("Database connection successful")
return True
finally:
db.close()
except Exception as e:
retry_count += 1
if retry_count < max_retries:
logger.warning(f"Database connection attempt {retry_count} failed: {e}")
logger.info(f"Retrying in {retry_interval} seconds...")
asyncio.sleep(retry_interval)
else:
logger.error(f"Failed to connect to database after {max_retries} attempts: {e}")
return False
return False

def cleanup_expired_otps():
"""Delete expired OTPs from the database."""
try:
db = SessionLocal()
try:
# Use timezone-aware UTC for comparison to match models
now = datetime.now(timezone.utc)
logger.debug(f"Running cleanup check at {now}")

result = db.query(OTP).filter(
OTP.expires_at < now
).delete()
db.commit()

# Always log the check, even if no deletions
if result > 0:
logger.info(f"Deleted {result} expired OTPs")
else:
logger.debug("No expired OTPs found to delete")

finally:
db.close()
except Exception as e:
logger.error(f"Error cleaning up expired OTPs: {e}")

async def run_cleanup_loop():
"""Run the cleanup task periodically."""
logger.info(f"Starting cleanup loop with interval: {settings.CLEANUP_INTERVAL_SECONDS} seconds")

while True:
logger.debug("Running cleanup cycle...")
cleanup_expired_otps()
logger.debug(f"Sleeping for {settings.CLEANUP_INTERVAL_SECONDS} seconds...")
await asyncio.sleep(settings.CLEANUP_INTERVAL_SECONDS)

def main():
# Only show startup banner once
logger.info("Starting OTP cleanup service...")
logger.info(f"Database URL: {settings.database_url.replace(settings.POSTGRES_PASSWORD, '****')}")
logger.info(f"Cleanup interval: {settings.CLEANUP_INTERVAL_SECONDS} seconds")

# Wait for database with retries
if not wait_for_db():
logger.error("Failed to connect to database after retries. Exiting.")
return

try:
# Run the cleanup loop
asyncio.run(run_cleanup_loop())
except KeyboardInterrupt:
logger.info("Shutting down OTP cleanup service...")
except Exception as e:
logger.error(f"Error in cleanup service: {e}")
raise

if __name__ == "__main__":
main()
28 changes: 28 additions & 0 deletions config.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
from pydantic_settings import BaseSettings


class Settings(BaseSettings):
# Database settings
POSTGRES_USER: str
POSTGRES_PASSWORD: str
POSTGRES_DB: str
POSTGRES_HOST: str = "localhost"
POSTGRES_PORT: str = "5432"

# Cleanup settings
CLEANUP_INTERVAL_SECONDS: int = 300 # 5 minutes

@property
def database_url(self) -> str:
"""Construct the database URL from the settings."""
return (
f"postgresql://{self.POSTGRES_USER}:{self.POSTGRES_PASSWORD}"
f"@{self.POSTGRES_HOST}:{self.POSTGRES_PORT}/{self.POSTGRES_DB}"
)

class Config:
env_file = ".env"
case_sensitive = True


settings = Settings()
18 changes: 18 additions & 0 deletions docker-compose.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
version: '3.8'

services:
cleanup:
build: .
container_name: otp_cleanup_service
env_file:
- .env
networks:
- jamandflows-network
restart: "no" # Don't auto-restart on failure

volumes:
postgres_data:

networks:
jamandflows-network:
external: true
26 changes: 26 additions & 0 deletions models.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
from datetime import datetime, timezone

from sqlalchemy import Column, DateTime, Integer, String
from sqlalchemy.ext.declarative import declarative_base

Base = declarative_base()

class OTP(Base):
__tablename__ = "otps"
id = Column(Integer, primary_key=True, index=True)
email = Column(String, index=True, nullable=False)
otp_code = Column(String, nullable=False)
name = Column(String, nullable=False)
password = Column(String, nullable=True)
is_active = Column(Integer, default=1)
# Use timezone-aware DateTime columns and defaults
created_at = Column(DateTime(timezone=True), default=lambda: datetime.now(timezone.utc))
expires_at = Column(DateTime(timezone=True), nullable=False)

def is_expired(self):
# Safely compare timezone-aware datetimes. If expires_at is naive, treat it as UTC.
now = datetime.now(timezone.utc)
expires = self.expires_at
if expires is not None and expires.tzinfo is None:
expires = expires.replace(tzinfo=timezone.utc)
return now > expires
5 changes: 5 additions & 0 deletions requirements.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
sqlalchemy>=2.0.0
pydantic>=2.0.0
pydantic-settings>=2.0.0
psycopg2-binary>=2.9.0
python-dotenv>=1.0.0