Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 0 additions & 3 deletions api/app.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,6 @@
from fastapi import FastAPI, HTTPException
from fastapi.middleware.cors import CORSMiddleware
from fastapi.responses import FileResponse
from fastapi.staticfiles import StaticFiles

from api import accounts, ai, image_tasks, register, system
from api.support import resolve_web_asset, start_limited_account_watcher
Expand Down Expand Up @@ -43,8 +42,6 @@ async def lifespan(_: FastAPI):
app.include_router(image_tasks.create_router())
app.include_router(register.create_router())
app.include_router(system.create_router(app_version))
if config.images_dir.exists():
app.mount("/images", StaticFiles(directory=str(config.images_dir)), name="images")

@app.get("/{full_path:path}", include_in_schema=False)
async def serve_web(full_path: str):
Expand Down
25 changes: 23 additions & 2 deletions api/system.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,8 @@
from api.support import require_admin, require_identity, resolve_image_base_url
from services.backup_service import BackupError, backup_service
from services.config import config
from services.image_service import delete_images, download_images_zip, get_image_download_response, get_thumbnail_response, list_images
from services.image_service import delete_images, download_images_zip, get_image_download_response, get_image_response, get_thumbnail_response, list_images
from services.image_storage_service import ImageStorageError, image_storage_service
from services.image_tags_service import delete_tag, get_all_tags, set_tags
from services.log_service import log_service
from services.proxy_service import test_proxy
Expand Down Expand Up @@ -69,13 +70,20 @@ async def get_settings(authorization: str | None = Header(default=None)):
@router.post("/api/settings")
async def save_settings(body: SettingsUpdateRequest, authorization: str | None = Header(default=None)):
require_admin(authorization)
return {"config": config.update(body.model_dump(mode="python"))}
try:
return {"config": config.update(body.model_dump(mode="python"))}
except ValueError as exc:
raise HTTPException(status_code=400, detail={"error": str(exc)}) from exc

@router.get("/api/images")
async def get_images(request: Request, start_date: str = "", end_date: str = "", authorization: str | None = Header(default=None)):
require_admin(authorization)
return list_images(resolve_image_base_url(request), start_date=start_date.strip(), end_date=end_date.strip())

@router.get("/images/{image_path:path}", include_in_schema=False)
async def get_image(image_path: str):
return get_image_response(image_path)

@router.get("/image-thumbnails/{image_path:path}", include_in_schema=False)
async def get_image_thumbnail(image_path: str):
return get_thumbnail_response(image_path)
Expand Down Expand Up @@ -135,6 +143,19 @@ async def test_backup_connection(authorization: str | None = Header(default=None
except BackupError as exc:
raise HTTPException(status_code=400, detail={"error": str(exc)}) from exc

@router.post("/api/image-storage/test")
async def test_image_storage_endpoint(authorization: str | None = Header(default=None)):
require_admin(authorization)
return {"result": await run_in_threadpool(image_storage_service.test_webdav)}

@router.post("/api/image-storage/sync")
async def sync_image_storage_endpoint(authorization: str | None = Header(default=None)):
require_admin(authorization)
try:
return {"result": await run_in_threadpool(image_storage_service.sync_all)}
except ImageStorageError as exc:
raise HTTPException(status_code=400, detail={"error": str(exc)}) from exc

@router.get("/api/backups")
async def get_backups(authorization: str | None = Header(default=None)):
require_admin(authorization)
Expand Down
2 changes: 2 additions & 0 deletions services/backup_service.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@
from curl_cffi import requests

from services.config import BASE_DIR, CONFIG_FILE, DATA_DIR, config, load_backup_state, save_backup_state
from services.image_storage_service import IMAGE_INDEX_FILE
from services.image_tags_service import TAGS_FILE


Expand Down Expand Up @@ -631,6 +632,7 @@ def _build_backup_archive(self, settings: dict[str, object], *, trigger: str) ->
self._add_file_to_archive(archive, DATA_DIR / "logs.jsonl", "data/logs.jsonl")
if include.get("image_tasks"):
self._add_file_to_archive(archive, DATA_DIR / "image_tasks.json", "data/image_tasks.json")
self._add_file_to_archive(archive, IMAGE_INDEX_FILE, "data/image_index.json")
if include.get("accounts_snapshot"):
self._add_bytes_to_archive(
archive,
Expand Down
46 changes: 46 additions & 0 deletions services/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,16 @@
"images": False,
}

DEFAULT_IMAGE_STORAGE = {
"enabled": False,
"mode": "local",
"webdav_url": "",
"webdav_username": "",
"webdav_password": "",
"webdav_root_path": "chatgpt2api/images",
"public_base_url": "",
}


def _normalize_bool(value: object, default: bool = False) -> bool:
if isinstance(value, str):
Expand Down Expand Up @@ -85,6 +95,35 @@ def _normalize_backup_state(value: object) -> dict[str, object]:
}


def _normalize_image_storage_settings(value: object) -> dict[str, object]:
source = value if isinstance(value, dict) else {}
mode = str(source.get("mode") or "local").strip().lower()
if mode not in {"local", "webdav", "both"}:
mode = "local"
enabled = _normalize_bool(source.get("enabled"), False)
if not enabled:
mode = "local"
root_path = str(source.get("webdav_root_path") or DEFAULT_IMAGE_STORAGE["webdav_root_path"]).strip().strip("/")
return {
"enabled": enabled,
"mode": mode,
"webdav_url": str(source.get("webdav_url") or "").strip().rstrip("/"),
"webdav_username": str(source.get("webdav_username") or "").strip(),
"webdav_password": str(source.get("webdav_password") or "").strip(),
"webdav_root_path": root_path or str(DEFAULT_IMAGE_STORAGE["webdav_root_path"]),
"public_base_url": str(source.get("public_base_url") or "").strip().rstrip("/"),
}


def _validate_image_storage_settings(settings: dict[str, object]) -> None:
if not _normalize_bool(settings.get("enabled"), False):
return
if not str(settings.get("webdav_url") or "").strip():
raise ValueError("启用 WebDAV 图片存储后必须填写 WebDAV URL")
if not str(settings.get("webdav_password") or "").strip():
raise ValueError("启用 WebDAV 图片存储后必须填写 WebDAV 密码")


@dataclass(frozen=True)
class LoadedSettings:
auth_key: str
Expand Down Expand Up @@ -285,6 +324,7 @@ def get(self) -> dict[str, object]:
data["ai_review"] = self.ai_review
data["global_system_prompt"] = self.global_system_prompt
data["backup"] = self.get_backup_settings()
data["image_storage"] = self.get_image_storage_settings()
data.pop("auth-key", None)
return data

Expand All @@ -296,6 +336,9 @@ def update(self, data: dict[str, object]) -> dict[str, object]:
next_data.update(dict(data or {}))
if "backup" in next_data:
next_data["backup"] = _normalize_backup_settings(next_data.get("backup"))
if "image_storage" in next_data:
next_data["image_storage"] = _normalize_image_storage_settings(next_data.get("image_storage"))
_validate_image_storage_settings(next_data["image_storage"])
next_data.pop("backup_state", None)
self.data = next_data
self._save()
Expand All @@ -304,6 +347,9 @@ def update(self, data: dict[str, object]) -> dict[str, object]:
def get_backup_settings(self) -> dict[str, object]:
return _normalize_backup_settings(self.data.get("backup"))

def get_image_storage_settings(self) -> dict[str, object]:
return _normalize_image_storage_settings(self.data.get("image_storage"))

def get_storage_backend(self) -> StorageBackend:
"""获取存储后端实例(单例)"""
if self._storage_backend is None:
Expand Down
96 changes: 46 additions & 50 deletions services/image_service.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,14 +2,14 @@

import io
import zipfile
from datetime import datetime
from pathlib import Path

from fastapi import HTTPException
from fastapi.responses import FileResponse
from fastapi.responses import FileResponse, Response
from PIL import Image, ImageOps

from services.config import config
from services.image_storage_service import image_storage_service
from services.image_tags_service import load_tags, remove_tags

THUMBNAIL_SIZE = (320, 320)
Expand Down Expand Up @@ -46,6 +46,12 @@ def _safe_image_path(relative_path: str) -> Path:
return path


def get_image_response(relative_path: str) -> FileResponse | Response:
if image_storage_service.has_local(relative_path):
return FileResponse(_safe_image_path(relative_path))
return Response(content=image_storage_service.get_bytes(relative_path), media_type="image/png")


def _thumbnail_path(relative_path: str) -> Path:
rel = _safe_relative_path(relative_path)
return config.image_thumbnails_dir / f"{rel}.png"
Expand All @@ -64,15 +70,19 @@ def _image_dimensions(path: Path) -> tuple[int, int] | None:


def ensure_thumbnail(relative_path: str) -> Path:
source = _safe_image_path(relative_path)
target = _thumbnail_path(relative_path)
source_mtime = source.stat().st_mtime
if target.exists() and target.stat().st_mtime >= source_mtime:
source_mtime = 0.0
source: Path | None = None
if image_storage_service.has_local(relative_path):
source = _safe_image_path(relative_path)
source_mtime = source.stat().st_mtime
if target.exists() and (not source_mtime or target.stat().st_mtime >= source_mtime):
return target

target.parent.mkdir(parents=True, exist_ok=True)
try:
with Image.open(source) as image:
image_source = source if source is not None else io.BytesIO(image_storage_service.get_bytes(relative_path))
with Image.open(image_source) as image:
image = ImageOps.exif_transpose(image)
if image.mode not in {"RGB", "RGBA"}:
image = image.convert("RGBA" if "A" in image.getbands() else "RGB")
Expand All @@ -90,64 +100,42 @@ def get_thumbnail_response(relative_path: str) -> FileResponse:


def get_image_download_response(relative_path: str) -> FileResponse:
path = _safe_image_path(relative_path)
return FileResponse(path, filename=path.name)
if image_storage_service.has_local(relative_path):
path = _safe_image_path(relative_path)
return FileResponse(path, filename=path.name)
rel = _safe_relative_path(relative_path)
return Response(
content=image_storage_service.get_bytes(rel),
media_type="image/png",
headers={"Content-Disposition": f'attachment; filename="{Path(rel).name}"'},
)


def cleanup_image_thumbnails() -> int:
thumbnails_root = config.image_thumbnails_dir
images_root = config.images_dir
removed = 0
for path in thumbnails_root.rglob("*"):
if not path.is_file():
continue
rel = path.relative_to(thumbnails_root).as_posix()
if not rel.endswith(".png") or not (images_root / rel[:-4]).exists():
if not rel.endswith(".png") or not image_storage_service.exists(rel[:-4]):
path.unlink()
removed += 1
_cleanup_empty_dirs(thumbnails_root)
return removed


def _image_items(start_date: str = "", end_date: str = "") -> list[dict[str, object]]:
items = []
root = config.images_dir
for path in root.rglob("*"):
if not path.is_file():
continue
rel = path.relative_to(root).as_posix()
parts = rel.split("/")
day = "-".join(parts[:3]) if len(parts) >= 4 else datetime.fromtimestamp(path.stat().st_mtime).strftime("%Y-%m-%d")
if start_date and day < start_date:
continue
if end_date and day > end_date:
continue
dimensions = _image_dimensions(path)
items.append({
"rel": rel,
"path": rel,
"name": path.name,
"date": day,
"size": path.stat().st_size,
"created_at": datetime.fromtimestamp(path.stat().st_mtime).strftime("%Y-%m-%d %H:%M:%S"),
**({"width": dimensions[0], "height": dimensions[1]} if dimensions else {}),
})
items.sort(key=lambda item: str(item["created_at"]), reverse=True)
return items


def list_images(base_url: str, start_date: str = "", end_date: str = "") -> dict[str, object]:
config.cleanup_old_images()
cleanup_image_thumbnails()
all_tags = load_tags()
items = [
{
**item,
"url": f"{base_url.rstrip('/')}/images/{item['path']}",
"url": str(item.get("url") or f"{base_url.rstrip('/')}/images/{item['path']}"),
"thumbnail_url": thumbnail_url(base_url, str(item["path"])),
"tags": all_tags.get(str(item["path"]), []),
}
for item in _image_items(start_date, end_date)
for item in image_storage_service.list_items(base_url, start_date, end_date)
]
groups: dict[str, list[dict[str, object]]] = {}
for item in items:
Expand All @@ -157,21 +145,23 @@ def list_images(base_url: str, start_date: str = "", end_date: str = "") -> dict

def delete_images(paths: list[str] | None = None, start_date: str = "", end_date: str = "", all_matching: bool = False) -> dict[str, int]:
root = config.images_dir.resolve()
targets = [str(item["path"]) for item in _image_items(start_date, end_date)] if all_matching else (paths or [])
targets = [
str(item["path"])
for item in image_storage_service.list_items("", start_date=start_date, end_date=end_date)
] if all_matching else (paths or [])
removed = 0
for item in targets:
path = (root / item).resolve()
try:
path.relative_to(root)
except ValueError:
continue
if path.is_file():
path.unlink()
for thumbnail in (_thumbnail_path(item), config.image_thumbnails_dir / _safe_relative_path(item)):
if thumbnail.is_file():
thumbnail.unlink()
remove_tags(item)
if image_storage_service.delete(item):
removed += 1
for thumbnail in (_thumbnail_path(item), config.image_thumbnails_dir / _safe_relative_path(item)):
if thumbnail.is_file():
thumbnail.unlink()
remove_tags(item)
_cleanup_empty_dirs(root)
_cleanup_empty_dirs(config.image_thumbnails_dir)
return {"removed": removed}
Expand All @@ -186,12 +176,18 @@ def download_images_zip(paths: list[str]) -> io.BytesIO:
for item in paths:
rel = _safe_relative_path(item)
path = (root / rel).resolve()
payload: bytes | None = None
try:
path.relative_to(root)
except ValueError:
continue
if not path.is_file():
continue
if path.is_file():
payload = path.read_bytes()
else:
try:
payload = image_storage_service.get_bytes(rel)
except Exception:
continue
name = path.name
if name in used_names:
stem = path.stem
Expand All @@ -201,7 +197,7 @@ def download_images_zip(paths: list[str]) -> io.BytesIO:
counter += 1
name = f"{stem}_{counter}{suffix}"
used_names.add(name)
zf.write(path, name)
zf.writestr(name, payload)
added += 1
if added == 0:
raise HTTPException(status_code=404, detail="no images found")
Expand Down
Loading