Skip to content

Commit

Permalink
feat: support for thumbor-aws result storage
Browse files Browse the repository at this point in the history
  • Loading branch information
fdintino committed Jan 4, 2024
1 parent 52da121 commit ec353eb
Show file tree
Hide file tree
Showing 9 changed files with 521 additions and 16 deletions.
1 change: 1 addition & 0 deletions .github/workflows/test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@ jobs:
env:
FFMPEG_BUILD_VER: 20210119-553eb07737
GIFSICLE_VER: 1.92
PYTEST_ADDOPTS: ${{ matrix.python-version == '2.7' && '--ignore=tests/result_storages/test_thumbor_aws_storage.py' || '' }}

container:
image: ${{ matrix.python-version == '2.7' && 'python:2.7-buster' || null }}
Expand Down
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
[build-system]
requires = ["setuptools"]
requires = ["setuptools==44.0.0"]
build-backend = "setuptools.build_meta"
8 changes: 8 additions & 0 deletions src/thumbor_video_engine/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -288,3 +288,11 @@
'the source image is an animated gif and the request accepts it (via '
'Accept: video/* header)',
'Video')


Config.define(
'FFMPEG_PRESERVE_AUDIO',
False,
'If True, thumbor-video-engine will not strip the audio track. Defaults to '
'False',
'Video')
108 changes: 108 additions & 0 deletions src/thumbor_video_engine/result_storages/aws_storage.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,108 @@
from datetime import datetime, timezone
from hashlib import sha1
from os.path import join
from deprecated import deprecated

from thumbor.engines import BaseEngine
from thumbor.result_storages import ResultStorageResult
from thumbor.utils import logger
import thumbor_aws.result_storage
from thumbor_aws.utils import normalize_path
from .base import BaseStorage


class Storage(BaseStorage, thumbor_aws.result_storage.Storage):
def _normalize_path(self, path):
path = normalize_path("", path).lstrip("/")
segments = [path]

root_path = getattr(
self.context.config, "TC_AWS_RESULT_STORAGE_ROOT_PATH", None
)

if root_path:
segments.insert(0, root_path)
auto_component = self.get_auto_path_component()
if auto_component:
segments.append(auto_component)

should_randomize_key = getattr(
self.context.config, "TC_AWS_RANDOMIZE_KEYS", False
)
if should_randomize_key:
segments.insert(0, self._generate_digest(segments))

normalized_path = (
join(segments[0], *segments[1:]).lstrip("/")
if len(segments) > 1
else segments[0]
)
if normalized_path.endswith("/"):
normalized_path += self.context.config.TC_AWS_ROOT_IMAGE_NAME

return normalized_path

def _generate_digest(self, segments):
return sha1(".".join(segments).encode("utf-8")).hexdigest()

async def put(self, image_bytes: bytes) -> str:
file_abspath = self._normalize_path(self.context.request.url)
logger.debug("[RESULT_STORAGE] putting at %s", file_abspath)
content_type = BaseEngine.get_mimetype(image_bytes)
response = await self.upload(
file_abspath,
image_bytes,
content_type,
self.context.config.AWS_DEFAULT_LOCATION,
)
logger.info("[RESULT_STORAGE] Image uploaded successfully to %s", file_abspath)
return response

async def get(self) -> ResultStorageResult:
path = self.context.request.url
file_abspath = self._normalize_path(path)

logger.debug("[RESULT_STORAGE] getting from %s", file_abspath)

exists = await self.object_exists(file_abspath)
if not exists:
logger.debug("[RESULT_STORAGE] image not found at %s", file_abspath)
return None

status, body, last_modified = await self.get_data(
self.bucket_name, file_abspath
)

if status != 200 or self._is_expired(last_modified):
logger.debug(
"[RESULT_STORAGE] cached image has expired (status %s)", status
)
return None

logger.info(
"[RESULT_STORAGE] Image retrieved successfully at %s.",
file_abspath,
)

return ResultStorageResult(
buffer=body,
metadata={
"LastModified": last_modified.replace(tzinfo=timezone.utc),
"ContentLength": len(body),
"ContentType": BaseEngine.get_mimetype(body),
},
)

@deprecated(version="7.0.0", reason="Use result's last_modified instead")
async def last_updated( # pylint: disable=invalid-overridden-method
self,
) -> datetime:
path = self.context.request.url
file_abspath = self._normalize_path(path)
logger.debug("[RESULT_STORAGE] getting from %s", file_abspath)

response = await self.get_object_metadata(file_abspath)
return datetime.strptime(
response["ResponseMetadata"]["HTTPHeaders"]["last-modified"],
"%a, %d %b %Y %H:%M:%S %Z",
)
44 changes: 32 additions & 12 deletions tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,60 +5,80 @@
from thumbor.context import Context, ServerParameters, RequestParameters
from thumbor.importer import Importer
from thumbor.server import configure_log, get_application

try:
from shutil import which
except ImportError:
from thumbor.utils import which

try:
from tests.mock_aio_server import s3_server, s3_client, session # noqa
except: # noqa

@pytest.fixture
def s3_server():
yield "http://does.not.exist"

@pytest.fixture
def s3_client():
return None


CURR_DIR = os.path.abspath(os.path.dirname(__file__))


@pytest.fixture
def storage_path():
return os.path.join(CURR_DIR, 'data')
return os.path.join(CURR_DIR, "data")


@pytest.fixture
def ffmpeg_path():
return os.getenv('FFMPEG_PATH') or which('ffmpeg')
return os.getenv("FFMPEG_PATH") or which("ffmpeg")


@pytest.fixture
def mp4_buffer(storage_path):
with open(os.path.join(storage_path, 'hotdog.mp4'), mode='rb') as f:
with open(os.path.join(storage_path, "hotdog.mp4"), mode="rb") as f:
return f.read()


@pytest.fixture
def config(storage_path, ffmpeg_path):
Config.allow_environment_variables()
return Config(
SECURITY_KEY='changeme',
LOADER='thumbor.loaders.file_loader',
APP_CLASS='thumbor_video_engine.app.ThumborServiceApp',
SECURITY_KEY="changeme",
LOADER="thumbor.loaders.file_loader",
APP_CLASS="thumbor_video_engine.app.ThumborServiceApp",
FILTERS=[],
FILE_LOADER_ROOT_PATH=storage_path,
FFMPEG_PATH=ffmpeg_path,
FFPROBE_PATH=(os.getenv('FFPROBE_PATH') or which('ffprobe')),
STORAGE='thumbor.storages.no_storage')
FFPROBE_PATH=(os.getenv("FFPROBE_PATH") or which("ffprobe")),
STORAGE="thumbor.storages.no_storage",
)


@pytest.fixture
def context(config):
config.ENGINE = 'thumbor_video_engine.engines.video'
config.ENGINE = "thumbor_video_engine.engines.video"

importer = Importer(config)
importer.import_modules()

server = ServerParameters(
None, 'localhost', 'thumbor.conf', None, 'info', config.APP_CLASS,
gifsicle_path=which('gifsicle'))
None,
"localhost",
"thumbor.conf",
None,
"info",
config.APP_CLASS,
gifsicle_path=which("gifsicle"),
)
server.security_key = config.SECURITY_KEY

req = RequestParameters()

configure_log(config, 'DEBUG')
configure_log(config, "DEBUG")

with Context(server=server, config=config, importer=importer) as context:
context.request = req
Expand Down
49 changes: 49 additions & 0 deletions tests/mock_aio_server.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,49 @@
import pytest
import pytest_asyncio
import aiobotocore.session
from aiobotocore.config import AioConfig

from tests.moto_server import MotoService


@pytest_asyncio.fixture
async def s3_server(monkeypatch, event_loop):
monkeypatch.setenv("TEST_SERVER_MODE", "true")
monkeypatch.setenv("AWS_SHARED_CREDENTIALS_FILE", "")
monkeypatch.setenv("AWS_ACCESS_KEY_ID", "test-key")
monkeypatch.setenv("AWS_SECRET_ACCESS_KEY", "test-secret-key")
monkeypatch.setenv("AWS_SESSION_TOKEN", "test-session-token")
async with MotoService("s3", ssl=False) as svc:
yield svc.endpoint_url


@pytest.fixture
def session(event_loop):
return aiobotocore.session.AioSession()


@pytest_asyncio.fixture
async def s3_client(
session,
s3_server,
):
# This depends on mock_attributes because we may want to test event listeners.
# See the documentation of `mock_attributes` for details.
read_timeout = connect_timeout = 5
region = "us-east-1"

async with session.create_client(
"s3",
region_name=region,
config=AioConfig(
region_name=region,
signature_version="s3",
read_timeout=read_timeout,
connect_timeout=connect_timeout,
),
verify=False,
endpoint_url=s3_server,
aws_secret_access_key="xxx",
aws_access_key_id="xxx",
) as client:
yield client
Loading

0 comments on commit ec353eb

Please sign in to comment.