Skip to content

Commit

Permalink
Merge branch 'release-2.1.0'
Browse files Browse the repository at this point in the history
  • Loading branch information
kurtmckee committed Aug 9, 2021
2 parents 2143579 + 51366ff commit d619744
Show file tree
Hide file tree
Showing 4 changed files with 72 additions and 40 deletions.
14 changes: 12 additions & 2 deletions CHANGES.rst
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,15 @@ Unreleased changes



2.1.0 - 2021-08-09
==================

* Do not scan the filesystem for files to compress
when all compression algorithms are disabled.
* Add `pelican-granular-signals`_ as a dependency.
* Guarantee that files are compressed at the right time.



2.0.0 - 2021-04-13
==================
Expand Down Expand Up @@ -68,7 +77,8 @@ pelican_precompress can be referenced and enabled with the name



.. Contributor links
.. -----------------
.. Links
.. -----
.. _pelican-granular-signals: https://github.com/kurtmckee/pelican-granular-signals/
.. _Ryan Castellucci: https://github.com/ryancdotorg/
8 changes: 5 additions & 3 deletions pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[tool.poetry]
name = "pelican_precompress"
version = "2.0.0"
version = "2.1.0"
description = "Pre-compress your Pelican site using gzip, zopfli, and brotli!"
authors = ["Kurt McKee <[email protected]>"]
license = "MIT"
Expand All @@ -16,7 +16,8 @@ classifiers = [
]

[tool.poetry.dependencies]
python = "^3.6"
python = ">=3.6.1,<4.0"
pelican-granular-signals = "^1.0.0"

[tool.poetry.dev-dependencies]
tox = "^3.23.0"
Expand All @@ -37,14 +38,15 @@ isolated_build = True
[testenv]
deps =
blinker
pytest
pytest-cov
pyfakefs
brotli: brotli
zopfli: zopfli
setenv =
COVERAGE_FILE={toxworkdir}/.coverage.envname.{envname}
commands = {envpython} -m pytest --color=yes --cov=pelican.plugins.precompress --cov=test_pelican_precompress --cov-report=term test_pelican_precompress.py
commands = {envpython} -m pytest --color=yes --cov=pelican.plugins.precompress --cov=test_pelican_precompress --cov-report=term tests/
[testenv:clean]
Expand Down
27 changes: 24 additions & 3 deletions src/pelican/plugins/precompress/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,8 @@
from typing import Dict, Iterable, Optional, Set, Union
import zlib

import pelican
import blinker
import pelican.plugins.granular_signals


log = logging.getLogger(__name__)
Expand Down Expand Up @@ -135,6 +136,10 @@ def compress_files(instance):
('zopfli', '.gz', compress_with_zopfli, decompress_with_gzip),
)

# Exit quickly if no algorithms are enabled.
if not enabled_formats:
return

pool = multiprocessing.Pool()

minimum_size = settings['PRECOMPRESS_MIN_SIZE']
Expand Down Expand Up @@ -239,5 +244,21 @@ def compress_with_zopfli(data: bytes) -> bytes:


def register():
# Wait until all of the files are written.
pelican.signals.finalized.connect(compress_files)
"""Register the plugin to run at the correct time.
Pelican lacks a granular signal structure, and its dependency blinker
is unable to order the set of receivers for a specific signal (or to
have an order imposed on it externally).
To ensure that compression happens only after other plugins have run
(for example, after a minification plugin runs), pelican-precompress
doesn't actually register itself with the *finalized* signal.
Instead, it relies on the pelican-granular-signals plugin's
"compress" signal.
"""

# Guarantee that the granular-signals plugin is registered.
pelican.plugins.granular_signals.register()

blinker.signal("compress").connect(compress_files)
63 changes: 31 additions & 32 deletions test_pelican_precompress.py → tests/test_pelican_precompress.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,21 @@
import pelican.plugins.precompress as pp


@pytest.fixture
def multiprocessing():
def apply_async_mock(fn, args, *extra_args, **kwargs):
"""Act as a pass-through for multiprocessing.Pool.apply_async() calls."""

return fn(*args, *extra_args, **kwargs)

multiprocessing_mock = Mock()
multiprocessing_mock.Pool.return_value = multiprocessing_mock
multiprocessing_mock.apply_async = apply_async_mock

with patch('pelican.plugins.precompress.multiprocessing', multiprocessing_mock):
yield multiprocessing_mock


@pytest.mark.parametrize(
'installed_modules, expected_settings',
(
Expand Down Expand Up @@ -135,9 +150,12 @@ def test_compress_with_gzip_exception():


def test_register():
with patch('pelican.plugins.precompress.pelican', Mock()) as pelican:
pp.register()
pelican.signals.finalized.connect.assert_called_once_with(pp.compress_files)
with patch('pelican.plugins.granular_signals.register', Mock()) as granular_signals:
with patch('pelican.plugins.precompress.blinker', Mock()) as blinker:
pp.register()
assert granular_signals.call_count == 1
assert blinker.signal.call_count == 1
assert blinker.signal("compress").connect.call_count == 1


copyrighted_files = [
Expand All @@ -154,19 +172,7 @@ def test_copyrights(path):
assert f'2019-{time.gmtime().tm_year}' in file.read(100), f'{path.name} has an incorrect copyright date'


def apply_async_mock(fn, args, *extra_args, **kwargs):
"""Act as a pass-through for multiprocessing.Pool.apply_async() calls."""

return fn(*args, *extra_args, **kwargs)


multiprocessing_mock = Mock()
multiprocessing_mock.Pool.return_value = multiprocessing_mock
multiprocessing_mock.apply_async = apply_async_mock


@patch('pelican.plugins.precompress.multiprocessing', multiprocessing_mock)
def test_compress_files_do_nothing(fs):
def test_compress_files_do_nothing(fs, multiprocessing):
"""If all compressors are disabled, no compressed files should be written."""
fs.create_file('/test.txt')
instance = Mock()
Expand All @@ -179,10 +185,10 @@ def test_compress_files_do_nothing(fs):
pp.compress_files(instance)
assert not pathlib.Path('/test.txt.br').exists()
assert not pathlib.Path('/test.txt.gz').exists()
assert multiprocessing.Pool.call_count == 0


@patch('pelican.plugins.precompress.multiprocessing', multiprocessing_mock)
def test_compress_files_never_overwrite(fs):
def test_compress_files_never_overwrite(fs, multiprocessing):
with open('/test.txt', 'wb') as file:
file.write(b'a' * 100)
fs.create_file('/test.txt.gz')
Expand All @@ -200,8 +206,7 @@ def test_compress_files_never_overwrite(fs):
assert pathlib.Path('/test.txt.gz').stat().st_size == 0


@patch('pelican.plugins.precompress.multiprocessing', multiprocessing_mock)
def test_compress_files_skip_existing_matching_files(fs):
def test_compress_files_skip_existing_matching_files(fs, multiprocessing):
with open('/test.txt', 'wb') as file:
file.write(b'abc' * 1000)
destination = pathlib.Path('/test.txt.gz')
Expand All @@ -224,8 +229,7 @@ def test_compress_files_skip_existing_matching_files(fs):
assert destination.stat().st_size == destination_size


@patch('pelican.plugins.precompress.multiprocessing', multiprocessing_mock)
def test_compress_files_overwrite_br(fs):
def test_compress_files_overwrite_br(fs, multiprocessing):
brotli = pytest.importorskip('brotli')
with open('/test.txt', 'wb') as file:
file.write(b'a' * 100)
Expand All @@ -246,8 +250,7 @@ def test_compress_files_overwrite_br(fs):
assert brotli.decompress(file.read()) == b'a' * 100


@patch('pelican.plugins.precompress.multiprocessing', multiprocessing_mock)
def test_compress_files_overwrite_gz(fs):
def test_compress_files_overwrite_gz(fs, multiprocessing):
with open('/test.txt', 'wb') as file:
file.write(b'a' * 100)
with open('/test.txt.gz', 'wb') as file:
Expand All @@ -267,8 +270,7 @@ def test_compress_files_overwrite_gz(fs):
assert gzip.decompress(file.read()) == b'a' * 100


@patch('pelican.plugins.precompress.multiprocessing', multiprocessing_mock)
def test_compress_files_file_size_increase(fs):
def test_compress_files_file_size_increase(fs, multiprocessing):
with open('/test.txt', 'wb') as file:
file.write(b'a' * 2)
instance = Mock()
Expand All @@ -285,8 +287,7 @@ def test_compress_files_file_size_increase(fs):
assert not pathlib.Path('/test.txt.gz').exists()


@patch('pelican.plugins.precompress.multiprocessing', multiprocessing_mock)
def test_compress_files_continue_on_small_files(fs):
def test_compress_files_continue_on_small_files(fs, multiprocessing):
"""Verify that small files do not cause an early exit.
This was incorrect behavior was reported in issue #5.
Expand All @@ -310,8 +311,7 @@ def test_compress_files_continue_on_small_files(fs):
assert pathlib.Path('/999-must-compress.txt.gz').exists()


@patch('pelican.plugins.precompress.multiprocessing', multiprocessing_mock)
def test_compress_files_overwrite_erase_existing_file(fs):
def test_compress_files_overwrite_erase_existing_file(fs, multiprocessing):
"""Ensure existing files are erased if the file size would increase."""
with open('/test.txt', 'wb') as file:
file.write(b'a' * 2)
Expand All @@ -332,8 +332,7 @@ def test_compress_files_overwrite_erase_existing_file(fs):
assert not pathlib.Path('/test.txt.gz').exists()


@patch('pelican.plugins.precompress.multiprocessing', multiprocessing_mock)
def test_compress_files_success_all_algorithms(fs):
def test_compress_files_success_all_algorithms(fs, multiprocessing):
pytest.importorskip('brotli')
pytest.importorskip('zopfli')
with open('/test.txt', 'wb') as file:
Expand Down

0 comments on commit d619744

Please sign in to comment.