-
Notifications
You must be signed in to change notification settings - Fork 9
/
Copy pathconftest.py
122 lines (104 loc) · 3.62 KB
/
conftest.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
# Copyright 2019 Zuru Tech HK Limited. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""pytest configuration."""
import operator
import os
import shutil
from pathlib import Path
import pytest
import tensorflow # pylint: disable=import-error
import ashpy
from ashpy.metrics import (
ClassifierLoss,
InceptionScore,
SlicedWassersteinDistance,
SSIM_Multiscale,
)
from tests.utils.fake_training_loop import (
FakeAdversarialTraining,
FakeClassifierTraining,
)
@pytest.fixture(autouse=True)
def add_common_namespaces(doctest_namespace):
"""Add the common namespace to all tests."""
doctest_namespace["tf"] = tensorflow
doctest_namespace["trainers"] = ashpy.trainers
doctest_namespace["models"] = ashpy.models
doctest_namespace["metrics"] = ashpy.metrics
doctest_namespace["layers"] = ashpy.layers
doctest_namespace["losses"] = ashpy.losses
doctest_namespace["callbacks"] = ashpy.callbacks
@pytest.fixture(scope="function")
def save_dir():
"""Add the save_dir parameter to tests."""
m_save_dir = Path("testlog/savedir")
# Clean before
if m_save_dir.exists():
shutil.rmtree(m_save_dir)
assert not m_save_dir.exists()
yield m_save_dir
# teardown
if m_save_dir.exists():
shutil.rmtree(m_save_dir)
assert not m_save_dir.exists()
# ------------------------------------------------------------------------------------
TEST_MATRIX = {
# NOTE: Always pass metrics as Tuple, Trainers produce side effects!
"adversarial_trainer": [
FakeAdversarialTraining,
{
"image_resolution": [256, 256],
"layer_spec_input_res": (8, 8),
"layer_spec_target_res": (8, 8),
"channels": 3,
"output_shape": 1,
"measure_performance_freq": 1,
"callbacks": [
ashpy.callbacks.LogImageGANCallback(
event=ashpy.callbacks.Event.ON_BATCH_END, event_freq=1
)
],
},
(
SlicedWassersteinDistance(resolution=256),
SSIM_Multiscale(),
InceptionScore(
# Fake inception model
ashpy.models.gans.ConvDiscriminator(
layer_spec_input_res=(299, 299),
layer_spec_target_res=(7, 7),
kernel_size=(5, 5),
initial_filters=16,
filters_cap=32,
output_shape=10,
)
),
),
],
"classifier_trainer": [
FakeClassifierTraining,
{"measure_performance_freq": 1},
(ClassifierLoss(model_selection_operator=operator.lt),),
],
}
TRAINING_IDS = [k for k in TEST_MATRIX]
LOOPS = [TEST_MATRIX[k] for k in TEST_MATRIX]
@pytest.fixture(scope="function", params=LOOPS, ids=TRAINING_IDS)
def fake_training_fn(request):
"""Fixture used to generate fake training for the tests."""
training_loop, loop_args, metrics = request.param
assert len(metrics) in [1, 3]
return lambda logdir, **kwargs: training_loop(
logdir=logdir, metrics=metrics, **loop_args, **kwargs
)