-
Notifications
You must be signed in to change notification settings - Fork 884
142 lines (139 loc) · 6.1 KB
/
tests_with_latest_deps.yaml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
name: Tests
on:
pull_request:
types: [opened, synchronize]
push:
branches:
- main
workflow_dispatch:
jobs:
tests:
name: ${{ matrix.python_version }} tests ${{ matrix.libraries }}
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
python_version: ["3.9", "3.10", "3.11"]
libraries: ["core", "spark/dask - misc", "spark/dask - computational", "spark/dask - entityset_1", "spark/dask - entityset_2", "spark/dask - primitives"]
steps:
- uses: actions/setup-python@v4
with:
python-version: ${{ matrix.python_version }}
- name: Checkout repository
uses: actions/checkout@v3
with:
ref: ${{ github.event.pull_request.head.ref }}
repository: ${{ github.event.pull_request.head.repo.full_name }}
- name: Build featuretools package
run: make package
- name: Set up pip and graphviz
run: |
pip config --site set global.progress_bar off
python -m pip install --upgrade pip
sudo apt update && sudo apt install -y graphviz
- if: ${{ !startsWith(matrix.libraries, 'spark/dask') }}
name: Install featuretools with test requirements
run: |
python -m pip install -e unpacked_sdist/
python -m pip install -e unpacked_sdist/[test]
- if: ${{ startsWith(matrix.libraries, 'spark/dask') }}
name: Install spark pkg, featuretools with test requirements and spark/dask requirements
run: |
sudo apt install -y openjdk-11-jre-headless
JAVA_HOME="/usr/lib/jvm/java-11-openjdk-amd64"
python -m pip install -e unpacked_sdist/[dask]
python -m pip install -e unpacked_sdist/[spark]
python -m pip install -e unpacked_sdist/[test]
- if: ${{ matrix.python_version == 3.9 && startsWith(matrix.libraries, 'spark/dask') }}
name: Generate coverage args
run: echo "coverage_args=--cov=featuretools --cov-config=../pyproject.toml --cov-report=xml:../coverage.xml" >> $GITHUB_ENV
- if: ${{ env.coverage_args }}
name: Erase coverage files
run: |
cd unpacked_sdist
coverage erase
- if: ${{ !startsWith(matrix.libraries, 'spark/dask') }}
name: Run unit tests (no code coverage)
run: |
cd unpacked_sdist
pytest featuretools/ -n auto
- if: ${{ matrix.libraries == 'spark/dask - misc' }}
name: Run unit tests (misc)
run: |
cd unpacked_sdist
pytest featuretools/ -n auto --ignore=featuretools/tests/computational_backend --ignore=featuretools/tests/entityset_tests --ignore=featuretools/tests/primitive_tests ${{ env.coverage_args }}
- if: ${{ matrix.libraries == 'spark/dask - computational' }}
name: Run unit tests (computational backend)
run: |
cd unpacked_sdist
pytest featuretools/tests/computational_backend/ -n auto ${{ env.coverage_args }}
- if: ${{ matrix.libraries == 'spark/dask - entityset_1' }}
name: Run unit tests (entityset batch 1)
run: |
cd unpacked_sdist
pytest featuretools/tests/entityset_tests -n auto --ignore=featuretools/tests/entityset_tests/test_es.py --ignore=featuretools/tests/entityset_tests/test_ww_es.py ${{ env.coverage_args }}
- if: ${{ matrix.libraries == 'spark/dask - entityset_2' }}
name: Run unit tests (entityset batch 2)
run: |
cd unpacked_sdist
pytest featuretools/tests/entityset_tests/test_es.py featuretools/tests/entityset_tests/test_ww_es.py ${{ env.coverage_args }}
- if: ${{ matrix.libraries == 'spark/dask - primitives' }}
name: Run unit tests (primitives)
run: |
cd unpacked_sdist
pytest featuretools/tests/primitive_tests -n auto ${{ env.coverage_args }}
- if: ${{ env.coverage_args }}
name: Upload coverage to Codecov
uses: codecov/codecov-action@v3
with:
token: ${{ secrets.CODECOV_TOKEN }}
fail_ci_if_error: true
files: ${{ github.workspace }}/coverage.xml
verbose: true
win_unit_tests:
name: ${{ matrix.python_version }} windows unit tests
runs-on: windows-latest
strategy:
fail-fast: false
matrix:
python_version: ["3.9", "3.10", "3.11"]
steps:
- name: Download miniconda
shell: pwsh
run: |
$File = "Miniconda3-latest-Windows-x86_64.exe"
$Uri = "https://repo.anaconda.com/miniconda/$File"
$ProgressPreference = "silentlyContinue"
Invoke-WebRequest -Uri $Uri -Outfile "$env:USERPROFILE/$File"
$hashFromFile = Get-FileHash "$env:USERPROFILE/$File" -Algorithm SHA256
$hashFromUrl = "c9b32faa9262828702334b16bcb5b53556e630d54e5127f5c36c7ba7ed43179a"
if ($hashFromFile.Hash -ne "$hashFromUrl") {
Throw "$File hashes do not match"
}
- name: Install miniconda
shell: cmd
run: start /wait "" %UserProfile%\Miniconda3-latest-Windows-x86_64.exe /InstallationType=JustMe /RegisterPython=0 /S /D=%UserProfile%\Miniconda3
- name: Create python ${{ matrix.python_version }} environment
shell: pwsh
run: |
. $env:USERPROFILE\Miniconda3\shell\condabin\conda-hook.ps1
conda create -n featuretools python=${{ matrix.python_version }}
- name: Checkout repository
uses: actions/checkout@v3
with:
ref: ${{ github.event.pull_request.head.ref }}
repository: ${{ github.event.pull_request.head.repo.full_name }}
- name: Install featuretools with test requirements
shell: pwsh
run: |
. $env:USERPROFILE\Miniconda3\shell\condabin\conda-hook.ps1
conda activate featuretools
conda config --add channels conda-forge
conda install -q -y -c conda-forge python-graphviz graphviz
python -m pip install --upgrade pip
python -m pip install .[test]
- name: Run unit tests
run: |
. $env:USERPROFILE\Miniconda3\shell\condabin\conda-hook.ps1
conda activate featuretools
pytest featuretools\ -n auto