Skip to content

Commit 8dc1fc2

Browse files
authored
Merge pull request #619 from cmu-delphi/release/delphi-epidata-0.1.2
Release Delphi Epidata 0.1.2
2 parents 833fe7d + 2c15d31 commit 8dc1fc2

22 files changed

+301
-63
lines changed

.bumpversion.cfg

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
[bumpversion]
2-
current_version = 0.1.1
2+
current_version = 0.1.2
33
commit = False
44
tag = False
55

.github/release-drafter.yml

Lines changed: 31 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,31 @@
1+
name-template: "v$RESOLVED_VERSION"
2+
tag-template: "v$RESOLVED_VERSION"
3+
categories:
4+
- title: "🚀 API Changes"
5+
labels:
6+
- "api change"
7+
- title: "🚀 Python Client Changes"
8+
labels:
9+
- "python client"
10+
- title: "🚀 R Client Changes"
11+
labels:
12+
- "r client"
13+
- title: "🚀 JavaScript Client Changes"
14+
labels:
15+
- "js client"
16+
- "javascript"
17+
- title: "📕 Documentation"
18+
labels:
19+
- "documentation"
20+
- title: "🧰 Development"
21+
labels:
22+
- "chore"
23+
- "documentation"
24+
- "dependencies"
25+
- "acquisition"
26+
change-template: "- #$NUMBER $TITLE"
27+
change-title-escapes: '\<*_&`#@'
28+
template: |
29+
$CHANGES
30+
31+
Thanks to $CONTRIBUTORS

.github/workflows/create-delphi-epidata-release.yml renamed to .github/workflows/create-release.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
name: Create Delphi Epidata Release
1+
name: Create Release
22

33
on:
44
workflow_dispatch:

.github/workflows/release-helper.yml

Lines changed: 153 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,153 @@
1+
name: Release Helper
2+
3+
on:
4+
push:
5+
branches:
6+
- main
7+
8+
jobs:
9+
correct_repository:
10+
runs-on: ubuntu-latest
11+
steps:
12+
- name: fail on fork
13+
if: github.repository_owner != 'cmu-delphi'
14+
run: exit 1
15+
16+
create_release:
17+
needs: correct_repository
18+
runs-on: ubuntu-latest
19+
steps:
20+
- name: Check out code
21+
uses: actions/checkout@v2
22+
with:
23+
ssh-key: ${{ secrets.CMU_DELPHI_DEPLOY_MACHINE_SSH }}
24+
- name: Set up Python 3.8
25+
uses: actions/setup-python@v2
26+
with:
27+
python-version: 3.8
28+
- name: Extract version
29+
id: extract_version
30+
run: |
31+
python -m pip install bump2version
32+
echo -n "::set-output name=version::"
33+
bump2version --dry-run --list patch | grep ^current_version | sed -r s,"^.*=",,
34+
- name: Create Release
35+
id: create_release
36+
uses: release-drafter/release-drafter@v5
37+
env:
38+
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
39+
with:
40+
version: ${{ steps.extract_version.outputs.version }}
41+
publish: true
42+
outputs:
43+
version: ${{ steps.extract_version.outputs.version }}
44+
upload_url: ${{ steps.create_release.outputs.upload_url }}
45+
tag_name: ${{ steps.create_release.outputs.tag_name }}
46+
47+
release_python_client:
48+
needs: create_release
49+
runs-on: ubuntu-latest
50+
steps:
51+
- name: Check out code
52+
uses: actions/checkout@v2
53+
- name: Set up Python 3.8
54+
uses: actions/setup-python@v2
55+
with:
56+
python-version: 3.8
57+
- name: Install build dependencies
58+
run: |
59+
python -m pip install --upgrade pip
60+
pip install wheel twine
61+
- name: Prepare package
62+
run: |
63+
cp src/client/*.py src/client/packaging/pypi/delphi_epidata/
64+
- name: Create release
65+
working-directory: src/client/packaging/pypi
66+
run: |
67+
python setup.py sdist bdist_wheel
68+
- uses: actions/upload-artifact@v2
69+
with:
70+
name: delphi_epidata_py
71+
path: src/client/packaging/pypi/dist/*.tar.gz
72+
- name: Upload Release Asset
73+
uses: AButler/[email protected]
74+
with:
75+
files: "src/client/packaging/pypi/dist/*.tar.gz"
76+
repo-token: ${{ secrets.GITHUB_TOKEN }}
77+
release-tag: ${{ needs.create_release.outputs.tag_name }}
78+
- name: Publish a Python distribution to PyPI
79+
uses: pypa/gh-action-pypi-publish@release/v1
80+
with:
81+
user: __token__
82+
password: ${{ secrets.DELPHI_PYPI_PROD_TOKEN }}
83+
packages_dir: src/client/packaging/pypi/dist/
84+
skip_existing: true
85+
# repository_url: https://test.pypi.org/legacy/
86+
87+
release_js_client:
88+
needs: create_release
89+
runs-on: ubuntu-latest
90+
defaults:
91+
run:
92+
working-directory: src/client/packaging/npm
93+
steps:
94+
- name: Check out code
95+
uses: actions/checkout@v2
96+
- uses: actions/setup-node@v2
97+
with:
98+
node-version: '14.x'
99+
- name: Cache Node.js modules
100+
uses: actions/cache@v2
101+
with:
102+
path: ~/.npm # npm cache files are stored in `~/.npm` on Linux/macOS
103+
key: ${{ runner.OS }}-node2-${{ hashFiles('**/package-lock.json') }}
104+
restore-keys: |
105+
${{ runner.OS }}-node2-
106+
- run: npm ci
107+
- run: npm test
108+
- run: npm pack
109+
- name: Rename to a different name
110+
run: for f in *.tgz; do mv "$f" "$(echo "$f" | sed s/delphi_epidata-/delphi_epidata_js-/)"; done
111+
- uses: actions/upload-artifact@v2
112+
with:
113+
name: delphi_epidata_js
114+
path: src/client/packaging/npm/*.tgz
115+
- name: Upload Release Asset
116+
uses: AButler/[email protected]
117+
with:
118+
files: "src/client/packaging/npm/*.tgz"
119+
repo-token: ${{ secrets.GITHUB_TOKEN }}
120+
release-tag: ${{ needs.create_release.outputs.tag_name }}
121+
- name: Publish to NPM
122+
uses: JS-DevTools/npm-publish@v1
123+
with:
124+
token: ${{ secrets.DELPHI_NPM_TOKEN }}
125+
package: src/client/packaging/npm/package.json
126+
access: public
127+
check-version: true
128+
129+
sync_dev:
130+
needs: correct_repository
131+
runs-on: ubuntu-latest
132+
steps:
133+
- name: Check out code
134+
uses: actions/checkout@v2
135+
with:
136+
ref: dev
137+
ssh-key: ${{ secrets.CMU_DELPHI_DEPLOY_MACHINE_SSH }}
138+
- name: Reset dev branch
139+
run: |
140+
git fetch origin main:main
141+
git reset --hard main
142+
- name: Create pull request into dev
143+
uses: peter-evans/create-pull-request@v3
144+
with:
145+
branch: bot/sync-main-dev
146+
commit-message: "chore: sync main-dev"
147+
base: dev
148+
title: "chore: sync main->dev"
149+
labels: chore
150+
reviewers: krivard
151+
assignees: krivard
152+
body: |
153+
Syncing Main->Dev.

docs/api/covidcast-signals/hhs.md

Lines changed: 8 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -30,10 +30,14 @@ the sum of all adult and pediatric COVID-19 hospital admissions. This
3030
sum is used as the "ground truth" for hospitalizations by the [COVID-19 Forecast Hub](https://github.com/reichlab/covid19-forecast-hub/blob/master/data-processed/README.md#hospitalizations).
3131

3232

33-
| Signal | Geography | Resolution | Description |
34-
| --- | --- | --- | --- |
35-
| `confirmed_admissions_covid_1d` | state | 1 day | Sum of adult and pediatric confirmed COVID-19 hospital admissions occurring each day. <br/> **Earliest date available:** 2019-12-31 |
36-
| `sum_confirmed_suspected_admissions_covid_1d` | state | 1 day | Sum of adult and pediatric confirmed and suspected COVID-19 hospital admissions occurring each day. <br/> **Earliest date available:** 2019-12-31 |
33+
| Signal | 7-day average signal | Geography | Resolution | Description |
34+
| --- | --- | --- | --- | --- |
35+
| `confirmed_admissions_covid_1d` | `confirmed_admissions_covid_1d_7dav`| state | 1 day | Sum of adult and pediatric confirmed COVID-19 hospital admissions occurring each day. <br/> **Earliest date available:** 2019-12-31 |
36+
| `sum_confirmed_suspected_admissions_covid_1d` | `sum_confirmed_suspected_admissions_covid_1d_7dav` | state | 1 day | Sum of adult and pediatric confirmed and suspected COVID-19 hospital admissions occurring each day. <br/> **Earliest date available:** 2019-12-31 |
37+
38+
The 7-day average signals are computed by Delphi by calculating
39+
moving averages of the preceding 7 days, so e.g. the signal for June 7 is the
40+
average of the underlying data for June 1 through 7, inclusive.
3741

3842
## Table of contents
3943
{: .no_toc .text-delta}

src/acquisition/covidcast/covidcast_meta_cache_updater.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -48,18 +48,18 @@ def main(args, epidata_impl=Epidata, database_impl=Database):
4848
if len(metadata)==0:
4949
args = ("no results",-2)
5050

51-
print('covidcast_meta result: %s (code %d)' % args)
51+
logger.info('covidcast_meta result: %s (code %d)' % args)
5252

5353
if args[-1] != 1:
54-
print('unable to cache epidata')
54+
logger.error('unable to cache epidata')
5555
return False
5656

5757
# update the cache
5858
try:
5959
metadata_update_start_time = time.time()
6060
database.update_covidcast_meta_cache(metadata)
6161
metadata_update_interval_in_seconds = time.time() - metadata_update_start_time
62-
print('successfully cached epidata')
62+
logger.info('successfully cached epidata')
6363
finally:
6464
# no catch block so that an exception above will cause the program to
6565
# fail after the following cleanup

src/acquisition/covidcast/csv_importer.py

Lines changed: 13 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -14,6 +14,7 @@
1414
# first party
1515
from delphi_utils import Nans
1616
from delphi.utils.epiweek import delta_epiweeks
17+
from delphi.epidata.acquisition.covidcast.logger import get_structured_logger
1718

1819
class CsvImporter:
1920
"""Finds and parses covidcast CSV files."""
@@ -84,16 +85,17 @@ def is_sane_week(value):
8485

8586
@staticmethod
8687
def find_issue_specific_csv_files(scan_dir, glob=glob):
88+
logger = get_structured_logger('find_issue_specific_csv_files')
8789
for path in sorted(glob.glob(os.path.join(scan_dir, '*'))):
8890
issuedir_match = CsvImporter.PATTERN_ISSUE_DIR.match(path.lower())
8991
if issuedir_match and os.path.isdir(path):
9092
issue_date_value = int(issuedir_match.group(2))
9193
issue_date = CsvImporter.is_sane_day(issue_date_value)
9294
if issue_date:
93-
print(' processing csv files from issue date: "' + str(issue_date) + '", directory', path)
95+
logger.info('processing csv files from issue date: "' + str(issue_date) + '", directory', path)
9496
yield from CsvImporter.find_csv_files(path, issue=(issue_date, epi.Week.fromdate(issue_date)), glob=glob)
9597
else:
96-
print(' invalid issue directory day', issue_date_value)
98+
logger.warning(event='invalid issue directory day', detail=issue_date_value, file=path)
9799

98100
@staticmethod
99101
def find_csv_files(scan_dir, issue=(date.today(), epi.Week.fromdate(date.today())), glob=glob):
@@ -105,7 +107,7 @@ def find_csv_files(scan_dir, issue=(date.today(), epi.Week.fromdate(date.today()
105107
valid, details is a tuple of (source, signal, time_type, geo_type,
106108
time_value, issue, lag) (otherwise None).
107109
"""
108-
110+
logger = get_structured_logger('find_csv_files')
109111
issue_day,issue_epiweek=issue
110112
issue_day_value=int(issue_day.strftime("%Y%m%d"))
111113
issue_epiweek_value=int(str(issue_epiweek))
@@ -117,14 +119,11 @@ def find_csv_files(scan_dir, issue=(date.today(), epi.Week.fromdate(date.today()
117119
if not path.lower().endswith('.csv'):
118120
# safe to ignore this file
119121
continue
120-
121-
print('file:', path)
122-
123122
# match a daily or weekly naming pattern
124123
daily_match = CsvImporter.PATTERN_DAILY.match(path.lower())
125124
weekly_match = CsvImporter.PATTERN_WEEKLY.match(path.lower())
126125
if not daily_match and not weekly_match:
127-
print(' invalid csv path/filename', path)
126+
logger.warning(event='invalid csv path/filename', detail=path, file=path)
128127
yield (path, None)
129128
continue
130129

@@ -135,7 +134,7 @@ def find_csv_files(scan_dir, issue=(date.today(), epi.Week.fromdate(date.today()
135134
match = daily_match
136135
time_value_day = CsvImporter.is_sane_day(time_value)
137136
if not time_value_day:
138-
print(' invalid filename day', time_value)
137+
logger.warning(event='invalid filename day', detail=time_value, file=path)
139138
yield (path, None)
140139
continue
141140
issue_value=issue_day_value
@@ -146,7 +145,7 @@ def find_csv_files(scan_dir, issue=(date.today(), epi.Week.fromdate(date.today()
146145
match = weekly_match
147146
time_value_week=CsvImporter.is_sane_week(time_value)
148147
if not time_value_week:
149-
print(' invalid filename week', time_value)
148+
logger.warning(event='invalid filename week', detail=time_value, file=path)
150149
yield (path, None)
151150
continue
152151
issue_value=issue_epiweek_value
@@ -155,15 +154,15 @@ def find_csv_files(scan_dir, issue=(date.today(), epi.Week.fromdate(date.today()
155154
# # extract and validate geographic resolution
156155
geo_type = match.group(3).lower()
157156
if geo_type not in CsvImporter.GEOGRAPHIC_RESOLUTIONS:
158-
print(' invalid geo_type', geo_type)
157+
logger.warning(event='invalid geo_type', detail=geo_type, file=path)
159158
yield (path, None)
160159
continue
161160

162161
# extract additional values, lowercased for consistency
163162
source = match.group(1).lower()
164163
signal = match.group(4).lower()
165164
if len(signal) > 64:
166-
print(' invalid signal name (64 char limit)',signal)
165+
logger.warning(event='invalid signal name (64 char limit)',detail=signal, file=path)
167166
yield (path, None)
168167
continue
169168

@@ -344,19 +343,19 @@ def load_csv(filepath, geo_type, pandas=pandas):
344343
In case of a validation error, `None` is yielded for the offending row,
345344
including the header.
346345
"""
347-
346+
logger = get_structured_logger('load_csv')
348347
# don't use type inference, just get strings
349348
table = pandas.read_csv(filepath, dtype='str')
350349

351350
if not CsvImporter.is_header_valid(table.columns):
352-
print(' invalid header')
351+
logger.warning(event='invalid header', detail=table.columns, file=filepath)
353352
yield None
354353
return
355354

356355
for row in table.itertuples(index=False):
357356
row_values, error = CsvImporter.extract_and_check_row(row, geo_type)
358357
if error:
359-
print(' invalid value for %s (%s)' % (str(row), error))
358+
logger.warning(event = 'invalid value for row', detail=(str(row), error), file=filepath)
360359
yield None
361360
continue
362361
yield row_values

0 commit comments

Comments
 (0)