Skip to content

Commit 7a27a3a

Browse files
committed
style(flynt): convert .format and % strings to f-strings
1 parent dd1b089 commit 7a27a3a

22 files changed

+121
-126
lines changed

src/acquisition/cdcp/cdc_dropbox_receiver.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -101,7 +101,7 @@ def fetch_data():
101101
if resp.status_code != 200:
102102
raise Exception(["resp.status_code", resp.status_code])
103103
dropbox_len = meta.size
104-
print(" need %d bytes..." % dropbox_len)
104+
print(f" need {int(dropbox_len)} bytes...")
105105
content_len = int(resp.headers.get("Content-Length", -1))
106106
if dropbox_len != content_len:
107107
info = ["dropbox_len", dropbox_len, "content_len", content_len]

src/acquisition/cdcp/cdc_extract.py

+3-3
Original file line numberDiff line numberDiff line change
@@ -110,7 +110,7 @@ def get_total_hits(cur, epiweek, state):
110110
for (total,) in cur:
111111
pass
112112
if total is None:
113-
raise Exception("missing data for %d-%s" % (epiweek, state))
113+
raise Exception(f"missing data for {int(epiweek)}-{state}")
114114
return total
115115

116116

@@ -166,7 +166,7 @@ def extract(first_week=None, last_week=None, test_mode=False):
166166
cur.execute("SELECT max(`epiweek`) FROM `cdc_meta`")
167167
for (last_week,) in cur:
168168
pass
169-
print("extracting %d--%d" % (first_week, last_week))
169+
print(f"extracting {int(first_week)}--{int(last_week)}")
170170

171171
# update each epiweek
172172
for epiweek in flu.range_epiweeks(first_week, last_week, inclusive=True):
@@ -180,7 +180,7 @@ def extract(first_week=None, last_week=None, test_mode=False):
180180
store_result(cur, epiweek, state, *nums, total)
181181
print(f" {epiweek}-{state}: {' '.join(str(n) for n in nums)} ({total})")
182182
except Exception as ex:
183-
print(" %d-%s: failed" % (epiweek, state), ex)
183+
print(f" {int(epiweek)}-{state}: failed", ex)
184184
# raise ex
185185
sys.stdout.flush()
186186

src/acquisition/cdcp/cdc_upload.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -232,7 +232,7 @@ def parse_zip(zf, level=1):
232232
if handler is not None:
233233
with zf.open(name) as temp:
234234
count = handler(csv.reader(io.StringIO(str(temp.read(), "utf-8"))))
235-
print(prefix, " %d rows" % count)
235+
print(prefix, f" {int(count)} rows")
236236
else:
237237
print(prefix, " (ignored)")
238238

src/acquisition/ecdc/ecdc_db_update.py

+6-6
Original file line numberDiff line numberDiff line change
@@ -87,7 +87,7 @@ def safe_int(i):
8787
def get_rows(cnx, table="ecdc_ili"):
8888
# Count and return the number of rows in the `ecdc_ili` table.
8989
select = cnx.cursor()
90-
select.execute("SELECT count(1) num FROM %s" % table)
90+
select.execute(f"SELECT count(1) num FROM {table}")
9191
for (num,) in select:
9292
pass
9393
select.close()
@@ -100,7 +100,7 @@ def update_from_file(issue, date, dir, test_mode=False):
100100
u, p = secrets.db.epi
101101
cnx = mysql.connector.connect(user=u, password=p, database="epidata")
102102
rows1 = get_rows(cnx, "ecdc_ili")
103-
print("rows before: %d" % (rows1))
103+
print(f"rows before: {int(rows1)}")
104104
insert = cnx.cursor()
105105

106106
# load the data, ignoring empty rows
@@ -115,9 +115,9 @@ def update_from_file(issue, date, dir, test_mode=False):
115115
row["region"] = data[4]
116116
row["incidence_rate"] = data[3]
117117
rows.append(row)
118-
print(" loaded %d rows" % len(rows))
118+
print(f" loaded {len(rows)} rows")
119119
entries = [obj for obj in rows if obj]
120-
print(" found %d entries" % len(entries))
120+
print(f" found {len(entries)} entries")
121121

122122
sql = """
123123
INSERT INTO
@@ -149,7 +149,7 @@ def update_from_file(issue, date, dir, test_mode=False):
149149
else:
150150
cnx.commit()
151151
rows2 = get_rows(cnx)
152-
print("rows after: %d (added %d)" % (rows2, rows2 - rows1))
152+
print(f"rows after: {int(rows2)} (added {int(rows2 - rows1)})")
153153
cnx.close()
154154

155155

@@ -171,7 +171,7 @@ def main():
171171
raise Exception("--file and --issue must both be present or absent")
172172

173173
date = datetime.datetime.now().strftime("%Y-%m-%d")
174-
print("assuming release date is today, %s" % date)
174+
print(f"assuming release date is today, {date}")
175175

176176
ensure_tables_exist()
177177
if args.file:

src/acquisition/flusurv/flusurv.py

+3-3
Original file line numberDiff line numberDiff line change
@@ -80,7 +80,7 @@ def fetch_json(path, payload, call_count=1, requests_impl=requests):
8080

8181
# it's polite to self-identify this "bot"
8282
delphi_url = "https://delphi.cmu.edu/index.html"
83-
user_agent = "Mozilla/5.0 (compatible; delphibot/1.0; +%s)" % delphi_url
83+
user_agent = f"Mozilla/5.0 (compatible; delphibot/1.0; +{delphi_url})"
8484

8585
# the FluSurv AMF server
8686
flusurv_url = "https://gis.cdc.gov/GRASP/Flu3/" + path
@@ -106,7 +106,7 @@ def fetch_json(path, payload, call_count=1, requests_impl=requests):
106106
if resp.status_code == 500 and call_count <= 2:
107107
# the server often fails with this status, so wait and retry
108108
delay = 10 * call_count
109-
print("got status %d, will retry in %d sec..." % (resp.status_code, delay))
109+
print(f"got status {int(resp.status_code)}, will retry in {int(delay)} sec...")
110110
time.sleep(delay)
111111
return fetch_json(path, payload, call_count=call_count + 1)
112112
elif resp.status_code != 200:
@@ -180,7 +180,7 @@ def extract_from_object(data_in):
180180
raise Exception("no data found")
181181

182182
# print the result and return flu data
183-
print("found data for %d weeks" % len(data_out))
183+
print(f"found data for {len(data_out)} weeks")
184184
return data_out
185185

186186

src/acquisition/flusurv/flusurv_update.py

+3-3
Original file line numberDiff line numberDiff line change
@@ -108,7 +108,7 @@ def update(issue, location_name, test_mode=False):
108108
cnx = mysql.connector.connect(host=secrets.db.host, user=u, password=p, database="epidata")
109109
cur = cnx.cursor()
110110
rows1 = get_rows(cur)
111-
print("rows before: %d" % rows1)
111+
print(f"rows before: {int(rows1)}")
112112

113113
# SQL for insert/update
114114
sql = """
@@ -148,7 +148,7 @@ def update(issue, location_name, test_mode=False):
148148

149149
# commit and disconnect
150150
rows2 = get_rows(cur)
151-
print("rows after: %d (+%d)" % (rows2, rows2 - rows1))
151+
print(f"rows after: {int(rows2)} (+{int(rows2 - rows1)})")
152152
cur.close()
153153
if test_mode:
154154
print("test mode: not committing database changes")
@@ -170,7 +170,7 @@ def main():
170170

171171
# scrape current issue from the main page
172172
issue = flusurv.get_current_issue()
173-
print("current issue: %d" % issue)
173+
print(f"current issue: {int(issue)}")
174174

175175
# fetch flusurv data
176176
if args.location == "all":

src/acquisition/fluview/fluview.py

+7-7
Original file line numberDiff line numberDiff line change
@@ -108,23 +108,23 @@ def get_tier_ids(name):
108108
location_ids[Key.TierType.hhs] = sorted(set(location_ids[Key.TierType.hhs]))
109109
num = len(location_ids[Key.TierType.hhs])
110110
if num != 10:
111-
raise Exception("expected 10 hhs regions, found %d" % num)
111+
raise Exception(f"expected 10 hhs regions, found {int(num)}")
112112

113113
# add location ids for census divisions
114114
for row in data[Key.TierListEntry.cen]:
115115
location_ids[Key.TierType.cen].append(row[Key.TierIdEntry.cen])
116116
location_ids[Key.TierType.cen] = sorted(set(location_ids[Key.TierType.cen]))
117117
num = len(location_ids[Key.TierType.cen])
118118
if num != 9:
119-
raise Exception("expected 9 census divisions, found %d" % num)
119+
raise Exception(f"expected 9 census divisions, found {int(num)}")
120120

121121
# add location ids for states
122122
for row in data[Key.TierListEntry.sta]:
123123
location_ids[Key.TierType.sta].append(row[Key.TierIdEntry.sta])
124124
location_ids[Key.TierType.sta] = sorted(set(location_ids[Key.TierType.sta]))
125125
num = len(location_ids[Key.TierType.sta])
126126
if num != 57:
127-
raise Exception("expected 57 states/territories/cities, found %d" % num)
127+
raise Exception(f"expected 57 states/territories/cities, found {int(num)}")
128128

129129
# return a useful subset of the metadata
130130
# (latest epiweek, latest season, tier ids, location ids)
@@ -181,7 +181,7 @@ def save_latest(path=None):
181181
data = fetch_metadata(sess)
182182
info = get_issue_and_locations(data)
183183
issue = info["epiweek"]
184-
print("current issue: %d" % issue)
184+
print(f"current issue: {int(issue)}")
185185

186186
# establish timing
187187
dt = datetime.datetime.now().strftime("%Y%m%d_%H%M%S")
@@ -200,7 +200,7 @@ def save_latest(path=None):
200200
("cen", Key.TierType.cen),
201201
("sta", Key.TierType.sta),
202202
):
203-
name = "ilinet_%s_%d_%s.zip" % (delphi_name, issue, dt)
203+
name = f"ilinet_{delphi_name}_{int(issue)}_{dt}.zip"
204204
if path is None:
205205
filename = name
206206
else:
@@ -209,12 +209,12 @@ def save_latest(path=None):
209209
locations = info["location_ids"][cdc_name]
210210

211211
# download and show timing information
212-
print("downloading %s" % delphi_name)
212+
print(f"downloading {delphi_name}")
213213
t0 = time.time()
214214
size = download_data(tier_id, locations, seasons, filename)
215215
t1 = time.time()
216216

217-
print(" saved %s (%d bytes in %.1f seconds)" % (filename, size, t1 - t0))
217+
print(f" saved {filename} ({int(size)} bytes in {t1 - t0:.1f} seconds)")
218218
files.append(filename)
219219

220220
# return the current issue and the list of downloaded files

src/acquisition/fluview/fluview_update.py

+17-17
Original file line numberDiff line numberDiff line change
@@ -297,7 +297,7 @@ def get_rows(cnx, table="fluview"):
297297
Looking at the fluview table by default, but may pass parameter
298298
to look at public health or clinical lab data instead."""
299299
select = cnx.cursor()
300-
select.execute("SELECT count(1) num FROM %s" % table)
300+
select.execute(f"SELECT count(1) num FROM {table}")
301301
for (num,) in select:
302302
pass
303303
select.close()
@@ -313,16 +313,16 @@ def update_from_file_clinical(issue, date, filename, test_mode=False):
313313
u, p = secrets.db.epi
314314
cnx = mysql.connector.connect(user=u, password=p, database="epidata")
315315
rows1 = get_rows(cnx, CL_TABLE)
316-
print("rows before: %d" % (rows1))
316+
print(f"rows before: {int(rows1)}")
317317
insert = cnx.cursor()
318318

319319
# load the data, ignoring empty rows
320-
print("loading data from %s as issued on %d" % (filename, issue))
320+
print(f"loading data from {filename} as issued on {int(issue)}")
321321
rows = load_zipped_csv(filename, CL_SHEET)
322-
print(" loaded %d rows" % len(rows))
322+
print(f" loaded {len(rows)} rows")
323323
data = [get_clinical_data(row) for row in rows]
324324
entries = [obj for obj in data if obj]
325-
print(" found %d entries" % len(entries))
325+
print(f" found {len(entries)} entries")
326326

327327
sql = """
328328
INSERT INTO
@@ -365,7 +365,7 @@ def update_from_file_clinical(issue, date, filename, test_mode=False):
365365
else:
366366
cnx.commit()
367367
rows2 = get_rows(cnx)
368-
print("rows after: %d (added %d)" % (rows2, rows2 - rows1))
368+
print(f"rows after: {int(rows2)} (added {int(rows2 - rows1)})")
369369
cnx.close()
370370

371371

@@ -378,16 +378,16 @@ def update_from_file_public(issue, date, filename, test_mode=False):
378378
u, p = secrets.db.epi
379379
cnx = mysql.connector.connect(user=u, password=p, database="epidata")
380380
rows1 = get_rows(cnx, PHL_TABLE)
381-
print("rows before: %d" % (rows1))
381+
print(f"rows before: {int(rows1)}")
382382
insert = cnx.cursor()
383383

384384
# load the data, ignoring empty rows
385-
print("loading data from %s as issued on %d" % (filename, issue))
385+
print(f"loading data from {filename} as issued on {int(issue)}")
386386
rows = load_zipped_csv(filename, PHL_SHEET)
387-
print(" loaded %d rows" % len(rows))
387+
print(f" loaded {len(rows)} rows")
388388
data = [get_public_data(row) for row in rows]
389389
entries = [obj for obj in data if obj]
390-
print(" found %d entries" % len(entries))
390+
print(f" found {len(entries)} entries")
391391

392392
sql = """
393393
INSERT INTO
@@ -434,7 +434,7 @@ def update_from_file_public(issue, date, filename, test_mode=False):
434434
else:
435435
cnx.commit()
436436
rows2 = get_rows(cnx)
437-
print("rows after: %d (added %d)" % (rows2, rows2 - rows1))
437+
print(f"rows after: {int(rows2)} (added {int(rows2 - rows1)})")
438438
cnx.close()
439439

440440

@@ -447,16 +447,16 @@ def update_from_file(issue, date, filename, test_mode=False):
447447
u, p = secrets.db.epi
448448
cnx = mysql.connector.connect(user=u, password=p, database="epidata")
449449
rows1 = get_rows(cnx)
450-
print("rows before: %d" % (rows1))
450+
print(f"rows before: {int(rows1)}")
451451
insert = cnx.cursor()
452452

453453
# load the data, ignoring empty rows
454-
print("loading data from %s as issued on %d" % (filename, issue))
454+
print(f"loading data from {filename} as issued on {int(issue)}")
455455
rows = load_zipped_csv(filename)
456-
print(" loaded %d rows" % len(rows))
456+
print(f" loaded {len(rows)} rows")
457457
data = [get_ilinet_data(row) for row in rows]
458458
entries = [obj for obj in data if obj]
459-
print(" found %d entries" % len(entries))
459+
print(f" found {len(entries)} entries")
460460

461461
sql = """
462462
INSERT INTO
@@ -509,7 +509,7 @@ def update_from_file(issue, date, filename, test_mode=False):
509509
else:
510510
cnx.commit()
511511
rows2 = get_rows(cnx)
512-
print("rows after: %d (added %d)" % (rows2, rows2 - rows1))
512+
print(f"rows after: {int(rows2)} (added {int(rows2 - rows1)})")
513513
cnx.close()
514514

515515

@@ -531,7 +531,7 @@ def main():
531531
raise Exception("--file and --issue must both be present or absent")
532532

533533
date = datetime.datetime.now().strftime("%Y-%m-%d")
534-
print("assuming release date is today, %s" % date)
534+
print(f"assuming release date is today, {date}")
535535

536536
if args.file:
537537
update_from_file(args.issue, date, args.file, test_mode=args.test)

src/acquisition/fluview/impute_missing_values.py

+4-4
Original file line numberDiff line numberDiff line change
@@ -270,13 +270,13 @@ def impute_missing_values(database, test_mode=False):
270270
# database connection
271271
database.connect()
272272
rows1 = database.count_rows()
273-
print("rows before: %d" % (rows1))
273+
print(f"rows before: {int(rows1)}")
274274

275275
# iterate over missing epiweeks
276276
missing_rows = database.find_missing_rows()
277-
print("missing data for %d epiweeks" % len(missing_rows))
277+
print(f"missing data for {len(missing_rows)} epiweeks")
278278
for issue, epiweek in missing_rows:
279-
print("i=%d e=%d" % (issue, epiweek))
279+
print(f"i={int(issue)} e={int(epiweek)}")
280280

281281
# get known values from table `fluview`
282282
known_values = database.get_known_values(issue, epiweek)
@@ -317,7 +317,7 @@ def impute_missing_values(database, test_mode=False):
317317

318318
# database cleanup
319319
rows2 = database.count_rows()
320-
print("rows after: %d (added %d)" % (rows2, rows2 - rows1))
320+
print(f"rows after: {int(rows2)} (added {int(rows2 - rows1)})")
321321
commit = not test_mode
322322
database.close(commit)
323323

src/acquisition/ght/ght_update.py

+5-7
Original file line numberDiff line numberDiff line change
@@ -266,7 +266,7 @@ def get_num_rows():
266266
ew0 = 200401 if ew0 is None else flu.add_epiweeks(ew0, -4)
267267
ew0 = ew0 if first is None else first
268268
ew1 = ew1 if last is None else last
269-
print("Checking epiweeks between %d and %d..." % (ew0, ew1))
269+
print(f"Checking epiweeks between {int(ew0)} and {int(ew1)}...")
270270

271271
# keep track of how many rows were added
272272
rows_before = get_num_rows()
@@ -283,7 +283,7 @@ def get_num_rows():
283283
total_rows = 0
284284
ght = GHT(API_KEY)
285285
for term in terms:
286-
print(" [%s] using term" % term)
286+
print(f" [{term}] using term")
287287
ll, cl = len(locations), len(countries)
288288
for i in range(max(ll, cl)):
289289
location = locations[i] if i < ll else locations[0]
@@ -303,8 +303,7 @@ def get_num_rows():
303303
else:
304304
delay = 2**attempt
305305
print(
306-
" [%s|%s] caught exception (will retry in %ds):"
307-
% (term, location, delay),
306+
f" [{term}|{location}] caught exception (will retry in {int(delay)}s):",
308307
ex,
309308
)
310309
time.sleep(delay)
@@ -332,15 +331,14 @@ def get_num_rows():
332331
ew = flu.add_epiweeks(ew, 1)
333332
if num_missing > 0:
334333
print(
335-
" [%s|%s] missing %d/%d value(s)"
336-
% (term, location, num_missing, len(values))
334+
f" [{term}|{location}] missing {int(num_missing)}/{len(values)} value(s)"
337335
)
338336
except Exception as ex:
339337
print(f" [{term}|{location}] caught exception (will NOT retry):", ex)
340338

341339
# keep track of how many rows were added
342340
rows_after = get_num_rows()
343-
print("Inserted %d/%d row(s)" % (rows_after - rows_before, total_rows))
341+
print(f"Inserted {int(rows_after - rows_before)}/{int(total_rows)} row(s)")
344342

345343
# cleanup
346344
cur.close()

0 commit comments

Comments
 (0)