@@ -244,8 +244,8 @@ def handle_trendseries():
244
244
def gen (rows ):
245
245
for key , group in groupby ((parse_row (row , fields_string , fields_int , fields_float ) for row in rows ), lambda row : (row ["geo_type" ], row ["geo_value" ], row ["source" ], row ["signal" ])):
246
246
trends = compute_trends (key [0 ], key [1 ], key [2 ], key [3 ], shifter , ((row ["time_value" ], row ["value" ]) for row in group ))
247
- for t in trends :
248
- yield t
247
+ for trend in trends :
248
+ yield trend . asdict ()
249
249
250
250
# execute first query
251
251
try :
@@ -501,7 +501,7 @@ def handle_coverage():
501
501
similar to /signal_dashboard_coverage for a specific signal returns the coverage (number of locations for a given geo_type)
502
502
"""
503
503
504
- signal = parse_source_signal_arg ( "signal" )
504
+ signal = parse_source_signal_pairs ( )
505
505
geo_type = request .args .get ("geo_type" , "county" )
506
506
if "window" in request .values :
507
507
time_window = parse_day_range_arg ("window" )
@@ -531,6 +531,7 @@ def handle_coverage():
531
531
q .where_source_signal_pairs ("source" , "signal" , signal )
532
532
q .where_time_pairs ("time_type" , "time_value" , [TimePair ("day" , [time_window ])])
533
533
q .group_by = "c.source, c.signal, c.time_value"
534
+ q .set_order ("source" , "signal" , "time_value" )
534
535
535
536
_handle_lag_issues_as_of (q , None , None , None )
536
537
0 commit comments