diff --git a/experiments/15-e2e-scenarios-v2-baselines/common_daylong.sh b/experiments/15-e2e-scenarios-v2-baselines/common_daylong.sh index b655c890..208dfcf5 100644 --- a/experiments/15-e2e-scenarios-v2-baselines/common_daylong.sh +++ b/experiments/15-e2e-scenarios-v2-baselines/common_daylong.sh @@ -75,7 +75,7 @@ function start_snowset_repeating_olap_runner() { --baseline $ANALYTICS_ENGINE --run-for-s $run_for_s --output-dir $results_dir - --issue-slots 5 + --issue-slots $analytics_issue_slots ) >&2 echo "[Snowset Repeating Analytics] Running with $ra_clients..." @@ -108,7 +108,7 @@ function start_snowset_txn_runner() { --output-dir $results_dir \ --run-for-s $run_for_s \ --baseline $TRANSACTION_ENGINE \ - --issue-slots 5 \ + --issue-slots $txn_issue_slots \ --avg-gap-s 0.025 \ --avg-gap-std-s 0.002 \ & @@ -134,7 +134,7 @@ function start_sequence_runner() { --avg-gap-std-s $gap_std_s --baseline $ANALYTICS_ENGINE --output-dir $results_dir - --issue-slots 5 + --issue-slots $analytics_issue_slots ) >&2 echo "[Seq Analytics] Running with $num_clients..." diff --git a/experiments/15-e2e-scenarios-v2-baselines/daylong/run_workload.sh b/experiments/15-e2e-scenarios-v2-baselines/daylong/run_workload.sh index 657bc7e0..cf11d125 100755 --- a/experiments/15-e2e-scenarios-v2-baselines/daylong/run_workload.sh +++ b/experiments/15-e2e-scenarios-v2-baselines/daylong/run_workload.sh @@ -10,6 +10,8 @@ run_for_s=$(bc <<< "scale=0; ($hours * 60 * 60) / 1.0") # Add 5 minutes of buffer time. total_time_s=$(($run_for_s + 5 * 60)) clients_multiplier=1 +analytics_issue_slots=5 +txn_issue_slots=1 max_num_clients=$((10 * $clients_multiplier)) gap_dist_path="workloads/IMDB_20GB/regular_test/gap_time_dist.npy" query_frequency_path="workloads/IMDB_100GB/regular_test/query_frequency.npy" @@ -40,15 +42,15 @@ log_workload_point "clients_starting" start_snowset_repeating_olap_runner $max_num_clients $time_scale_factor $clients_multiplier "ra" $run_for_s rana_pid=$runner_pid -# # Transactions. -# start_snowset_txn_runner $max_num_clients $time_scale_factor $clients_multiplier "t" $run_for_s -# txn_pid=$runner_pid +# Transactions. +start_snowset_txn_runner $max_num_clients $time_scale_factor $clients_multiplier "t" $run_for_s +txn_pid=$runner_pid -# # Ad-hoc queries. -# # 2 clients, issuing once per 8 minutes on average with a standard deviation of -# # 2 minutes. -# start_sequence_runner 2 $((8 * 60)) $((2 * 60)) "adhoc" -# adhoc_pid=$runner_pid +# Ad-hoc queries. +# 2 clients, issuing once per 8 minutes on average with a standard deviation of +# 2 minutes. +start_sequence_runner 2 $((8 * 60)) $((2 * 60)) "adhoc" +adhoc_pid=$runner_pid log_workload_point "clients_started" diff --git a/load_baseline.py b/load_baseline.py index 9198565f..8723c8c5 100644 --- a/load_baseline.py +++ b/load_baseline.py @@ -44,8 +44,8 @@ def main(): ) if args.metrics: from datetime import datetime, timedelta - start_time = datetime(year=2023, month=12, day=27, hour=14, minute=17) - end_time = datetime(year=2023, month=12, day=28, hour=2, minute=22) + start_time = datetime(year=2024, month=1, day=22, hour=13, minute=38, second=10) + end_time = datetime(year=2024, month=1, day=23, hour=1, minute=43, second=10) outdir = "expt_out_daylong_redshift_aurora" df = loader.fetch_metrics(start_time=start_time, end_time=end_time) # Write to csv diff --git a/workloads/IMDB_extended/run_repeating_analytics.py b/workloads/IMDB_extended/run_repeating_analytics.py index 4d28b56e..440d0aa6 100644 --- a/workloads/IMDB_extended/run_repeating_analytics.py +++ b/workloads/IMDB_extended/run_repeating_analytics.py @@ -232,7 +232,10 @@ def handle_result(result: QueryResult) -> None: print(f"QUERY ERROR: {ex}", file=sys.stderr, flush=True) if ex.is_transient(): verbose_logger.warning("Transient query error: %s", ex.message()) - + if "syntax" in ex.message().lower(): + verbose_logger.warning(f"SYNTAX ERROR: {ex}") + print("SYNTAX ERROR", file=sys.stderr, flush=True) + exit(1) if bh.backoff is None: bh.backoff = RandomizedExponentialBackoff( max_retries=100, diff --git a/workloads/IMDB_extended/workload_utils/baseline.py b/workloads/IMDB_extended/workload_utils/baseline.py index d143cfdb..9263af46 100644 --- a/workloads/IMDB_extended/workload_utils/baseline.py +++ b/workloads/IMDB_extended/workload_utils/baseline.py @@ -58,7 +58,7 @@ def make_tidb_conn(): allow_local_infile=True, ) cur = conn.cursor() - cur.execute("SET sql_mode = 'ANSI';") + cur.execute("SET SESSION sql_mode = 'REAL_AS_FLOAT,PIPES_AS_CONCAT,ANSI_QUOTES,IGNORE_SPACE,ONLY_FULL_GROUP_BY,ANSI';") conn.commit() cur.close() return conn