Skip to content

Commit fbefedf

Browse files
authored
async api should match sync api (#179)
match sync api
1 parent cdebf3a commit fbefedf

File tree

3 files changed

+195
-44
lines changed

3 files changed

+195
-44
lines changed

dune_client/client_async.py

Lines changed: 165 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -24,6 +24,7 @@
2424
ContentTypeError,
2525
TCPConnector,
2626
)
27+
from deprecated import deprecated
2728

2829
from dune_client.api.base import (
2930
DUNE_CSV_NEXT_OFFSET_HEADER,
@@ -196,7 +197,10 @@ async def _request(
196197
return response
197198
return await self._handle_response(response)
198199

199-
async def execute(self, query: QueryBase, performance: str | None = None) -> ExecutionResponse:
200+
async def execute_query(
201+
self, query: QueryBase, performance: str | None = None
202+
) -> ExecutionResponse:
203+
"""Post's to Dune API for execute `query`"""
200204
params = query.request_format()
201205
params["performance"] = performance or self.performance
202206

@@ -210,14 +214,15 @@ async def execute(self, query: QueryBase, performance: str | None = None) -> Exe
210214
except KeyError as err:
211215
raise DuneError(response_json, "ExecutionResponse", err) from err
212216

213-
async def get_status(self, job_id: str) -> ExecutionStatusResponse:
217+
async def get_execution_status(self, job_id: str) -> ExecutionStatusResponse:
218+
"""GET status from Dune API for `job_id` (aka `execution_id`)"""
214219
response_json = await self._get(route=f"/execution/{job_id}/status")
215220
try:
216221
return ExecutionStatusResponse.from_dict(response_json)
217222
except KeyError as err:
218223
raise DuneError(response_json, "ExecutionStatusResponse", err) from err
219224

220-
async def get_result(
225+
async def get_execution_results(
221226
self,
222227
job_id: str,
223228
batch_size: int | None = None,
@@ -226,6 +231,7 @@ async def get_result(
226231
filters: str | None = None,
227232
sort_by: list[str] | None = None,
228233
) -> ResultsResponse:
234+
"""GET results from Dune API for `job_id` (aka `execution_id`)"""
229235
self._validate_sampling(sample_count, batch_size, filters)
230236

231237
if sample_count is None and batch_size is None:
@@ -243,7 +249,7 @@ async def get_result(
243249
self._get_result_by_url,
244250
)
245251

246-
async def get_result_csv(
252+
async def get_execution_results_csv(
247253
self,
248254
job_id: str,
249255
batch_size: int | None = None,
@@ -257,7 +263,7 @@ async def get_result_csv(
257263
258264
this API only returns the raw data in CSV format, it is faster & lighterweight
259265
use this method for large results where you want lower CPU and memory overhead
260-
if you need metadata information use get_results() or get_status()
266+
if you need metadata information use get_execution_results() or get_execution_status()
261267
"""
262268
self._validate_sampling(sample_count, batch_size, filters)
263269

@@ -320,11 +326,70 @@ async def cancel_execution(self, job_id: str) -> bool:
320326
else:
321327
return success
322328

329+
#######################
330+
# Deprecated Functions:
331+
#######################
332+
@deprecated(version="1.9.3", reason="Please use execute_query")
333+
async def execute(self, query: QueryBase, performance: str | None = None) -> ExecutionResponse:
334+
"""Post's to Dune API for execute `query`"""
335+
return await self.execute_query(query, performance)
336+
337+
@deprecated(version="1.9.3", reason="Please use get_execution_status")
338+
async def get_status(self, job_id: str) -> ExecutionStatusResponse:
339+
"""GET status from Dune API for `job_id` (aka `execution_id`)"""
340+
return await self.get_execution_status(job_id)
341+
342+
@deprecated(version="1.9.3", reason="Please use get_execution_results")
343+
async def get_result(
344+
self,
345+
job_id: str,
346+
batch_size: int | None = None,
347+
columns: list[str] | None = None,
348+
sample_count: int | None = None,
349+
filters: str | None = None,
350+
sort_by: list[str] | None = None,
351+
) -> ResultsResponse:
352+
"""GET results from Dune API for `job_id` (aka `execution_id`)"""
353+
return await self.get_execution_results(
354+
job_id,
355+
batch_size=batch_size,
356+
columns=columns,
357+
sample_count=sample_count,
358+
filters=filters,
359+
sort_by=sort_by,
360+
)
361+
362+
@deprecated(version="1.9.3", reason="Please use get_execution_results_csv")
363+
async def get_result_csv(
364+
self,
365+
job_id: str,
366+
batch_size: int | None = None,
367+
columns: list[str] | None = None,
368+
sample_count: int | None = None,
369+
filters: str | None = None,
370+
sort_by: list[str] | None = None,
371+
) -> ExecutionResultCSV:
372+
"""
373+
GET results in CSV format from Dune API for `job_id` (aka `execution_id`)
374+
375+
this API only returns the raw data in CSV format, it is faster & lighterweight
376+
use this method for large results where you want lower CPU and memory overhead
377+
if you need metadata information use get_execution_results() or get_execution_status()
378+
"""
379+
return await self.get_execution_results_csv(
380+
job_id,
381+
batch_size=batch_size,
382+
columns=columns,
383+
sample_count=sample_count,
384+
filters=filters,
385+
sort_by=sort_by,
386+
)
387+
323388
########################
324389
# Higher level functions
325390
########################
326391

327-
async def refresh(
392+
async def run_query(
328393
self,
329394
query: QueryBase,
330395
ping_frequency: int = 5,
@@ -343,7 +408,7 @@ async def refresh(
343408
self._validate_sampling(sample_count, batch_size, filters)
344409

345410
job_id = await self._refresh(query, ping_frequency=ping_frequency, performance=performance)
346-
return await self.get_result(
411+
return await self.get_execution_results(
347412
job_id,
348413
columns=columns,
349414
sample_count=sample_count,
@@ -352,7 +417,7 @@ async def refresh(
352417
batch_size=batch_size,
353418
)
354419

355-
async def refresh_csv(
420+
async def run_query_csv(
356421
self,
357422
query: QueryBase,
358423
ping_frequency: int = 5,
@@ -371,7 +436,7 @@ async def refresh_csv(
371436
self._validate_sampling(sample_count, batch_size, filters)
372437

373438
job_id = await self._refresh(query, ping_frequency=ping_frequency, performance=performance)
374-
return await self.get_result_csv(
439+
return await self.get_execution_results_csv(
375440
job_id,
376441
columns=columns,
377442
sample_count=sample_count,
@@ -380,7 +445,7 @@ async def refresh_csv(
380445
batch_size=batch_size,
381446
)
382447

383-
async def refresh_into_dataframe(
448+
async def run_query_dataframe(
384449
self,
385450
query: QueryBase,
386451
performance: str | None = None,
@@ -394,13 +459,13 @@ async def refresh_into_dataframe(
394459
Execute a Dune Query, waits till execution completes,
395460
fetched and returns the result as a Pandas DataFrame
396461
397-
This is a convenience method that uses refresh_csv underneath
462+
This is a convenience method that uses run_query_csv() + pandas.read_csv() underneath
398463
"""
399464
try:
400465
import pandas as pd # noqa: PLC0415
401466
except ImportError as exc:
402467
raise ImportError("dependency failure, pandas is required but missing") from exc
403-
results = await self.refresh_csv(
468+
results = await self.run_query_csv(
404469
query,
405470
performance=performance,
406471
columns=columns,
@@ -411,6 +476,92 @@ async def refresh_into_dataframe(
411476
)
412477
return pd.read_csv(results.data)
413478

479+
######################
480+
# Deprecated Functions
481+
######################
482+
@deprecated(version="1.9.3", reason="Please use run_query")
483+
async def refresh(
484+
self,
485+
query: QueryBase,
486+
ping_frequency: int = 5,
487+
performance: str | None = None,
488+
batch_size: int | None = None,
489+
columns: list[str] | None = None,
490+
sample_count: int | None = None,
491+
filters: str | None = None,
492+
sort_by: list[str] | None = None,
493+
) -> ResultsResponse:
494+
"""
495+
Executes a Dune `query`, waits until execution completes,
496+
fetches and returns the results.
497+
Sleeps `ping_frequency` seconds between each status request.
498+
"""
499+
return await self.run_query(
500+
query,
501+
ping_frequency=ping_frequency,
502+
performance=performance,
503+
batch_size=batch_size,
504+
columns=columns,
505+
sample_count=sample_count,
506+
filters=filters,
507+
sort_by=sort_by,
508+
)
509+
510+
@deprecated(version="1.9.3", reason="Please use run_query_csv")
511+
async def refresh_csv(
512+
self,
513+
query: QueryBase,
514+
ping_frequency: int = 5,
515+
performance: str | None = None,
516+
batch_size: int | None = None,
517+
columns: list[str] | None = None,
518+
sample_count: int | None = None,
519+
filters: str | None = None,
520+
sort_by: list[str] | None = None,
521+
) -> ExecutionResultCSV:
522+
"""
523+
Executes a Dune query, waits till execution completes,
524+
fetches and the results in CSV format
525+
(use it load the data directly in pandas.from_csv() or similar frameworks)
526+
"""
527+
return await self.run_query_csv(
528+
query,
529+
ping_frequency=ping_frequency,
530+
performance=performance,
531+
batch_size=batch_size,
532+
columns=columns,
533+
sample_count=sample_count,
534+
filters=filters,
535+
sort_by=sort_by,
536+
)
537+
538+
@deprecated(version="1.9.3", reason="Please use run_query_dataframe")
539+
async def refresh_into_dataframe(
540+
self,
541+
query: QueryBase,
542+
performance: str | None = None,
543+
batch_size: int | None = None,
544+
columns: list[str] | None = None,
545+
sample_count: int | None = None,
546+
filters: str | None = None,
547+
sort_by: list[str] | None = None,
548+
) -> Any:
549+
"""
550+
Execute a Dune Query, waits till execution completes,
551+
fetched and returns the result as a Pandas DataFrame
552+
553+
This is a convenience method that uses run_query_csv underneath
554+
"""
555+
return await self.run_query_dataframe(
556+
query,
557+
performance=performance,
558+
batch_size=batch_size,
559+
columns=columns,
560+
sample_count=sample_count,
561+
filters=filters,
562+
sort_by=sort_by,
563+
)
564+
414565
#################
415566
# Private Methods
416567
#################
@@ -522,11 +673,11 @@ async def _refresh(
522673
ping_frequency: int = 5,
523674
performance: str | None = None,
524675
) -> str:
525-
job_id = (await self.execute(query=query, performance=performance)).execution_id
676+
job_id = (await self.execute_query(query=query, performance=performance)).execution_id
526677
terminal_states = ExecutionState.terminal_states()
527678

528679
while True:
529-
status = await self.get_status(job_id)
680+
status = await self.get_execution_status(job_id)
530681
if status.state in terminal_states:
531682
if status.state == ExecutionState.FAILED:
532683
self.logger.error(status)

tests/e2e/test_async_client.py

Lines changed: 15 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -18,27 +18,27 @@ def setUp(self) -> None:
1818
async def test_disconnect(self):
1919
dune = AsyncDuneClient()
2020
await dune.connect()
21-
results = (await dune.refresh(self.query)).get_rows()
21+
results = (await dune.run_query(self.query)).get_rows()
2222
assert len(results) > 0
2323
await dune.disconnect()
2424
assert dune._session is None
2525

26-
async def test_refresh_context_manager_singleton(self):
26+
async def test_run_query_context_manager_singleton(self):
2727
dune = AsyncDuneClient()
2828
async with dune as cl:
29-
results = (await cl.refresh(self.query)).get_rows()
29+
results = (await cl.run_query(self.query)).get_rows()
3030
assert len(results) > 0
3131

32-
async def test_refresh_context_manager(self):
32+
async def test_run_query_context_manager(self):
3333
async with AsyncDuneClient() as cl:
34-
results = (await cl.refresh(self.query)).get_rows()
34+
results = (await cl.run_query(self.query)).get_rows()
3535
assert len(results) > 0
3636

37-
async def test_refresh_with_pagination(self):
37+
async def test_run_query_with_pagination(self):
3838
# Arrange
3939
async with AsyncDuneClient() as cl:
4040
# Act
41-
results = (await cl.refresh(self.multi_rows_query, batch_size=1)).get_rows()
41+
results = (await cl.run_query(self.multi_rows_query, batch_size=1)).get_rows()
4242

4343
# Assert
4444
assert results == [
@@ -49,20 +49,20 @@ async def test_refresh_with_pagination(self):
4949
{"number": 5},
5050
]
5151

52-
async def test_refresh_with_filters(self):
52+
async def test_run_query_with_filters(self):
5353
# Arrange
5454
async with AsyncDuneClient() as cl:
5555
# Act
56-
results = (await cl.refresh(self.multi_rows_query, filters="number < 3")).get_rows()
56+
results = (await cl.run_query(self.multi_rows_query, filters="number < 3")).get_rows()
5757

5858
# Assert
5959
assert results == [{"number": 1}, {"number": 2}]
6060

61-
async def test_refresh_csv_with_pagination(self):
61+
async def test_run_query_csv_with_pagination(self):
6262
# Arrange
6363
async with AsyncDuneClient() as cl:
6464
# Act
65-
result_csv = await cl.refresh_csv(self.multi_rows_query, batch_size=1)
65+
result_csv = await cl.run_query_csv(self.multi_rows_query, batch_size=1)
6666

6767
# Assert
6868
assert pd.read_csv(result_csv.data).to_dict(orient="records") == [
@@ -73,11 +73,11 @@ async def test_refresh_csv_with_pagination(self):
7373
{"number": 5},
7474
]
7575

76-
async def test_refresh_csv_with_filters(self):
76+
async def test_run_query_csv_with_filters(self):
7777
# Arrange
7878
async with AsyncDuneClient() as cl:
7979
# Act
80-
result_csv = await cl.refresh_csv(self.multi_rows_query, filters="number < 3")
80+
result_csv = await cl.run_query_csv(self.multi_rows_query, filters="number < 3")
8181

8282
# Assert
8383
assert pd.read_csv(result_csv.data).to_dict(orient="records") == [
@@ -86,9 +86,9 @@ async def test_refresh_csv_with_filters(self):
8686
]
8787

8888
@unittest.skip("Large performance tier doesn't currently work.")
89-
async def test_refresh_context_manager_performance_large(self):
89+
async def test_run_query_context_manager_performance_large(self):
9090
async with AsyncDuneClient() as cl:
91-
results = (await cl.refresh(self.query, performance="large")).get_rows()
91+
results = (await cl.run_query(self.query, performance="large")).get_rows()
9292
assert len(results) > 0
9393

9494
async def test_get_latest_result_with_query_object(self):

0 commit comments

Comments
 (0)