diff --git a/src/questdb/dataframe.pxi b/src/questdb/dataframe.pxi index 2693250d..1741f097 100644 --- a/src/questdb/dataframe.pxi +++ b/src/questdb/dataframe.pxi @@ -2424,14 +2424,16 @@ cdef void_int _dataframe( _ensure_has_gil(&gs) raise c_err_to_py(err) + was_auto_flush = True _dataframe_handle_auto_flush(&af, ls_buf, &gs) + was_auto_flush = False except Exception as e: # It would be an internal bug for this to raise. if not line_sender_buffer_rewind_to_marker(ls_buf, &err): raise c_err_to_py(err) if (isinstance(e, IngressError) and - (e.code == IngressErrorCode.InvalidApiCall)): + (e.code == IngressErrorCode.InvalidApiCall) and not was_auto_flush): # TODO: This should be allowed by the database. # It currently isn't so we have to raise an error. raise IngressError( diff --git a/test/test_dataframe.py b/test/test_dataframe.py index a3104cba..7e6b909d 100644 --- a/test/test_dataframe.py +++ b/test/test_dataframe.py @@ -1523,6 +1523,18 @@ def test_serializing_in_chunks(self): for i in range(index * 10, (index + 1) * 10)) self.assertEqual(buf, exp.encode("utf-8")) + def test_auto_flush_error_msg(self): + header = ["x", "y"] + x = list(range(10000)) + y = list(range(10000)) + + df = pd.DataFrame(zip(x, y), columns=header) + + with self.assertRaisesRegex(qi.IngressError, 'Could not flush buffer: Buffer size of 21780 exceeds maximum configured allowed size of 1024 bytes'): + with qi.Sender.from_conf("http::addr=localhost:9000;auto_flush_rows=1000;max_buf_size=1024;protocol_version=2;") as sender: + sender.dataframe(df, table_name='test_df', at=qi.ServerTimestamp) + sender.flush() + def test_arrow_chunked_array(self): # We build a table with chunked arrow arrays as columns. chunks_a = [