Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ jobs:
run: uv run ruff format --check . --exclude docs/_extensions --exclude sidemantic-duckdb/extension-ci-tools --exclude sidemantic-duckdb/scripts --exclude sidemantic-duckdb/duckdb --exclude sidemantic/adapters/malloy_grammar --exclude sidemantic/adapters/holistics_grammar

- name: Run tests
run: uv run pytest -v --cov=sidemantic --cov-report=term-missing
run: uv run pytest -v

update-schema:
name: Update JSON Schema
Expand Down
2 changes: 2 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -353,3 +353,5 @@ Sidemantic is an ambitious but young semantic layer project. You could encounter
```bash
uv run pytest -v
```

This prints line coverage for `sidemantic` with missing lines in the terminal.
6 changes: 5 additions & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -170,7 +170,10 @@ pythonpath = ["."]
markers = [
"integration: marks tests as integration tests requiring external services (deselect with '-m \"not integration\"')",
]
addopts = "-m 'not integration'" # Skip integration tests by default
addopts = "-m 'not integration' --cov=sidemantic --cov-report=term-missing" # Skip integration tests by default and show coverage

[tool.coverage.run]
source = ["sidemantic"]

[tool.uv]
prerelease = "if-necessary"
Expand All @@ -191,6 +194,7 @@ dev = [
"pyarrow>=14.0.0",
"pygls>=2.0.0",
"pytest>=8.4.2",
"pytest-cov>=5.0.0",
"ruff>=0.14.0",
"uvicorn>=0.34.0",
]
9 changes: 9 additions & 0 deletions sidemantic/core/semantic_layer.py
Original file line number Diff line number Diff line change
Expand Up @@ -142,6 +142,15 @@ def __exit__(self, exc_type, exc_val, exc_tb):
if hasattr(self.adapter, "close"):
self.adapter.close()

@property
def adapter(self):
"""Database adapter accessor with legacy _adapter compatibility."""
return self._adapter

@adapter.setter
def adapter(self, value):
self._adapter = value

@property
def conn(self):
"""Get raw database connection for backward compatibility."""
Expand Down
13 changes: 8 additions & 5 deletions sidemantic/server/server.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,10 @@
from sidemantic.server.connection import SemanticLayerConnection


def _sql_string_literal(value: str) -> str:
return "'" + value.replace("'", "''") + "'"


def map_type(duckdb_type: str) -> str:
"""Map DuckDB types to PostgreSQL types."""
type_lower = duckdb_type.lower()
Expand Down Expand Up @@ -72,11 +76,10 @@ def __init__(self, connection_id, executor):

for schema_name, table_name in tbls:
server._server.register_schema("sidemantic", schema_name)
# Use parameterized query to handle names with special characters (e.g., quotes)
cols_info = layer.adapter.raw_connection.execute(
cols_info = layer.adapter.execute(
"SELECT column_name, data_type, is_nullable FROM information_schema.columns "
"WHERE table_schema = ? AND table_name = ?",
[schema_name, table_name],
f"WHERE table_schema = {_sql_string_literal(schema_name)} "
f"AND table_name = {_sql_string_literal(table_name)}"
).fetchall()
columns = []
for col_name, data_type, is_nullable in cols_info:
Expand Down Expand Up @@ -115,7 +118,7 @@ def __init__(self, connection_id, executor):
# Also register the magic 'metrics' table if there are graph-level metrics
if layer.graph.metrics:
metric_columns = []
for metric in layer.graph.metrics:
for metric in layer.graph.metrics.values():
metric_columns.append({metric.name: {"type": "numeric", "nullable": True}})

# Add all dimension columns from all models
Expand Down
258 changes: 258 additions & 0 deletions tests/db/test_adbc_adapter.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,8 @@
The tests will use whichever method has SQLite available.
"""

from types import SimpleNamespace

import pytest

# Check if adbc_driver_manager is available
Expand Down Expand Up @@ -417,3 +419,259 @@ def test_adbc_url_path_based_uri():
adapter2 = ADBCAdapter.from_url(f"adbc://{driver_for_url}/:memory:")
assert adapter2.dialect == "sqlite"
adapter2.close()


class _FakeCursor:
def __init__(self, rows=None, description=None, arrow_table=None, close_error=False):
self.rows = list(rows or [])
self.description = description or [("value",)]
self.arrow_table = arrow_table
self.close_error = close_error
self.closed = False

def fetchone(self):
return self.rows[0] if self.rows else None

def fetchall(self):
return list(self.rows)

def close(self):
self.closed = True
if self.close_error:
raise RuntimeError("close failed")

def fetch_arrow_table(self):
return self.arrow_table


def test_adbc_result_fetch_helpers_close_cursor():
import pyarrow as pa

from sidemantic.db.adbc import ADBCResult

cursor = _FakeCursor(rows=[(1,)], description=[("x",)], arrow_table=pa.table({"x": [1]}))
result = ADBCResult(cursor)
assert result.description == [("x",)]
assert result.fetchone() == (1,)
assert cursor.closed is True

cursor2 = _FakeCursor(rows=[(1,), (2,)])
result2 = ADBCResult(cursor2)
assert result2.fetchall() == [(1,), (2,)]
assert cursor2.closed is True

cursor3 = _FakeCursor(arrow_table=pa.table({"x": [1, 2]}), close_error=True)
result3 = ADBCResult(cursor3)
batch_reader = result3.fetch_record_batch()
assert batch_reader.read_all().to_pylist() == [{"x": 1}, {"x": 2}]
assert cursor3.closed is True


def test_adbc_adapter_get_tables_uses_native_metadata():
from sidemantic.db.adbc import ADBCAdapter

adapter = ADBCAdapter.__new__(ADBCAdapter)
adapter.conn = SimpleNamespace(
adbc_get_objects=lambda: SimpleNamespace(
read_all=lambda: SimpleNamespace(
to_pydict=lambda: {
"catalog_db_schemas": [
[
{
"db_schema_name": "analytics",
"db_schema_tables": [{"table_name": "orders"}, {"table_name": "customers"}],
}
]
]
}
)
)
)

tables = adapter.get_tables()

assert tables == [
{"table_name": "orders", "schema": "analytics"},
{"table_name": "customers", "schema": "analytics"},
]


def test_adbc_adapter_get_tables_falls_back_to_information_schema(monkeypatch):
from sidemantic.db.adbc import ADBCAdapter

adapter = ADBCAdapter.__new__(ADBCAdapter)
adapter.conn = SimpleNamespace(adbc_get_objects=lambda: (_ for _ in ()).throw(RuntimeError("no metadata")))
captured = {}

class FakeResult:
def fetchall(self):
return [("orders", "analytics"), ("customers", "public")]

def fake_execute(sql):
captured["sql"] = sql
return FakeResult()

adapter.execute = fake_execute

tables = adapter.get_tables()

assert "information_schema.tables" in captured["sql"]
assert tables == [
{"table_name": "orders", "schema": "analytics"},
{"table_name": "customers", "schema": "public"},
]


def test_adbc_adapter_get_columns_uses_table_schema():
import pyarrow as pa

from sidemantic.db.adbc import ADBCAdapter

adapter = ADBCAdapter.__new__(ADBCAdapter)
adapter._driver_name = "sqlite"
adapter.conn = SimpleNamespace(
adbc_get_table_schema=lambda **kwargs: pa.schema([("id", pa.int64()), ("name", pa.string())])
)

columns = adapter.get_columns("orders")

assert columns == [
{"column_name": "id", "data_type": "int64"},
{"column_name": "name", "data_type": "string"},
]


def test_adbc_adapter_get_columns_uses_objects_metadata_fallback():
from sidemantic.db.adbc import ADBCAdapter

adapter = ADBCAdapter.__new__(ADBCAdapter)
adapter._driver_name = "sqlite"
adapter.conn = SimpleNamespace(
adbc_get_table_schema=lambda **kwargs: (_ for _ in ()).throw(RuntimeError("no schema")),
adbc_get_objects=lambda **kwargs: SimpleNamespace(
read_all=lambda: SimpleNamespace(
to_pydict=lambda: {
"catalog_db_schemas": [
[
{
"db_schema_name": "main",
"db_schema_tables": [
{
"table_name": "orders",
"table_columns": [
{"column_name": "id", "xdbc_type_name": "INTEGER"},
{"column_name": "name", "xdbc_type_name": "TEXT"},
],
}
],
}
]
]
}
)
),
)

columns = adapter.get_columns("orders", schema="main")

assert columns == [
{"column_name": "id", "data_type": "INTEGER"},
{"column_name": "name", "data_type": "TEXT"},
]


def test_adbc_adapter_get_columns_falls_back_to_sql_for_snowflake(monkeypatch):
from sidemantic.db.adbc import ADBCAdapter

adapter = ADBCAdapter.__new__(ADBCAdapter)
adapter._driver_name = "snowflake"
adapter.conn = SimpleNamespace(
adbc_get_table_schema=lambda **kwargs: (_ for _ in ()).throw(RuntimeError("no schema")),
adbc_get_objects=lambda **kwargs: (_ for _ in ()).throw(RuntimeError("no objects")),
)
captured = {}

class FakeResult:
def fetchall(self):
return [("ID", "NUMBER"), ("STATUS", "VARCHAR")]

def fake_execute(sql):
captured["sql"] = sql
return FakeResult()

adapter.execute = fake_execute

columns = adapter.get_columns("orders", schema="analytics")

assert "table_name IN ('ORDERS', 'orders')" in captured["sql"]
assert "table_schema IN ('ANALYTICS', 'analytics')" in captured["sql"]
assert columns == [
{"column_name": "ID", "data_type": "NUMBER"},
{"column_name": "STATUS", "data_type": "VARCHAR"},
]


def test_adbc_adapter_dialect_strips_package_prefix():
from sidemantic.db.adbc import ADBCAdapter

adapter = ADBCAdapter.__new__(ADBCAdapter)
adapter._driver_name = "adbc_driver_postgresql"

assert adapter.dialect == "postgres"


def test_adbc_adapter_close_calls_connection():
from sidemantic.db.adbc import ADBCAdapter

closed = {"value": False}
adapter = ADBCAdapter.__new__(ADBCAdapter)
adapter.conn = SimpleNamespace(close=lambda: closed.__setitem__("value", True))

adapter.close()

assert closed["value"] is True


def test_adbc_adapter_from_url_sqlite_defaults_to_memory(monkeypatch):
from sidemantic.db.adbc import ADBCAdapter

captured = {}
original_init = ADBCAdapter.__init__

def fake_init(self, driver, uri=None, **kwargs):
captured["driver"] = driver
captured["uri"] = uri
captured["kwargs"] = kwargs

monkeypatch.setattr(ADBCAdapter, "__init__", fake_init)
try:
adapter = ADBCAdapter.from_url("adbc://sqlite")
finally:
monkeypatch.setattr(ADBCAdapter, "__init__", original_init)

assert isinstance(adapter, ADBCAdapter)
assert captured["driver"] == "sqlite"
assert captured["uri"] == ":memory:"


def test_adbc_adapter_from_url_adbc_query_params_become_db_kwargs(monkeypatch):
from sidemantic.db.adbc import ADBCAdapter

captured = {}
original_init = ADBCAdapter.__init__

def fake_init(self, driver, uri=None, **kwargs):
captured["driver"] = driver
captured["uri"] = uri
captured["kwargs"] = kwargs

monkeypatch.setattr(ADBCAdapter, "__init__", fake_init)
try:
adapter = ADBCAdapter.from_url("adbc://snowflake?account=myacct&warehouse=wh")
finally:
monkeypatch.setattr(ADBCAdapter, "__init__", original_init)

assert isinstance(adapter, ADBCAdapter)
assert captured["driver"] == "snowflake"
assert captured["uri"] is None
assert captured["kwargs"]["db_kwargs"] == {"account": "myacct", "warehouse": "wh"}
Loading
Loading