Skip to content

Commit

Permalink
datacube: fixing caching for csv
Browse files Browse the repository at this point in the history
  • Loading branch information
gs-gunjan committed Feb 12, 2025
1 parent 5622f03 commit 9e98725
Showing 1 changed file with 4 additions and 42 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -93,48 +93,8 @@ export class LegendDataCubeDataCubeCacheManager {

const connection = await this.database.connect();

const columns: string[] = [];
const columnNames: string[] = [];
result.builder.columns.forEach((col) => {
let colType: string;
switch (col.type as string) {
case PRIMITIVE_TYPE.BOOLEAN: {
colType = 'BOOLEAN';
break;
}
case PRIMITIVE_TYPE.INTEGER: {
colType = 'INTEGER';
break;
}
case PRIMITIVE_TYPE.NUMBER:
case PRIMITIVE_TYPE.DECIMAL:
case PRIMITIVE_TYPE.FLOAT: {
colType = 'FLOAT';
break;
}
// We don't use type DATE because DuckDB will automatically convert it to a TIMESTAMP
case PRIMITIVE_TYPE.STRICTDATE:
case PRIMITIVE_TYPE.DATETIME:
case PRIMITIVE_TYPE.DATE: {
colType = 'VARCHAR';
break;
}
case PRIMITIVE_TYPE.STRING: {
colType = 'VARCHAR';
break;
}
default: {
throw new UnsupportedOperationError(
`Can't initialize cache: failed to find matching DuckDB type for Pure type '${col.type}'`,
);
}
}
columns.push(`"${col.name}" ${colType}`);
columnNames.push(col.name);
});

const CREATE_TABLE_SQL = `CREATE TABLE ${schema}.${table} (${columns.join(',')})`;
await connection.query(CREATE_TABLE_SQL);
result.builder.columns.forEach((col) => columnNames.push(col.name));

const data = result.result.rows.map((row) => row.values);

Expand All @@ -148,7 +108,9 @@ export class LegendDataCubeDataCubeCacheManager {
await connection.insertCSVFromPath(csvFileName, {
schema: schema,
name: table,
create: false,
create: true,
header: true,
detect: true,
escape: `'`,
quote: `'`,
delimiter: ',',
Expand Down

0 comments on commit 9e98725

Please sign in to comment.