diff --git a/packages/legend-application-data-cube/src/components/builder/LegendDataCubeCreator.tsx b/packages/legend-application-data-cube/src/components/builder/LegendDataCubeCreator.tsx index 924a987137..d5538899ab 100644 --- a/packages/legend-application-data-cube/src/components/builder/LegendDataCubeCreator.tsx +++ b/packages/legend-application-data-cube/src/components/builder/LegendDataCubeCreator.tsx @@ -28,8 +28,8 @@ import { LegendQueryDataCubeSourceBuilder } from './source/LegendQueryDataCubeSo import { AdhocQueryDataCubeSourceBuilder } from './source/AdhocQueryDataCubeSourceBuilder.js'; import { AdhocQueryDataCubeSourceBuilderState } from '../../stores/builder/source/AdhocQueryDataCubeSourceBuilderState.js'; import { useLegendDataCubeBuilderStore } from './LegendDataCubeBuilderStoreProvider.js'; -import { RawFileQueryDataCubeSourceBuilderState } from '../../stores/builder/source/RawFileQueryDataCubeSourceBuilderState.js'; -import { RawFileQueryDataCubeSourceBuilder } from './source/RawFileQueryDataCubeSourceBuilder.js'; +import { CSVFileQueryDataCubeSourceBuilderState } from '../../stores/builder/source/CSVFileQueryDataCubeSourceBuilderState.js'; +import { CSVFileQueryDataCubeSourceBuilder } from './source/CSVFileQueryDataCubeSourceBuilder.js'; export const LegendDataCubeCreator = observer(() => { const store = useLegendDataCubeBuilderStore(); @@ -63,7 +63,7 @@ export const LegendDataCubeCreator = observer(() => { {[ LegendDataCubeSourceBuilderType.LEGEND_QUERY, LegendDataCubeSourceBuilderType.ADHOC_QUERY, - LegendDataCubeSourceBuilderType.RAW_FILE_QUERY, + LegendDataCubeSourceBuilderType.CSV_FILE_QUERY, ].map((type) => ( { /> )} {sourceBuilder instanceof - RawFileQueryDataCubeSourceBuilderState && ( - )} diff --git a/packages/legend-application-data-cube/src/components/builder/source/RawFileQueryDataCubeSourceBuilder.tsx b/packages/legend-application-data-cube/src/components/builder/source/CSVFileQueryDataCubeSourceBuilder.tsx similarity index 71% rename from packages/legend-application-data-cube/src/components/builder/source/RawFileQueryDataCubeSourceBuilder.tsx rename to packages/legend-application-data-cube/src/components/builder/source/CSVFileQueryDataCubeSourceBuilder.tsx index f64ca3aba2..361c59e1c1 100644 --- a/packages/legend-application-data-cube/src/components/builder/source/RawFileQueryDataCubeSourceBuilder.tsx +++ b/packages/legend-application-data-cube/src/components/builder/source/CSVFileQueryDataCubeSourceBuilder.tsx @@ -15,12 +15,12 @@ */ import { observer } from 'mobx-react-lite'; -import type { RawFileQueryDataCubeSourceBuilderState } from '../../../stores/builder/source/RawFileQueryDataCubeSourceBuilderState.js'; +import type { CSVFileQueryDataCubeSourceBuilderState } from '../../../stores/builder/source/CSVFileQueryDataCubeSourceBuilderState.js'; import Papa from 'papaparse'; -import { useState } from 'react'; +import { csvStringify } from '@finos/legend-shared'; -export const RawFileQueryDataCubeSourceBuilder = observer( - (props: { sourceBuilder: RawFileQueryDataCubeSourceBuilderState }) => { +export const CSVFileQueryDataCubeSourceBuilder = observer( + (props: { sourceBuilder: CSVFileQueryDataCubeSourceBuilderState }) => { const { sourceBuilder } = props; const handleFileChange = (e: React.ChangeEvent) => { @@ -30,7 +30,11 @@ export const RawFileQueryDataCubeSourceBuilder = observer( Papa.parse(file, { complete: (result) => { // Set the parsed data to state - sourceBuilder.fileData = result.data; + sourceBuilder.setFileData( + csvStringify(result.data, { escapeChar: `'`, quoteChar: `'` }), + ); + sourceBuilder.setFileName(file.name); + sourceBuilder.setRowCount(result.data.length); }, header: true, dynamicTyping: true, diff --git a/packages/legend-application-data-cube/src/stores/LegendDataCubeCacheManager.ts b/packages/legend-application-data-cube/src/stores/LegendDataCubeCacheManager.ts index c1c9ce01be..4d3813290e 100644 --- a/packages/legend-application-data-cube/src/stores/LegendDataCubeCacheManager.ts +++ b/packages/legend-application-data-cube/src/stores/LegendDataCubeCacheManager.ts @@ -34,11 +34,19 @@ import { import type { CachedDataCubeSource } from '@finos/legend-data-cube'; import { Type } from 'apache-arrow'; +// TODO: rename this to legend engine duck db engine export class LegendDataCubeDataCubeCacheManager { private static readonly DUCKDB_DEFAULT_SCHEMA_NAME = 'main'; // See https://duckdb.org/docs/sql/statements/use.html - private static readonly TABLE_NAME_PREFIX = 'cache'; - private static readonly CSV_FILE_NAME = 'data'; - private static tableCounter = 0; + private static readonly CACHE_TABLE_NAME_PREFIX = 'cache'; + private static readonly FILE_DATA_TABLE_NAME_PREFIX = 'fileData'; + private static readonly CACHE_FILE_NAME = 'cacheData'; + private static readonly FILE_DATA_FILE_NAME = 'ingestData'; + private static readonly COLUMN_NAME = 'column_name'; + private static readonly COLUMN_TYPE = 'column_type'; + private static readonly ESCAPE_CHAR = `'`; + private static readonly QUOTE_CHAR = `'`; + private static cacheTableCounter = 0; + private static fileTableCounter = 0; private _database?: duckdb.AsyncDuckDB | undefined; @@ -87,9 +95,9 @@ export class LegendDataCubeDataCubeCacheManager { async cache(result: TDSExecutionResult) { const schema = LegendDataCubeDataCubeCacheManager.DUCKDB_DEFAULT_SCHEMA_NAME; - LegendDataCubeDataCubeCacheManager.tableCounter += 1; - const table = `${LegendDataCubeDataCubeCacheManager.TABLE_NAME_PREFIX}${LegendDataCubeDataCubeCacheManager.tableCounter}`; - const csvFileName = `${LegendDataCubeDataCubeCacheManager.CSV_FILE_NAME}${LegendDataCubeDataCubeCacheManager.tableCounter}.csv`; + LegendDataCubeDataCubeCacheManager.cacheTableCounter += 1; + const table = `${LegendDataCubeDataCubeCacheManager.CACHE_TABLE_NAME_PREFIX}${LegendDataCubeDataCubeCacheManager.cacheTableCounter}`; + const csvFileName = `${LegendDataCubeDataCubeCacheManager.CACHE_FILE_NAME}${LegendDataCubeDataCubeCacheManager.cacheTableCounter}.csv`; const connection = await this.database.connect(); @@ -99,8 +107,8 @@ export class LegendDataCubeDataCubeCacheManager { const data = result.result.rows.map((row) => row.values); const csv = csvStringify([columnNames, ...data], { - escapeChar: `'`, - quoteChar: `'`, + escapeChar: LegendDataCubeDataCubeCacheManager.ESCAPE_CHAR, + quoteChar: LegendDataCubeDataCubeCacheManager.QUOTE_CHAR, }); await this._database?.registerFileText(csvFileName, csv); @@ -111,19 +119,54 @@ export class LegendDataCubeDataCubeCacheManager { create: true, header: true, detect: true, - escape: `'`, - quote: `'`, - delimiter: ',', + escape: LegendDataCubeDataCubeCacheManager.ESCAPE_CHAR, + quote: LegendDataCubeDataCubeCacheManager.QUOTE_CHAR, }); await connection.close(); - return { table, schema, rowCount: result.result.rows.length }; + return { schema, table, rowCount: result.result.rows.length }; + } + + async ingestFileData(csvString: string) { + const schema = + LegendDataCubeDataCubeCacheManager.DUCKDB_DEFAULT_SCHEMA_NAME; + LegendDataCubeDataCubeCacheManager.fileTableCounter += 1; + const table = `${LegendDataCubeDataCubeCacheManager.FILE_DATA_TABLE_NAME_PREFIX}${LegendDataCubeDataCubeCacheManager.fileTableCounter}`; + const csvFileName = `${LegendDataCubeDataCubeCacheManager.FILE_DATA_FILE_NAME}${LegendDataCubeDataCubeCacheManager.fileTableCounter}.csv`; + + const connection = await this.database.connect(); + + await this._database?.registerFileText(csvFileName, csvString); + + await connection.insertCSVFromPath(csvFileName, { + schema: schema, + name: table, + header: true, + detect: true, + escape: LegendDataCubeDataCubeCacheManager.ESCAPE_CHAR, + quote: LegendDataCubeDataCubeCacheManager.QUOTE_CHAR, + }); + + const dbSchemaResult = await connection.query( + `DESCRIBE ${schema}.${table}`, + ); + const dbSchema = dbSchemaResult + .toArray() + .map((data) => [ + data[LegendDataCubeDataCubeCacheManager.COLUMN_NAME], + data[LegendDataCubeDataCubeCacheManager.COLUMN_TYPE], + ]); + + await connection.close(); + + return { schema, table, dbSchema }; } async runSQLQuery(sql: string) { const connection = await this.database.connect(); const result = await connection.query(sql); + console.log(sql); await connection.close(); const data = result.toArray(); diff --git a/packages/legend-application-data-cube/src/stores/LegendDataCubeDataCubeEngine.ts b/packages/legend-application-data-cube/src/stores/LegendDataCubeDataCubeEngine.ts index b21b544367..4f039dd9bf 100644 --- a/packages/legend-application-data-cube/src/stores/LegendDataCubeDataCubeEngine.ts +++ b/packages/legend-application-data-cube/src/stores/LegendDataCubeDataCubeEngine.ts @@ -72,6 +72,9 @@ import { PackageableElementPointerType, DatabaseType, PRIMITIVE_TYPE, + V1_BigInt, + V1_Decimal, + V1_Double, } from '@finos/legend-graph'; import { _elementPtr, @@ -111,11 +114,17 @@ import { LegendQueryDataCubeSource, RawLegendQueryDataCubeSource, } from './model/LegendQueryDataCubeSource.js'; -import { deserialize, serialize } from 'serializr'; +import { deserialize, raw, serialize } from 'serializr'; import { resolveVersion, type DepotServerClient, } from '@finos/legend-server-depot'; +import type { CSVFileQueryDataCubeSourceBuilderState } from './builder/source/CSVFileQueryDataCubeSourceBuilderState.js'; +import { + CSV_FILE_QUERY_DATA_CUBE_SOURCE_TYPE, + CSVFileQueryDataCubeSource, + RawCSVFileQueryDataCubeSource, +} from './model/CSVFIleQueryDataCubeSource.js'; export class LegendDataCubeDataCubeEngine extends DataCubeEngine { private readonly _application: LegendDataCubeApplicationStore; @@ -176,6 +185,40 @@ export class LegendDataCubeDataCubeEngine extends DataCubeEngine { } return source; } + case CSV_FILE_QUERY_DATA_CUBE_SOURCE_TYPE: { + const rawSource = + RawCSVFileQueryDataCubeSource.serialization.fromJson(value); + const source = new CSVFileQueryDataCubeSource(); + source.fileName = rawSource.fileName; + source.count = rawSource.count; + source.db = rawSource.db; + source.model = rawSource.model; + source.runtime = rawSource.runtime; + source.schema = rawSource.schema; + source.table = rawSource.table; + + const query = new V1_ClassInstance(); + query.type = V1_ClassInstanceType.RELATION_STORE_ACCESSOR; + const storeAccessor = new V1_RelationStoreAccessor(); + storeAccessor.path = [source.db, source.schema, source.table]; + query.value = storeAccessor; + source.query = query; + + try { + source.columns = ( + await this._getLambdaRelationType( + this.serializeValueSpecification(_lambda([], [source.query])), + serialize(source.model), + ) + ).columns; + } catch (error) { + assertErrorThrown(error); + throw new Error( + `Can't get query result columns. Make sure the source query return a relation (i.e. typed TDS). Error: ${error.message}`, + ); + } + return source; + } case LEGEND_QUERY_DATA_CUBE_SOURCE_TYPE: { const rawSource = RawLegendQueryDataCubeSource.serialization.fromJson(value); @@ -455,6 +498,34 @@ export class LegendDataCubeDataCubeEngine extends DataCubeEngine { result: await this._cacheManager.runSQLQuery(sql), executionTime: endTime - startTime, }; + } else if (source instanceof CSVFileQueryDataCubeSource) { + // get the execute plan to extract the generated SQL to run against cached DB + const executionPlan = await this._generateExecutionPlan( + query, + source.model, + [], + // NOTE: for caching, we're using DuckDB, but its protocol models + // are not available in the latest production protocol version V1_33_0, so + // we have to force using VX_X_X + // once we either cut another protocol version or backport the DuckDB models + // to V1_33_0, we will can remove this + { ...options, clientVersion: PureClientVersion.VX_X_X }, + ); + const sql = guaranteeNonNullable( + executionPlan instanceof V1_SimpleExecutionPlan + ? executionPlan.rootExecutionNode.executionNodes + .filter(filterByType(V1_SQLExecutionNode)) + .at(-1)?.sqlQuery + : undefined, + `Can't process execution plan: failed to extract generated SQL`, + ); + const endTime = performance.now(); + return { + executedQuery: await queryCodePromise, + executedSQL: sql, + result: await this._cacheManager.runSQLQuery(sql), + executionTime: endTime - startTime, + }; } else { throw new UnsupportedOperationError( `Can't execute query with unsupported source`, @@ -500,10 +571,131 @@ export class LegendDataCubeDataCubeEngine extends DataCubeEngine { DataCubeFunction.FROM, [_elementPtr(source.runtime)].filter(isNonNullable), ); + } else if (source instanceof CSVFileQueryDataCubeSource) { + return _function( + DataCubeFunction.FROM, + [_elementPtr(source.runtime)].filter(isNonNullable), + ); } return undefined; } + // --------------------------------- FILE UPLOAD ----------------------------------- + + override async ingestFileData( + csvString: string, + ): Promise { + const { + schema: schemaName, + table: tableName, + dbSchema: dbSchema, + } = await this._cacheManager.ingestFileData(csvString); + + const packagePath = 'upload::local'; + + const table = new V1_Table(); + table.name = tableName; + table.columns = dbSchema.map((col) => { + const column = new V1_Column(); + column.name = col[0]; + // TODO: check if we have a duckdb enum mapping + switch (col[1] as string) { + case 'BIT': { + column.type = new V1_Bit(); + break; + } + case 'INTEGER': { + column.type = new V1_Integer(); + break; + } + case 'BIGINT': { + column.type = new V1_BigInt(); + break; + } + case 'BOOLEAN': { + // TODO: understand why boolean is not present in relationalDataType + column.type = new V1_VarChar(); + break; + } + case 'TIMESTAMP': { + column.type = new V1_Date(); + break; + } + case 'VARCHAR': { + column.type = new V1_VarChar(); + break; + } + case 'DECIMAL': { + column.type = new V1_Decimal(); + break; + } + case 'DOUBLE': { + column.type = new V1_Double(); + break; + } + case 'FLOAT': { + column.type = new V1_Float(); + break; + } + case 'DATE': { + column.type = new V1_Date(); + break; + } + default: { + throw new UnsupportedOperationError( + `Can't initialize cache: failed to find matching relational data type for Pure type '${col[1]}' when synthesizing table definition`, + ); + } + } + return column; + }); + + const schema = new V1_Schema(); + schema.name = schemaName; + schema.tables = [table]; + const database = new V1_Database(); + database.name = 'db'; + database.package = packagePath; + database.schemas = [schema]; + + const connection = new V1_RelationalDatabaseConnection(); + connection.databaseType = DatabaseType.DuckDB; + connection.type = DatabaseType.DuckDB; + const dataSourceSpec = new V1_DuckDBDatasourceSpecification(); + dataSourceSpec.path = '/tmpUploadedFile'; + connection.store = database.path; + connection.datasourceSpecification = dataSourceSpec; + connection.authenticationStrategy = new V1_TestAuthenticationStrategy(); + + const runtime = new V1_EngineRuntime(); + const storeConnections = new V1_StoreConnections(); + storeConnections.store = new V1_PackageableElementPointer( + PackageableElementPointerType.STORE, + database.path, + ); + const identifiedConnection = new V1_IdentifiedConnection(); + identifiedConnection.connection = connection; + identifiedConnection.id = 'c0'; + storeConnections.storeConnections = [identifiedConnection]; + runtime.connections = [storeConnections]; + + const packageableRuntime = new V1_PackageableRuntime(); + packageableRuntime.runtimeValue = runtime; + packageableRuntime.package = packagePath; + packageableRuntime.name = 'rt'; + + const model = new V1_PureModelContextData(); + model.elements = [database, packageableRuntime]; + + const csvFileSource = new CSVFileQueryDataCubeSource(); + csvFileSource.model = model; + csvFileSource.runtime = packageableRuntime.path; + csvFileSource.db = database.path; + csvFileSource.schema = schema.name; + csvFileSource.table = table.name; + return csvFileSource; + } + // ---------------------------------- CACHING -------------------------------------- override async initializeCache( @@ -527,7 +719,7 @@ export class LegendDataCubeDataCubeEngine extends DataCubeEngine { } = await this._cacheManager.cache(result.result); // model - const pacakgePath = 'local'; + const packagePath = 'local'; const table = new V1_Table(); table.name = tableName; @@ -573,7 +765,7 @@ export class LegendDataCubeDataCubeEngine extends DataCubeEngine { schema.tables = [table]; const database = new V1_Database(); database.name = 'db'; - database.package = pacakgePath; + database.package = packagePath; database.schemas = [schema]; const connection = new V1_RelationalDatabaseConnection(); @@ -599,7 +791,7 @@ export class LegendDataCubeDataCubeEngine extends DataCubeEngine { const packageableRuntime = new V1_PackageableRuntime(); packageableRuntime.runtimeValue = runtime; - packageableRuntime.package = pacakgePath; + packageableRuntime.package = packagePath; packageableRuntime.name = 'rt'; const model = new V1_PureModelContextData(); diff --git a/packages/legend-application-data-cube/src/stores/builder/LegendDataCubeCreatorState.tsx b/packages/legend-application-data-cube/src/stores/builder/LegendDataCubeCreatorState.tsx index ad3aac1bbc..491ef2f5b3 100644 --- a/packages/legend-application-data-cube/src/stores/builder/LegendDataCubeCreatorState.tsx +++ b/packages/legend-application-data-cube/src/stores/builder/LegendDataCubeCreatorState.tsx @@ -41,7 +41,7 @@ import { type LegendDataCubeBuilderStore, } from './LegendDataCubeBuilderStore.js'; import { generateBuilderRoute } from '../../__lib__/LegendDataCubeNavigation.js'; -import { RawFileQueryDataCubeSourceBuilderState } from './source/RawFileQueryDataCubeSourceBuilderState.js'; +import { CSVFileQueryDataCubeSourceBuilderState } from './source/CSVFileQueryDataCubeSourceBuilderState.js'; const DEFAULT_SOURCE_TYPE = LegendDataCubeSourceBuilderType.LEGEND_QUERY; @@ -106,8 +106,8 @@ export class LegendDataCubeCreatorState { this._application, this._engine, ); - case LegendDataCubeSourceBuilderType.RAW_FILE_QUERY: - return new RawFileQueryDataCubeSourceBuilderState( + case LegendDataCubeSourceBuilderType.CSV_FILE_QUERY: + return new CSVFileQueryDataCubeSourceBuilderState( this._application, this._engine, ); diff --git a/packages/legend-application-data-cube/src/stores/builder/source/RawFileQueryDataCubeSourceBuilderState.ts b/packages/legend-application-data-cube/src/stores/builder/source/CSVFileQueryDataCubeSourceBuilderState.ts similarity index 50% rename from packages/legend-application-data-cube/src/stores/builder/source/RawFileQueryDataCubeSourceBuilderState.ts rename to packages/legend-application-data-cube/src/stores/builder/source/CSVFileQueryDataCubeSourceBuilderState.ts index b6b57f3155..3c9792cb81 100644 --- a/packages/legend-application-data-cube/src/stores/builder/source/RawFileQueryDataCubeSourceBuilderState.ts +++ b/packages/legend-application-data-cube/src/stores/builder/source/CSVFileQueryDataCubeSourceBuilderState.ts @@ -24,11 +24,17 @@ import { LegendDataCubeSourceBuilderType, } from './LegendDataCubeSourceBuilderState.js'; import type { LegendDataCubeApplicationStore } from '../../LegendDataCubeBaseStore.js'; -import { makeObservable, observable } from 'mobx'; +import { action, makeObservable, observable } from 'mobx'; import type { LegendDataCubeDataCubeEngine } from '../../LegendDataCubeDataCubeEngine.js'; +import { + CSVFileQueryDataCubeSource, + RawCSVFileQueryDataCubeSource, +} from '../../model/CSVFIleQueryDataCubeSource.js'; -export class RawFileQueryDataCubeSourceBuilderState extends LegendDataCubeSourceBuilderState { - fileData!: unknown[] | undefined; +export class CSVFileQueryDataCubeSourceBuilderState extends LegendDataCubeSourceBuilderState { + fileData!: string; + fileName!: string; + rowCount!: number; constructor( application: LegendDataCubeApplicationStore, @@ -37,11 +43,28 @@ export class RawFileQueryDataCubeSourceBuilderState extends LegendDataCubeSource super(application, engine); makeObservable(this, { fileData: observable, + fileName: observable, + rowCount: observable, + setFileData: action, + setFileName: action, + setRowCount: action, }); } + setFileData(data: string) { + this.fileData = data; + } + + setFileName(fileName: string) { + this.fileName = fileName; + } + + setRowCount(count: number) { + this.rowCount = count; + } + override get label(): LegendDataCubeSourceBuilderType { - return LegendDataCubeSourceBuilderType.RAW_FILE_QUERY; + return LegendDataCubeSourceBuilderType.CSV_FILE_QUERY; } override get isValid(): boolean { @@ -49,9 +72,25 @@ export class RawFileQueryDataCubeSourceBuilderState extends LegendDataCubeSource } override async generateSourceData(): Promise { - // generate cached data source - // const cachedSource = await this._engine.initializeCache(); - // return RawCachedDataCubeSource.serialization.toJson(guaranteeType(cachedSource, CachedDataCubeSource, `Can't generate source for data`)); - throw new UnsupportedOperationError('Method not implemented'); + const csvDataSource = guaranteeType( + await this._engine.ingestFileData(this.fileData), + CSVFileQueryDataCubeSource, + `Can't generate data source`, + ); + + const rawCsvDataSource = new RawCSVFileQueryDataCubeSource(); + rawCsvDataSource.count = this.rowCount; + rawCsvDataSource.fileName = this.fileName; + rawCsvDataSource.db = csvDataSource.db; + rawCsvDataSource.model = csvDataSource.model; + rawCsvDataSource.schema = csvDataSource.schema; + rawCsvDataSource.table = csvDataSource.table; + rawCsvDataSource.runtime = csvDataSource.runtime; + + console.log( + RawCSVFileQueryDataCubeSource.serialization.toJson(rawCsvDataSource), + ); + + return RawCSVFileQueryDataCubeSource.serialization.toJson(rawCsvDataSource); } } diff --git a/packages/legend-application-data-cube/src/stores/builder/source/LegendDataCubeSourceBuilderState.ts b/packages/legend-application-data-cube/src/stores/builder/source/LegendDataCubeSourceBuilderState.ts index fd81b0146c..44a27f0e59 100644 --- a/packages/legend-application-data-cube/src/stores/builder/source/LegendDataCubeSourceBuilderState.ts +++ b/packages/legend-application-data-cube/src/stores/builder/source/LegendDataCubeSourceBuilderState.ts @@ -22,7 +22,7 @@ import type { DataCubeConfiguration } from '@finos/legend-data-cube'; export enum LegendDataCubeSourceBuilderType { LEGEND_QUERY = 'Legend Query', ADHOC_QUERY = 'Ad hoc Query', - RAW_FILE_QUERY = 'Raw File Query', + CSV_FILE_QUERY = 'CSV File Query', } export abstract class LegendDataCubeSourceBuilderState { diff --git a/packages/legend-application-data-cube/src/stores/model/CSVFIleQueryDataCubeSource.ts b/packages/legend-application-data-cube/src/stores/model/CSVFIleQueryDataCubeSource.ts new file mode 100644 index 0000000000..45505ff081 --- /dev/null +++ b/packages/legend-application-data-cube/src/stores/model/CSVFIleQueryDataCubeSource.ts @@ -0,0 +1,62 @@ +/** + * Copyright (c) 2020-present, Goldman Sachs + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import { DataCubeSource } from '@finos/legend-data-cube'; +import { + V1_pureModelContextDataPropSchema, + type V1_PureModelContextData, +} from '@finos/legend-graph'; +import { + SerializationFactory, + usingConstantValueSchema, +} from '@finos/legend-shared'; +import { createModelSchema, primitive } from 'serializr'; +import { RawLegendQueryDataCubeSource } from './LegendQueryDataCubeSource.js'; + +export const CSV_FILE_QUERY_DATA_CUBE_SOURCE_TYPE = 'csvFileQuery'; + +export class CSVFileQueryDataCubeSource extends DataCubeSource { + model!: V1_PureModelContextData; + runtime!: string; + db!: string; + schema!: string; + table!: string; + count!: number; + fileName!: string; +} + +export class RawCSVFileQueryDataCubeSource { + model!: V1_PureModelContextData; + runtime!: string; + db!: string; + schema!: string; + table!: string; + count!: number; + fileName!: string; + + static readonly serialization = new SerializationFactory( + createModelSchema(RawCSVFileQueryDataCubeSource, { + _type: usingConstantValueSchema(CSV_FILE_QUERY_DATA_CUBE_SOURCE_TYPE), + model: V1_pureModelContextDataPropSchema, + runtime: primitive(), + db: primitive(), + schema: primitive(), + table: primitive(), + count: primitive(), + fileName: primitive(), + }), + ); +} diff --git a/packages/legend-data-cube/src/index.tsx b/packages/legend-data-cube/src/index.tsx index a540d36a53..80471aa63f 100644 --- a/packages/legend-data-cube/src/index.tsx +++ b/packages/legend-data-cube/src/index.tsx @@ -19,10 +19,7 @@ export * from './stores/core/model/DataCubeSource.js'; export { type DataCubeColumn } from './stores/core/model/DataCubeColumn.js'; export * from './stores/core/model/DataCubeConfiguration.js'; export * from './stores/core/model/AdhocQueryDataCubeSource.js'; -export { - CachedDataCubeSource, - RawCachedDataCubeSource, -} from './stores/core/model/CachedDataCubeSource.js'; +export { CachedDataCubeSource } from './stores/core/model/CachedDataCubeSource.js'; export * from './stores/core/DataCubeEngine.js'; export * from './stores/core/DataCubeQueryEngine.js'; diff --git a/packages/legend-data-cube/src/stores/core/DataCubeEngine.tsx b/packages/legend-data-cube/src/stores/core/DataCubeEngine.tsx index 14c1e8b2f5..fc60c71133 100644 --- a/packages/legend-data-cube/src/stores/core/DataCubeEngine.tsx +++ b/packages/legend-data-cube/src/stores/core/DataCubeEngine.tsx @@ -270,6 +270,11 @@ export abstract class DataCubeEngine { source: DataCubeSource, ): V1_AppliedFunction | undefined; + // ---------------------------------- FILE UPLOAD ------------------------------ + async ingestFileData(csvString: string): Promise { + return undefined; + } + // ---------------------------------- CACHING ---------------------------------- async initializeCache( diff --git a/packages/legend-data-cube/src/stores/core/model/CachedDataCubeSource.ts b/packages/legend-data-cube/src/stores/core/model/CachedDataCubeSource.ts index 51f7d54344..94dfa6d985 100644 --- a/packages/legend-data-cube/src/stores/core/model/CachedDataCubeSource.ts +++ b/packages/legend-data-cube/src/stores/core/model/CachedDataCubeSource.ts @@ -14,18 +14,8 @@ * limitations under the License. */ -import { - V1_pureModelContextDataPropSchema, - type V1_PureModelContextData, -} from '@finos/legend-graph'; +import { type V1_PureModelContextData } from '@finos/legend-graph'; import { DataCubeSource } from './DataCubeSource.js'; -import { - SerializationFactory, - usingConstantValueSchema, -} from '@finos/legend-shared'; -import { createModelSchema, primitive } from 'serializr'; - -export const CACHED_DATA_CUBE_SOURCE_TYPE = 'cached'; export class CachedDataCubeSource extends DataCubeSource { model!: V1_PureModelContextData; @@ -35,25 +25,3 @@ export class CachedDataCubeSource extends DataCubeSource { table!: string; count!: number; } - -export class RawCachedDataCubeSource { - model!: V1_PureModelContextData; - runtime!: string; - db!: string; - schema!: string; - table!: string; - count!: number; - - static readonly serialization = new SerializationFactory( - createModelSchema(CachedDataCubeSource, { - _type: usingConstantValueSchema(CACHED_DATA_CUBE_SOURCE_TYPE), - model: V1_pureModelContextDataPropSchema, - query: primitive(), - runtime: primitive(), - db: primitive(), - schema: primitive(), - table: primitive(), - count: primitive(), - }), - ); -}