diff --git a/package-lock.json b/package-lock.json index 290a40a3..b79f31fe 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,12 +1,12 @@ { "name": "rosetta-dbt-studio", - "version": "1.2.5", + "version": "1.2.6", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "rosetta-dbt-studio", - "version": "1.2.5", + "version": "1.2.6", "hasInstallScript": true, "license": "MIT", "dependencies": { diff --git a/package.json b/package.json index 0e2dcede..2f1893a8 100644 --- a/package.json +++ b/package.json @@ -1,5 +1,5 @@ { - "version": "1.2.5", + "version": "1.2.6", "name": "rosetta-dbt-studio", "description": "Turn Raw Data into Business Insights—Faster with RosettaDB", "keywords": [ diff --git a/release/app/package-lock.json b/release/app/package-lock.json index 9776e2a7..3c84dbc3 100644 --- a/release/app/package-lock.json +++ b/release/app/package-lock.json @@ -1,12 +1,12 @@ { "name": "rosetta-dbt-studio", - "version": "1.2.5", + "version": "1.2.6", "lockfileVersion": 2, "requires": true, "packages": { "": { "name": "rosetta-dbt-studio", - "version": "1.2.5", + "version": "1.2.6", "hasInstallScript": true, "license": "MIT", "dependencies": { diff --git a/release/app/package.json b/release/app/package.json index d9446434..cc667313 100644 --- a/release/app/package.json +++ b/release/app/package.json @@ -1,6 +1,6 @@ { "name": "rosetta-dbt-studio", - "version": "1.2.5", + "version": "1.2.6", "description": "A modern DBT desktop IDE", "license": "MIT", "author": { diff --git a/src/main/ipcHandlers/connectors.ipcHandlers.ts b/src/main/ipcHandlers/connectors.ipcHandlers.ts index eba59e65..0439d2c0 100644 --- a/src/main/ipcHandlers/connectors.ipcHandlers.ts +++ b/src/main/ipcHandlers/connectors.ipcHandlers.ts @@ -1,6 +1,10 @@ import { ipcMain } from 'electron'; import { ConnectorsService } from '../services'; -import type { ConnectionInput, QueryResponseType } from '../../types/backend'; +import type { + ConnectionInput, + QueryResponseType, + ExecuteStatementType, +} from '../../types/backend'; import { ConfigureConnectionBody, UpdateConnectionBody } from '../../types/ipc'; import { CloudConnection, RecentItem } from '../../types/frontend'; @@ -10,6 +14,7 @@ const handlerChannels = [ 'connector:validate', 'connector:getJdbcUrl', 'connector:query', + 'connector:cancel-query', 'connector:list', ]; @@ -67,10 +72,7 @@ const registerConnectorsHandlers = () => { ipcMain.handle( 'connector:query', - async ( - _event, - body: { connection: ConnectionInput; query: string; projectName: string }, - ): Promise => { + async (_event, body: ExecuteStatementType): Promise => { try { return ConnectorsService.executeSelectStatement(body); } catch (error: any) { @@ -79,6 +81,13 @@ const registerConnectorsHandlers = () => { }, ); + ipcMain.handle( + 'connector:cancel-query', + async (_event, queryId: string): Promise => { + return ConnectorsService.cancelQuery(queryId); + }, + ); + ipcMain.handle( 'connector:setConnectionEnvVariable', async (_event, { key, value }: { key: string; value: string }) => { diff --git a/src/main/ipcHandlers/duckLake.ipcHandlers.ts b/src/main/ipcHandlers/duckLake.ipcHandlers.ts index a1e15247..491314b6 100644 --- a/src/main/ipcHandlers/duckLake.ipcHandlers.ts +++ b/src/main/ipcHandlers/duckLake.ipcHandlers.ts @@ -104,6 +104,15 @@ const registerDuckLakeHandlers = () => { }, ); + ipcMain.handle( + 'ducklake:instance:listSnapshots', + async (_event, instanceId: string, params: any) => { + // Ensure params has defaults if missing (though Service also defaults) + const listParams = params || { page: 1, pageSize: 100 }; + return DuckLakeService.listInstanceSnapshots(instanceId, listParams); + }, + ); + ipcMain.handle( 'ducklake:snapshot:restore', async ( diff --git a/src/main/services/connectors.service.ts b/src/main/services/connectors.service.ts index 08809e13..76c0cddb 100644 --- a/src/main/services/connectors.service.ts +++ b/src/main/services/connectors.service.ts @@ -347,6 +347,17 @@ export default class ConnectorsService { } } + private static runningQueries = new Map void>(); + + static async cancelQuery(queryId: string): Promise { + const cancelFn = this.runningQueries.get(queryId); + if (cancelFn) { + // Execute the cancellation function (closes connection/client) + cancelFn(); + this.runningQueries.delete(queryId); + } + } + /** * Run a select statement and expect the results and fields */ @@ -354,6 +365,7 @@ export default class ConnectorsService { connection, query, projectName, + queryId, }: ExecuteStatementType): Promise { const storeUser = await SecureStorageService.getCredential( `db-user-${projectName}`, @@ -384,25 +396,71 @@ export default class ConnectorsService { (connection as any).keyfile = bigQueryKey; } - switch (connection.type) { - case 'postgres': - return executePostgresQuery(connection, query); - case 'snowflake': - return executeSnowflakeQuery(connection, query); - case 'bigquery': - return executeBigQueryQuery(connection, query); - case 'databricks': - return executeDatabricksQuery(connection, query); - case 'duckdb': - return executeDuckDBQuery(connection, query); - case 'redshift': - return executeRedshiftQuery(connection, query); - default: - // Use the literal type instead of accessing the property to avoid TypeScript error - throw new Error( - `Unsupported connection type: ${(connection as any).type}`, - ); + const startTime = Date.now(); + let response: QueryResponseType; + + // Helper to register cancel callback if queryId is present + const registerCancel = queryId + ? (fn: () => void) => { + this.runningQueries.set(queryId, fn); + } + : undefined; + + try { + switch (connection.type) { + case 'postgres': + response = await executePostgresQuery( + connection, + query, + registerCancel, + ); + break; + case 'snowflake': + response = await executeSnowflakeQuery( + connection, + query, + registerCancel, + ); + break; + case 'bigquery': + // BigQuery cancellation not yet implemented in utils + response = await executeBigQueryQuery(connection, query); + break; + case 'databricks': + response = await executeDatabricksQuery( + connection, + query, + registerCancel, + ); + break; + case 'duckdb': + response = await executeDuckDBQuery( + connection, + query, + registerCancel, + ); + break; + case 'redshift': + response = await executeRedshiftQuery( + connection, + query, + registerCancel, + ); + break; + default: + throw new Error( + `Unsupported connection type: ${(connection as any).type}`, + ); + } + } finally { + // Clean up running query registry + if (queryId) { + this.runningQueries.delete(queryId); + } } + + response.duration = Date.now() - startTime; + return response; } static extractDbNameFromPath = (url: string) => { @@ -637,10 +695,6 @@ export default class ConnectorsService { conn: ConnectionInput, ): Promise { const profileConfig = { - config: { - send_anonymous_usage_stats: false, - partial_parse: true, - }, [name]: { target: 'dev', outputs: { diff --git a/src/main/services/duckLake.service.ts b/src/main/services/duckLake.service.ts index c7755778..1b863c24 100644 --- a/src/main/services/duckLake.service.ts +++ b/src/main/services/duckLake.service.ts @@ -17,12 +17,15 @@ import { DuckLakeInstanceHealth, DuckLakeTableInfo, DuckLakeSnapshotInfo, + DuckLakeSnapshotDetail, DuckLakeQueryRequest, DuckLakeQueryResult, DuckLakeMaintenanceTask, DuckLakeCatalogConfig, DuckLakeMaintenanceType, DuckLakeStorageConfig, + DuckLakeSnapshotParams, + DuckLakePaginatedResult, } from '../../types/duckLake'; import { DuckLakeError } from '../../types/duckLakeErrors'; @@ -563,6 +566,24 @@ export default class DuckLakeService { } } + static async listInstanceSnapshots( + instanceId: string, + params: DuckLakeSnapshotParams = { page: 1, pageSize: 100 }, + ): Promise> { + try { + await this.ensureConnected(instanceId); + const adapter = await this.getAdapter(instanceId); + return await adapter.listInstanceSnapshots(params); + } catch (error) { + // eslint-disable-next-line no-console + console.error( + `[DuckLakeService] Failed to list instance snapshots for ${instanceId}:`, + error, + ); + throw error; + } + } + static async restoreSnapshot( instanceId: string, // eslint-disable-next-line @typescript-eslint/no-unused-vars diff --git a/src/main/services/duckLake/adapters/base.adapter.ts b/src/main/services/duckLake/adapters/base.adapter.ts index 06268c09..cee4b6d4 100644 --- a/src/main/services/duckLake/adapters/base.adapter.ts +++ b/src/main/services/duckLake/adapters/base.adapter.ts @@ -9,9 +9,12 @@ import { DuckLakeInstance, DuckLakeTableInfo, DuckLakeSnapshotInfo, + DuckLakeSnapshotDetail, DuckLakeQueryResult, DuckLakeQueryRequest, DuckLakeStorageConfig, + DuckLakeSnapshotParams, + DuckLakePaginatedResult, } from '../../../../types/duckLake'; import { generateGCSBearerToken } from '../../../helpers/cloudAuth.helper'; @@ -96,6 +99,14 @@ export abstract class CatalogAdapter { */ abstract listSnapshots(tableName: string): Promise; + /** + * Get all snapshots for the entire instance (not table-specific) + * Used for instance-wide history view with pagination + */ + abstract listInstanceSnapshots( + params: DuckLakeSnapshotParams, + ): Promise>; + /** * Get comprehensive table details from DuckLake metadata catalog (Phase 8b) * Queries multiple metadata tables to provide complete table information diff --git a/src/main/services/duckLake/adapters/duckdb.adapter.ts b/src/main/services/duckLake/adapters/duckdb.adapter.ts index 6a049b90..3bd4c5cf 100644 --- a/src/main/services/duckLake/adapters/duckdb.adapter.ts +++ b/src/main/services/duckLake/adapters/duckdb.adapter.ts @@ -16,9 +16,12 @@ import { DuckLakeInstance, DuckLakeTableInfo, DuckLakeSnapshotInfo, + DuckLakeSnapshotDetail, DuckLakeQueryResult, DuckLakeQueryRequest, DuckLakeStorageConfig, + DuckLakeSnapshotParams, + DuckLakePaginatedResult, } from '../../../../types/duckLake'; import { DuckLakeError } from '../../../../types/duckLakeErrors'; import { normalizeNumericValue } from '../../../../renderer/utils/fileUtils'; @@ -287,10 +290,12 @@ export class DuckDBCatalogAdapter extends CatalogAdapter { t.table_uuid, cs.snapshot_id as current_snapshot, ts.record_count, - ts.file_size_bytes + ts.file_size_bytes, + snap.snapshot_time FROM ${quotedMetadataDatabase}.main.ducklake_table t JOIN ${quotedMetadataDatabase}.main.ducklake_schema s ON t.schema_id = s.schema_id LEFT JOIN ${quotedMetadataDatabase}.main.ducklake_table_stats ts ON ts.table_id = t.table_id + LEFT JOIN ${quotedMetadataDatabase}.main.ducklake_snapshot snap ON snap.snapshot_id = t.begin_snapshot CROSS JOIN current_snapshot cs WHERE cs.snapshot_id >= t.begin_snapshot AND (cs.snapshot_id < t.end_snapshot OR t.end_snapshot IS NULL) @@ -304,15 +309,24 @@ export class DuckDBCatalogAdapter extends CatalogAdapter { const tables: DuckLakeTableInfo[] = rows.map((row: any) => { if (Array.isArray(row)) { - const [, tableName, schemaName, , , recordCount, fileSizeBytes] = row; + const [ + , + tableName, + schemaName, + , + , + recordCount, + fileSizeBytes, + snapshotTime, + ] = row; return { name: tableName, schema: schemaName || 'main', instanceId: '', columns: [], snapshots: [], - createdAt: new Date(), - updatedAt: new Date(), + createdAt: snapshotTime ? new Date(snapshotTime) : new Date(), + updatedAt: snapshotTime ? new Date(snapshotTime) : new Date(), rowCount: normalizeNumericValue(recordCount), sizeBytes: normalizeNumericValue(fileSizeBytes), }; @@ -324,8 +338,12 @@ export class DuckDBCatalogAdapter extends CatalogAdapter { instanceId: '', columns: [], snapshots: [], - createdAt: new Date(), - updatedAt: new Date(), + createdAt: row.snapshot_time + ? new Date(row.snapshot_time) + : new Date(), + updatedAt: row.snapshot_time + ? new Date(row.snapshot_time) + : new Date(), rowCount: normalizeNumericValue(row.record_count), sizeBytes: normalizeNumericValue(row.file_size_bytes), }; @@ -548,6 +566,128 @@ export class DuckDBCatalogAdapter extends CatalogAdapter { } } + async listInstanceSnapshots( + params: DuckLakeSnapshotParams, + ): Promise> { + try { + if (!this.connectionInfo) { + throw new Error('No active connection'); + } + + const { page, pageSize, filter } = params; + const offset = (page - 1) * pageSize; + + // Find the DuckLake metadata database + const databasesQuery = ` + SELECT database_name + FROM duckdb_databases() + WHERE database_name LIKE '__ducklake_metadata_%' + LIMIT 1 + `; + + const databasesResult = + await this.connectionInfo.connection.run(databasesQuery); + const databaseRows = await databasesResult.getRows(); + + if (databaseRows.length === 0) { + throw new Error('DuckLake metadata database not found'); + } + + const metadataDatabase = Array.isArray(databaseRows[0]) + ? databaseRows[0][0] + : (databaseRows[0] as any).database_name; + + // Quote the database name to handle special characters (hyphens, etc.) + const quotedMetadataDatabase = `"${metadataDatabase}"`; + + // Build WHERE clause + let whereClause = ''; + if (filter) { + // Sanitize filter for simple SQL injection prevention + // Escape LIKE wildcards first, then single quotes for SQL + const safeFilter = filter + .replace(/\\/g, '\\\\') + .replace(/%/g, '\\%') + .replace(/_/g, '\\_') + .replace(/'/g, "''"); + whereClause = ` + WHERE CAST(s.snapshot_id AS VARCHAR) LIKE '%${safeFilter}%' ESCAPE '\\' + OR sc.changes_made LIKE '%${safeFilter}%' ESCAPE '\\' + `; + } + + // 1. Get Total Count + const countQuery = ` + SELECT COUNT(*) as total + FROM ${quotedMetadataDatabase}.main.ducklake_snapshot s + LEFT JOIN ${quotedMetadataDatabase}.main.ducklake_snapshot_changes sc + ON s.snapshot_id = sc.snapshot_id + ${whereClause} + `; + + const countResult = await this.connectionInfo.connection.run(countQuery); + const countRows = await countResult.getRows(); + + const totalRaw = Array.isArray(countRows[0]) + ? countRows[0][0] + : countRows[0].total; + const total = Number(String(totalRaw)); + + // 2. Get Data with Pagination + const snapshotsQuery = ` + SELECT + s.snapshot_id, + s.snapshot_time, + s.schema_version, + s.next_catalog_id, + s.next_file_id, + sc.changes_made + FROM ${quotedMetadataDatabase}.main.ducklake_snapshot s + LEFT JOIN ${quotedMetadataDatabase}.main.ducklake_snapshot_changes sc + ON s.snapshot_id = sc.snapshot_id + ${whereClause} + ORDER BY s.snapshot_id DESC + LIMIT ${pageSize} OFFSET ${offset} + `; + + const snapshotsResult = + await this.connectionInfo.connection.run(snapshotsQuery); + const rows = await snapshotsResult.getRows(); + + const data = rows.map((row: any) => { + if (Array.isArray(row)) { + return { + snapshotId: row[0], + snapshotTime: new Date(row[1]), + schemaVersion: row[2], + nextCatalogId: row[3], + nextFileId: row[4], + changesMade: row[5], + }; + } + return { + snapshotId: row.snapshot_id, + snapshotTime: new Date(row.snapshot_time), + schemaVersion: row.schema_version, + nextCatalogId: row.next_catalog_id, + nextFileId: row.next_file_id, + changesMade: row.changes_made, + }; + }); + + return { + data, + total, + page, + pageSize, + }; + } catch (error) { + // eslint-disable-next-line no-console + console.error('Failed to list instance snapshots:', error); + throw error; + } + } + /** * Get comprehensive table details from DuckLake metadata catalog (Phase 8b) * Queries multiple metadata tables to provide complete table information @@ -952,8 +1092,49 @@ export class DuckDBCatalogAdapter extends CatalogAdapter { console.debug('No partition info found for table:', tableName); } - // 7. Get snapshots + // 7. Get table-specific snapshots using CTE const snapshotsQuery = ` + WITH table_snapshots AS ( + -- Snapshot when table was created + SELECT t.begin_snapshot as snapshot_id + FROM ${quotedMetadataDatabase}.main.ducklake_table t + WHERE t.table_id = ${tableId} + + UNION + + -- Snapshot when table was deleted (if applicable) + SELECT t.end_snapshot as snapshot_id + FROM ${quotedMetadataDatabase}.main.ducklake_table t + WHERE t.table_id = ${tableId} AND t.end_snapshot IS NOT NULL + + UNION + + -- Snapshots when columns were added/modified + SELECT c.begin_snapshot as snapshot_id + FROM ${quotedMetadataDatabase}.main.ducklake_column c + WHERE c.table_id = ${tableId} + + UNION + + -- Snapshots when columns were dropped + SELECT c.end_snapshot as snapshot_id + FROM ${quotedMetadataDatabase}.main.ducklake_column c + WHERE c.table_id = ${tableId} AND c.end_snapshot IS NOT NULL + + UNION + + -- Snapshots when data files were added + SELECT df.begin_snapshot as snapshot_id + FROM ${quotedMetadataDatabase}.main.ducklake_data_file df + WHERE df.table_id = ${tableId} + + UNION + + -- Snapshots when data files were deleted + SELECT df.end_snapshot as snapshot_id + FROM ${quotedMetadataDatabase}.main.ducklake_data_file df + WHERE df.table_id = ${tableId} AND df.end_snapshot IS NOT NULL + ) SELECT s.snapshot_id, s.snapshot_time, @@ -962,10 +1143,10 @@ export class DuckDBCatalogAdapter extends CatalogAdapter { s.next_file_id, sc.changes_made FROM ${quotedMetadataDatabase}.main.ducklake_snapshot s + INNER JOIN table_snapshots ts ON s.snapshot_id = ts.snapshot_id LEFT JOIN ${quotedMetadataDatabase}.main.ducklake_snapshot_changes sc ON s.snapshot_id = sc.snapshot_id ORDER BY s.snapshot_id DESC - LIMIT 50 `; const snapshotsResult = diff --git a/src/main/services/duckLake/adapters/postgresql.adapter.ts b/src/main/services/duckLake/adapters/postgresql.adapter.ts index 8382b354..25751aa5 100644 --- a/src/main/services/duckLake/adapters/postgresql.adapter.ts +++ b/src/main/services/duckLake/adapters/postgresql.adapter.ts @@ -14,9 +14,12 @@ import { DuckLakeInstance, DuckLakeTableInfo, DuckLakeSnapshotInfo, + DuckLakeSnapshotDetail, DuckLakeQueryResult, DuckLakeQueryRequest, DuckLakeStorageConfig, + DuckLakeSnapshotParams, + DuckLakePaginatedResult, } from '../../../../types/duckLake'; import { DuckLakeError } from '../../../../types/duckLakeErrors'; import { normalizeNumericValue } from '../../../../renderer/utils/fileUtils'; @@ -56,14 +59,13 @@ export class PostgreSQLCatalogAdapter extends CatalogAdapter { const connectionString = this.buildPostgreSQLConnectionString(pgConfig); // Attach DuckLake catalog with PostgreSQL backend + // We must escape single quotes in the connection string for SQL safety const attachString = `ducklake:postgres:${connectionString}`; + const escapedAttachString = attachString.replace(/'/g, "''"); - // For S3 storage we expect instance.dataPath to be an s3:// URI such as - // s3://adaptivescale/ducklake_nuri/ and rely on httpfs + secret created - // via createSecrets above. We just pass through instance.dataPath here. await this.attachDuckLakeCatalog( connection, - attachString, + escapedAttachString, instance.name, instance.dataPath, ); @@ -182,10 +184,21 @@ export class PostgreSQLCatalogAdapter extends CatalogAdapter { // Test PostgreSQL connection const pgConfig = config.postgresql!; const connectionString = this.buildPostgreSQLConnectionString(pgConfig); + const escapedConnectionString = connectionString.replace(/'/g, "''"); - // Test basic PostgreSQL connection - const testQuery = `SELECT 1 FROM postgres_query('${connectionString}', 'SELECT 1 as test')`; - await testConnection.run(testQuery); + // Test basic PostgreSQL connection using ATTACH (more reliable than postgres_query for testing) + const tempAlias = `test_pg_${Date.now()}`; + try { + await testConnection.run( + `ATTACH '${escapedConnectionString}' AS ${tempAlias} (TYPE postgres)`, + ); + await testConnection.run(`DETACH ${tempAlias}`); + } catch (err) { + // If ATTACH fails, we still try a direct query as a fallback + // in case the specific DuckDB version doesn't support ATTACH (TYPE postgres) + const testQuery = `SELECT 1 FROM postgres_query('${escapedConnectionString}', 'SELECT 1 as test')`; + await testConnection.run(testQuery); + } const responseTime = Date.now() - startTime; @@ -533,6 +546,126 @@ export class PostgreSQLCatalogAdapter extends CatalogAdapter { } } + async listInstanceSnapshots( + params: DuckLakeSnapshotParams, + ): Promise> { + try { + if (!this.connectionInfo) { + throw new Error('No active connection'); + } + + const { page, pageSize, filter } = params; + const offset = (page - 1) * pageSize; + + // Find the DuckLake metadata database + const databasesQuery = ` + SELECT database_name + FROM duckdb_databases() + WHERE database_name LIKE '__ducklake_metadata_%' + LIMIT 1 + `; + + const databasesResult = + await this.connectionInfo.connection.run(databasesQuery); + const databaseRows = await databasesResult.getRows(); + + if (databaseRows.length === 0) { + throw new Error('DuckLake metadata database not found'); + } + + const metadataDatabase = Array.isArray(databaseRows[0]) + ? databaseRows[0][0] + : (databaseRows[0] as any).database_name; + + const quotedMetadataDatabase = `"${metadataDatabase}"`; + + // Build WHERE clause + let whereClause = ''; + if (filter) { + // Escape LIKE wildcards first, then single quotes for SQL + const safeFilter = filter + .replace(/\\/g, '\\\\') + .replace(/%/g, '\\%') + .replace(/_/g, '\\_') + .replace(/'/g, "''"); + whereClause = ` + WHERE CAST(s.snapshot_id AS VARCHAR) LIKE '%${safeFilter}%' ESCAPE '\\' + OR sc.changes_made LIKE '%${safeFilter}%' ESCAPE '\\' + `; + } + + // 1. Get Total Count + const countQuery = ` + SELECT COUNT(*) as total + FROM ${quotedMetadataDatabase}.ducklake_snapshot s + LEFT JOIN ${quotedMetadataDatabase}.ducklake_snapshot_changes sc + ON s.snapshot_id = sc.snapshot_id + ${whereClause} + `; + + const countResult = await this.connectionInfo.connection.run(countQuery); + const countRows = await countResult.getRows(); + + const totalRaw = Array.isArray(countRows[0]) + ? countRows[0][0] + : countRows[0].total; + const total = Number(String(totalRaw)); + + // 2. Get Data with Pagination + const snapshotsQuery = ` + SELECT + s.snapshot_id, + s.snapshot_time, + s.schema_version, + s.next_catalog_id, + s.next_file_id, + sc.changes_made + FROM ${quotedMetadataDatabase}.ducklake_snapshot s + LEFT JOIN ${quotedMetadataDatabase}.ducklake_snapshot_changes sc + ON s.snapshot_id = sc.snapshot_id + ${whereClause} + ORDER BY s.snapshot_id DESC + LIMIT ${pageSize} OFFSET ${offset} + `; + + const snapshotsResult = + await this.connectionInfo.connection.run(snapshotsQuery); + const rows = await snapshotsResult.getRows(); + + const data = rows.map((row: any) => { + if (Array.isArray(row)) { + return { + snapshotId: row[0], + snapshotTime: new Date(row[1]), + schemaVersion: row[2], + nextCatalogId: row[3], + nextFileId: row[4], + changesMade: row[5], + }; + } + return { + snapshotId: row.snapshot_id, + snapshotTime: new Date(row.snapshot_time), + schemaVersion: row.schema_version, + nextCatalogId: row.next_catalog_id, + nextFileId: row.next_file_id, + changesMade: row.changes_made, + }; + }); + + return { + data, + total, + page, + pageSize, + }; + } catch (error) { + // eslint-disable-next-line no-console + console.error('Failed to list instance snapshots:', error); + throw error; + } + } + /** * Get comprehensive table details from DuckLake metadata catalog (Phase 8b) * Queries multiple metadata tables to provide complete table information @@ -720,7 +853,7 @@ export class PostgreSQLCatalogAdapter extends CatalogAdapter { cs.contains_null, cs.contains_nan, cs.min_value, - cs.max_value, + cs.max_value FROM ${quotedMetadataDatabase}.ducklake_table_column_stats cs JOIN ${quotedMetadataDatabase}.ducklake_column c ON cs.column_id = c.column_id @@ -771,7 +904,7 @@ export class PostgreSQLCatalogAdapter extends CatalogAdapter { file_order, begin_snapshot, end_snapshot, - partition_id, + partition_id FROM ${quotedMetadataDatabase}.ducklake_data_file WHERE table_id = ${tableId} AND ${currentSnapshot} >= begin_snapshot @@ -937,20 +1070,61 @@ export class PostgreSQLCatalogAdapter extends CatalogAdapter { console.debug('No partition info found for table:', tableName); } - // 7. Get snapshots + // 7. Get table-specific snapshots using CTE const snapshotsQuery = ` + WITH table_snapshots AS ( + -- Snapshot when table was created + SELECT t.begin_snapshot as snapshot_id + FROM ${quotedMetadataDatabase}.ducklake_table t + WHERE t.table_id = ${tableId} + + UNION + + -- Snapshot when table was deleted (if applicable) + SELECT t.end_snapshot as snapshot_id + FROM ${quotedMetadataDatabase}.ducklake_table t + WHERE t.table_id = ${tableId} AND t.end_snapshot IS NOT NULL + + UNION + + -- Snapshots when columns were added/modified + SELECT c.begin_snapshot as snapshot_id + FROM ${quotedMetadataDatabase}.ducklake_column c + WHERE c.table_id = ${tableId} + + UNION + + -- Snapshots when columns were dropped + SELECT c.end_snapshot as snapshot_id + FROM ${quotedMetadataDatabase}.ducklake_column c + WHERE c.table_id = ${tableId} AND c.end_snapshot IS NOT NULL + + UNION + + -- Snapshots when data files were added + SELECT df.begin_snapshot as snapshot_id + FROM ${quotedMetadataDatabase}.ducklake_data_file df + WHERE df.table_id = ${tableId} + + UNION + + -- Snapshots when data files were deleted + SELECT df.end_snapshot as snapshot_id + FROM ${quotedMetadataDatabase}.ducklake_data_file df + WHERE df.table_id = ${tableId} AND df.end_snapshot IS NOT NULL + ) SELECT s.snapshot_id, s.snapshot_time, s.schema_version, s.next_catalog_id, s.next_file_id, - sc.changes_made, + sc.changes_made FROM ${quotedMetadataDatabase}.ducklake_snapshot s + INNER JOIN table_snapshots ts ON s.snapshot_id = ts.snapshot_id LEFT JOIN ${quotedMetadataDatabase}.ducklake_snapshot_changes sc ON s.snapshot_id = sc.snapshot_id ORDER BY s.snapshot_id DESC - LIMIT 50 `; const snapshotsResult = @@ -1130,14 +1304,18 @@ export class PostgreSQLCatalogAdapter extends CatalogAdapter { private buildPostgreSQLConnectionString( config: NonNullable, ): string { + // For DuckDB's postgres extension, the keyword=value format is often more reliable + // across different versions than the URI format. const parts = [ - `dbname=${config.database}`, `host=${config.host}`, `port=${config.port}`, `user=${config.username}`, + `dbname=${config.database}`, ]; if (config.password) { + // For DuckDB's postgres extension, we use the keyword=value format. + // We don't need internal quoting here because we escape the whole string in the SQL literals. parts.push(`password=${config.password}`); } diff --git a/src/main/services/duckLake/adapters/sqlite.adapter.ts b/src/main/services/duckLake/adapters/sqlite.adapter.ts index bfbf9783..225e0da9 100644 --- a/src/main/services/duckLake/adapters/sqlite.adapter.ts +++ b/src/main/services/duckLake/adapters/sqlite.adapter.ts @@ -16,9 +16,12 @@ import { DuckLakeInstance, DuckLakeTableInfo, DuckLakeSnapshotInfo, + DuckLakeSnapshotDetail, DuckLakeQueryResult, DuckLakeQueryRequest, DuckLakeStorageConfig, + DuckLakeSnapshotParams, + DuckLakePaginatedResult, } from '../../../../types/duckLake'; import { DuckLakeError } from '../../../../types/duckLakeErrors'; import { normalizeNumericValue } from '../../../../renderer/utils/fileUtils'; @@ -512,6 +515,130 @@ export class SQLiteCatalogAdapter extends CatalogAdapter { } } + async listInstanceSnapshots( + params: DuckLakeSnapshotParams, + ): Promise> { + try { + if (!this.connectionInfo) { + throw new Error('No active connection'); + } + + const { page, pageSize, filter } = params; + const offset = (page - 1) * pageSize; + + // Find the DuckLake metadata database + const databasesQuery = ` + SELECT database_name + FROM duckdb_databases() + WHERE database_name LIKE '__ducklake_metadata_%' + LIMIT 1 + `; + + const databasesResult = + await this.connectionInfo.connection.run(databasesQuery); + const databaseRows = await databasesResult.getRows(); + + if (databaseRows.length === 0) { + throw new Error('DuckLake metadata database not found'); + } + + const metadataDatabase = Array.isArray(databaseRows[0]) + ? databaseRows[0][0] + : (databaseRows[0] as any).database_name; + + // Quote the database name + const quotedMetadataDatabase = `"${metadataDatabase}"`; + + // Build WHERE clause + let whereClause = ''; + if (filter) { + // Sanitize filter for simple SQL injection prevention (basic) + // In real implementations, use bound parameters if possible, but DuckDB Node bindings might differ + // For text search in snapshots + // Escape LIKE wildcards first, then single quotes for SQL + const safeFilter = filter + .replace(/\\/g, '\\\\') + .replace(/%/g, '\\%') + .replace(/_/g, '\\_') + .replace(/'/g, "''"); + whereClause = ` + WHERE CAST(s.snapshot_id AS VARCHAR) LIKE '%${safeFilter}%' ESCAPE '\\' + OR sc.changes_made LIKE '%${safeFilter}%' ESCAPE '\\' + `; + } + + // 1. Get Total Count + const countQuery = ` + SELECT COUNT(*) as total + FROM ${quotedMetadataDatabase}.main.ducklake_snapshot s + LEFT JOIN ${quotedMetadataDatabase}.main.ducklake_snapshot_changes sc + ON s.snapshot_id = sc.snapshot_id + ${whereClause} + `; + + const countResult = await this.connectionInfo.connection.run(countQuery); + const countRows = await countResult.getRows(); + // Handle count result safely for BigInt/Number + const totalRaw = Array.isArray(countRows[0]) + ? countRows[0][0] + : countRows[0].total; + const total = Number(String(totalRaw)); + + // 2. Get Data with Pagination + const snapshotsQuery = ` + SELECT + s.snapshot_id, + s.snapshot_time, + s.schema_version, + s.next_catalog_id, + s.next_file_id, + sc.changes_made + FROM ${quotedMetadataDatabase}.main.ducklake_snapshot s + LEFT JOIN ${quotedMetadataDatabase}.main.ducklake_snapshot_changes sc + ON s.snapshot_id = sc.snapshot_id + ${whereClause} + ORDER BY s.snapshot_id DESC + LIMIT ${pageSize} OFFSET ${offset} + `; + + const snapshotsResult = + await this.connectionInfo.connection.run(snapshotsQuery); + const rows = await snapshotsResult.getRows(); + + const data = rows.map((row: any) => { + if (Array.isArray(row)) { + return { + snapshotId: row[0], + snapshotTime: new Date(row[1]), + schemaVersion: row[2], + nextCatalogId: row[3], + nextFileId: row[4], + changesMade: row[5], + }; + } + return { + snapshotId: row.snapshot_id, + snapshotTime: new Date(row.snapshot_time), + schemaVersion: row.schema_version, + nextCatalogId: row.next_catalog_id, + nextFileId: row.next_file_id, + changesMade: row.changes_made, + }; + }); + + return { + data, + total, + page, + pageSize, + }; + } catch (error) { + // eslint-disable-next-line no-console + console.error('Failed to list instance snapshots:', error); + throw error; + } + } + /** * Get comprehensive table details from DuckLake metadata catalog (Phase 8b) * Queries multiple metadata tables to provide complete table information @@ -916,8 +1043,49 @@ export class SQLiteCatalogAdapter extends CatalogAdapter { console.debug('No partition info found for table:', tableName); } - // 7. Get snapshots + // 7. Get table-specific snapshots using CTE const snapshotsQuery = ` + WITH table_snapshots AS ( + -- Snapshot when table was created + SELECT t.begin_snapshot as snapshot_id + FROM ${quotedMetadataDatabase}.main.ducklake_table t + WHERE t.table_id = ${tableId} + + UNION + + -- Snapshot when table was deleted (if applicable) + SELECT t.end_snapshot as snapshot_id + FROM ${quotedMetadataDatabase}.main.ducklake_table t + WHERE t.table_id = ${tableId} AND t.end_snapshot IS NOT NULL + + UNION + + -- Snapshots when columns were added/modified + SELECT c.begin_snapshot as snapshot_id + FROM ${quotedMetadataDatabase}.main.ducklake_column c + WHERE c.table_id = ${tableId} + + UNION + + -- Snapshots when columns were dropped + SELECT c.end_snapshot as snapshot_id + FROM ${quotedMetadataDatabase}.main.ducklake_column c + WHERE c.table_id = ${tableId} AND c.end_snapshot IS NOT NULL + + UNION + + -- Snapshots when data files were added + SELECT df.begin_snapshot as snapshot_id + FROM ${quotedMetadataDatabase}.main.ducklake_data_file df + WHERE df.table_id = ${tableId} + + UNION + + -- Snapshots when data files were deleted + SELECT df.end_snapshot as snapshot_id + FROM ${quotedMetadataDatabase}.main.ducklake_data_file df + WHERE df.table_id = ${tableId} AND df.end_snapshot IS NOT NULL + ) SELECT s.snapshot_id, s.snapshot_time, @@ -926,10 +1094,10 @@ export class SQLiteCatalogAdapter extends CatalogAdapter { s.next_file_id, sc.changes_made FROM ${quotedMetadataDatabase}.main.ducklake_snapshot s + INNER JOIN table_snapshots ts ON s.snapshot_id = ts.snapshot_id LEFT JOIN ${quotedMetadataDatabase}.main.ducklake_snapshot_changes sc ON s.snapshot_id = sc.snapshot_id ORDER BY s.snapshot_id DESC - LIMIT 50 `; const snapshotsResult = diff --git a/src/main/services/duckLake/instanceStore.service.ts b/src/main/services/duckLake/instanceStore.service.ts index 3ed46a2c..ce085fc9 100644 --- a/src/main/services/duckLake/instanceStore.service.ts +++ b/src/main/services/duckLake/instanceStore.service.ts @@ -437,8 +437,14 @@ export default class DuckLakeInstanceStore { dataPath: instance.dataPath, storage: storagePersisted, catalog: catalogPersisted, - createdAt: instance.createdAt.toISOString(), - updatedAt: instance.updatedAt.toISOString(), + createdAt: (instance.createdAt instanceof Date + ? instance.createdAt + : new Date(instance.createdAt) + ).toISOString(), + updatedAt: (instance.updatedAt instanceof Date + ? instance.updatedAt + : new Date(instance.updatedAt) + ).toISOString(), status: instance.status, tags: instance.tags, runtimeOptions: instance.runtimeOptions, diff --git a/src/main/utils/connectors.ts b/src/main/utils/connectors.ts index 2aee6831..3fbc39cc 100644 --- a/src/main/utils/connectors.ts +++ b/src/main/utils/connectors.ts @@ -70,6 +70,7 @@ export async function testRedshiftConnection( export const executePostgresQuery = async ( config: PostgresConnection, query: string, + registerCancel?: (fn: () => void) => void, ): Promise => { const client = new pg.Client({ host: config.host, @@ -80,6 +81,12 @@ export const executePostgresQuery = async ( connectionTimeoutMillis: 5000, }); + if (registerCancel) { + registerCancel(() => { + client.end(); + }); + } + try { await client.connect(); const result = await client.query(query); @@ -99,6 +106,7 @@ export const executePostgresQuery = async ( export const executeRedshiftQuery = async ( config: RedshiftConnection, query: string, + registerCancel?: (fn: () => void) => void, ): Promise => { const clientConfig: any = { host: config.host, @@ -118,6 +126,12 @@ export const executeRedshiftQuery = async ( const client = new pg.Client(clientConfig); + if (registerCancel) { + registerCancel(() => { + client.end(); + }); + } + try { await client.connect(); const result = await client.query(query); @@ -187,9 +201,16 @@ export async function testSnowflakeConnection( export const executeSnowflakeQuery = async ( config: SnowflakeConnection, query: string, + registerCancel?: (fn: () => void) => void, ): Promise => { const connection = createSnowflakeConnection(config); + if (registerCancel) { + registerCancel(() => { + connection.destroy(() => {}); + }); + } + return new Promise((resolve) => { connection.connect((err) => { if (err) { @@ -255,8 +276,23 @@ export async function testDatabricksConnection( export const executeDatabricksQuery = async ( config: DatabricksConnection, query: string, + registerCancel?: (fn: () => void) => void, ): Promise => { const client = new DBSQLClient(); + let session: any; + let queryOperation: any; + + if (registerCancel) { + registerCancel(async () => { + try { + if (queryOperation) await queryOperation.close(); + if (session) await session.close(); + await client.close(); + } catch (e) { + // ignore errors during cancellation + } + }); + } try { const connection = await client.connect({ @@ -265,8 +301,8 @@ export const executeDatabricksQuery = async ( path: config.httpPath, }); - const session = await connection.openSession(); - const queryOperation = await session.executeStatement(query, { + session = await connection.openSession(); + queryOperation = await session.executeStatement(query, { runAsync: true, }); @@ -582,10 +618,32 @@ export async function testDuckDBConnection( export const executeDuckDBQuery = async ( config: DuckDBConnection, query: string, + registerCancel?: (fn: () => void) => void, ): Promise => { let instance: any = null; let connection: any = null; + if (registerCancel) { + registerCancel(async () => { + try { + if (connection) { + if (typeof connection.close === 'function') await connection.close(); + else if (typeof connection.closeSync === 'function') + connection.closeSync(); + } + if (instance) { + if (typeof instance.close === 'function') await instance.close(); + else if (typeof instance.closeSync === 'function') + instance.closeSync(); + else if (typeof instance.terminate === 'function') + await instance.terminate(); + } + } catch (e) { + // ignore + } + }); + } + try { instance = await DuckDBInstance.create(config.database_path); connection = await instance.connect(); diff --git a/src/renderer/components/customTable/CustomTableToolbar.tsx b/src/renderer/components/customTable/CustomTableToolbar.tsx index 18bd0a3b..ecf8c704 100644 --- a/src/renderer/components/customTable/CustomTableToolbar.tsx +++ b/src/renderer/components/customTable/CustomTableToolbar.tsx @@ -1,5 +1,5 @@ -import React from 'react'; -import { Toolbar, Typography, Tooltip, TextField } from '@mui/material'; +import React, { ReactNode } from 'react'; +import { Toolbar, Typography, Tooltip, TextField, Box } from '@mui/material'; import { makeStyles } from '@mui/styles'; const useStyles = makeStyles({ @@ -10,34 +10,54 @@ const useStyles = makeStyles({ title: { flex: '1 1 45%', }, + searchContainer: { + paddingTop: '4px', + paddingBottom: '4px', + }, }); type Props = { name: string; handleSearch: (keyword: string) => void; + toolbarContent?: ReactNode; }; -const CustomTableToolbar = ({ name, handleSearch }: Props) => { +const CustomTableToolbar = ({ name, handleSearch, toolbarContent }: Props) => { const classes = useStyles(); return (
- - - {name} - + + {name ? ( + + {name} + + ) : ( +
+ )} + + {toolbarContent && {toolbarContent}} + { handleSearch(event.target.value); }} + InputProps={{ + style: { height: 32, fontSize: '0.875rem' }, + }} /> diff --git a/src/renderer/components/customTable/index.tsx b/src/renderer/components/customTable/index.tsx index 5762c341..b20aaf6f 100644 --- a/src/renderer/components/customTable/index.tsx +++ b/src/renderer/components/customTable/index.tsx @@ -19,6 +19,7 @@ const CustomTable = ({ loading, rowActions, containerStyle, + toolbarContent, }: CustomTableType) => { const [page, setPage] = React.useState(0); const [perPage, setPerPage] = useLocalStorage(id, '10'); @@ -58,6 +59,7 @@ const CustomTable = ({ )} { if (customPagination) { customPagination.setKeyword(value); @@ -73,7 +75,7 @@ const CustomTable = ({ pointerEvents: loading ? 'none' : 'auto', }} > - +
{ if (customPagination) { diff --git a/src/renderer/components/customTable/types.ts b/src/renderer/components/customTable/types.ts index 6d31f934..fcaaea24 100644 --- a/src/renderer/components/customTable/types.ts +++ b/src/renderer/components/customTable/types.ts @@ -37,4 +37,5 @@ export type CustomTableType = { customPagination?: CustomTablePagination; rowActions?: Array>; containerStyle?: CSSProperties; + toolbarContent?: ReactNode; }; diff --git a/src/renderer/components/dataLake/DataLakeConnectionWizard.tsx b/src/renderer/components/dataLake/DataLakeConnectionWizard.tsx index 128f4668..535d21a1 100644 --- a/src/renderer/components/dataLake/DataLakeConnectionWizard.tsx +++ b/src/renderer/components/dataLake/DataLakeConnectionWizard.tsx @@ -1815,6 +1815,12 @@ export const DataLakeConnectionWizard: React.FC< {wizardData.basics?.description && ( diff --git a/src/renderer/components/dataLake/DataLakeInstanceDetails.tsx b/src/renderer/components/dataLake/DataLakeInstanceDetails.tsx index c90e5d32..1842a49e 100644 --- a/src/renderer/components/dataLake/DataLakeInstanceDetails.tsx +++ b/src/renderer/components/dataLake/DataLakeInstanceDetails.tsx @@ -1,4 +1,5 @@ import React, { useState } from 'react'; +import { useQueryClient } from 'react-query'; import { Box, Typography, @@ -20,6 +21,15 @@ import { Card, CardContent, LinearProgress, + Table, + TableBody, + TableCell, + TableContainer, + TableHead, + TableRow, + TablePagination, + TextField, + InputAdornment, } from '@mui/material'; import { Dataset as Database, @@ -36,6 +46,8 @@ import { Security, Speed, Memory, + Refresh, + Search, } from '@mui/icons-material'; import { useNavigate } from 'react-router-dom'; import moment from 'moment'; @@ -47,50 +59,13 @@ import { import { useRefreshDuckLakeInstanceHealth, useDuckLakeInstanceHealth, + duckLakeKeys, + useDuckLakeInstanceSnapshots, } from '../../controllers/duckLake.controller'; -import { DuckLakeStorageConfig } from '../../../types/duckLake'; - -interface DuckLakeInstance { - id: string; - name: string; - status: 'active' | 'inactive' | 'error'; - dataPath: string; - storage?: DuckLakeStorageConfig; - catalog: { - type: 'duckdb' | 'sqlite' | 'postgresql'; - duckdb?: { metadataPath: string }; - sqlite?: { metadataPath: string }; - postgresql?: { - host: string; - port: number; - database: string; - username: string; - ssl: boolean; - }; - }; - runtime?: { - maxMemory?: string; - threads?: number; - enableOptimizer?: boolean; - tempDirectory?: string; - }; - createdAt: string; - updatedAt: string; - description?: string; - health?: { - catalogConnected: boolean; - dataPathAccessible: boolean; - extensionLoaded: boolean; - lastChecked: string; - error?: string; - }; - stats?: { - tableCount: number; - totalSize: number; - lastQuery: string; - queryCount: number; - }; -} +import { + DuckLakeInstance, + DuckLakeSnapshotDetail, +} from '../../../types/duckLake'; interface DuckLakeInstanceDetailsProps { instance: DuckLakeInstance; @@ -103,14 +78,36 @@ export const DataLakeInstanceDetails: React.FC< DuckLakeInstanceDetailsProps > = ({ instance, onEdit, onDelete, isLoading = false }) => { const navigate = useNavigate(); + const queryClient = useQueryClient(); const [currentTab, setCurrentTab] = useState(0); const [deleteDialogOpen, setDeleteDialogOpen] = useState(false); // Use the health check mutation for test connection const testConnectionMutation = useRefreshDuckLakeInstanceHealth(); + // Snapshot Pagination State + const [snapshotPage, setSnapshotPage] = useState(0); + const [snapshotRowsPerPage, setSnapshotRowsPerPage] = useState(10); + const [snapshotFilter, setSnapshotFilter] = useState(''); + // Fetch health data for this instance const healthQuery = useDuckLakeInstanceHealth(instance.id); + const snapshotsQuery = useDuckLakeInstanceSnapshots(instance.id, { + page: snapshotPage + 1, + pageSize: snapshotRowsPerPage, + filter: snapshotFilter, + }); + + const handleChangePage = (_event: unknown, newPage: number) => { + setSnapshotPage(newPage); + }; + + const handleChangeRowsPerPage = ( + event: React.ChangeEvent, + ) => { + setSnapshotRowsPerPage(parseInt(event.target.value, 10)); + setSnapshotPage(0); + }; const getStatusIcon = (status: string) => { switch (status) { @@ -146,6 +143,7 @@ export const DataLakeInstanceDetails: React.FC< }; const formatBytes = (bytes: number) => { + if (bytes <= 0) return '0 Bytes'; const sizes = ['Bytes', 'KB', 'MB', 'GB', 'TB']; const i = Math.floor(Math.log(bytes) / Math.log(1024)); return `${Math.round((bytes / 1024 ** i) * 100) / 100} ${sizes[i]}`; @@ -155,6 +153,17 @@ export const DataLakeInstanceDetails: React.FC< testConnectionMutation.mutate(instance.id); }; + const handleRefresh = () => { + // Invalidate tables, health status, and snapshots for this instance + queryClient.invalidateQueries(duckLakeKeys.tables(instance.id)); + queryClient.invalidateQueries(duckLakeKeys.instanceHealth(instance.id)); + queryClient.invalidateQueries(duckLakeKeys.instance(instance.id)); + // Use exact: false to match parameterized snapshot queries + queryClient.invalidateQueries(duckLakeKeys.instanceSnapshots(instance.id), { + exact: false, + }); + }; + const isStorageHealthy = (value?: boolean) => typeof value !== 'boolean' || value; @@ -409,7 +418,11 @@ export const DataLakeInstanceDetails: React.FC< primary="Storage Path" secondary={healthQuery.data.storageLocation} secondaryTypographyProps={{ - sx: { fontFamily: 'monospace' }, + sx: { + fontFamily: 'monospace', + wordBreak: 'break-all', + overflowWrap: 'anywhere', + }, }} /> @@ -525,7 +538,12 @@ export const DataLakeInstanceDetails: React.FC< primary="Data Path" secondary={instance.dataPath} secondaryTypographyProps={{ - sx: { fontFamily: 'monospace', fontSize: '0.875rem' }, + sx: { + fontFamily: 'monospace', + fontSize: '0.875rem', + wordBreak: 'break-all', + overflowWrap: 'anywhere', + }, }} /> @@ -582,7 +600,12 @@ export const DataLakeInstanceDetails: React.FC< primary="Metadata Path" secondary={instance.catalog.duckdb.metadataPath} secondaryTypographyProps={{ - sx: { fontFamily: 'monospace', fontSize: '0.875rem' }, + sx: { + fontFamily: 'monospace', + fontSize: '0.875rem', + wordBreak: 'break-all', + overflowWrap: 'anywhere', + }, }} /> @@ -593,7 +616,12 @@ export const DataLakeInstanceDetails: React.FC< primary="Metadata Path" secondary={instance.catalog.sqlite.metadataPath} secondaryTypographyProps={{ - sx: { fontFamily: 'monospace', fontSize: '0.875rem' }, + sx: { + fontFamily: 'monospace', + fontSize: '0.875rem', + wordBreak: 'break-all', + overflowWrap: 'anywhere', + }, }} /> @@ -605,6 +633,12 @@ export const DataLakeInstanceDetails: React.FC< @@ -636,7 +670,7 @@ export const DataLakeInstanceDetails: React.FC< {/* Runtime Configuration */} - {instance.runtime && ( + {(instance.runtime || instance.runtimeOptions) && ( - {instance.runtime.maxMemory || 'Default'} + {instance.runtime?.maxMemory || + instance.runtimeOptions?.maxMemory || + 'Default'} @@ -669,7 +705,9 @@ export const DataLakeInstanceDetails: React.FC< Threads - {instance.runtime.threads || 'Auto'} + {instance.runtime?.threads || + instance.runtimeOptions?.threads || + 'Auto'} @@ -682,7 +720,8 @@ export const DataLakeInstanceDetails: React.FC< Optimizer - {instance.runtime.enableOptimizer + {(instance.runtime?.enableOptimizer ?? + instance.runtimeOptions?.enableOptimizer) ? 'Enabled' : 'Disabled'} @@ -698,9 +737,16 @@ export const DataLakeInstanceDetails: React.FC< - {instance.runtime.tempDirectory || 'Default'} + {instance.runtime?.tempDirectory || + instance.runtimeOptions?.tempDirectory || + 'Default'} @@ -713,6 +759,30 @@ export const DataLakeInstanceDetails: React.FC< ); + // Safely convert any value to string for React rendering (handles DuckDB hugeint, BigInt, circular objects) + const safeToString = (value: any): string => { + if (value === null || value === undefined) { + return '-'; + } + // Handle BigInt explicitly + if (typeof value === 'bigint') { + return String(value); + } + if (typeof value === 'object') { + // Handle DuckDB hugeint objects + if (value.hugeint !== undefined) { + return String(value.hugeint); + } + // Handle other objects with try/catch for circular references + try { + return JSON.stringify(value); + } catch { + return '-'; + } + } + return String(value); + }; + return ( {/* Header */} @@ -739,11 +809,19 @@ export const DataLakeInstanceDetails: React.FC< {instance.name} - DuckLake Instance • {instance.catalog.type.toUpperCase()} Catalog + DataLake Instance • {instance.catalog.type.toUpperCase()} Catalog +
+ + + Snapshot ID + Time + Schema Version + Changes + + + + {snapshotsQuery.data.data.map( + (snapshot: DuckLakeSnapshotDetail) => ( + + + + + + {moment(snapshot.snapshotTime).format( + 'YYYY-MM-DD HH:mm:ss', + )} + + {moment(snapshot.snapshotTime).fromNow()} + + + + {safeToString(snapshot.schemaVersion)} + + + {snapshot.changesMade || '-'} + + + ), + )} + +
+ + + + )} + + {!snapshotsQuery.isLoading && + !snapshotsQuery.isError && + (!snapshotsQuery.data || + snapshotsQuery.data.data.length === 0) && ( + No snapshot history available + )} + + )} + {currentTab === 3 && ( - Activity history coming soon... + Activity coming soon... )} diff --git a/src/renderer/components/dataLake/DataLakeInstances.tsx b/src/renderer/components/dataLake/DataLakeInstances.tsx index 8da5f5b3..4536cd5f 100644 --- a/src/renderer/components/dataLake/DataLakeInstances.tsx +++ b/src/renderer/components/dataLake/DataLakeInstances.tsx @@ -212,7 +212,9 @@ export const DataLakeInstances: React.FC = ({ variant="outlined" /> - + ; + instances?: DuckLakeInstance[]; } export const DataLakeSidebar: React.FC = ({ diff --git a/src/renderer/components/dataLake/DataLakeTableDetails.tsx b/src/renderer/components/dataLake/DataLakeTableDetails.tsx index 4667923f..8208fe16 100644 --- a/src/renderer/components/dataLake/DataLakeTableDetails.tsx +++ b/src/renderer/components/dataLake/DataLakeTableDetails.tsx @@ -251,9 +251,26 @@ export const DataLakeTableDetails: React.FC = () => { Path - - {tableDetails.path} - {tableDetails.pathIsRelative && ' (relative)'} + + + {tableDetails.path} + + {tableDetails.pathIsRelative && ( + + (relative) + + )} )} @@ -458,7 +475,11 @@ export const DataLakeTableDetails: React.FC = () => { {file.path} @@ -600,9 +621,7 @@ export const DataLakeTableDetails: React.FC = () => { Snapshot ID Time Schema Version - Author Changes - Commit Message @@ -631,9 +650,7 @@ export const DataLakeTableDetails: React.FC = () => { {safeToString(snapshot.schemaVersion)} - {snapshot.author || '-'} {snapshot.changesMade || '-'} - {snapshot.commitMessage || '-'} ), )} diff --git a/src/renderer/components/dataLake/DataLakeTableImportWizard.tsx b/src/renderer/components/dataLake/DataLakeTableImportWizard.tsx index a38cd5bd..bb7792d4 100644 --- a/src/renderer/components/dataLake/DataLakeTableImportWizard.tsx +++ b/src/renderer/components/dataLake/DataLakeTableImportWizard.tsx @@ -108,16 +108,20 @@ export const DataLakeTableImportWizard: React.FC< const handleImport = () => { let sourceQuery = ''; - - switch (sourceType) { - case 'url': - sourceQuery = `CREATE TABLE ${tableName} AS FROM '${sourceUrl}'`; - break; - case 'file': - sourceQuery = `CREATE TABLE ${tableName} AS FROM '${filePath}'`; - break; - default: - sourceQuery = ''; + const src = sourceType === 'url' ? sourceUrl : filePath; + const escapedSrc = src.replace(/'/g, "''"); + // Quote table name as identifier to prevent SQL injection and handle reserved words + const escapedTableName = `"${tableName.replace(/"/g, '""')}"`; + const isCsv = src.toLowerCase().endsWith('.csv'); + + if (isCsv) { + // Use specific CSV reader with error handling options + // ignore_errors=true: Skips rows with parsing errors + // null_padding=true: Pads missing columns with NULL + sourceQuery = `CREATE TABLE ${escapedTableName} AS SELECT * FROM read_csv_auto('${escapedSrc}', ignore_errors=true, null_padding=true)`; + } else { + // Default behavior for other formats (Parquet, JSON, etc.) + sourceQuery = `CREATE TABLE ${escapedTableName} AS FROM '${escapedSrc}'`; } onImport(tableName, sourceQuery); @@ -242,12 +246,14 @@ export const DataLakeTableImportWizard: React.FC< onChange={(e) => setFilePath(e.target.value)} placeholder="/path/to/file.csv" helperText="Absolute or relative path to CSV, Parquet, or JSON file" - InputProps={{ - endAdornment: ( - - - - ), + slotProps={{ + input: { + endAdornment: ( + + + + ), + }, }} /> diff --git a/src/renderer/components/dataLake/DataLakeTables.tsx b/src/renderer/components/dataLake/DataLakeTables.tsx index c58f7d59..ce9bc81b 100644 --- a/src/renderer/components/dataLake/DataLakeTables.tsx +++ b/src/renderer/components/dataLake/DataLakeTables.tsx @@ -1,4 +1,4 @@ -import React from 'react'; +import React, { useMemo } from 'react'; import { Box, Card, @@ -46,9 +46,18 @@ export const DataLakeTables: React.FC = ({ }) => { const navigate = useNavigate(); - const filteredTables = selectedInstanceId - ? tables.filter((table) => table.instanceId === selectedInstanceId) - : tables; + const filteredTables = useMemo(() => { + const result = selectedInstanceId + ? tables.filter((table) => table.instanceId === selectedInstanceId) + : [...tables]; + + // Sort by createdAt descending (newest first) + return result.sort((a, b) => { + const dateA = new Date(a.createdAt).getTime(); + const dateB = new Date(b.createdAt).getTime(); + return dateB - dateA; + }); + }, [tables, selectedInstanceId]); const formatBytes = (bytes?: number) => { if (!bytes) return 'Unknown'; @@ -105,7 +114,7 @@ export const DataLakeTables: React.FC = ({ {selectedInstanceId && ( t.instanceId === selectedInstanceId)?.instanceName || selectedInstanceId}`} + label={`${tables.find((t) => t.instanceId === selectedInstanceId)?.instanceName || selectedInstanceId}`} variant="outlined" color="primary" /> diff --git a/src/renderer/components/dataLake/DataLakeTablesView.tsx b/src/renderer/components/dataLake/DataLakeTablesView.tsx index 4fae272e..ffe28988 100644 --- a/src/renderer/components/dataLake/DataLakeTablesView.tsx +++ b/src/renderer/components/dataLake/DataLakeTablesView.tsx @@ -115,6 +115,7 @@ export const DataLakeTablesView: React.FC = ({ schema: table.schema, rowCount: table.rowCount, sizeBytes: table.sizeBytes, + lastAccessed: table.updatedAt?.toISOString(), createdAt: table.createdAt?.toISOString() || new Date().toISOString(), })); diff --git a/src/renderer/components/editor/index.tsx b/src/renderer/components/editor/index.tsx index cfee856a..5acd5d3a 100644 --- a/src/renderer/components/editor/index.tsx +++ b/src/renderer/components/editor/index.tsx @@ -151,13 +151,46 @@ export const Editor: React.FC = ({ return () => window.removeEventListener('keydown', handleKeyDown); }, [handleSave]); + // Track the active tab ID and expected content using refs to prevent stale closures + // This is critical because Monaco fires onChange when content prop changes, + // but at that moment the closure might still have the old activeTab + const activeTabIdRef = React.useRef(activeTabId); + const expectedContentRef = React.useRef(activeContent); + + // Update refs synchronously before render completes + activeTabIdRef.current = activeTabId; + expectedContentRef.current = activeContent; + // Content change handler (no auto-save) - const handleChange: OnChange = (value) => { - if (value === undefined || !activeTab || !activeTabId) { - return; - } - onTabContentChange(activeTabId, value); - }; + // Only handle ACTUAL user edits, not programmatic content changes from tab switching + const handleChange: OnChange = React.useCallback( + (value) => { + // Ignore undefined values + if (value === undefined) { + return; + } + + // Get the current active tab ID from the ref (always up-to-date) + const currentTabId = activeTabIdRef.current; + if (!currentTabId) { + return; + } + + // Get the expected content for the current tab + const expectedContent = expectedContentRef.current; + + // CRITICAL: If the incoming value matches what we expect for this tab, + // it means Monaco is just syncing to our controlled value (tab switch). + // Only process changes that are DIFFERENT from what we set. + if (value === expectedContent) { + return; + } + + // This is a genuine user edit - update the tab content + onTabContentChange(currentTabId, value); + }, + [onTabContentChange], + ); if (tabs.length === 0) { return ( diff --git a/src/renderer/components/errorMessage/index.tsx b/src/renderer/components/errorMessage/index.tsx index 3283725f..6361c2c7 100644 --- a/src/renderer/components/errorMessage/index.tsx +++ b/src/renderer/components/errorMessage/index.tsx @@ -4,9 +4,9 @@ import { Box, Typography } from '@mui/material'; import ErrorOutlineIcon from '@mui/icons-material/ErrorOutline'; const ErrorContainer = styled(Box)(({ theme }) => ({ - backgroundColor: theme.palette.error.light, - color: theme.palette.error.contrastText, - border: `1px solid ${theme.palette.error.main}`, + backgroundColor: theme.palette.background.paper, + color: theme.palette.text.primary, + border: `1px solid ${theme.palette.divider}`, borderRadius: theme.shape.borderRadius, padding: theme.spacing(2), display: 'flex', @@ -35,7 +35,7 @@ export const ErrorMessage: React.FC = ({ return ( - + diff --git a/src/renderer/components/fileTreeViewer/RenderTree.tsx b/src/renderer/components/fileTreeViewer/RenderTree.tsx index cac1c654..1a266fa8 100644 --- a/src/renderer/components/fileTreeViewer/RenderTree.tsx +++ b/src/renderer/components/fileTreeViewer/RenderTree.tsx @@ -50,6 +50,7 @@ type Props = { copyPathData: string; onPastePath: (source: string, target: string) => void; selectedPath?: string; + onRename?: (oldPath: string, newPath: string) => void; }; const getColorByStatus = (status?: string) => { @@ -85,6 +86,7 @@ const RenderTree: React.FC = ({ onPastePath, copyPathData, selectedPath, + onRename, }) => { const [menuPosition, setMenuPosition] = React.useState(null); @@ -113,6 +115,18 @@ const RenderTree: React.FC = ({ path: node.path, newName, }); + const lastSepIndex = Math.max( + node.path.lastIndexOf('/'), + node.path.lastIndexOf('\\'), + ); + const parentPath = + lastSepIndex > 0 ? node.path.substring(0, lastSepIndex + 1) : ''; + const newPath = parentPath + newName; + + if (typeof onRename === 'function') { + onRename(node.path, newPath); + } + if (typeof onRefresh === 'function') { onRefresh(); } @@ -121,7 +135,7 @@ const RenderTree: React.FC = ({ } finally { setRenameOpen(false); } - }, [renameValue, node.path, node.name, onRefresh]); + }, [renameValue, node.path, node.name, onRefresh, onRename]); const fileStatus = fileStatuses[node.path]; const labelColor = getColorByStatus(fileStatus); @@ -416,6 +430,7 @@ const RenderTree: React.FC = ({ onPastePath={onPastePath} copyPathData={copyPathData} selectedPath={selectedPath} + onRename={onRename} /> ))} diff --git a/src/renderer/components/fileTreeViewer/index.tsx b/src/renderer/components/fileTreeViewer/index.tsx index 0d54d51b..d6f66e7c 100644 --- a/src/renderer/components/fileTreeViewer/index.tsx +++ b/src/renderer/components/fileTreeViewer/index.tsx @@ -25,6 +25,7 @@ type Props = { statuses: FileStatus[]; copyPath: (source: string, target: string) => Promise; selectedPath?: string; + onRenameCallback?: (oldPath: string, newPath: string) => void; }; const filterTreeAndCollectExpanded = ( @@ -106,6 +107,7 @@ const FileTreeViewer: React.FC = ({ statuses, copyPath, selectedPath, + onRenameCallback, }) => { const { data: project } = useGetSelectedProject(); const [expandedItems, setExpandedItems] = React.useState([]); @@ -294,6 +296,7 @@ const FileTreeViewer: React.FC = ({ onPastePath={(source, target) => copyPath(source, target)} copyPathData={copyPathData} selectedPath={selectedPath} + onRename={onRenameCallback} /> {(fileModal || folderModal) && ( diff --git a/src/renderer/components/schemaTreeViewer/index.tsx b/src/renderer/components/schemaTreeViewer/index.tsx index 5e56c65e..6a5de98a 100644 --- a/src/renderer/components/schemaTreeViewer/index.tsx +++ b/src/renderer/components/schemaTreeViewer/index.tsx @@ -60,10 +60,22 @@ const SchemaTreeViewer: React.FC = React.memo( - {tables.length === 0 && ( + {isLoadingSchema && ( + + + + )} + {!isLoadingSchema && tables.length === 0 && ( No Schema available )} - {tables.length > 0 && ( + {!isLoadingSchema && tables.length > 0 && ( Promise; onCopyPath: (source: string, target: string) => Promise; onNewFile: (filePath?: string) => void; + onRenameFile?: (oldPath: string, newPath: string) => void; } const ExplorerTab: React.FC = ({ @@ -32,6 +33,7 @@ const ExplorerTab: React.FC = ({ onRefreshFiles, onCopyPath, onNewFile, + onRenameFile, }) => { return ( @@ -46,6 +48,7 @@ const ExplorerTab: React.FC = ({ copyPath={onCopyPath} onNewFileCallback={onNewFile} selectedPath={selectedFilePath} + onRenameCallback={onRenameFile} /> )} @@ -96,6 +99,7 @@ interface ProjectSidebarProps { onRefreshFiles: () => Promise; onCopyPath: (source: string, target: string) => Promise; onNewFile: (filePath?: string) => void; + onRenameFile?: (oldPath: string, newPath: string) => void; // Source Control tab integration with Monaco editor onSourceControlOpenFile?: (filePath: string) => void; @@ -118,6 +122,7 @@ export const ProjectSidebar: React.FC = ({ onRefreshFiles, onCopyPath, onNewFile, + onRenameFile, onSourceControlOpenFile, onSourceControlFileSelect, onSourceControlRefreshFileContent, @@ -272,6 +277,7 @@ export const ProjectSidebar: React.FC = ({ onRefreshFiles={onRefreshFiles} onCopyPath={onCopyPath} onNewFile={onNewFile} + onRenameFile={onRenameFile} /> )} diff --git a/src/renderer/components/sqlEditor/index.tsx b/src/renderer/components/sqlEditor/index.tsx index c5761ca6..a5c5fa6b 100644 --- a/src/renderer/components/sqlEditor/index.tsx +++ b/src/renderer/components/sqlEditor/index.tsx @@ -7,6 +7,7 @@ import { QueryHistoryType } from '../../../types/frontend'; import { ConnectionInput, Project } from '../../../types/backend'; import { SqlEditorComponent } from './editorComponent'; import { QueryHistory } from './queryHistory'; +import { useAppContext } from '../../hooks'; type Props = { completions: Omit[]; @@ -17,6 +18,7 @@ type Props = { setLoadingQuery: (v: boolean) => void; setQueryResults: (v: any) => void; setError: (v: any) => void; + onQueryStart?: (queryId: string) => void; }; export const SqlEditor: React.FC = ({ @@ -28,46 +30,119 @@ export const SqlEditor: React.FC = ({ setLoadingQuery, setQueryResults, setError, + onQueryStart, }) => { + const { fetchSchema } = useAppContext(); const editorRef = useRef( null, ); + // Helper function to detect DDL operations that modify schema + const isDDLOperation = (query: string): boolean => { + const normalizedQuery = query.trim().toUpperCase(); + const ddlKeywords = [ + 'CREATE TABLE', + 'DROP TABLE', + 'ALTER TABLE', + 'CREATE SCHEMA', + 'DROP SCHEMA', + 'CREATE VIEW', + 'DROP VIEW', + 'RENAME TABLE', + 'TRUNCATE TABLE', + ]; + return ddlKeywords.some((keyword) => normalizedQuery.includes(keyword)); + }; + + const getCommandType = (query: string): string => { + const normalized = query.trim().toUpperCase(); + // Helper uses strict keyword check, but we can also check DML + if (isDDLOperation(query)) return 'DDL'; + if ( + normalized.startsWith('INSERT') || + normalized.startsWith('UPDATE') || + normalized.startsWith('DELETE') || + normalized.startsWith('MERGE') || + normalized.startsWith('TRUNCATE') + ) { + return 'DML'; + } + return 'SELECT'; + }; + const handleRunQuery = async (selectedQuery: string) => { if (!connectionInput || !selectedProject) { toast.error('No database connection configured for this project'); return; } + // Generate semi-unique ID for query cancellation + const queryId = `query-${Date.now()}-${Math.random() + .toString(36) + .substr(2, 9)}`; + setError(undefined); setLoadingQuery(true); + if (onQueryStart) { + onQueryStart(queryId); + } + try { const result = await connectorsServices.queryData({ connection: connectionInput, query: selectedQuery, projectName: selectedProject.name, + queryId, }); if (result.error) { - toast.error(result.error); setError(result.error); setLoadingQuery(false); return; } - setQueryResults(result); + // Check if this was a DDL operation + const wasDDL = isDDLOperation(selectedQuery); + const commandType = getCommandType(selectedQuery); + + const enrichedResult = { + ...result, + isCommand: commandType === 'DDL' || commandType === 'DML', + commandType, + }; + + setQueryResults(enrichedResult); + + // Truncate results for history storage to prevent quota issues + // LocalStorage has a limit, so we can't store thousands of rows per query history item + const historyResults = { + ...result, + data: result.data?.slice(0, 10), // Only store first 10 rows in history + }; const newHistoryItem: QueryHistoryType = { id: new Date().toISOString(), executedAt: new Date(), - results: result, + results: historyResults, projectId: selectedProject.id, projectName: selectedProject.name, query: selectedQuery, }; - setQueryHistory([...queryHistory, newHistoryItem]); + // Limit history to last 50 items to prevent storage overflow + // We append new item then slice the end of the array + const updatedHistory = [...queryHistory, newHistoryItem]; + if (updatedHistory.length > 50) { + setQueryHistory(updatedHistory.slice(updatedHistory.length - 50)); + } else { + setQueryHistory(updatedHistory); + } + + // Refresh schema if DDL operation was executed + if (wasDDL) { + fetchSchema(); + } } catch (error) { toast.error('An unexpected error occurred while executing the query'); setError(error); diff --git a/src/renderer/components/sqlEditor/queryHistory/index.tsx b/src/renderer/components/sqlEditor/queryHistory/index.tsx index 93876f17..d20cb3e9 100644 --- a/src/renderer/components/sqlEditor/queryHistory/index.tsx +++ b/src/renderer/components/sqlEditor/queryHistory/index.tsx @@ -23,6 +23,29 @@ import { Container } from './styles'; import { QueryHistoryType } from '../../../../types/frontend'; import { projectsServices } from '../../../services'; +const formatQueryPreview = (query: string, maxLength: number = 30) => { + if (!query) return ''; + + const lines = query.split('\n'); + // Find the first line that is not a comment and not empty + const firstSignificantLine = lines.find((line) => { + const trimmed = line.trim(); + return ( + trimmed.length > 0 && + !trimmed.startsWith('--') && + !trimmed.startsWith('/*') + ); + }); + + // If no significant line found, fallback to the first non-empty line + const preview = firstSignificantLine + ? firstSignificantLine.trim() + : query.trim().split('\n')[0].trim(); + + if (preview.length <= maxLength) return preview; + return `${preview.slice(0, maxLength)}...`; +}; + type Props = { onQuerySelect: (value: QueryHistoryType) => void; queryHistory: QueryHistoryType[]; @@ -206,48 +229,59 @@ export const QueryHistoryToolbar: React.FC = ({ maxHeight: 500, }} > - Query History - +
{sortedHistory.map((qh, index) => ( { setSelectedQueryHistory(qh); handleClose(); }} > +
- {qh.query.trim().slice(0, 16)}... + {formatQueryPreview(qh.query, 30)}
- {moment(qh.executedAt).fromNow()} + {moment(qh.executedAt).fromNow(true)} ago
@@ -415,48 +449,59 @@ const QueryHistory: React.FC = ({ maxHeight: 500, }} > - Query History - +
{sortedHistory.map((qh, index) => ( { setSelectedQueryHistory(qh); handleClose(); }} > +
- {qh.query.trim().slice(0, 16)}... + {formatQueryPreview(qh.query, 30)}
- {moment(qh.executedAt).fromNow()} + {moment(qh.executedAt).fromNow(true)} ago
diff --git a/src/renderer/controllers/duckLake.controller.ts b/src/renderer/controllers/duckLake.controller.ts index 493d42fd..54485d46 100644 --- a/src/renderer/controllers/duckLake.controller.ts +++ b/src/renderer/controllers/duckLake.controller.ts @@ -1,6 +1,7 @@ import { useQuery, useMutation, useQueryClient } from 'react-query'; import { toast } from 'react-toastify'; import { DuckLakeService } from '../services/duckLake.service'; +import { DuckLakeSnapshotParams } from '../../types/duckLake'; import { cloudExplorerKeys } from './cloudExplorer.controller'; // Query keys for React Query cache management @@ -18,6 +19,8 @@ export const duckLakeKeys = { [...duckLakeKeys.table(instanceId, tableName), 'details'] as const, // Phase 8b snapshots: (instanceId: string, tableName: string) => [...duckLakeKeys.table(instanceId, tableName), 'snapshots'] as const, + instanceSnapshots: (instanceId: string, params?: DuckLakeSnapshotParams) => + [...duckLakeKeys.instance(instanceId), 'snapshots', params] as const, maintenanceTasks: (instanceId: string) => [...duckLakeKeys.instance(instanceId), 'maintenance'] as const, maintenanceTask: (taskId: string) => @@ -193,6 +196,19 @@ export function useDuckLakeSnapshots(instanceId: string, tableName: string) { }); } +export function useDuckLakeInstanceSnapshots( + instanceId: string, + params: DuckLakeSnapshotParams, +) { + return useQuery({ + queryKey: duckLakeKeys.instanceSnapshots(instanceId, params), + queryFn: () => DuckLakeService.listInstanceSnapshots(instanceId, params), + enabled: !!instanceId, + keepPreviousData: true, + staleTime: 30000, + }); +} + /** * Get comprehensive table details from DuckLake metadata catalog (Phase 8b) * Fetches complete table information including schema, statistics, data files, partitions, snapshots, and tags diff --git a/src/renderer/hooks/useTabManager.ts b/src/renderer/hooks/useTabManager.ts index 8b32dbc7..c87d91e9 100644 --- a/src/renderer/hooks/useTabManager.ts +++ b/src/renderer/hooks/useTabManager.ts @@ -168,6 +168,7 @@ export interface UseTabManagerReturn { reorderTabs: (fromIndex: number, toIndex: number) => void; reset: () => void; getTabByPath: (path: string) => EditorTabState | undefined; + renameTab: (oldPath: string, newPath: string) => void; refreshTabContentByPath: (path: string) => Promise; // Unsaved changes dialog support pendingClose: PendingCloseState | null; @@ -270,15 +271,41 @@ const useTabManager = (projectId?: string): UseTabManagerReturn => { [performClose], ); - const closeTabByPath = React.useCallback( - (path: string) => { - const targetTab = tabsRef.current.find((tab) => tab.path === path); - if (targetTab) { - closeTab(targetTab.id); + const closeTabByPath = React.useCallback((path: string) => { + const currentTabs = tabsRef.current; + const tabsToClose = currentTabs.filter( + (tab) => + tab.path === path || + tab.path.startsWith(`${path}/`) || + tab.path.startsWith(`${path}\\`), + ); + + if (tabsToClose.length === 0) { + return; + } + + const idsToClose = new Set(tabsToClose.map((t) => t.id)); + + setTabs((current) => { + const nextTabs = current.filter((t) => !idsToClose.has(t.id)); + + return nextTabs; + }); + + setActiveTabId((currentId) => { + if (currentId && idsToClose.has(currentId)) { + // If the active tab was closed, try to find a neighbor in the remaining tabs. + // Since tabs ref might not be updated yet, we derive remaining tabs from current ref. + const remaining = tabsRef.current.filter((t) => !idsToClose.has(t.id)); + if (remaining.length === 0) { + return null; + } + // Fallback to the last available tab, or similar logic to performClose + return remaining[remaining.length - 1].id; } - }, - [closeTab], - ); + return currentId; + }); + }, []); const updateTab = React.useCallback( (tabId: EditorTabId, updater: (tab: EditorTabState) => EditorTabState) => { @@ -365,6 +392,39 @@ const useTabManager = (projectId?: string): UseTabManagerReturn => { [markTabSaved], ); + const renameTab = React.useCallback((oldPath: string, newPath: string) => { + setTabs((current) => + current.map((tab) => { + // Exact match - file rename + if (tab.path === oldPath) { + return { + ...tab, + path: newPath, + title: deriveTitleFromPath(newPath), + }; + } + + // Check if tab is a child of renamed folder + // Normalize paths to ensure proper prefix matching + const isChild = + tab.path.startsWith(`${oldPath}/`) || + tab.path.startsWith(`${oldPath}\\`); + + if (isChild) { + // Replace the old path prefix with the new path + const updatedPath = tab.path.replace(oldPath, newPath); + return { + ...tab, + path: updatedPath, + title: deriveTitleFromPath(updatedPath), + }; + } + + return tab; + }), + ); + }, []); + const reorderTabs = React.useCallback( (fromIndex: number, toIndex: number) => { setTabs((current) => { @@ -412,12 +472,15 @@ const useTabManager = (projectId?: string): UseTabManagerReturn => { let isEditable = false; let hasInitialContent = false; + isEditable = isEditableFile(path); + hasInitialContent = typeof options?.content === 'string'; + setTabs((current) => { const existingTab = current.find((tab) => tab.path === path); if (existingTab) { targetId = existingTab.id; // Update existing tab with new options - return current.map((tab) => + const updated = current.map((tab) => tab.path === path ? { ...tab, @@ -425,11 +488,13 @@ const useTabManager = (projectId?: string): UseTabManagerReturn => { } : tab, ); + tabsRef.current = updated; + return updated; } - isEditable = isEditableFile(path); - hasInitialContent = typeof options?.content === 'string'; const id = ensureUniqueId(path, current); + targetId = id; + const initialContent = options?.content ?? (isEditable ? '' : getNonEditableFileMessage(path)); @@ -447,13 +512,13 @@ const useTabManager = (projectId?: string): UseTabManagerReturn => { isReadOnly: options?.isReadOnly ?? !isEditable, }; - targetId = id; shouldLoadContent = isEditable && !hasInitialContent; const updated = [...current, newTab]; tabsRef.current = updated; return updated; }); + // The setTabs callback runs synchronously, so targetId is set before this line if (!targetId) { return null; } @@ -615,6 +680,7 @@ const useTabManager = (projectId?: string): UseTabManagerReturn => { reorderTabs, reset, getTabByPath, + renameTab, refreshTabContentByPath, // Unsaved changes dialog support pendingClose, diff --git a/src/renderer/screens/dataLake/index.tsx b/src/renderer/screens/dataLake/index.tsx index cbdd14cf..836e7bb2 100644 --- a/src/renderer/screens/dataLake/index.tsx +++ b/src/renderer/screens/dataLake/index.tsx @@ -365,17 +365,7 @@ const DataLake: React.FC = () => { }; return ( - ({ - id: i.id, - name: i.name, - status: i.status, - }))} - /> - } - > + }> {renderContent()} diff --git a/src/renderer/screens/projectDetails/index.tsx b/src/renderer/screens/projectDetails/index.tsx index 6cb62920..9d0bc34f 100644 --- a/src/renderer/screens/projectDetails/index.tsx +++ b/src/renderer/screens/projectDetails/index.tsx @@ -110,6 +110,7 @@ const ProjectDetails: React.FC = () => { reorderTabs, reset, getTabByPath, + renameTab, refreshTabContentByPath, // Unsaved changes dialog support pendingClose, @@ -745,6 +746,12 @@ const ProjectDetails: React.FC = () => { setSelectedFilePath(filePath); openTab(filePath); }} + onRenameFile={(oldPath, newPath) => { + renameTab(oldPath, newPath); + if (activeTab?.path === oldPath || selectedFilePath === oldPath) { + setSelectedFilePath(newPath); + } + }} // Source Control Monaco Editor Integration onSourceControlOpenFile={(filePath: string) => { setSelectedFilePath(filePath); diff --git a/src/renderer/screens/sql/index.tsx b/src/renderer/screens/sql/index.tsx index 03bef3d7..fb3b4b70 100644 --- a/src/renderer/screens/sql/index.tsx +++ b/src/renderer/screens/sql/index.tsx @@ -1,6 +1,9 @@ import React, { useState } from 'react'; import SplitPane from 'split-pane-react'; -import { Box, useTheme } from '@mui/material'; +import { Box, Button, CircularProgress, useTheme } from '@mui/material'; +import { Stop } from '@mui/icons-material'; +import { toast } from 'react-toastify'; +import { connectorsServices } from '../../services'; import { useGetConnectionById, useGetSelectedProject } from '../../controllers'; import { useAppContext, useLocalStorage } from '../../hooks'; import { QueryHistoryType } from '../../../types/frontend'; @@ -35,9 +38,10 @@ const Sql = () => { JSON.stringify([]), ); const [sizes, setSizes] = useState<[number, number]>([ - window.innerHeight - 250, - 250, + window.innerHeight - 350, + 350, ]); + const [activeQueryId, setActiveQueryId] = useState(null); const connectionInput = React.useMemo(() => { return connection ? getConnectionInput(connection) : undefined; @@ -47,6 +51,20 @@ const Sql = () => { return schema ? utils.generateMonacoCompletions(schema) : []; }, [schema]); + const handleCancelQuery = async () => { + if (activeQueryId) { + try { + await connectorsServices.cancelQuery(activeQueryId); + toast.info('Query execution cancelled'); + } catch (e) { + toast.error('Failed to cancel query'); + } finally { + setActiveQueryId(null); + setLoadingQuery(false); + } + } + }; + const renderSash = () => ( { setLoadingQuery={setLoadingQuery} setQueryResults={setQueryResults} setError={setError} + onQueryStart={(id) => setActiveQueryId(id)} /> { background: theme.palette.background.paper, }} > - {loadingQuery && } + {loadingQuery && ( + + + + + )} {!loadingQuery && error && ( )} diff --git a/src/renderer/screens/sql/queryResult.tsx b/src/renderer/screens/sql/queryResult.tsx index 818496cb..9531e2ec 100644 --- a/src/renderer/screens/sql/queryResult.tsx +++ b/src/renderer/screens/sql/queryResult.tsx @@ -1,8 +1,32 @@ import React from 'react'; +import { styled } from '@mui/material/styles'; +import { Box, Typography } from '@mui/material'; +import { CheckCircleOutline } from '@mui/icons-material'; import { QueryResponseType } from '../../../types/backend'; import { CustomTable } from '../../components/customTable'; import { underscoreToTitleCase } from '../../helpers/utils'; +const SuccessContainer = styled(Box)(({ theme }) => ({ + backgroundColor: theme.palette.background.paper, + color: theme.palette.text.primary, + border: `1px solid ${theme.palette.divider}`, + borderRadius: theme.shape.borderRadius, + padding: theme.spacing(2), + display: 'flex', + alignItems: 'center', + gap: theme.spacing(2), + boxShadow: theme.shadows[2], + margin: theme.spacing(2, 0), + width: '100%', +})); + +const IconWrapper = styled(Box)(() => ({ + display: 'flex', + alignItems: 'center', + justifyContent: 'center', + fontSize: '2rem', +})); + type Props = { results: QueryResponseType; }; @@ -16,10 +40,65 @@ export const QueryResult: React.FC = ({ results }) => { return results.data ?? []; }, [results]); + // Use isCommand flag if available, otherwise fallback to field check + const isCommand = + results.isCommand || + ((!results.fields || results.fields.length === 0) && results.success); + + // Show row count for DML or generic commands with rowCount > 0 + const showRowCount = + results.commandType === 'DML' || + (results.commandType !== 'DDL' && + results.rowCount !== undefined && + results.rowCount > 0); + + if (isCommand) { + return ( + + + + + + + Command executed successfully + + {showRowCount && results.rowCount !== undefined && ( + + {`${results.rowCount} row${ + results.rowCount !== 1 ? 's' : '' + } affected`} + + )} + {results.duration !== undefined && ( + + Duration:{' '} + {results.duration! > 1000 + ? `${(results.duration! / 1000).toFixed(2)}s` + : `${results.duration!}ms`} + + )} + + + ); + } + return ( > id="query-result" - name="Query Result" + name="" + toolbarContent={ + results.duration !== undefined ? ( + + {results.duration > 1000 + ? `${(results.duration / 1000).toFixed(2)}s` + : `${results.duration}ms`} + + ) : null + } rows={rows as any} columns={columns.map((column) => ({ id: column, @@ -28,7 +107,7 @@ export const QueryResult: React.FC = ({ results }) => {
=> { const { data } = await client.post< - { connection: ConnectionInput; query: string }, + { + connection: ConnectionInput; + query: string; + projectName: string; + queryId?: string; + }, QueryResponseType >('connector:query', body); return data; }; +export const cancelQuery = async (queryId: string): Promise => { + await client.post('connector:cancel-query', queryId); +}; + export const setConnectionEnvVariable = async ( key: string, value: string, diff --git a/src/renderer/services/duckLake.service.ts b/src/renderer/services/duckLake.service.ts index f4948dfd..3560ca31 100644 --- a/src/renderer/services/duckLake.service.ts +++ b/src/renderer/services/duckLake.service.ts @@ -10,11 +10,14 @@ import { DuckLakeInstanceHealth, DuckLakeTableInfo, DuckLakeSnapshotInfo, + DuckLakeSnapshotDetail, DuckLakeQueryRequest, DuckLakeQueryResult, DuckLakeMaintenanceTask, DuckLakeCatalogConfig, DuckLakeStorageConfig, + DuckLakeSnapshotParams, + DuckLakePaginatedResult, } from '../../types/duckLake'; export namespace DuckLakeService { @@ -152,6 +155,17 @@ export namespace DuckLakeService { ); } + export async function listInstanceSnapshots( + instanceId: string, + params?: DuckLakeSnapshotParams, + ): Promise> { + return window.electron.ipcRenderer.invoke( + 'ducklake:instance:listSnapshots', + instanceId, + params, + ); + } + export async function restoreSnapshot( instanceId: string, tableName: string, diff --git a/src/types/backend.ts b/src/types/backend.ts index 56bddf93..01c51e68 100644 --- a/src/types/backend.ts +++ b/src/types/backend.ts @@ -381,6 +381,9 @@ export type QueryResponseType = { fields?: { name: string; type: number }[]; rowCount?: number; // Add rowCount for affected rows in INSERT/UPDATE/DELETE operations error?: string; + duration?: number; + isCommand?: boolean; + commandType?: string; }; export type CliUpdateItem = { @@ -490,6 +493,7 @@ export type ExecuteStatementType = { connection: ConnectionInput; query: string; projectName: string; + queryId?: string; }; // AI Provider Types diff --git a/src/types/duckLake.ts b/src/types/duckLake.ts index b2f1b280..2de6b9b9 100644 --- a/src/types/duckLake.ts +++ b/src/types/duckLake.ts @@ -121,9 +121,6 @@ export interface DuckLakeSnapshotDetail { nextCatalogId: number; nextFileId: number; changesMade?: string; - author?: string; - commitMessage?: string; - commitExtraInfo?: string; } /** @@ -227,6 +224,29 @@ export type DuckLakeInstanceStatus = | 'error' | 'connecting'; +// Health and Status Types +export interface DuckLakeInstanceMetrics { + tableCount: number; + totalRows: number; + totalSize: number; + snapshotCount: number; + lastActivity?: Date; +} + +export interface DuckLakeInstanceHealth { + instanceId: string; + status: DuckLakeInstanceStatus; + lastChecked: Date | string; + catalogConnected: boolean; + extensionLoaded: boolean; + dataPathAccessible: boolean; + storageConnected?: boolean; + storageLocation?: string; + errors?: string[]; + warnings?: string[]; + metrics?: DuckLakeInstanceMetrics; +} + export interface DuckLakeRuntimeOptions { maxMemory?: string; threads?: number; @@ -241,8 +261,8 @@ export interface DuckLakeInstance { dataPath: string; storage?: DuckLakeStorageConfig; catalog: DuckLakeCatalogConfig; - createdAt: Date; - updatedAt: Date; + createdAt: Date | string; + updatedAt: Date | string; /** * Operational status of the instance configuration * @@ -264,6 +284,16 @@ export interface DuckLakeInstance { status: DuckLakeInstanceStatus; tags?: string[]; runtimeOptions?: DuckLakeRuntimeOptions; + runtime?: DuckLakeRuntimeOptions; // Alias for UI components + + // Hydrated data for UI + health?: DuckLakeInstanceHealth; + stats?: { + tableCount: number; + totalSize: number; + lastQuery: string; + queryCount: number; + }; } // Snapshot and Time Travel Types @@ -376,29 +406,6 @@ export interface DuckLakeQueryResult { snapshotId?: string; } -// Health and Status Types -export interface DuckLakeInstanceMetrics { - tableCount: number; - totalRows: number; - totalSize: number; - snapshotCount: number; - lastActivity?: Date; -} - -export interface DuckLakeInstanceHealth { - instanceId: string; - status: DuckLakeInstanceStatus; - lastChecked: Date; - catalogConnected: boolean; - extensionLoaded: boolean; - dataPathAccessible: boolean; - storageConnected?: boolean; - storageLocation?: string; - errors?: string[]; - warnings?: string[]; - metrics?: DuckLakeInstanceMetrics; -} - // Configuration and Settings Types export interface DuckLakeMaintenanceSchedule { enabled: boolean; @@ -480,6 +487,19 @@ export interface DuckLakeTableDetails { columnTags: DuckLakeColumnTag[]; } +export interface DuckLakeSnapshotParams { + page: number; + pageSize: number; + filter?: string; +} + +export interface DuckLakePaginatedResult { + data: T[]; + total: number; + page: number; + pageSize: number; +} + // IPC Channel Types (for type-safe IPC communication) export interface DuckLakeIpcChannels { // Instance Management @@ -566,6 +586,12 @@ export interface DuckLakeIpcChannels { tableName: string, ) => Promise; + // Instance Snapshots (Phase: History Fix) + 'ducklake:instance:listSnapshots': ( + instanceId: string, + params: DuckLakeSnapshotParams, + ) => Promise>; + // Cloud Connection Management (Phase: Connection Integration) 'ducklake:connection:list': () => Promise; // Returns CloudConnection[] 'ducklake:connection:get': (id: string) => Promise; // Returns CloudConnection | null diff --git a/src/types/ipc.ts b/src/types/ipc.ts index 7ffe68a0..1de524d3 100644 --- a/src/types/ipc.ts +++ b/src/types/ipc.ts @@ -65,7 +65,8 @@ export type ConnectorChannels = | 'connector:list' | 'connector:get' | 'connector:update' - | 'connector:delete'; + | 'connector:delete' + | 'connector:cancel-query'; export type SourcesChannels = | 'sources:create' @@ -262,6 +263,7 @@ export type DuckLakeChannels = // Snapshot Management | 'ducklake:snapshot:list' + | 'ducklake:instance:listSnapshots' | 'ducklake:snapshot:restore' // Query Execution