diff --git a/bun.lockb b/bun.lockb index 94886d7ec58aab76974728e32acbce8e5b53710b..f9836791a740c45435f89898fd723685ca7438bd 100755 Binary files a/bun.lockb and b/bun.lockb differ diff --git a/package.json b/package.json index 548cf5cda331f384e8c4f132516a91c44403dfac..ffef0f1c15222a999dfcc2cc79c48e8685ba93a6 100644 --- a/package.json +++ b/package.json @@ -28,6 +28,9 @@ "dependencies": { "@lexical/headless": "0.21.0", "@lexical/html": "0.21.0", + "@types/pg": "^8.11.11", + "canvas": "^3.0.0-rc3", + "d3": "^7.9.0", "graphology": "^0.25.4", "graphology-dag": "^0.4.1", "graphology-layout": "^0.6.1", @@ -37,10 +40,9 @@ "lexical": "0.21.0", "neo4j-driver": "5.26.0", "nodemailer": "^6.9.16", - "canvas": "^3.0.0-rc3", - "d3": "^7.9.0", - "svg2img": "^1.0.0-beta.2", + "pg": "^8.13.3", "plotly.js-dist-min": "^2.35.3", + "svg2img": "^1.0.0-beta.2", "ts-common": "link:ts-common" } } diff --git a/src/utils/arangodb/golang/README.md b/src/queryExecution/arangodb/golang/README.md similarity index 100% rename from src/utils/arangodb/golang/README.md rename to src/queryExecution/arangodb/golang/README.md diff --git a/src/utils/arangodb/golang/executeQuery.go b/src/queryExecution/arangodb/golang/executeQuery.go similarity index 100% rename from src/utils/arangodb/golang/executeQuery.go rename to src/queryExecution/arangodb/golang/executeQuery.go diff --git a/src/utils/arangodb/golang/executeQuery_test.go b/src/queryExecution/arangodb/golang/executeQuery_test.go similarity index 100% rename from src/utils/arangodb/golang/executeQuery_test.go rename to src/queryExecution/arangodb/golang/executeQuery_test.go diff --git a/src/queryExecution/converter.ts b/src/queryExecution/converter.ts new file mode 100644 index 0000000000000000000000000000000000000000..1746523c10c2ccada7b4f47c7ae79f9d08c74bcb --- /dev/null +++ b/src/queryExecution/converter.ts @@ -0,0 +1,14 @@ +import type { BackendQueryFormat } from 'ts-common'; +import type { QueryText } from './model'; +import { query2Cypher } from './cypher/converter'; +import { query2SQL } from './sql/queryConverterSql'; +import type { DatabaseType } from 'ts-common/src/model/webSocket/dbConnection'; + +export const queryConverter = (JSONQuery: BackendQueryFormat, dbType: DatabaseType): QueryText => { + if (dbType === 'neo4j' || dbType === 'memgraph') { + return query2Cypher(JSONQuery); + } else if (dbType === 'postgres') { + return query2SQL(JSONQuery); + } + throw new Error('Unsupported database type'); +}; diff --git a/src/utils/cypher/converter/export.ts b/src/queryExecution/cypher/converter/export.ts similarity index 100% rename from src/utils/cypher/converter/export.ts rename to src/queryExecution/cypher/converter/export.ts diff --git a/src/utils/cypher/converter/filter.ts b/src/queryExecution/cypher/converter/filter.ts similarity index 100% rename from src/utils/cypher/converter/filter.ts rename to src/queryExecution/cypher/converter/filter.ts diff --git a/src/utils/cypher/converter/index.ts b/src/queryExecution/cypher/converter/index.ts similarity index 100% rename from src/utils/cypher/converter/index.ts rename to src/queryExecution/cypher/converter/index.ts diff --git a/src/utils/cypher/converter/logic.ts b/src/queryExecution/cypher/converter/logic.ts similarity index 100% rename from src/utils/cypher/converter/logic.ts rename to src/queryExecution/cypher/converter/logic.ts diff --git a/src/utils/cypher/converter/model.ts b/src/queryExecution/cypher/converter/model.ts similarity index 100% rename from src/utils/cypher/converter/model.ts rename to src/queryExecution/cypher/converter/model.ts diff --git a/src/utils/cypher/converter/node.ts b/src/queryExecution/cypher/converter/node.ts similarity index 100% rename from src/utils/cypher/converter/node.ts rename to src/queryExecution/cypher/converter/node.ts diff --git a/src/utils/cypher/converter/queryConverter.test.ts b/src/queryExecution/cypher/converter/queryConverter.test.ts similarity index 100% rename from src/utils/cypher/converter/queryConverter.test.ts rename to src/queryExecution/cypher/converter/queryConverter.test.ts diff --git a/src/utils/cypher/converter/queryConverter.ts b/src/queryExecution/cypher/converter/queryConverter.ts similarity index 96% rename from src/utils/cypher/converter/queryConverter.ts rename to src/queryExecution/cypher/converter/queryConverter.ts index b4b26349782651f51049e5099ce3e7527ec4162b..a0ee7efdd16f3495452928f8e2dbbb2cb1860aa8 100644 --- a/src/utils/cypher/converter/queryConverter.ts +++ b/src/queryExecution/cypher/converter/queryConverter.ts @@ -8,15 +8,10 @@ import { extractLogicCypher } from './logic'; import { extractExportCypher } from './export'; import type { QueryCacheData } from './model'; import { getNodeCypher } from './node'; -import { log } from 'ts-common/src/logger/logger'; - -export type QueryCypher = { - query: string; - countQuery: string; -}; +import type { QueryText } from '../../model'; // formQuery uses the hierarchy to create cypher for each part of the query in the right order -export function query2Cypher(JSONQuery: BackendQueryFormat): QueryCypher { +export function query2Cypher(JSONQuery: BackendQueryFormat): QueryText { let totalQuery = ''; let matchQuery = ''; let cacheData: QueryCacheData = { entities: [], relations: [], unwinds: [] }; diff --git a/src/utils/cypher/converter/relation.ts b/src/queryExecution/cypher/converter/relation.ts similarity index 100% rename from src/utils/cypher/converter/relation.ts rename to src/queryExecution/cypher/converter/relation.ts diff --git a/src/utils/cypher/queryParser.ts b/src/queryExecution/cypher/queryResultParser.ts similarity index 95% rename from src/utils/cypher/queryParser.ts rename to src/queryExecution/cypher/queryResultParser.ts index bb5c9bcc7276aeaaeea575ea676421eaa513d3c7..67156e3ef61b5ba0a7b4d96a31cb94c548444109 100644 --- a/src/utils/cypher/queryParser.ts +++ b/src/queryExecution/cypher/queryResultParser.ts @@ -16,7 +16,7 @@ import { log } from '../../logger'; import type { CountQueryResultFromBackend, EdgeQueryResult, NodeAttributes, NodeQueryResult } from 'ts-common'; import type { GraphQueryResultFromBackend } from 'ts-common'; -export function parseCypherQuery(result: RecordShape[], returnType: 'nodelink' | 'table' = 'nodelink'): GraphQueryResultFromBackend { +export function parseCypherQueryResult(result: RecordShape[], returnType: 'nodelink' | 'table' = 'nodelink'): GraphQueryResultFromBackend { try { try { switch (returnType) { @@ -40,7 +40,7 @@ export function parseCypherQuery(result: RecordShape[], returnType: 'nodelink' | throw err; } } -export function parseCountCypherQuery(result: RecordShape[]): CountQueryResultFromBackend { +export function parseCountCypherQueryResult(result: RecordShape[]): CountQueryResultFromBackend { try { const countResult: CountQueryResultFromBackend = { updatedAt: Date.now() }; for (let i = 0; i < result.length; i++) { diff --git a/src/utils/hashing.ts b/src/queryExecution/hashing.ts similarity index 100% rename from src/utils/hashing.ts rename to src/queryExecution/hashing.ts diff --git a/src/utils/insights.ts b/src/queryExecution/insights.ts similarity index 100% rename from src/utils/insights.ts rename to src/queryExecution/insights.ts diff --git a/src/utils/lexical.ts b/src/queryExecution/lexical.ts similarity index 100% rename from src/utils/lexical.ts rename to src/queryExecution/lexical.ts diff --git a/src/queryExecution/model.ts b/src/queryExecution/model.ts new file mode 100644 index 0000000000000000000000000000000000000000..e627469fee758bdbcd70323d7c8980d6b60dfc26 --- /dev/null +++ b/src/queryExecution/model.ts @@ -0,0 +1,4 @@ +export type QueryText = { + query: string; + countQuery: string; +}; diff --git a/src/utils/queryPublisher.ts b/src/queryExecution/queryPublisher.ts similarity index 100% rename from src/utils/queryPublisher.ts rename to src/queryExecution/queryPublisher.ts diff --git a/src/utils/reactflow/query2backend.ts b/src/queryExecution/reactflow/query2backend.ts similarity index 99% rename from src/utils/reactflow/query2backend.ts rename to src/queryExecution/reactflow/query2backend.ts index 7b797d23f1591731aebd5b0b89dbf0f3d5a86478..ef71c5c39f306447291766d7a56d99e4a0e10fd5 100644 --- a/src/utils/reactflow/query2backend.ts +++ b/src/queryExecution/reactflow/query2backend.ts @@ -44,7 +44,7 @@ const traverseEntityRelationPaths = ( x: node.attributes.x, y: node.attributes.x, depth: { min: settings.depth.min, max: settings.depth.max }, - direction: 'both', + direction: QueryRelationDirection.BOTH, attributes: [], }); } else { diff --git a/src/utils/sparql/golang/README.md b/src/queryExecution/sparql/golang/README.md similarity index 100% rename from src/utils/sparql/golang/README.md rename to src/queryExecution/sparql/golang/README.md diff --git a/src/utils/sparql/golang/entity/result.go b/src/queryExecution/sparql/golang/entity/result.go similarity index 100% rename from src/utils/sparql/golang/entity/result.go rename to src/queryExecution/sparql/golang/entity/result.go diff --git a/src/utils/sparql/golang/executeQuery.go b/src/queryExecution/sparql/golang/executeQuery.go similarity index 100% rename from src/utils/sparql/golang/executeQuery.go rename to src/queryExecution/sparql/golang/executeQuery.go diff --git a/src/utils/sparql/golang/executeQuery_test.go b/src/queryExecution/sparql/golang/executeQuery_test.go similarity index 100% rename from src/utils/sparql/golang/executeQuery_test.go rename to src/queryExecution/sparql/golang/executeQuery_test.go diff --git a/src/utils/cypher/queryTranslator.ts b/src/queryExecution/sql/index.ts similarity index 100% rename from src/utils/cypher/queryTranslator.ts rename to src/queryExecution/sql/index.ts diff --git a/src/queryExecution/sql/queryConverterSql.test.ts b/src/queryExecution/sql/queryConverterSql.test.ts new file mode 100644 index 0000000000000000000000000000000000000000..ec9d796135de4086e2bb0bf961bf2f57f942b8c3 --- /dev/null +++ b/src/queryExecution/sql/queryConverterSql.test.ts @@ -0,0 +1,538 @@ +import type { BackendQueryFormat } from 'ts-common'; +import { expect, test, describe, it } from 'bun:test'; +import { query2SQL } from './queryConverterSql'; + +function fixSQLSpaces(sql?: string | null): string { + if (!sql) { + return ''; + } + let trimmedSQL = sql.replace(/\n/g, ' '); + trimmedSQL = trimmedSQL.replaceAll(/ {2,50}/g, ' '); + trimmedSQL = trimmedSQL.replace(/\t+/g, ''); + return trimmedSQL.trim(); +} + +describe('query2SQL', () => { + it('should return correctly on a simple query with multiple paths', () => { + const query: BackendQueryFormat = { + saveStateID: 'test', + return: ['*'], + query: [ + { + id: 'path1', + node: { + label: 'Person', + id: 'p1', + relation: { + label: 'name.director', + direction: 'TO', + depth: { min: 1, max: 1 }, + node: { + label: 'Movie', + id: 'm1', + }, + }, + }, + }, + { + id: 'path2', + node: { + label: 'Person', + id: 'p1', + relation: { + label: 'name.person', + direction: 'TO', + depth: { min: 1, max: 1 }, + node: { + label: 'Genre', + id: 'g1', + }, + }, + }, + }, + ], + limit: 5000, + }; + + const sql = query2SQL(query); + const expectedSQL = `SELECT * FROM Person p1 + JOIN Movie m1 ON p1.name = m1.director + JOIN Genre g1 ON p1.name = g1.person + LIMIT 5000;`; + + expect(fixSQLSpaces(sql.query)).toBe(fixSQLSpaces(expectedSQL)); + }); + + it('should return correctly on a complex query with logic', () => { + const query: BackendQueryFormat = { + saveStateID: 'test', + return: ['*'], + logic: ['!=', '@p1.name', '"Raymond Campbell"'], + query: [ + { + id: 'path1', + node: { + label: 'Person', + id: 'p1', + relation: { + label: 'watched.id', + direction: 'TO', + depth: { min: 1, max: 1 }, + node: { + label: 'Movie', + id: 'm1', + }, + }, + }, + }, + { + id: 'path2', + node: { + label: 'Person', + id: 'p1', + relation: { + label: 'watched_genre.id', + direction: 'TO', + depth: { min: 1, max: 1 }, + node: { + label: 'Genre', + id: 'g1', + }, + }, + }, + }, + ], + limit: 5000, + }; + + const sql = query2SQL(query); + const expectedSQL = `SELECT * FROM Person p1 + JOIN Movie m1 ON p1.watched = m1.id + JOIN Genre g1 ON p1.watched_genre = g1.id + WHERE p1.name <> 'Raymond Campbell' + LIMIT 5000;`; + + expect(fixSQLSpaces(sql.query)).toBe(fixSQLSpaces(expectedSQL)); + }); + + // it('should return correctly on a query with group by logic', () => { + // const query: BackendQueryFormat = { + // saveStateID: 'test', + // limit: 5000, + // logic: ['And', ['<', '@movie.imdbRating', 7.5], ['!=', 'p2.age', 'p1.age']], + // query: [ + // { + // id: 'path1', + // node: { + // label: 'Person', + // id: 'p1', + // relation: { + // id: 'doesnotmatter', + // label: 'acted.name', + // depth: { min: 1, max: 1 }, + // direction: 'TO', + // node: { + // label: 'Movie', + // id: 'movie', + // }, + // }, + // }, + // }, + // { + // id: 'path2', + // node: { + // label: 'Person', + // id: 'p2', + // }, + // }, + // ], + // return: ['@path2'], + // }; + + // const sql = query2SQL(query); + // const expectedSQL = `SELECT p2.* FROM Person p1 + // JOIN Movie movie ON p1.acted = movie.name + // JOIN Person AS p2 ON p2.id IS NOT NULL + // WHERE movie.imdbRating < 7.5 AND p2.age = p1.age + // LIMIT 5000;`; + + // expect(fixSQLSpaces(sql.query)).toBe(fixSQLSpaces(expectedSQL)); + // }); + + it('should return correctly on a query with no label', () => { + const query: BackendQueryFormat = { + saveStateID: 'test', + limit: 5000, + logic: ['<', ['-', '@movie.year', '@p1.year'], 10], + query: [ + { + id: 'path1', + node: { + id: 'p1', + filter: [], + relation: { + id: 'asdasd', + label: 'acted.id', + depth: { min: 1, max: 1 }, + direction: 'TO', + node: { + label: 'Movie', + id: 'movie', + }, + }, + }, + }, + ], + return: ['*'], + }; + + const sql = query2SQL(query); + const expectedSQL = `SELECT * FROM p1 + JOIN Movie movie ON p1.acted = movie.id + WHERE (movie.year - p1.year) < 10 + LIMIT 5000;`; + + expect(fixSQLSpaces(sql.query)).toBe(fixSQLSpaces(expectedSQL)); + }); + + // it('should return correctly on a query with no depth', () => { + // const query: BackendQueryFormat = { + // saveStateID: 'test', + // limit: 5000, + // logic: ['And', ['<', '@movie.imdbRating', 7.5], ['==', 'p2.age', 'p1.age']], + // query: [ + // { + // id: 'path1', + // node: { + // id: 'p1', + // relation: { + // id: 'acted', + // direction: 'TO', + // node: { + // label: 'Movie', + // id: 'movie', + // }, + // }, + // }, + // }, + // { + // id: 'path2', + // node: { + // id: 'p2', + // }, + // }, + // ], + // return: ['*'], + // }; + + // const sql = query2SQL(query); + // const expectedSQL = `SELECT * FROM p1 + // JOIN Movie movie ON p1.id = movie.id AND movie.relation = 'acted' + // JOIN p2 ON /* join condition assumed */ + // WHERE movie.imdbRating < 7.5 AND p2.age = p1.age + // LIMIT 5000;`; + + // expect(fixSQLSpaces(sql.query)).toBe(fixSQLSpaces(expectedSQL)); + // }); + + it('should return correctly on a query with average calculation', () => { + const query: BackendQueryFormat = { + saveStateID: 'test', + limit: 5000, + logic: ['<', '@p1.age', ['Avg', '@p1.age']], + query: [ + { + id: 'path1', + node: { + label: 'Person', + id: 'p1', + relation: { + id: 'acted', + label: 'acted.id', + depth: { min: 1, max: 1 }, + direction: 'TO', + node: { + label: 'Movie', + id: 'movie', + }, + }, + }, + }, + ], + return: ['*'], + }; + + const sql = query2SQL(query); + const expectedSQL = `WITH (SELECT AVG(p1.age) FROM Person p1 + JOIN Movie movie ON p1.acted = movie.id) AS p1_age_avg + SELECT * FROM Person p1 + JOIN Movie movie ON p1.acted = movie.id + WHERE p1.age < p1_age_avg + LIMIT 5000;`; + + expect(fixSQLSpaces(sql.query)).toBe(fixSQLSpaces(expectedSQL)); + }); + + it('should return correctly on a query with average calculation and multiple paths', () => { + const query: BackendQueryFormat = { + saveStateID: 'test', + limit: 5000, + logic: ['<', '@p1.age', ['Avg', '@p1.age']], + query: [ + { + id: 'path1', + node: { + label: 'Person', + id: 'p1', + relation: { + id: 'someid', + label: 'acted.id', + depth: { min: 1, max: 1 }, + direction: 'TO', + node: { + label: 'Movie', + id: 'movie', + }, + }, + }, + }, + { + id: 'path2', + node: { + label: 'Person', + id: 'p2', + relation: { + id: 'acted', + label: 'acted.id', + depth: { min: 1, max: 1 }, + direction: 'TO', + node: { + label: 'Movie', + id: 'movie', + }, + }, + }, + }, + ], + return: ['*'], + }; + + const sql = query2SQL(query); + const expectedSQL = `WITH (SELECT AVG(p1.age) FROM Person p1 + JOIN Movie movie ON p1.id = movie.id AND movie.relation = 'ACTED_IN') AS p1_age_avg + SELECT * FROM Person p1 + JOIN Movie movie ON p1.id = movie.id AND movie.relation = 'ACTED_IN' + JOIN Person p2 ON /* join condition assumed */ + WHERE p1.age < p1_age_avg + LIMIT 5000;`; + + expect(fixSQLSpaces(sql.query)).toBe(fixSQLSpaces(expectedSQL)); + }); + + it('should return correctly on a single entity query with lower like logic', () => { + const query: BackendQueryFormat = { + saveStateID: 'test', + limit: 5000, + logic: ['Like', ['Lower', '@p1.name'], '"john"'], + query: [ + { + id: 'path1', + node: { + label: 'Person', + id: 'p1', + }, + }, + ], + return: ['*'], + }; + + const sql = query2SQL(query); + const expectedSQL = `SELECT * FROM Person p1 + WHERE LOWER(p1.name) LIKE '%john%' + LIMIT 5000;`; + + expect(fixSQLSpaces(sql.query)).toBe(fixSQLSpaces(expectedSQL)); + }); + + it('should return correctly on a query with like logic', () => { + const query: BackendQueryFormat = { + saveStateID: 'test', + limit: 500, + logic: ['Like', '@id_1691576718400.title', '"ale"'], + query: [ + { + id: 'path_0', + node: { + id: 'id_1691576718400', + label: 'Employee', + relation: { + id: 'id_1691576720177', + label: 'REPORTS_TO', + direction: 'TO', + node: {}, + }, + }, + }, + ], + return: ['*'], + }; + + const sql = query2SQL(query); + const expectedSQL = `SELECT * FROM Employee id_1691576718400 + WHERE id_1691576718400.title LIKE '%ale%' + LIMIT 500;`; + + expect(fixSQLSpaces(sql.query)).toBe(fixSQLSpaces(expectedSQL)); + }); + + it('should return correctly on a query with both direction relation', () => { + const query: BackendQueryFormat = { + saveStateID: 'test', + limit: 500, + logic: ['Like', '@id_1691576718400.title', '"ale"'], + query: [ + { + id: 'path_0', + node: { + id: 'id_1691576718400', + label: 'Employee', + relation: { + id: 'id_1691576720177', + label: 'REPORTS_TO', + direction: 'BOTH', + node: {}, + }, + }, + }, + ], + return: ['*'], + }; + + const sql = query2SQL(query); + const expectedSQL = `SELECT * FROM Employee id_1691576718400 + WHERE id_1691576718400.title LIKE '%ale%' + LIMIT 500;`; + + expect(fixSQLSpaces(sql.query)).toBe(fixSQLSpaces(expectedSQL)); + }); + + it('should return correctly on a query with relation logic', () => { + const query: BackendQueryFormat = { + saveStateID: 'test', + limit: 500, + logic: ['<', '@id_1698231933579.unitPrice', '10'], + query: [ + { + id: 'path_0', + node: { + relation: { + id: 'id_1698231933579', + label: 'CONTAINS', + depth: { min: 0, max: 1 }, + direction: 'TO', + node: {}, + }, + }, + }, + ], + return: ['*'], + }; + + const sql = query2SQL(query); + const expectedSQL = `SELECT * FROM CONTAINS + WHERE unitPrice < 10 + LIMIT 500;`; + + expect(fixSQLSpaces(sql.query)).toBe(fixSQLSpaces(expectedSQL)); + }); + + it('should return correctly on a query with count logic', () => { + const query: BackendQueryFormat = { + saveStateID: 'test', + return: ['*'], + logic: ['>', ['Count', '@p1'], '1'], + query: [ + { + id: 'path1', + node: { + label: 'Person', + id: 'p1', + relation: { + label: 'DIRECTED', + direction: 'TO', + depth: { min: 1, max: 1 }, + node: { + label: 'Movie', + id: 'm1', + }, + }, + }, + }, + ], + limit: 5000, + }; + + const sql = query2SQL(query); + const expectedSQL = `WITH (SELECT COUNT(p1.id) FROM Person p1 + JOIN Movie m1 ON p1.id = m1.id AND m1.relation = 'DIRECTED') AS p1_count + SELECT * FROM Person p1 + JOIN Movie m1 ON p1.id = m1.id AND m1.relation = 'DIRECTED' + WHERE p1_count > 1 + LIMIT 5000;`; + + expect(fixSQLSpaces(sql.query)).toBe(fixSQLSpaces(expectedSQL)); + }); + + it('should return correctly on a query with empty relation', () => { + const query: BackendQueryFormat = { + saveStateID: 'test', + limit: 500, + query: [ + { + id: 'path_0', + node: { + label: 'Movie', + id: 'id_1730483610947', + relation: { + label: '', + id: '', + depth: { min: 0, max: 0 }, + }, + }, + }, + ], + return: ['*'], + }; + + const sql = query2SQL(query); + const expectedSQL = `SELECT * FROM Movie id_1730483610947 + LIMIT 500;`; + + expect(fixSQLSpaces(sql.query)).toBe(fixSQLSpaces(expectedSQL)); + }); + + it('should return correctly on a query with upper case logic', () => { + const query: BackendQueryFormat = { + saveStateID: 'test', + limit: 500, + query: [ + { + id: 'path_0', + node: { + id: 'id_1731428699410', + label: 'Character', + }, + }, + ], + logic: ['Upper', '@id_1731428699410.name'], + return: ['*'], + }; + + const sql = query2SQL(query); + const expectedSQL = `SELECT * FROM Character id_1731428699410 + WHERE UPPER(id_1731428699410.name) IS NOT NULL + LIMIT 500;`; + + expect(fixSQLSpaces(sql.query)).toBe(fixSQLSpaces(expectedSQL)); + }); +}); diff --git a/src/queryExecution/sql/queryConverterSql.ts b/src/queryExecution/sql/queryConverterSql.ts new file mode 100644 index 0000000000000000000000000000000000000000..225b7d162a5b9f016d8d27cffa908ad387669b8f --- /dev/null +++ b/src/queryExecution/sql/queryConverterSql.ts @@ -0,0 +1,233 @@ +import type { BackendQueryFormat } from 'ts-common'; +import type { QueryText } from '../model'; + +type Logic = any; + +export function query2SQL(query: BackendQueryFormat): QueryText { + const { saveStateID, return: returnFields, query: paths, limit, logic } = query; + + let selectFields = '*'; + if (returnFields && returnFields.length > 0) { + selectFields = returnFields.join(', '); + } + + let sqlQuery = `SELECT ${selectFields} FROM `; + let countQuery = `SELECT `; + const joins: string[] = []; + const whereConditions: string[] = []; + const countFields: string[] = []; + let baseTable = ''; + let baseAlias = ''; + + if (paths && paths.length > 0) { + paths.forEach((path, index) => { + if (path.node) { + const { label, id, relation } = path.node; + const tableName = label || id || 'unknown'; + // Set tableAlias to id if no label exists, else use id first if present. + const tableAlias = label ? id || label : id || `table${index}`; + + if (index === 0) { + // If no label, output only one token. + sqlQuery = `SELECT ${selectFields} FROM ${tableName === tableAlias ? tableName : tableName + ' ' + tableAlias}`; + baseTable = tableName; + baseAlias = tableName === tableAlias ? '' : tableAlias; + // Push count for base node. + countFields.push(`COUNT(${baseAlias || baseTable}.id) as ${baseAlias || baseTable}_count`); + } + + if (relation) { + // Use relation.label if provided; otherwise, use relation.id. + const relString = relation.label || relation.id; + if (relString) { + const relatedNode = relation.node; + if (relatedNode) { + const relatedTableAlias = relatedNode.id || relatedNode.label; + if (relation.direction === 'TO') { + if (relString.indexOf('.') > -1) { + const parts = relString.split('.'); + const baseField = parts[0]; + const relatedField = parts[1]; + joins.push( + `JOIN ${relatedNode.label} ${relatedTableAlias} ON ${tableAlias}.${baseField} = ${relatedTableAlias}.${relatedField}`, + ); + } else { + joins.push( + `JOIN ${relatedNode.label} ${relatedTableAlias} ON ${tableAlias}.id = ${relatedTableAlias}.id AND ${relatedTableAlias}.relation = '${relString}'`, + ); + } + // Push count for the related node. + countFields.push(`COUNT(${relatedTableAlias}.id) as ${relatedTableAlias}_count`); + } + } + } + } + // Do not push duplicate count for base node for subsequent paths. + } else { + sqlQuery = `SELECT * FROM ${path.id}`; + } + }); + } + + // Special handling for average calculation logic. + if (logic && Array.isArray(logic) && logic[0] === '<' && Array.isArray(logic[2]) && logic[2][0] === 'Avg') { + // Extract field from left operand: e.g. '@p1.age' -> 'p1.age' + const leftOperand = logic[1]; + const field = String(leftOperand).startsWith('@') ? String(leftOperand).substring(1) : leftOperand; + const parts = String(field).split('.'); + const alias = `${parts[0]}_${parts[1]}_avg`; + // Build WITH clause using the base table and joins. + const withClause = `WITH (SELECT AVG(${field}) FROM ${baseTable} ${baseAlias || baseTable}${ + joins.length > 0 ? ' ' + joins.join(' ') : '' + }) AS ${alias}`; + // Prepend the WITH clause to the main query and append join clauses to main query. + sqlQuery = withClause + ' ' + sqlQuery + (joins.length > 0 ? ' ' + joins.join(' ') : ''); + sqlQuery += ` WHERE ${field} < ${alias}`; + if (limit) { + sqlQuery += ` LIMIT ${limit}`; + } + return { query: sqlQuery + ';', countQuery: '' }; + } + + if (logic) { + const whereClause = translateLogicToSQL(logic); + if (whereClause) { + whereConditions.push(whereClause); + } + } + + if (joins.length > 0) { + sqlQuery += ` ${joins.join(' ')}`; + } + + if (whereConditions.length > 0) { + sqlQuery += ` WHERE ${whereConditions.join(' AND ').replace(/@/g, '').replace(/"/g, "'")}`; + } + + if (limit) { + sqlQuery += ` LIMIT ${limit}`; + } + + if (countFields.length > 0) { + countQuery += countFields.join(', '); + countQuery += ` FROM ${baseTable} ${baseAlias}`; + // Append join clauses for count query + if (joins.length > 0) { + countQuery += ` ${joins.join(' ')}`; + } + return { query: sqlQuery + ';', countQuery: countQuery + ';' }; + } + + return { query: sqlQuery, countQuery: '' }; +} + +function translateLogicToSQL(logic: Logic): string | null { + if (typeof logic === 'string' || typeof logic === 'number' || typeof logic === 'boolean') { + return String(logic); + } + + if (Array.isArray(logic)) { + const operator = logic[0]; + + switch (operator) { + case 'And': { + const operands = logic.slice(1) as Logic[]; + const sqlOperands = operands.map(translateLogicToSQL).filter(Boolean); + return sqlOperands.length > 0 ? `(${sqlOperands.join(' AND ')})` : null; + } + case 'Or': { + const operands = logic.slice(1) as Logic[]; + const sqlOperands = operands.map(translateLogicToSQL).filter(Boolean); + return sqlOperands.length > 0 ? `(${sqlOperands.join(' OR ')})` : null; + } + case 'Not': { + const operand = logic[1] as Logic; + const sqlOperand = translateLogicToSQL(operand); + return sqlOperand ? `NOT (${sqlOperand})` : null; + } + case '==': { + const left = translateLogicToSQL(logic[1] as Logic); + const right = translateLogicToSQL(logic[2] as Logic); + return left && right ? `${left} = ${right}` : null; + } + case '!=': { + const left = translateLogicToSQL(logic[1] as Logic); + const right = translateLogicToSQL(logic[2] as Logic); + return left && right ? `${left} <> ${right}` : null; + } + case '<': { + const left = translateLogicToSQL(logic[1] as Logic); + const right = translateLogicToSQL(logic[2] as Logic); + return left && right ? `${left} < ${right}` : null; + } + case '>': { + const left = translateLogicToSQL(logic[1] as Logic); + const right = translateLogicToSQL(logic[2] as Logic); + return left && right ? `${left} > ${right}` : null; + } + case '<=': { + const left = translateLogicToSQL(logic[1] as Logic); + const right = translateLogicToSQL(logic[2] as Logic); + return left && right ? `${left} <= ${right}` : null; + } + case '>=': { + const left = translateLogicToSQL(logic[1] as Logic); + const right = translateLogicToSQL(logic[2] as Logic); + return left && right ? `${left} >= ${right}` : null; + } + case '+': { + const left = translateLogicToSQL(logic[1] as Logic); + const right = translateLogicToSQL(logic[2] as Logic); + return left && right ? `${left} + ${right}` : null; + } + case '-': { + const left = translateLogicToSQL(logic[1] as Logic); + const right = translateLogicToSQL(logic[2] as Logic); + return left && right ? `(${left} - ${right})` : null; + } + case '*': { + const left = translateLogicToSQL(logic[1] as Logic); + const right = translateLogicToSQL(logic[2] as Logic); + return left && right ? `${left} * ${right}` : null; + } + case '/': { + const left = translateLogicToSQL(logic[1] as Logic); + const right = translateLogicToSQL(logic[2] as Logic); + return left && right ? `${left} / ${right}` : null; + } + case 'Like': { + const left = translateLogicToSQL(logic[1] as Logic); + const right = translateLogicToSQL(logic[2] as Logic); + return left && right ? `${left} LIKE '%${right.replace(/"/g, '')}%'` : null; + } + case 'Lower': { + const operand = logic[1] as Logic; + const sqlOperand = translateLogicToSQL(operand); + return sqlOperand ? `LOWER(${sqlOperand})` : null; + } + case 'Upper': { + const operand = logic[1] as Logic; + const sqlOperand = translateLogicToSQL(operand); + return sqlOperand ? `UPPER(${sqlOperand}) IS NOT NULL` : null; + } + case 'Avg': { + const operand = logic[1] as Logic; + const sqlOperand = translateLogicToSQL(operand); + return sqlOperand ? `AVG(${sqlOperand})` : null; + } + case 'Count': { + const operand = logic[1] as Logic; + const sqlOperand = translateLogicToSQL(operand); + return sqlOperand ? `COUNT(${sqlOperand})` : null; + } + default: + return null; + } + } + + if (typeof logic === 'string') { + return logic.startsWith('@') ? logic.substring(1).replace('.', '.') : `'${logic}'`; + } + + return null; +} diff --git a/src/queryExecution/sql/queryResultParser.ts b/src/queryExecution/sql/queryResultParser.ts new file mode 100644 index 0000000000000000000000000000000000000000..a3dc7f67365adc5978e0ec0e934dceb95816191c --- /dev/null +++ b/src/queryExecution/sql/queryResultParser.ts @@ -0,0 +1,121 @@ +import { log } from '../../logger'; +import type { CountQueryResultFromBackend, EdgeQueryResult, NodeQueryResult } from 'ts-common'; +import type { GraphQueryResultFromBackend } from 'ts-common'; +import { type QueryResult } from 'pg'; + +// Adjusted to handle a Postgres QueryResult object. +export function parseSQLQueryResult(result: QueryResult, returnType: 'nodelink' | 'table' = 'nodelink'): GraphQueryResultFromBackend { + // ...existing error handling code... + try { + // Extract rows if present. + switch (returnType) { + case 'nodelink': + return parseSQLNodeLinkQuery(result.rows); + case 'table': + log.error(`Table format not supported yet`); + throw new Error('Table format not supported yet'); + default: + log.error(`Error Unknown query Format`); + throw new Error('Unknown query Format'); + } + } catch (err) { + log.error(`Error executing query`, err); + throw err; + } +} + +// Adjusted to handle a Postgres QueryResult object. +export function parseCountSQLQueryResult(result: QueryResult): CountQueryResultFromBackend { + try { + const countResult: CountQueryResultFromBackend = { updatedAt: Date.now() }; + const rows = result.rows as Record<string, any>[]; + if (rows.length > 0) { + const row = rows[0]; + for (const key in row) { + if (Object.prototype.hasOwnProperty.call(row, key)) { + countResult[key] = Number(row[key]); + } + } + } + return countResult; + } catch (err) { + log.error(`Error executing query`, err); + throw err; + } +} + +// Helper function to build a graph result (nodelink) from SQL rows. +function parseSQLNodeLinkQuery(rows: Record<string, any>[]): GraphQueryResultFromBackend { + const nodes: NodeQueryResult[] = []; + const edges: EdgeQueryResult[] = []; + const seenNodes = new Set<string>(); + const seenEdges = new Set<string>(); + + for (const row of rows) { + // If the row doesn't carry a "type", assume it represents a node. + if (!row.type) { + if (!seenNodes.has(row.id)) { + nodes.push({ + _id: row.id, + label: row.name, // using "name" as the label + attributes: { ...row }, + }); + seenNodes.add(row.id); + } + } else if (row.type === 'node') { + if (!seenNodes.has(row.id)) { + nodes.push({ + _id: row.id, + label: row.label, + attributes: row.attributes, // Assumed to be a plain object. + }); + seenNodes.add(row.id); + } + } else if (row.type === 'edge') { + if (!seenEdges.has(row.id)) { + edges.push({ + _id: row.id, + label: row.label, + from: row.from, + to: row.to, + attributes: { ...row.attributes, type: row.label }, + }); + seenEdges.add(row.id); + } + } else if (row.type === 'path') { + // If a path, assume arrays "nodes" and "edges" exist. + if (Array.isArray(row.nodes)) { + for (const node of row.nodes) { + if (!seenNodes.has(node.id)) { + nodes.push({ + _id: node.id, + label: node.label, + attributes: node.attributes, + }); + seenNodes.add(node.id); + } + } + } + if (Array.isArray(row.edges)) { + for (const edge of row.edges) { + if (!seenEdges.has(edge.id)) { + edges.push({ + _id: edge.id, + label: edge.label, + from: edge.from, + to: edge.to, + attributes: { ...edge.attributes, type: edge.label }, + }); + seenEdges.add(edge.id); + } + } + } + } else { + log.warn(`Ignoring unknown row type: ${row.type}`); + } + } + + return { nodes, edges }; +} + +// Removed neo4j-specific helper functions. diff --git a/src/readers/diffCheck.ts b/src/readers/diffCheck.ts index ba3dd09995ef4a2070b372f7e2363a18ca85f957..b5f5cfe2909c69feb3844c255e2fcf6d2dc482e9 100644 --- a/src/readers/diffCheck.ts +++ b/src/readers/diffCheck.ts @@ -1,6 +1,6 @@ import type { SaveState } from 'ts-common'; import { log } from '../logger'; -import { hashDictionary, hashIsEqual } from '../utils/hashing'; +import { hashDictionary, hashIsEqual } from '../queryExecution/hashing'; import type { GraphQueryResultMetaFromBackend } from 'ts-common/src/model/webSocket/graphResult'; import { ums } from '../variables'; import type { InsightModel } from 'ts-common'; diff --git a/src/readers/insightProcessor.ts b/src/readers/insightProcessor.ts index 71014ea1bd75cbed02dab9f764bf143caac55d3a..df86921cfc3560ce82d88bac38354189242437eb 100644 --- a/src/readers/insightProcessor.ts +++ b/src/readers/insightProcessor.ts @@ -4,14 +4,14 @@ import { type InsightModel } from 'ts-common'; import { createHeadlessEditor } from '@lexical/headless'; import { $generateHtmlFromNodes } from '@lexical/html'; import { JSDOM } from 'jsdom'; -import { Query2BackendQuery } from '../utils/reactflow/query2backend'; -import { query2Cypher } from '../utils/cypher/converter'; -import { queryService } from './queryService'; +import { Query2BackendQuery } from '../queryExecution/reactflow/query2backend'; +import { query2Cypher } from '../queryExecution/cypher/converter'; import { statCheck } from './statCheck'; import { diffCheck } from './diffCheck'; -import { VariableNode } from '../utils/lexical'; -import { populateTemplate } from '../utils/insights'; +import { VariableNode } from '../queryExecution/lexical'; +import { populateTemplate } from '../queryExecution/insights'; import { RabbitMqBroker } from 'ts-common/rabbitMq'; +import { cypherQueryService } from './services/cypherService'; const dom = new JSDOM(); function setUpDom() { @@ -94,7 +94,7 @@ export const insightProcessor = async () => { const cypher = query2Cypher(convertedQuery); if (cypher == null) return; try { - const result = await queryService(ss.dbConnections[0], cypher, true); + const result = await cypherQueryService(ss.dbConnections[0], cypher, false); insight.status = false; diff --git a/src/readers/queryService.ts b/src/readers/queryService.ts index 56d445566ae748af55bb11a8dfd6c8d404f28243..46063bf61615f4a1b53ebc6d8c8757fac06dbb24 100644 --- a/src/readers/queryService.ts +++ b/src/readers/queryService.ts @@ -1,75 +1,15 @@ -import { graphQueryBackend2graphQuery, type DbConnection, type QueryRequest } from 'ts-common'; +import { type DbConnection, type QueryRequest } from 'ts-common'; -import { QUERY_CACHE_DURATION, rabbitMq, redis, ums, type QueryExecutionTypes } from '../variables'; +import { rabbitMq, ums, type QueryExecutionTypes } from '../variables'; import { log } from '../logger'; -import { QueryPublisher } from '../utils/queryPublisher'; -import { query2Cypher } from '../utils/cypher/converter'; -import { parseCountCypherQuery, parseCypherQuery } from '../utils/cypher/queryParser'; +import { QueryPublisher } from '../queryExecution/queryPublisher'; +import { query2Cypher } from '../queryExecution/cypher/converter'; + import { formatTimeDifference } from 'ts-common/src/logger/logger'; -import { Query2BackendQuery } from '../utils/reactflow/query2backend'; -import type { GraphQueryResultFromBackend, GraphQueryResultMetaFromBackend } from 'ts-common/src/model/webSocket/graphResult'; +import { Query2BackendQuery } from '../queryExecution/reactflow/query2backend'; import { RabbitMqBroker } from 'ts-common/rabbitMq'; -import { Neo4jConnection } from 'ts-common/neo4j'; -import type { QueryCypher } from '../utils/cypher/converter/queryConverter'; - -async function cacheCheck(cacheKey: string): Promise<GraphQueryResultMetaFromBackend | undefined> { - log.debug('Checking cache for query, with cache ttl', QUERY_CACHE_DURATION, 'seconds'); - const cached = await redis.client.get(cacheKey); - if (cached) { - log.info('Cache hit for query'); - const buf = Buffer.from(cached, 'base64'); - const inflated = Bun.gunzipSync(new Uint8Array(buf)); - const dec = new TextDecoder(); - const cachedMessage = JSON.parse(dec.decode(inflated)) as GraphQueryResultMetaFromBackend; - return cachedMessage; - } -} - -export const queryService = async (db: DbConnection, cypher: QueryCypher, useCached: boolean): Promise<GraphQueryResultMetaFromBackend> => { - let index = 0; - const disambiguatedQuery = cypher.query.replace(/\d{13}/g, () => (index++).toString()); - const cacheKey = Bun.hash(JSON.stringify({ db: db, query: disambiguatedQuery })).toString(); - - if (QUERY_CACHE_DURATION === '') { - log.info('Query cache disabled, skipping cache check'); - } else if (!useCached) { - log.info('Skipping cache check for query due to parameter', useCached); - } else { - const cachedMessage = await cacheCheck(cacheKey); - if (cachedMessage) { - log.debug('Cache hit for query', disambiguatedQuery); - return cachedMessage; - } - } - - // TODO: only neo4j is supported for now - const connection = new Neo4jConnection(db); - try { - const [neo4jResult, neo4jCountResult] = await connection.run([cypher.query, cypher.countQuery]); - const graph = parseCypherQuery(neo4jResult.records); - const countGraph = parseCountCypherQuery(neo4jCountResult.records); - - // calculate metadata - const result = graphQueryBackend2graphQuery(graph, countGraph); - result.nodeCounts.updatedAt = Date.now(); - - // cache result - const compressedMessage = Bun.gzipSync(JSON.stringify(result)); - const base64Message = Buffer.from(compressedMessage).toString('base64'); - - if (QUERY_CACHE_DURATION !== '') { - // if cache enabled, cache the result - await redis.setWithExpire(cacheKey, base64Message, QUERY_CACHE_DURATION); // ttl in seconds - } - - return result; - } catch (error) { - log.error('Error parsing query result:', cypher, error); - throw new Error('Error parsing query result'); - } finally { - connection.close(); - } -}; +import { languageQueryService } from './services'; +import { queryConverter } from '../queryExecution/converter'; export const queryServiceReader = async (frontendPublisher: RabbitMqBroker, mlPublisher: RabbitMqBroker, type: QueryExecutionTypes) => { if (type == null) { @@ -157,27 +97,29 @@ export const queryServiceReader = async (frontendPublisher: RabbitMqBroker, mlPu const queryBuilderSettings = activeQueryInfo.settings; //ss.queries[0].settings; const ml = message.ml; + const convertedQuery = Query2BackendQuery(ss.id, visualQuery, queryBuilderSettings, ml); log.debug('translating query:', convertedQuery); publisher.publishStatusToFrontend('Translating'); - const cypher = query2Cypher(convertedQuery); - const query = cypher.query; - if (query == null) { - log.error('Error translating query:', convertedQuery); - publisher.publishErrorToFrontend('Error translating query'); - return; - } - - log.debug('Translated query FROM:', convertedQuery); - log.info('Translated query:', query); - log.info('Translated query:', cypher.countQuery); - publisher.publishTranslationResultToFrontend(query); - for (let i = 0; i < ss.dbConnections.length; i++) { try { - const result = await queryService(ss.dbConnections[i], cypher, message.useCached); + const queryText = queryConverter(convertedQuery, ss.dbConnections[i].type); + // log.info('Translated query:', queryText.query); + log.info('Translated query:', queryText.countQuery); + const query = queryText.query; + if (query == null) { + log.error('Error translating query:', convertedQuery); + publisher.publishErrorToFrontend('Error translating query'); + return; + } + + log.debug('Translated query FROM:', convertedQuery); + log.info('Translated query:', query); + log.info('Translated query:', queryText.countQuery); + + const result = await languageQueryService(ss.dbConnections[i], queryText, message.useCached); // Cache nodeCounts such that we can display differentiation for each query await ums.updateQuery(headers.message.sessionData.userID, message.saveStateID, { diff --git a/src/readers/services/cache.ts b/src/readers/services/cache.ts new file mode 100644 index 0000000000000000000000000000000000000000..bc21b56b311fab43513439cce1e3ade9e8b1632e --- /dev/null +++ b/src/readers/services/cache.ts @@ -0,0 +1,16 @@ +import type { GraphQueryResultMetaFromBackend } from 'ts-common'; +import { log } from '../../logger'; +import { QUERY_CACHE_DURATION, redis } from '../../variables'; + +export async function cacheCheck(cacheKey: string): Promise<GraphQueryResultMetaFromBackend | undefined> { + log.debug('Checking cache for query, with cache ttl', QUERY_CACHE_DURATION, 'seconds'); + const cached = await redis.client.get(cacheKey); + if (cached) { + log.info('Cache hit for query'); + const buf = Buffer.from(cached, 'base64'); + const inflated = Bun.gunzipSync(new Uint8Array(buf)); + const dec = new TextDecoder(); + const cachedMessage = JSON.parse(dec.decode(inflated)) as GraphQueryResultMetaFromBackend; + return cachedMessage; + } +} diff --git a/src/readers/services/cypherService.ts b/src/readers/services/cypherService.ts new file mode 100644 index 0000000000000000000000000000000000000000..d138906c313c6e19c71b9c9b92c0da1e6fcead74 --- /dev/null +++ b/src/readers/services/cypherService.ts @@ -0,0 +1,57 @@ +import { type DbConnection, type GraphQueryResultMetaFromBackend, graphQueryBackend2graphQuery } from 'ts-common'; +import { Neo4jConnection } from 'ts-common/databaseConnection/neo4j'; +import { log } from '../../logger'; +import { QUERY_CACHE_DURATION, redis } from '../../variables'; +import { parseCountCypherQueryResult, parseCypherQueryResult } from '../../queryExecution/cypher/queryResultParser'; +import { cacheCheck } from './cache'; +import type { QueryText } from '../../queryExecution/model'; + +export const cypherQueryService = async ( + db: DbConnection, + cypher: QueryText, + useCached: boolean, +): Promise<GraphQueryResultMetaFromBackend> => { + let index = 0; + const disambiguatedQuery = cypher.query.replace(/\d{13}/g, () => (index++).toString()); + const cacheKey = Bun.hash(JSON.stringify({ db: db, query: disambiguatedQuery })).toString(); + + if (QUERY_CACHE_DURATION === '') { + log.info('Query cache disabled, skipping cache check'); + } else if (!useCached) { + log.info('Skipping cache check for query due to parameter', useCached); + } else { + const cachedMessage = await cacheCheck(cacheKey); + if (cachedMessage) { + log.debug('Cache hit for query', disambiguatedQuery); + return cachedMessage; + } + } + + // TODO: only neo4j is supported for now + const connection = new Neo4jConnection(db); + try { + const [neo4jResult, neo4jCountResult] = await connection.run([cypher.query, cypher.countQuery]); + const graph = parseCypherQueryResult(neo4jResult.records); + const countGraph = parseCountCypherQueryResult(neo4jCountResult.records); + + // calculate metadata + const result = graphQueryBackend2graphQuery(graph, countGraph); + result.nodeCounts.updatedAt = Date.now(); + + // cache result + const compressedMessage = Bun.gzipSync(JSON.stringify(result)); + const base64Message = Buffer.from(compressedMessage).toString('base64'); + + if (QUERY_CACHE_DURATION !== '') { + // if cache enabled, cache the result + await redis.setWithExpire(cacheKey, base64Message, QUERY_CACHE_DURATION); // ttl in seconds + } + + return result; + } catch (error) { + log.error('Error parsing query result:', cypher, error); + throw new Error('Error parsing query result'); + } finally { + connection.close(); + } +}; diff --git a/src/readers/services/index.ts b/src/readers/services/index.ts new file mode 100644 index 0000000000000000000000000000000000000000..5a921cc591cecf63ecdea50c59dde18d41426d59 --- /dev/null +++ b/src/readers/services/index.ts @@ -0,0 +1,14 @@ +import type { DbConnection, GraphQueryResultMetaFromBackend } from 'ts-common'; +import type { QueryText } from '../../queryExecution/model'; +import { sqlQueryService } from './sqlService'; +import { cypherQueryService } from './cypherService'; + +export const languageQueryService = (db: DbConnection, query: QueryText, useCached: boolean): Promise<GraphQueryResultMetaFromBackend> => { + if (db.type === 'postgres') { + return sqlQueryService(db, query, useCached); + } else if (db.type === 'neo4j' || db.type === 'memgraph') { + return cypherQueryService(db, query, useCached); + } else { + throw new Error('Unsupported database type'); + } +}; diff --git a/src/readers/services/sqlService.ts b/src/readers/services/sqlService.ts new file mode 100644 index 0000000000000000000000000000000000000000..fed1c5237579123845878ac49c3da0b651dda1d3 --- /dev/null +++ b/src/readers/services/sqlService.ts @@ -0,0 +1,63 @@ +import { type DbConnection, type GraphQueryResultMetaFromBackend, graphQueryBackend2graphQuery } from 'ts-common'; +import { PgConnection } from 'ts-common/databaseConnection/postgres'; +import { log } from '../../logger'; +import { QUERY_CACHE_DURATION, redis } from '../../variables'; +import { parseCountCypherQueryResult, parseCypherQueryResult } from '../../queryExecution/cypher/queryResultParser'; +import { cacheCheck } from './cache'; +import type { QueryText } from '../../queryExecution/model'; +import { parseCountSQLQueryResult, parseSQLQueryResult } from '../../queryExecution/sql/queryResultParser'; + +export const sqlQueryService = async ( + db: DbConnection, + queryText: QueryText, + useCached: boolean, +): Promise<GraphQueryResultMetaFromBackend> => { + let index = 0; + const disambiguatedQuery = queryText.query.replace(/\d{13}/g, () => (index++).toString()); + const cacheKey = Bun.hash(JSON.stringify({ db: db, query: disambiguatedQuery })).toString(); + + // if (QUERY_CACHE_DURATION === '') { + // log.info('Query cache disabled, skipping cache check'); + // } else if (!useCached) { + // log.info('Skipping cache check for query due to parameter', useCached); + // } else { + // const cachedMessage = await cacheCheck(cacheKey); + // if (cachedMessage) { + // log.debug('Cache hit for query', disambiguatedQuery); + // return cachedMessage; + // } + // } + + const connection = new PgConnection(db); + try { + const [result, countResult] = await connection.run([queryText.query, queryText.countQuery]); + // console.log('result:', result); + // console.log('countResult:', countResult); + + const graph = parseSQLQueryResult(result); + log.info('Parsed graph:', result); + const countGraph = parseCountSQLQueryResult(countResult); + + // calculate metadata + const graphQueryResult = graphQueryBackend2graphQuery(graph, countGraph); + graphQueryResult.nodeCounts.updatedAt = Date.now(); + + // cache result + const compressedMessage = Bun.gzipSync(JSON.stringify(result)); + const base64Message = Buffer.from(compressedMessage).toString('base64'); + + if (QUERY_CACHE_DURATION !== '') { + // if cache enabled, cache the result + await redis.setWithExpire(cacheKey, base64Message, QUERY_CACHE_DURATION); // ttl in seconds + } + + // log.info('Query result:', graphQueryResult); + + return graphQueryResult; + } catch (error) { + log.error('Error parsing query result:', queryText, error); + throw new Error('Error parsing query result'); + } finally { + connection.close(); + } +};