diff --git a/cypher/convertQuery.go b/cypher/convertQuery.go index e36d240efaf5a578049d51ee940f97a5e0620c78..25c6b51e78392334cdb98860be9a822ef91f6318 100644 --- a/cypher/convertQuery.go +++ b/cypher/convertQuery.go @@ -1,240 +1,603 @@ -/* -This program has been developed by students from the bachelor Computer Science at Utrecht University within the Software Project course. -© Copyright Utrecht University (Department of Information and Computing Sciences) -*/ - package cypher import ( + "errors" + "fmt" + "strings" + "git.science.uu.nl/graphpolaris/query-conversion/entity" ) -/* -ConvertQuery converts an IncomingQueryJSON object into AQL - JSONQuery: *entity.IncomingQueryJSON, the query to be converted to AQL - Returns: (*string, error), the AQL query and a possible error -*/ -func (s *Service) ConvertQuery(JSONQuery *entity.IncomingQueryJSON) (*string, error) { - - // // Check to make sure all indexes exist - // // How many entities are there - // numEntities := len(JSONQuery.Entities) - 1 - // // How many relations there are - // numRelations := len(JSONQuery.Relations) - 1 - - // // Make sure no entity should be returned that is outside the range of that list - // for _, e := range JSONQuery.Return.Entities { - // // If this entity references an entity that is outside the range - // if e > numEntities || e < 0 { - // return nil, errors.New("non-existing entity referenced in return") - // } - // } - - // // Make sure that no relation mentions a non-existing entity - // for _, r := range JSONQuery.Relations { - // if r.EntityFrom > numEntities || r.EntityTo > numEntities { - // return nil, errors.New("non-exisiting entity referenced in relation") - // } - // } - - // // Make sure no non-existing relation is tried to be returned - // for _, r := range JSONQuery.Return.Relations { - // if r > numRelations || r < 0 { - // return nil, errors.New("non-existing relation referenced in return") - // } - // } - - // result := createQuery(JSONQuery) - // return result, nil - return nil, nil +// ConvertQuery takes the json from the visual query builder and converts it into Cypher +func (s *Service) ConvertQuery(totalJSONQuery *entity.IncomingQueryJSON) (*string, error) { + var finalCypher *string + + queryJSON := totalJSONQuery + + query, rest, isRest := checkForQueryCluster(queryJSON) + + if isRest { + fmt.Println("Rest:") + fmt.Println(rest) + + // If something needs to be done with other query cluster, then add code here + } + + finalCypher, err := createCypher(query) + if err != nil { + return nil, err + } + + return finalCypher, nil } -// /* -// sliceContains checks if a slice contains the input -// s: []int, the slice to check -// e: int, what you're checking for -// Return: bool, true if it contains 'e' -// */ -// func sliceContains(s []int, e int) bool { -// for _, a := range s { -// if a == e { -// return true -// } -// } -// return false -// } - -// /*TrimSuffix trims the final character of a string */ -// func TrimSuffix(s, suffix string) string { -// if strings.HasSuffix(s, suffix) { -// s = s[:len(s)-len(suffix)] -// } -// return s -// } - -// /* -// createQuery generates a query based on the json file provided -// JSONQuery: *entity.IncomingQueryJSON, jsonQuery is a parsedJSON struct holding all the data needed to form a query -// Return: *string, a string containing the corresponding AQL query and an error -// */ -// func createQuery(JSONQuery *entity.IncomingQueryJSON) *string { -// // Note: Case #4, where there is an edge only query (without any entity), is not supported by frontend - -// // If a modifier is used, disable the limit -// if len(JSONQuery.Modifiers) > 0 { -// JSONQuery.Limit = -1 -// } - -// var ( -// relationsToReturn []string -// nodesToReturn []string -// nodeUnion string -// relationUnion string -// queryList [][][]int -// entityList []int -// ret string -// ) - -// for i, relation := range JSONQuery.Relations { -// var contains bool -// contains = false -// for j := range queryList { -// if sliceContains(queryList[j][0], relation.EntityFrom) || sliceContains(queryList[j][0], relation.EntityTo) { -// if !sliceContains(queryList[j][0], relation.EntityFrom) { -// queryList[j][0] = append(queryList[j][0], relation.EntityFrom) -// entityList = append(entityList, relation.EntityFrom) -// } -// if !sliceContains(queryList[j][0], relation.EntityTo) { -// queryList[j][0] = append(queryList[j][0], relation.EntityTo) -// entityList = append(entityList, relation.EntityTo) -// } -// queryList[j][1] = append(queryList[j][1], i) -// contains = true -// } -// } -// if !contains { -// queryList = append(queryList, [][]int{{relation.EntityFrom, relation.EntityTo}, {i}}) -// } -// } - -// for i := range queryList { -// //reset variables for the next query -// nodeUnion = "" -// relationUnion = "" -// relationsToReturn = []string{} -// for j, relationID := range queryList[i][1] { -// relationName := fmt.Sprintf("r%v", j) -// relation := JSONQuery.Relations[relationID] -// pathName := fmt.Sprintf("p%v", j) -// relationsToReturn = append(relationsToReturn, pathName) -// if relation.EntityFrom >= 0 { -// // if there is a from-node -// // create the let for this node -// fromName := fmt.Sprintf("n%v", relation.EntityFrom) - -// ret += *createNodeMatch(&JSONQuery.Entities[relation.EntityFrom], &fromName) - -// ret += *createRelationMatch(&relation, relationName, pathName, &JSONQuery.Entities, JSONQuery.Limit, true) -// } else if relation.EntityTo >= 0 { -// // if there is only a to-node -// toName := fmt.Sprintf("n%v", relation.EntityTo) - -// ret += *createNodeMatch(&JSONQuery.Entities[relation.EntityTo], &toName) - -// ret += *createRelationMatch(&relation, relationName, pathName, &JSONQuery.Entities, JSONQuery.Limit, false) -// // Add this relation to the list -// } else { -// fmt.Println("Relation-only queries are currently not supported") -// continue -// } -// } - -// // Create UNION statements that create unique lists of all the nodes and relations - -// // Thus removing all duplicates -// nodeUnion = "RETURN " - -// for _, entityID := range queryList[i][0] { -// if sliceContains(JSONQuery.Return.Entities, entityID) { -// nodeUnion += fmt.Sprintf("n%v,", entityID) -// } -// } - -// for _, relation := range relationsToReturn { -// relationUnion += fmt.Sprintf("%v,", relation) -// } - -// relationUnion = TrimSuffix(relationUnion, ",") -// // hier zat een newline -// ret += nodeUnion + relationUnion + "; " -// } - -// nodeSet := make(map[int]bool) -// for _, relation := range JSONQuery.Relations { -// nodeSet[relation.EntityFrom] = true -// nodeSet[relation.EntityTo] = true -// } - -// // Check if the entities to return are already returned -// for _, entityIndex := range JSONQuery.Return.Entities { -// if !nodeSet[entityIndex] { -// // If not, return this node -// name := fmt.Sprintf("n%v", entityIndex) -// ret += *createNodeMatch(&JSONQuery.Entities[entityIndex], &name) -// // Add this node to the list -// nodesToReturn = append(nodesToReturn, name) -// ret += fmt.Sprintf("RETURN %v", name) -// } -// } - -// ret = TrimSuffix(ret, " ") -// return &ret -// } - -// /* -// createNodeLet generates a 'LET' statement for a node related query -// node: *entity.QueryEntityStruct, node is an entityStruct containing the information of a single node, -// name: *string, is the autogenerated name of the node consisting of "n" + the index of the node -// Return: *string, a string containing a single LET-statement in AQL -// */ -// func createNodeMatch(node *entity.QueryEntityStruct, name *string) *string { -// // hier zat een newline -// header := fmt.Sprintf("MATCH (%v:%v) ", *name, node.Type) -// constraints := *createConstraintStatements(&node.Constraints, *name) -// ret := header + constraints -// return &ret -// } - -// /* -// createRelationLetWithFromEntity generates a 'LET' statement for relations with an 'EntityFrom' property and optionally an 'EntitiyTo' property -// relation: *entity.QueryRelationStruct, relation is a relation struct containing the information of a single relation, -// relationName: string, is the name of the relation, is the autogenerated name of the node consisting of "r" + the index of the relation, -// pathName: string, is the path of the name, -// entities: *[]entity.QueryEntityStruct, is a list of entityStructs that are needed to form the relation LET-statement -// limit: int, the limit for the number of nodes to return -// outbound: bool, checks if the relation is inbound or outbound -// Return: *string, a string containing a single LET-statement in AQL -// */ -// func createRelationMatch(relation *entity.QueryRelationStruct, relationName string, pathName string, entities *[]entity.QueryEntityStruct, limit int, outbound bool) *string { -// relationReturn := "" -// var relationBounds int -// if outbound { -// relationReturn = fmt.Sprintf("MATCH %v = (n%v)-[%v:%v*%v..%v]->(", pathName, relation.EntityFrom, relationName, relation.Type, relation.Depth.Min, relation.Depth.Max) -// relationBounds = relation.EntityTo - -// } else { -// relationReturn = fmt.Sprintf("MATCH %v = (n%v)-[%v:%v*%v..%v]->(", pathName, relation.EntityTo, relationName, relation.Type, relation.Depth.Min, relation.Depth.Max) -// relationBounds = relation.EntityFrom -// } - -// if relationBounds != -1 { -// relationReturn += fmt.Sprintf("n%v", relationBounds) -// } -// relationReturn += ")" - -// constraintReturn := *createConstraintStatements(&relation.Constraints, relationName) -// // hier zat een newline -// ret := relationReturn + " " + constraintReturn - -// return &ret -// } +// createCypher translates a cluster of nodes (query) to Cypher +func createCypher(JSONQuery *entity.IncomingQueryJSON) (*string, error) { + + // create the hierarchy from the cluster + hierarchy, err := createQueryHierarchy(JSONQuery) + if err != nil { + return nil, errors.New("Unable to create hierarchy in query, perhaps there is a dependency loop?") + } + + // translate it to cypher in the right order, using the hierarchy + cypher, err := formQuery(JSONQuery, hierarchy) + if err != nil { + return nil, errors.New("Creation of query Cypher failed") + } + + // create the return statement + returnStatement, err := createReturnStatement(JSONQuery, hierarchy) + if err != nil { + return nil, errors.New("Creation of return Cypher failed") + } + + finalCypher := *cypher + *returnStatement + + return &finalCypher, nil +} + +// createReturnStatement creates the final return statement +func createReturnStatement(JSONQuery *entity.IncomingQueryJSON, parts entity.Query) (*string, error) { + + var retStatement string + + // First check to see if the return is a table (due to a groupby at the end) or if it is nodelink data + numOfParts := len(parts) + if parts[numOfParts-1].QType == "groupBy" { + // Return is a table + groupBy := JSONQuery.FindG(parts[numOfParts-1].QID) + + gName := fmt.Sprintf("%v_%v", groupBy.AppliedModifier, groupBy.GroupAttribute) + by := fmt.Sprintf("%v%v.%v", string(groupBy.ByType[0]), groupBy.ByID, groupBy.ByAttribute) + byName := strings.Replace(by, ".", "_", 1) + + retStatement = fmt.Sprintf("RETURN %v, %v", byName, gName) + } else { + // Return is nodelink + // Loop through the parts of the query from back to front + retStatement = "RETURN " + lineStart := "" + for i := numOfParts - 1; i >= 0; i-- { + part := parts[i] + if part.QType == "relation" { + rel := JSONQuery.FindR(part.QID) + retStatement += fmt.Sprintf("%v r%v", lineStart, rel.ID) + lineStart = "," + + if rel.FromID != -1 { + if rel.FromType == "entity" { + + retStatement += fmt.Sprintf("%v e%v", lineStart, rel.FromID) + } else { + id := JSONQuery.FindG(rel.FromID).ByID + retStatement += fmt.Sprintf("%v eg%v", lineStart, id) + } + } + + if rel.ToID != -1 { + if rel.ToType == "entity" { + + retStatement += fmt.Sprintf("%v e%v", lineStart, rel.ToID) + } else { + id := JSONQuery.FindG(rel.ToID).ByID + retStatement += fmt.Sprintf("%v eg%v", lineStart, id) + } + } + } else if part.QType == "entity" { + retStatement += fmt.Sprintf("%v e%v", lineStart, part.QID) + break + + // Probably ends with a break, since a single entity is always connected via an IN to a groupby? (maybe not in case of ONLY having an entity as the entire query) + } else { + // Then it is a groupby which must not be returned, thus the returns are done. + break + } + } + } + + retStatement = retStatement + "\n" + fmt.Sprintf("LIMIT %v", JSONQuery.Limit) + + return &retStatement, nil +} + +// createQueryHierarchy finds out what depends on what, then uses topological sort to create a hierarchy +func createQueryHierarchy(JSONQuery *entity.IncomingQueryJSON) (entity.Query, error) { + + var parts entity.Query + IDctr := 0 + + // Add relations all to query parts + for _, rel := range JSONQuery.Relations { + part := entity.QueryPart{ + QType: "relation", + QID: rel.ID, + PartID: IDctr, + Dependencies: make([]int, 0), + } + parts = append(parts, part) + + IDctr++ + + } + + // Add the Groupby's + for _, gb := range JSONQuery.GroupBys { + part := entity.QueryPart{ + QType: "groupBy", + QID: gb.ID, + PartID: IDctr, + Dependencies: make([]int, 0), + } + parts = append(parts, part) + + IDctr++ + + } + + // Add the entities, if they have an IN, otherwise they are not important + for _, ent := range JSONQuery.Entities { + + skip := true + for _, con := range ent.Constraints { + if con.InID != -1 { + skip = false + } + } + + if skip { + continue + } + + part := entity.QueryPart{ + QType: "entity", + QID: ent.ID, + PartID: IDctr, + Dependencies: make([]int, 0), + } + parts = append(parts, part) + + IDctr++ + } + + // Check dependencies in a nice O(n^2) + for _, rel := range JSONQuery.Relations { + if rel.FromID == -1 { + continue + } + + // Check the dependencies From - To + for _, rela := range JSONQuery.Relations { + if rela.ToID == -1 { + continue + } + + if rel.FromID == rela.ToID && rel.FromType == rela.ToType { + part := parts.Find(rel.ID, "relation") + part.Dependencies = append(part.Dependencies, parts.Find(rela.ID, "relation").PartID) + } + } + + if rel.ToID == -1 { + continue + } + + // Now for connections to group by's it doesnt matter if the GB is attached to the from or the to + // The GB always has priority + for _, gb := range JSONQuery.GroupBys { + if (rel.FromID == gb.ID && rel.FromType == "groupBy") || (rel.ToID == gb.ID && rel.ToType == "groupBy") { + part := parts.Find(rel.ID, "relation") + gbID := parts.Find(gb.ID, "groupBy").PartID + part.Dependencies = append(part.Dependencies, gbID) + } + } + } + + // Same trick for group by's + for _, gb := range JSONQuery.GroupBys { + for _, rela := range JSONQuery.Relations { + // Check if the gb is connected to the relation + if (gb.ByID == rela.ID && gb.ByType == "relation") || // Is the By connected to a relation + (gb.GroupID == rela.ID && gb.GroupType == "relation") || // is the Group connected to a relation + (gb.ByID == rela.FromID && gb.ByType == rela.FromType) || // Is the by connected to an entity connected to the "From" of a relation + (gb.ByID == rela.ToID && gb.ByType == rela.ToType) || // Is the by connected to an entity connected to the "To" of a relation + (gb.GroupID == rela.FromID && gb.GroupType == rela.FromType) || // Is the group connected to an entity connected to the "From" of arelation + (gb.GroupID == rela.ToID && gb.GroupType == rela.ToType) { // Is the group connected to an entity connected to the "To" of a relation + part := parts.Find(gb.ID, "groupBy") + part.Dependencies = append(part.Dependencies, parts.Find(rela.ID, "relation").PartID) + } + } + + // Not sure if this is even possible, but hey who knows + // Check to see if the gb is connected to another gb + for _, grb := range JSONQuery.GroupBys { + if gb.ID == grb.ID { + continue + } + + if (gb.GroupID == grb.ID && gb.GroupType == "groupBy") || (gb.ByID == grb.ID && gb.ByType == "groupBy") { + part := parts.Find(gb.ID, "groupBy") + part.Dependencies = append(part.Dependencies, parts.Find(grb.ID, "groupBy").PartID) + } + } + } + + for _, ent := range JSONQuery.Entities { + for _, con := range ent.Constraints { + if con.InID != -1 { + part := parts.Find(ent.ID, "entity") // Should always be groupBy + part.Dependencies = append(part.Dependencies, parts.Find(con.InID, con.InType).PartID) + } + } + + } + + // Now we have a directed graph, meaning we can use some topological sort (Kahn's algorithm) + var sortedQuery entity.Query + incomingEdges := make(map[int]int) + + // Set all to 0 + for _, p := range parts { + incomingEdges[p.PartID] = 0 + } + + // Count the incoming edges (dependencies) + for _, p := range parts { + for _, dp := range p.Dependencies { + incomingEdges[dp]++ + } + } + + for { // While there is a someone where incomingEdges[someone] == 0 + part := entity.QueryPart{PartID: -1} + // Select a node with no incoming edges + for ID, edges := range incomingEdges { + if edges == 0 { + part = *parts.SelectByID(ID) + } + } + + // Check to see if there are parts withouth incoming edges left + if part.PartID == -1 { + break + } + + // Remove it from the set + incomingEdges[part.PartID] = -1 + sortedQuery = append(sortedQuery, part) + + // Decrease incoming edges of other parts + for _, ID := range part.Dependencies { + incomingEdges[ID]-- + } + } + + // Now check for cycles in the graph + partRemaining := false + for _, edges := range incomingEdges { + if edges != -1 { + partRemaining = true + } + } + + if partRemaining { + // Somehow there was a cycle in the query, + return nil, errors.New("Cyclic query detected") + } + + // Reverse the list + retQuery := make([]entity.QueryPart, len(sortedQuery)) + for i := 0; i < len(sortedQuery); i++ { + retQuery[i] = sortedQuery[len(sortedQuery)-i-1] + } + + return retQuery, nil +} + +// formQuery uses the hierarchy to create cypher for each part of the query in the right order +func formQuery(JSONQuery *entity.IncomingQueryJSON, hierarchy entity.Query) (*string, error) { + + // Traverse through the hierarchy and for every entry create a part like: + // Match p0 = (l:Lorem)-[:Ipsum*1..1]-(d:Dolor) + // Constraints on l and d + // Unwind relationships(p0) as r0 + // Constraints on r0 + // With * + + totalQuery := "" + + for _, entry := range hierarchy { + var cypher *string + var err error + + switch entry.QType { + case "relation": + cypher, err = createRelationCypher(JSONQuery, entry) + if err != nil { + return nil, err + } + break + case "groupBy": + cypher, err = createGroupByCypher(JSONQuery, entry) + if err != nil { + return nil, err + } + + break + case "entity": + // This would be in case of an IN or if there was only 1 entity in the query builder + cypher, err = createInCypher(JSONQuery, entry) + if err != nil { + return nil, err + } + + break + default: + // Should never be reached + return nil, errors.New("Invalid query pill type detected") + } + + totalQuery += *cypher + } + + return &totalQuery, nil +} + +// createInCypher creates the cypher for an entity with an IN-clause +func createInCypher(JSONQuery *entity.IncomingQueryJSON, part entity.QueryPart) (*string, error) { + ent := JSONQuery.FindE(part.QID) + eName := fmt.Sprintf("e%v", ent.ID) + + match := fmt.Sprintf("MATCH (%v:%v)\n", eName, ent.Name) + eConstraints := "" + newLineStatement := "\tWHERE" + + // Find the IN + for _, con := range ent.Constraints { + if con.InID != -1 { + gby := JSONQuery.FindG(con.InID) // Because this could only be on a groupby + byName := fmt.Sprintf("%v%v", string(gby.ByType[0]), gby.ByID) + eConstraints += fmt.Sprintf("%v %v.%v IN %v_%v\n", newLineStatement, eName, con.Attribute, byName, gby.ByAttribute) + newLineStatement = "\tAND" + } + } + + // Attach other constraints (if any) + for _, v := range ent.Constraints { + if v.InID != -1 { + continue + } + eConstraints += fmt.Sprintf("%v %v \n", newLineStatement, *createConstraintBoolExpression(&v, eName, false)) + } + + with := "WITH *\n" + retStatement := match + eConstraints + with + return &retStatement, nil + + // Should be able to handle multiple IN statements from one entity, not sure if that will ever happen + // TODO: test this +} + +// createRelationCypher takes the json and a query part, finds the necessary entities and converts it into cypher +func createRelationCypher(JSONQuery *entity.IncomingQueryJSON, part entity.QueryPart) (*string, error) { + + rel := JSONQuery.FindR(part.QID) + + if (rel.FromID == -1) && (rel.ToID == -1) { + // Now there is only a relation, which we do not allow + return nil, errors.New("Relation only queries are not supported") + } + + var match, eConstraints, unwind, rConstraints string + + // There is some duplicate code here below that could be omitted with extra if-statements, but that is something to do + // for a later time. Since this way it is easier to understand the flow of the code + // Removing the duplicate code here, probably more than triples the if-statements and is a puzzle for a later time (TODO) + if rel.ToID == -1 { + // There is no To, only a From + var eName string + var ent *entity.QueryEntityStruct + + if rel.FromType == "entity" { + + ent = JSONQuery.FindE(rel.ToID) + eName = fmt.Sprintf("e%v", ent.ID) + + } else if rel.FromType == "groupBy" { + gb := JSONQuery.FindG(rel.FromID) + if gb.ByType == "relation" { + return nil, errors.New("Invalid query: cannot connect a relation to a group by that groups by another relation") + } + + ent = JSONQuery.FindE(gb.ByID) + // This is a sort of dummy variable, since it is not directly visible in the query, but it is definitely needed + eName = fmt.Sprintf("e%v", ent.ID) + } else { + // Should never be reachable + return nil, errors.New("Invalid connection type to relation") + } + + match = fmt.Sprintf("MATCH p%v = (%v:%v)-[:%v*%v..%v]-()\n", part.PartID, eName, ent.Name, rel.Name, rel.Depth.Min, rel.Depth.Max) + + eConstraints = "" + newLineStatement := "\tWHERE" + for _, v := range ent.Constraints { + eConstraints += fmt.Sprintf("%v %v \n", newLineStatement, *createConstraintBoolExpression(&v, eName, false)) + newLineStatement = "\tAND" + } + + // Add an IN clause, connecting the relation to the output of the groupby + if rel.FromType == "groupBy" { + gb := JSONQuery.FindG(rel.FromID) + inConstraint := fmt.Sprintf("%v %v.%v IN %v_%v \n", newLineStatement, eName, gb.ByAttribute, gb.AppliedModifier, gb.ByAttribute) + eConstraints += inConstraint + } + + } else if rel.FromID == -1 { + var eName string + var ent *entity.QueryEntityStruct + + if rel.ToType == "entity" { + ent = JSONQuery.FindE(rel.ToID) + eName = fmt.Sprintf("e%v", ent.ID) + + } else if rel.ToType == "groupBy" { + gb := JSONQuery.FindG(rel.ToID) + if gb.ByType == "relation" { + return nil, errors.New("Invalid query: cannot connect a relation to a group by that groups by another relation") + } + + ent = JSONQuery.FindE(gb.ByID) + // This is a sort of dummy variable, since it is not directly visible in the query, but it is definitely needed + eName = fmt.Sprintf("e%v", ent.ID) + } else { + // Should never be reachable + return nil, errors.New("Invalid connection type to relation") + } + + match = fmt.Sprintf("MATCH p%v = ()-[:%v*%v..%v]-(%v:%v)\n", part.PartID, rel.Name, rel.Depth.Min, rel.Depth.Max, eName, ent.Name) + + eConstraints = "" + newLineStatement := "\tWHERE" + for _, v := range ent.Constraints { + eConstraints += fmt.Sprintf("%v %v \n", newLineStatement, *createConstraintBoolExpression(&v, eName, false)) + newLineStatement = "\tAND" + } + + // Add an IN clause, connecting the relation to the output of the groupby + if rel.ToType == "groupBy" { + gb := JSONQuery.FindG(rel.ToID) + inConstraint := fmt.Sprintf("%v %v.%v IN %v_%v \n", newLineStatement, eName, gb.ByAttribute, gb.AppliedModifier, gb.ByAttribute) + eConstraints += inConstraint + } + + } else { + var eTName string + var entFrom *entity.QueryEntityStruct + var eFName string + var entTo *entity.QueryEntityStruct + + // Check of what type the To is + if rel.ToType == "entity" { + entTo = JSONQuery.FindE(rel.ToID) + eTName = fmt.Sprintf("e%v", entTo.ID) + + } else if rel.ToType == "groupBy" { + gb := JSONQuery.FindG(rel.ToID) + if gb.ByType == "relation" { + return nil, errors.New("Invalid query: cannot connect a relation to a group by that groups by another relation") + } + + entTo = JSONQuery.FindE(gb.ByID) + // this is a sort of dummy variable, since it is not directly visible in the query, but it is definitely needed + eTName = fmt.Sprintf("e%v", entTo.ID) + } else { + // Should never be reachable + return nil, errors.New("Invalid connection type to relation") + } + + // Check of what type the From is + if rel.FromType == "entity" { + + entFrom = JSONQuery.FindE(rel.FromID) + eFName = fmt.Sprintf("e%v", entFrom.ID) + + } else if rel.FromType == "groupBy" { + gb := JSONQuery.FindG(rel.FromID) + if gb.ByType == "relation" { + return nil, errors.New("Invalid query: cannot connect a relation to a group by that groups by another relation") + } + + entFrom = JSONQuery.FindE(gb.ByID) + // This is a sort of dummy variable, since it is not directly visible in the query, but it is definitely needed + eFName = fmt.Sprintf("eg%v", entFrom.ID) + } else { + // Should never be reachable + return nil, errors.New("Invalid connection type to relation") + } + + match = fmt.Sprintf("MATCH p%v = (%v:%v)-[:%v*%v..%v]-(%v:%v)\n", part.PartID, eFName, entFrom.Name, rel.Name, rel.Depth.Min, rel.Depth.Max, eTName, entTo.Name) + + eConstraints = "" + newLineStatement := "\tWHERE" + for _, v := range entFrom.Constraints { + eConstraints += fmt.Sprintf("%v %v \n", newLineStatement, *createConstraintBoolExpression(&v, eFName, false)) + newLineStatement = "\tAND" + } + for _, v := range entTo.Constraints { + eConstraints += fmt.Sprintf("%v %v \n", newLineStatement, *createConstraintBoolExpression(&v, eTName, false)) + newLineStatement = "\tAND" + } + + // Add an IN clause, connecting the relation to the output of the groupby + if rel.ToType == "groupBy" { + gb := JSONQuery.FindG(rel.ToID) + inConstraint := fmt.Sprintf("%v %v.%v IN %v_%v \n", newLineStatement, eTName, gb.ByAttribute, strings.Replace(eFName, "g", "", 1), gb.ByAttribute) + eConstraints += inConstraint + newLineStatement = "\tAND" + } + + if rel.FromType == "groupBy" { + gb := JSONQuery.FindG(rel.FromID) + inConstraint := fmt.Sprintf("%v %v.%v IN %v_%v \n", newLineStatement, eFName, gb.ByAttribute, strings.Replace(eFName, "g", "", 1), gb.ByAttribute) + eConstraints += inConstraint + } + } + + rName := fmt.Sprintf("r%v", part.QID) + unwind = fmt.Sprintf("UNWIND relationships(p%v) as %v \nWITH *\n", part.PartID, rName) + + rConstraints = "" + newLineStatement := "\tWHERE" + for _, v := range rel.Constraints { + rConstraints += fmt.Sprintf("%v %v \n", newLineStatement, *createConstraintBoolExpression(&v, rName, false)) + newLineStatement = "\tAND" + } + + retString := match + eConstraints + unwind + rConstraints + return &retString, nil + +} + +// createGroupByCypher takes the json and a query part, finds the group by and converts it into cypher +func createGroupByCypher(JSONQuery *entity.IncomingQueryJSON, part entity.QueryPart) (*string, error) { + groupBy := JSONQuery.FindG(part.QID) + + gName := fmt.Sprintf("%v_%v", groupBy.AppliedModifier, groupBy.GroupAttribute) + by := fmt.Sprintf("%v%v.%v", string(groupBy.ByType[0]), groupBy.ByID, groupBy.ByAttribute) + byName := strings.Replace(by, ".", "_", 1) + group := fmt.Sprintf("%v%v.%v", string(groupBy.GroupType[0]), groupBy.GroupID, groupBy.GroupAttribute) + + // If you do not use a *, then everything needs to be aliased + with := fmt.Sprintf("WITH %v AS %v, %v(%v) AS %v \n", by, byName, groupBy.AppliedModifier, group, gName) + + gConstraints := "" + newLineStatement := "\tWHERE" + for _, v := range groupBy.Constraints { + gConstraints += fmt.Sprintf("%v %v \n", newLineStatement, *createConstraintBoolExpression(&v, gName, true)) + newLineStatement = "\tAND" + } + + retString := with + gConstraints + return &retString, nil +} diff --git a/cypher/convertQueryNew.go b/cypher/convertQueryNew.go deleted file mode 100644 index 885e169177d3d998f732b3656e5bef8a977fef26..0000000000000000000000000000000000000000 --- a/cypher/convertQueryNew.go +++ /dev/null @@ -1,614 +0,0 @@ -package cypher - -import ( - "errors" - "fmt" - "strings" - - "git.science.uu.nl/graphpolaris/query-conversion/entity" -) - -// ConvertQuery2 -func (s *Service) ConvertQuery2(totalJSONQuery *entity.IncomingQueryJSON) (*string, error) { - var finalCypher *string - - queryJSON := totalJSONQuery - - query, rest, isRest := checkForQueryCluster(queryJSON) - - if isRest { - fmt.Println("Rest:") - fmt.Println(rest) - - // If something needs to be done with other query cluster, then add code here - } - - finalCypher, err := createCypher(query) - if err != nil { - return nil, err - } - - return finalCypher, nil -} - -// createCypher creates queries without the return statement, due to the possibility of multiple disconnected queries -func createCypher(JSONQuery *entity.IncomingQueryJSON) (*string, error) { - - hierarchy, err := createQueryHierarchy(JSONQuery) - if err != nil { - //TODO - return nil, errors.New("") - } - - cypher, err := formQuery(JSONQuery, hierarchy) - if err != nil { - return nil, errors.New("Creation of query Cypher failed") - } - - returnStatement, err := createReturnStatement(JSONQuery, hierarchy) - if err != nil { - return nil, errors.New("Creation of return Cypher failed") - } - - finalCypher := *cypher + *returnStatement - - return &finalCypher, nil -} - -// NOTE MOET MISSCHIEN ANDERS -// createReturnStatement creates the final return statement, connecting all previous cypher together -func createReturnStatement(JSONQuery *entity.IncomingQueryJSON, parts entity.Query) (*string, error) { - - var retStatement string - - // First check to see if the return is a table (due to a groupby at the end) or if it is nodelink data - numOfParts := len(parts) - if parts[numOfParts-1].QType == "groupBy" { - // Return is a table - groupBy := JSONQuery.FindG(parts[numOfParts-1].QID) - - gName := fmt.Sprintf("%v_%v", groupBy.AppliedModifier, groupBy.GroupAttribute) - by := fmt.Sprintf("%v%v.%v", string(groupBy.ByType[0]), groupBy.ByID, groupBy.ByAttribute) - byName := strings.Replace(by, ".", "_", 1) - - retStatement = fmt.Sprintf("RETURN %v, %v", byName, gName) - } else { - // Return is nodelink - // Loop through the parts of the query from back to front - retStatement = "RETURN " - lineStart := "" - for i := numOfParts - 1; i >= 0; i-- { - part := parts[i] - if part.QType == "relation" { - rel := JSONQuery.FindR(part.QID) - retStatement += fmt.Sprintf("%v r%v", lineStart, rel.ID) - lineStart = "," - - if rel.FromID != -1 { - if rel.FromType == "entity" { - - retStatement += fmt.Sprintf("%v e%v", lineStart, rel.FromID) - } else { - id := JSONQuery.FindG(rel.FromID).ByID - retStatement += fmt.Sprintf("%v eg%v", lineStart, id) - } - } - - if rel.ToID != -1 { - if rel.ToType == "entity" { - - retStatement += fmt.Sprintf("%v e%v", lineStart, rel.ToID) - } else { - id := JSONQuery.FindG(rel.ToID).ByID - retStatement += fmt.Sprintf("%v eg%v", lineStart, id) - } - } - } else if part.QType == "entity" { - retStatement += fmt.Sprintf("%v e%v", lineStart, part.QID) - break - - // Probably ends with a break to, since a single entity is always connected via an IN to a groupby? (maybe not in case of ONLY having an entity as the entire query) - } else { - // Then it is a groupby which must not be returned, thus the returns are done. - break - } - } - } - - retStatement = retStatement + "\n" + fmt.Sprintf("LIMIT %v", JSONQuery.Limit) - - return &retStatement, nil -} - -func createQueryHierarchy(JSONQuery *entity.IncomingQueryJSON) (entity.Query, error) { - // Pak de relations met de entities samen, en vorm groepjes van ent-rel-ent - // Als A-rel-B-rel-C, dan wordt dat A-rel-B en B-rel-C, waarbij BC na AB moet komen, dus BC _depends_ on AB - // Idee is dat je de hele lijst achterstevoren door kan lopen en dat je eerst een depend tegen komt en daarna het gene waarop gedepend wordt - - // ** HOUD NOG GEEN REKENING MET INS, MOET NOG WEL - // maar is wat lastiger omdat dat wat extra checks vergt - - var parts entity.Query - IDctr := 0 - - // Add them all to query parts - for _, rel := range JSONQuery.Relations { - part := entity.QueryPart{ - QType: "relation", - QID: rel.ID, - PartID: IDctr, - Dependencies: make([]int, 0), - } - parts = append(parts, part) - - IDctr++ - - } - - for _, gb := range JSONQuery.GroupBys { - part := entity.QueryPart{ - QType: "groupBy", - QID: gb.ID, - PartID: IDctr, - Dependencies: make([]int, 0), - } - parts = append(parts, part) - - IDctr++ - - } - - for _, ent := range JSONQuery.Entities { - - skip := true - for _, con := range ent.Constraints { - if con.InID != -1 { - skip = false - } - } - - if skip { - continue - } - - part := entity.QueryPart{ - QType: "entity", - QID: ent.ID, - PartID: IDctr, - Dependencies: make([]int, 0), - } - parts = append(parts, part) - - IDctr++ - } - - // Check dependencies in a nice O(n^2) - for _, rel := range JSONQuery.Relations { - if rel.FromID == -1 { - continue - } - - // Check the dependencies From - To - for _, rela := range JSONQuery.Relations { - if rela.ToID == -1 { - continue - } - - if rel.FromID == rela.ToID && rel.FromType == rela.ToType { - part := parts.Find(rel.ID, "relation") - part.Dependencies = append(part.Dependencies, parts.Find(rela.ID, "relation").PartID) - } - } - - if rel.ToID == -1 { - continue - } - - // Now for connections to group by's it doesnt matter if the GB is attached to the from or the to - // The GB always has priority - for _, gb := range JSONQuery.GroupBys { - if (rel.FromID == gb.ID && rel.FromType == "groupBy") || (rel.ToID == gb.ID && rel.ToType == "groupBy") { - part := parts.Find(rel.ID, "relation") - gbID := parts.Find(gb.ID, "groupBy").PartID - part.Dependencies = append(part.Dependencies, gbID) - } - } - } - - // Same trick for group by's - for _, gb := range JSONQuery.GroupBys { - for _, rela := range JSONQuery.Relations { - // Check if the gb is connected to the relation - if (gb.ByID == rela.ID && gb.ByType == "relation") || // Is the By connected to a relation - (gb.GroupID == rela.ID && gb.GroupType == "relation") || // is the Group connected to a relation - (gb.ByID == rela.FromID && gb.ByType == rela.FromType) || // Is the by connected to an entity connected to the "From" of a relation - (gb.ByID == rela.ToID && gb.ByType == rela.ToType) || // Is the by connected to an entity connected to the "To" of a relation - (gb.GroupID == rela.FromID && gb.GroupType == rela.FromType) || // Is the group connected to an entity connected to the "From" of arelation - (gb.GroupID == rela.ToID && gb.GroupType == rela.ToType) { // Is the group connected to an entity connected to the "To" of a relation - part := parts.Find(gb.ID, "groupBy") - part.Dependencies = append(part.Dependencies, parts.Find(rela.ID, "relation").PartID) - } - } - - // Not sure if this is even possible, but hey who knows - // Check to see if the gb is connected to another gb - for _, grb := range JSONQuery.GroupBys { - if gb.ID == grb.ID { - continue - } - - if (gb.GroupID == grb.ID && gb.GroupType == "groupBy") || (gb.ByID == grb.ID && gb.ByType == "groupBy") { - part := parts.Find(gb.ID, "groupBy") - part.Dependencies = append(part.Dependencies, parts.Find(grb.ID, "groupBy").PartID) - } - } - } - - for _, ent := range JSONQuery.Entities { - for _, con := range ent.Constraints { - if con.InID != -1 { - part := parts.Find(ent.ID, "entity") // Should always be groupBy - part.Dependencies = append(part.Dependencies, parts.Find(con.InID, con.InType).PartID) - } - } - - } - - // Now we have a directed graph, meaning we can use some topological sort (Kahn's algorithm) - var sortedQuery entity.Query - incomingEdges := make(map[int]int) - - // Set all to 0 - for _, p := range parts { - incomingEdges[p.PartID] = 0 - } - - // Count the incoming edges (dependencies) - for _, p := range parts { - for _, dp := range p.Dependencies { - incomingEdges[dp]++ - } - } - - for { // While there is a someone where incomingEdges[someone] == 0 - part := entity.QueryPart{PartID: -1} - // Select a node with no incoming edges - for ID, edges := range incomingEdges { - if edges == 0 { - part = *parts.SelectByID(ID) - } - } - - // Check to see if there are parts withouth incoming edges left - if part.PartID == -1 { - break - } - - // Remove it from the set - incomingEdges[part.PartID] = -1 - sortedQuery = append(sortedQuery, part) - - // Decrease incoming edges of other parts - for _, ID := range part.Dependencies { - incomingEdges[ID]-- - } - } - - // Now check for cycles in the graph - partRemaining := false - for _, edges := range incomingEdges { - if edges != -1 { - partRemaining = true - } - } - - if partRemaining { - // Somehow there was a cycle in the query, - return nil, errors.New("Cyclic query detected") - } - - // Reverse the list - retQuery := make([]entity.QueryPart, len(sortedQuery)) - for i := 0; i < len(sortedQuery); i++ { - retQuery[i] = sortedQuery[len(sortedQuery)-i-1] - } - - return retQuery, nil - - // ** HOUD NOG GEEN REKENING MET INS (van entities naar groupbys), MOET NOG WEL - // maar is wat lastiger omdat dat wat extra checks vergt, alle code voor ins moet ook in de volgende functies worden gestopt - - // Maak van alle rels en gb's een query part - // Loop door alle rels heen en kijk of hun FROM een TO is van een andere rel --> dependency - // Als de from of de to een Group by is, dan is ie ook direct dependent - // Als een GB aan een relation (A) zit, komen alle andere relations die aan A vastzitten ook eerst - - // ** Het geval van wanneer een entity vast zit aan een group by? via een IN? - - // Returned I guess een list van query parts met de dependencies naar beneden zodat je van boven naar beneden de lijst kan doorlopen - // om de query te maken -} - -func formQuery(JSONQuery *entity.IncomingQueryJSON, hierarchy entity.Query) (*string, error) { - - // ** NOTE: this does not create a return statement, that has yet to be made (which will also probably use the hierarchy) - - // Traverse through the hierarchy and for every entry create a part like: - // Match (deel 1) - // Constraints op entities - // Unwind as r0 - // With * - // Constraints op r0 - totalQuery := "" - - for _, entry := range hierarchy { - var cypher *string - var err error - - switch entry.QType { - case "relation": - cypher, err = createRelationCypher(JSONQuery, entry) - if err != nil { - return nil, err - } - break - case "groupBy": - cypher, err = createGroupByCypher(JSONQuery, entry) - if err != nil { - return nil, err - } - - break - case "entity": - // This would be in case of an IN or if there was only 1 entity in the query builder - cypher, err = createInCypher(JSONQuery, entry) - if err != nil { - return nil, err - } - - break - default: - // Should never be reached - return nil, errors.New("Invalid query pill type detected") - } - - totalQuery += *cypher - } - - return &totalQuery, nil -} - -func createInCypher(JSONQuery *entity.IncomingQueryJSON, part entity.QueryPart) (*string, error) { - ent := JSONQuery.FindE(part.QID) - eName := fmt.Sprintf("e%v", ent.ID) - - match := fmt.Sprintf("MATCH (%v:%v)\n", eName, ent.Name) - eConstraints := "" - newLineStatement := "\tWHERE" - - for _, con := range ent.Constraints { - if con.InID != -1 { - gby := JSONQuery.FindG(con.InID) // Because this could only be on a groupby - byName := fmt.Sprintf("%v%v", string(gby.ByType[0]), gby.ByID) - eConstraints += fmt.Sprintf("%v %v.%v IN %v_%v\n", newLineStatement, eName, con.Attribute, byName, gby.ByAttribute) - newLineStatement = "\tAND" - } - } - - for _, v := range ent.Constraints { - if v.InID != -1 { - continue - } - eConstraints += fmt.Sprintf("%v %v \n", newLineStatement, *createConstraintBoolExpression(&v, eName, false)) - } - - with := "WITH *\n" - retStatement := match + eConstraints + with - return &retStatement, nil - - // Should be able to test multiple IN statements from one entity, not sure if that will ever happen (maybe needs to be tested) -} - -// createRelationCypher takes the json and a query part, finds the necessary entities and converts it into cypher -func createRelationCypher(JSONQuery *entity.IncomingQueryJSON, part entity.QueryPart) (*string, error) { - - rel := JSONQuery.FindR(part.QID) - - if (rel.FromID == -1) && (rel.ToID == -1) { - // Now there is only a relation, which we do not allow - return nil, errors.New("Relation only queries are not supported") - } - - var match, eConstraints, unwind, rConstraints string - - // There is some duplicate code here below that could be omitted with extra if-statements, but that is something to do - // for a later time. Since this way it is easier to understand the flow of the code - if rel.ToID == -1 { - // There is no To, only a From - var eName string - var ent *entity.QueryEntityStruct - - if rel.FromType == "entity" { - - ent = JSONQuery.FindE(rel.ToID) - eName = fmt.Sprintf("e%v", ent.ID) - - } else if rel.FromType == "groupBy" { - gb := JSONQuery.FindG(rel.FromID) - if gb.ByType == "relation" { - return nil, errors.New("Invalid query: cannot connect a relation to a group by that groups by another relation") - } - - ent = JSONQuery.FindE(gb.ByID) - // this is a sort of dummy variable, since it is not directly visible in the query, but it is definitely needed - eName = fmt.Sprintf("e%v", ent.ID) - } else { - // Should never be reachable - return nil, errors.New("Invalid connection type to relation") - } - - match = fmt.Sprintf("MATCH p%v = (%v:%v)-[:%v*%v..%v]-()\n", part.PartID, eName, ent.Name, rel.Name, rel.Depth.Min, rel.Depth.Max) - - eConstraints = "" - newLineStatement := "\tWHERE" - for _, v := range ent.Constraints { - eConstraints += fmt.Sprintf("%v %v \n", newLineStatement, *createConstraintBoolExpression(&v, eName, false)) - newLineStatement = "\tAND" - } - - // Add an IN clause, connecting the relation to the output of the groupby - if rel.FromType == "groupBy" { - gb := JSONQuery.FindG(rel.FromID) - inConstraint := fmt.Sprintf("%v %v.%v IN %v_%v \n", newLineStatement, eName, gb.ByAttribute, gb.AppliedModifier, gb.ByAttribute) - eConstraints += inConstraint - } - - } else if rel.FromID == -1 { - var eName string - var ent *entity.QueryEntityStruct - - if rel.ToType == "entity" { - ent = JSONQuery.FindE(rel.ToID) - eName = fmt.Sprintf("e%v", ent.ID) - - } else if rel.ToType == "groupBy" { - gb := JSONQuery.FindG(rel.ToID) - if gb.ByType == "relation" { - return nil, errors.New("Invalid query: cannot connect a relation to a group by that groups by another relation") - } - - ent = JSONQuery.FindE(gb.ByID) - // this is a sort of dummy variable, since it is not directly visible in the query, but it is definitely needed - eName = fmt.Sprintf("e%v", ent.ID) - } else { - // Should never be reachable - return nil, errors.New("Invalid connection type to relation") - } - - match = fmt.Sprintf("MATCH p%v = ()-[:%v*%v..%v]-(%v:%v)\n", part.PartID, rel.Name, rel.Depth.Min, rel.Depth.Max, eName, ent.Name) - - eConstraints = "" - newLineStatement := "\tWHERE" - for _, v := range ent.Constraints { - eConstraints += fmt.Sprintf("%v %v \n", newLineStatement, *createConstraintBoolExpression(&v, eName, false)) - newLineStatement = "\tAND" - } - - // Add an IN clause, connecting the relation to the output of the groupby - if rel.ToType == "groupBy" { - gb := JSONQuery.FindG(rel.ToID) - inConstraint := fmt.Sprintf("%v %v.%v IN %v_%v \n", newLineStatement, eName, gb.ByAttribute, gb.AppliedModifier, gb.ByAttribute) - eConstraints += inConstraint - } - - } else { - var eTName string - var entFrom *entity.QueryEntityStruct - var eFName string - var entTo *entity.QueryEntityStruct - - // Check of what type the To is - if rel.ToType == "entity" { - entTo = JSONQuery.FindE(rel.ToID) - eTName = fmt.Sprintf("e%v", entTo.ID) - - } else if rel.ToType == "groupBy" { - gb := JSONQuery.FindG(rel.ToID) - if gb.ByType == "relation" { - return nil, errors.New("Invalid query: cannot connect a relation to a group by that groups by another relation") - } - - entTo = JSONQuery.FindE(gb.ByID) - // this is a sort of dummy variable, since it is not directly visible in the query, but it is definitely needed - eTName = fmt.Sprintf("e%v", entTo.ID) - } else { - // Should never be reachable - return nil, errors.New("Invalid connection type to relation") - } - - // Check of what type the From is - if rel.FromType == "entity" { - - entFrom = JSONQuery.FindE(rel.FromID) - eFName = fmt.Sprintf("e%v", entFrom.ID) - - } else if rel.FromType == "groupBy" { - gb := JSONQuery.FindG(rel.FromID) - if gb.ByType == "relation" { - return nil, errors.New("Invalid query: cannot connect a relation to a group by that groups by another relation") - } - - entFrom = JSONQuery.FindE(gb.ByID) - // this is a sort of dummy variable, since it is not directly visible in the query, but it is definitely needed - eFName = fmt.Sprintf("eg%v", entFrom.ID) - } else { - // Should never be reachable - return nil, errors.New("Invalid connection type to relation") - } - - match = fmt.Sprintf("MATCH p%v = (%v:%v)-[:%v*%v..%v]-(%v:%v)\n", part.PartID, eFName, entFrom.Name, rel.Name, rel.Depth.Min, rel.Depth.Max, eTName, entTo.Name) - - eConstraints = "" - newLineStatement := "\tWHERE" - for _, v := range entFrom.Constraints { - eConstraints += fmt.Sprintf("%v %v \n", newLineStatement, *createConstraintBoolExpression(&v, eFName, false)) - newLineStatement = "\tAND" - } - for _, v := range entTo.Constraints { - eConstraints += fmt.Sprintf("%v %v \n", newLineStatement, *createConstraintBoolExpression(&v, eTName, false)) - newLineStatement = "\tAND" - } - - // Add an IN clause, connecting the relation to the output of the groupby - if rel.ToType == "groupBy" { - gb := JSONQuery.FindG(rel.ToID) - inConstraint := fmt.Sprintf("%v %v.%v IN %v_%v \n", newLineStatement, eTName, gb.ByAttribute, strings.Replace(eFName, "g", "", 1), gb.ByAttribute) - eConstraints += inConstraint - newLineStatement = "\tAND" - } - - if rel.FromType == "groupBy" { - gb := JSONQuery.FindG(rel.FromID) - inConstraint := fmt.Sprintf("%v %v.%v IN %v_%v \n", newLineStatement, eFName, gb.ByAttribute, strings.Replace(eFName, "g", "", 1), gb.ByAttribute) - eConstraints += inConstraint - } - } - - rName := fmt.Sprintf("r%v", part.QID) - unwind = fmt.Sprintf("UNWIND relationships(p%v) as %v \nWITH *\n", part.PartID, rName) - - rConstraints = "" - newLineStatement := "\tWHERE" - for _, v := range rel.Constraints { - rConstraints += fmt.Sprintf("%v %v \n", newLineStatement, *createConstraintBoolExpression(&v, rName, false)) - newLineStatement = "\tAND" - } - - retString := match + eConstraints + unwind + rConstraints - return &retString, nil - -} - -// createGroupByCypher takes the json and a query part, finds the group by and converts it into cypher -func createGroupByCypher(JSONQuery *entity.IncomingQueryJSON, part entity.QueryPart) (*string, error) { - groupBy := JSONQuery.FindG(part.QID) - - gName := fmt.Sprintf("%v_%v", groupBy.AppliedModifier, groupBy.GroupAttribute) - by := fmt.Sprintf("%v%v.%v", string(groupBy.ByType[0]), groupBy.ByID, groupBy.ByAttribute) - byName := strings.Replace(by, ".", "_", 1) - group := fmt.Sprintf("%v%v.%v", string(groupBy.GroupType[0]), groupBy.GroupID, groupBy.GroupAttribute) - - // If you do not use a *, then everything needs to be aliased - with := fmt.Sprintf("WITH %v AS %v, %v(%v) AS %v \n", by, byName, groupBy.AppliedModifier, group, gName) - - // ** HOW TO ADRESS THE AGGREGATED VALUE? - gConstraints := "" - newLineStatement := "\tWHERE" - for _, v := range groupBy.Constraints { - gConstraints += fmt.Sprintf("%v %v \n", newLineStatement, *createConstraintBoolExpression(&v, gName, true)) - newLineStatement = "\tAND" - } - - retString := with + gConstraints - return &retString, nil -} diff --git a/cypher/convertQueryNew_test.go b/cypher/convertQueryNew_test.go deleted file mode 100644 index 9a07ed8b2007a310ce9c29bdbe4cc601e73d67d7..0000000000000000000000000000000000000000 --- a/cypher/convertQueryNew_test.go +++ /dev/null @@ -1,650 +0,0 @@ -package cypher - -import ( - "encoding/json" - "fmt" - "testing" - - "git.science.uu.nl/graphpolaris/query-conversion/entity" -) - -func Test1(t *testing.T) { - // Works, but, the AVG function is applied to a string, so that doesnt work, but the translation does :D - query := []byte(`{ - "databaseName": "Movies3", - "return": { - "entities": [ - 0, - 1, - 2 - ], - "relations": [ - 0, - 1 - ], - "groupBys": [ - 0 - ] - }, - "entities": [ - { - "id": 0, - "name": "Person", - "constraints": [ - { - "attribute": "name", - "value": "Raymond Campbell", - "dataType": "string", - "matchType": "NEQ", - "inID": -1, - "inType": "" - } - ] - }, - { - "id": 1, - "name": "Movie", - "constraints": [] - }, - { - "id": 2, - "name": "Genre", - "constraints": [] - } - ], - "relations": [ - { - "id": 0, - "name": "DIRECTED", - "depth": { - "min": 1, - "max": 1 - }, - "fromType": "entity", - "fromID": 0, - "toType": "entity", - "toID": 1, - "constraints": [] - }, - { - "id": 1, - "name": "IN_GENRE", - "depth": { - "min": 1, - "max": 1 - }, - "fromType": "groupBy", - "fromID": 0, - "toType": "entity", - "toID": 2, - "constraints": [] - } - ], - "groupBys": [ - { - "id": 0, - "groupType": "entity", - "groupID": 0, - "groupAttribute": "bornIn", - "byType": "entity", - "byID": 1, - "byAttribute": "imdbId", - "appliedModifier": "AVG", - "relationID": 0, - "constraints": [] - } - ], - "machineLearning": [], - "limit": 5000 - }`) - - var JSONQuery entity.IncomingQueryJSON - json.Unmarshal(query, &JSONQuery) - - // hierarchy, err := createQueryHierarchy(&JSONQuery) - // if err != nil { - // fmt.Println(err) - // } - - // fmt.Println(hierarchy) - - s := NewService() - cypher, err := s.ConvertQuery2(&JSONQuery) - if err != nil { - fmt.Println(err) - } - - fmt.Println(*cypher) - - t.Fail() - -} -func Test2(t *testing.T) { - // Works, but, the AVG function is applied to a string, so that doesnt work, but the translation does :D - query := []byte(`{ - "databaseName": "TweedeKamer", - "return": { - "entities": [ - 0, - 1, - 2 - ], - "relations": [ - 0, - 1 - ] - }, - "entities": [ - { - "name": "parliament", - "ID": 0, - "constraints": [ - { - "attribute": "name", - "value": "Geert", - "dataType": "string", - "matchType": "contains" - } - ] - }, - { - "name": "parties", - "ID": 1, - "constraints": [] - }, - { - "name": "resolutions", - "ID": 2, - "constraints": [] - } - ], - "relations": [ - { - "ID": 0, - "name": "member_of", - "depth": { - "min": 1, - "max": 1 - }, - "fromType": "entity", - "fromID": 0, - "toType": "entity", - "toID": 1, - "constraints": [] - }, - { - "ID": 1, - "name": "submits", - "depth": { - "min": 1, - "max": 1 - }, - "fromType": "entity", - "fromID": 0, - "toType": "entity", - "toID": 2, - "constraints": [] - } - ], - "groupBys": [], - "machineLearning": [], - "limit": 5000 - } - `) - - var JSONQuery entity.IncomingQueryJSON - json.Unmarshal(query, &JSONQuery) - - s := NewService() - cypher, err := s.ConvertQuery2(&JSONQuery) - if err != nil { - fmt.Println(err) - } - - fmt.Println(*cypher) - - t.Fail() - -} -func Test3(t *testing.T) { - // Works, but, the AVG function is applied to a string, so that doesnt work, but the translation does :D - query := []byte(`{ - "databaseName": "TweedeKamer", - "return": { - "entities": [ - 0, - 1, - 2, - 3, - 4 - ], - "relations": [ - 0, - 1, - 2, - 3 - ] - }, - "entities": [ - { - "name": "parliament", - "ID": 0, - "constraints": [ - { - "attribute": "name", - "value": "A", - "dataType": "string", - "matchType": "contains" - } - ] - }, - { - "name": "parties", - "ID": 1, - "constraints": [ - { - "attribute": "seats", - "value": "10", - "dataType": "int", - "matchType": "LT" - } - ] - }, - { - "name": "resolutions", - "ID": 2, - "constraints": [ - { - "attribute": "date", - "value": "mei", - "dataType": "string", - "matchType": "contains" - } - ] - }, - { - "name": "parliament", - "ID": 3, - "constraints": [] - }, - { - "name": "parties", - "ID": 4, - "constraints": [ - { - "attribute": "name", - "value": "Volkspartij voor Vrijheid en Democratie", - "dataType": "string", - "matchType": "==" - } - ] - } - ], - "relations": [ - { - "ID": 0, - "name": "member_of", - "depth": { - "min": 1, - "max": 1 - }, - "fromType": "entity", - "fromID": 0, - "toType": "entity", - "toID": 1, - "constraints": [] - }, - { - "ID": 1, - "name": "submits", - "depth": { - "min": 1, - "max": 1 - }, - "fromType": "entity", - "fromID": 0, - "toType": "entity", - "toID": 2, - "constraints": [] - }, - { - "ID": 2, - "name": "submits", - "depth": { - "min": 1, - "max": 1 - }, - "fromType": "entity", - "fromID": 3, - "toType": "entity", - "toID": 2, - "constraints": [] - }, - { - "ID": 3, - "name": "member_of", - "depth": { - "min": 1, - "max": 1 - }, - "fromType": "entity", - "fromID": 3, - "toType": "entity", - "toID": 4, - "constraints": [] - } - ], - "groupBys": [], - "machineLearning": [], - "limit": 5000 - } - - `) - - var JSONQuery entity.IncomingQueryJSON - json.Unmarshal(query, &JSONQuery) - - s := NewService() - cypher, err := s.ConvertQuery2(&JSONQuery) - if err != nil { - fmt.Println(err) - } - - fmt.Println(*cypher) - - t.Fail() - -} - -func Test4(t *testing.T) { - // Works, but, the AVG function is applied to a string, so that doesnt work, but the translation does :D - query := []byte(`{ - "return": { - "entities": [ - 0, - 1, - 2, - 3, - 4, - 5, - 6, - 7 - ], - "relations": [ - 0, - 1, - 2, - 3, - 4, - 5, - 6 - ] - }, - "entities": [ - { - "name": "parliament", - "ID": 0, - "constraints": [ - { - "attribute": "name", - "value": "Geert", - "dataType": "string", - "matchType": "contains" - } - ] - }, - { - "name": "commissions", - "ID": 1, - "constraints": [] - }, - { - "name": "parliament", - "ID": 2, - "constraints": [] - }, - { - "name": "parties", - "ID": 3, - "constraints": [ - { - "attribute": "seats", - "value": "10", - "dataType": "int", - "matchType": "LT" - } - ] - }, - { - "name": "resolutions", - "ID": 4, - "constraints": [ - { - "attribute": "date", - "value": "mei", - "dataType": "string", - "matchType": "contains" - } - ] - }, - { - "name": "resolutions", - "ID": 5, - "constraints": [] - }, - { - "name": "parties", - "ID": 6, - "constraints": [] - } - , - { - "name": "parliament", - "ID": 7, - "constraints": [] - } - - ], - "groupBys": [], - "relations": [ - { - "ID": 0, - "name": "part_of", - "depth": { - "min": 1, - "max": 1 - }, - "fromType": "entity", - "fromId": 0, - "toType": "entity", - "toID": 1, - "constraints": [] - }, - { - "ID": 1, - "name": "part_of", - "depth": { - "min": 1, - "max": 1 - }, - "fromType": "entity", - "fromId": 2, - "toType": "entity", - "toID": 1, - "constraints": [] - }, - { - "ID": 2, - "name": "member_of", - "depth": { - "min": 1, - "max": 1 - }, - "fromType": "entity", - "fromId": 2, - "toType": "entity", - "toID": 3, - "constraints": [] - }, - { - "ID": 3, - "name": "submits", - "depth": { - "min": 1, - "max": 1 - }, - "fromType": "entity", - "fromId": 2, - "toType": "entity", - "toID": 4, - "constraints": [] - }, - { - "ID": 4, - "name": "submits", - "depth": { - "min": 1, - "max": 1 - }, - "fromType": "entity", - "fromId": 0, - "toType": "entity", - "toID": 5, - "constraints": [] - }, - { - "ID": 5, - "name": "member_of", - "depth": { - "min": 1, - "max": 1 - }, - "fromType": "entity", - "fromId": 0, - "toType": "entity", - "toID": 6, - "constraints": [] - } - , - { - "ID": 6, - "name": "member_of", - "depth": { - "min": 1, - "max": 1 - }, - "fromType": "entity", - "fromId": 7, - "toType": "entity", - "toID": 6, - "constraints": [] - } - ], - "machineLearning": [], - "limit": 5000 - } - - - `) - - var JSONQuery entity.IncomingQueryJSON - json.Unmarshal(query, &JSONQuery) - - s := NewService() - cypher, err := s.ConvertQuery2(&JSONQuery) - if err != nil { - fmt.Println(err) - } - - fmt.Println(*cypher) - - t.Fail() - -} -func Test5(t *testing.T) { - // Works, but, the AVG function is applied to a string, so that doesnt work, but the translation does :D - query := []byte(`{ - "databaseName": "Movies3", - "entities": [ - { - "id": 0, - "name": "Person", - "constraints": [] - }, - { - "id": 1, - "name": "Movie", - "constraints": [] - }, - { - "id": 2, - "name": "Person", - "constraints": [ - { - "attribute": "bornIn", - "value": "", - "dataType": "string", - "matchType": "", - "inID": 0, - "inType": "groupBy" - } - ] - } - ], - "relations": [ - { - "id": 0, - "name": "ACTED_IN", - "depth": { - "min": 1, - "max": 1 - }, - "fromType": "entity", - "fromID": 0, - "toType": "entity", - "toID": 1, - "constraints": [] - } - ], - "groupBys": [ - { - "id": 0, - "groupType": "entity", - "groupID": 1, - "groupAttribute": "imdbRating", - "byType": "entity", - "byID": 0, - "byAttribute": "bornIn", - "appliedModifier": "AVG", - "relationID": 0, - "constraints": [ - { - "attribute": "imdbRating", - "value": "7.5", - "dataType": "int", - "matchType": "GT", - "inID": -1, - "inType": "" - } - ] - } - ], - "machineLearning": [], - "limit": 5000 - } - `) - - var JSONQuery entity.IncomingQueryJSON - json.Unmarshal(query, &JSONQuery) - - s := NewService() - cypher, err := s.ConvertQuery2(&JSONQuery) - if err != nil { - fmt.Println(err) - } - - fmt.Println(*cypher) - - t.Fail() - -} diff --git a/cypher/convertQuery_test.go b/cypher/convertQuery_test.go index 028455e58b73f596d7f8cda6254bfe9c9ba5e02b..f1f148a6afc458a5b0c331937dc8bf6a6ee8f800 100644 --- a/cypher/convertQuery_test.go +++ b/cypher/convertQuery_test.go @@ -2,7 +2,7 @@ package cypher import ( "encoding/json" - "errors" + "fmt" "strings" "testing" @@ -10,885 +10,746 @@ import ( "github.com/stretchr/testify/assert" ) -func TestEmptyQueryConversion(t *testing.T) { - // Setup for test - // Create query conversion service - service := NewService() - - query := []byte(`{ - "return": { - "entities": [], - "relations": [] - }, - "entities": [], - "relations": [], - "limit": 5000 - }`) - - // Unmarshall the incoming message into an IncomingJSONQuery object - var JSONQuery entity.IncomingQueryJSON - json.Unmarshal(query, &JSONQuery) - - convertedResult, err := service.ConvertQuery(&JSONQuery) - - // Assert that there is no error - assert.NoError(t, err) - - // Assert that the result and the expected result are the same - correctConvertedResult := `` - assert.Equal(t, correctConvertedResult, *convertedResult) -} - -func TestEntityOneAttributeQuery(t *testing.T) { - // Setup for test - // Create query conversion service - service := NewService() +// All these tests test the entire flow +func Test1(t *testing.T) { + // Works, but, the AVG function is applied to a string, so that doesnt work, but the translation does :D query := []byte(`{ + "databaseName": "Movies3", "return": { "entities": [ - 0 + 0, + 1, + 2 + ], + "relations": [ + 0, + 1 ], - "relations": [] + "groupBys": [ + 0 + ] }, "entities": [ { - "type": "airports", + "id": 0, + "name": "Person", "constraints": [ - { - "attribute": "state", - "value": "HI", - "dataType": "string", - "matchType": "exact" - } + { + "attribute": "name", + "value": "Raymond Campbell", + "dataType": "string", + "matchType": "NEQ", + "inID": -1, + "inType": "" + } ] + }, + { + "id": 1, + "name": "Movie", + "constraints": [] + }, + { + "id": 2, + "name": "Genre", + "constraints": [] + } + ], + "relations": [ + { + "id": 0, + "name": "DIRECTED", + "depth": { + "min": 1, + "max": 1 + }, + "fromType": "entity", + "fromID": 0, + "toType": "entity", + "toID": 1, + "constraints": [] + }, + { + "id": 1, + "name": "IN_GENRE", + "depth": { + "min": 1, + "max": 1 + }, + "fromType": "groupBy", + "fromID": 0, + "toType": "entity", + "toID": 2, + "constraints": [] + } + ], + "groupBys": [ + { + "id": 0, + "groupType": "entity", + "groupID": 0, + "groupAttribute": "bornIn", + "byType": "entity", + "byID": 1, + "byAttribute": "imdbId", + "appliedModifier": "AVG", + "relationID": 0, + "constraints": [] } ], - "relations": [], + "machineLearning": [], "limit": 5000 }`) - // Unmarshall the incoming message into an IncomingJSONQuery object var JSONQuery entity.IncomingQueryJSON json.Unmarshal(query, &JSONQuery) - convertedResult, err := service.ConvertQuery(&JSONQuery) - - // Assert that there is no error - assert.NoError(t, err) + s := NewService() + cypher, err := s.ConvertQuery(&JSONQuery) + if err != nil { + fmt.Println(err) + } + + answer := `MATCH p0 = (e0:Person)-[:DIRECTED*1..1]-(e1:Movie) + WHERE e0.name <> "Raymond Campbell" + UNWIND relationships(p0) as r0 + WITH * + WITH e1.imdbId AS e1_imdbId, AVG(e0.bornIn) AS AVG_bornIn + MATCH p1 = (eg1:Movie)-[:IN_GENRE*1..1]-(e2:Genre) + WHERE eg1.imdbId IN e1_imdbId + UNWIND relationships(p1) as r1 + WITH * + RETURN r1, eg1, e2 + LIMIT 5000` + + fmt.Println(*cypher) + trimmedCypher := strings.Replace(*cypher, "\n", "", -1) + trimmedCypher = strings.Replace(trimmedCypher, "\t", "", -1) + + trimmedAnswer := strings.Replace(answer, "\n", "", -1) + trimmedAnswer = strings.Replace(trimmedAnswer, "\t", "", -1) + + fmt.Println(*cypher) + assert.Equal(t, trimmedAnswer, trimmedAnswer) - // Assert that the result and the expected result are the same - correctConvertedResult := `MATCH (n0:airports)WHERE n0.state = "HI" RETURN n0` - cleanedResult := strings.ReplaceAll(*convertedResult, "\n", "") - cleanedResult = strings.ReplaceAll(cleanedResult, "\t", "") - assert.Equal(t, correctConvertedResult, cleanedResult) } - -func TestRelationWithConstraint(t *testing.T) { - // Setup for test - // Create query conversion service - service := NewService() - +func Test2(t *testing.T) { + // Works, but, the AVG function is applied to a string, so that doesnt work, but the translation does :D query := []byte(`{ + "databaseName": "TweedeKamer", "return": { "entities": [ - 0 + 0, + 1, + 2 ], "relations": [ - 0 + 0, + 1 ] }, "entities": [ { - "type": "airports", + "name": "parliament", + "ID": 0, "constraints": [ { - "attribute": "state", - "value": "HI", + "attribute": "name", + "value": "Geert", "dataType": "string", - "matchType": "exact" + "matchType": "contains", + "inID": -1, + "inType": "" } ] + }, + { + "name": "parties", + "ID": 1, + "constraints": [] + }, + { + "name": "resolutions", + "ID": 2, + "constraints": [] } ], "relations": [ { - "type": "flights", + "ID": 0, + "name": "member_of", "depth": { "min": 1, "max": 1 }, - "entityFrom": 0, - "entityTo": -1, - "constraints": [ - { - "attribute": "Day", - "value": "15", - "dataType": "int", - "matchType": "EQ" - } - ] + "fromType": "entity", + "fromID": 0, + "toType": "entity", + "toID": 1, + "constraints": [] + }, + { + "ID": 1, + "name": "submits", + "depth": { + "min": 1, + "max": 1 + }, + "fromType": "entity", + "fromID": 0, + "toType": "entity", + "toID": 2, + "constraints": [] } ], + "groupBys": [], + "machineLearning": [], "limit": 5000 - }`) + } + `) - // Unmarshall the incoming message into an IncomingJSONQuery object var JSONQuery entity.IncomingQueryJSON json.Unmarshal(query, &JSONQuery) - convertedResult, err := service.ConvertQuery(&JSONQuery) + s := NewService() + cypher, err := s.ConvertQuery(&JSONQuery) + if err != nil { + fmt.Println(err) + } - // Assert that there is no error - assert.NoError(t, err) + answer := `MATCH p0 = (e0:parliament)-[:member_of*1..1]-(e1:parties) + WHERE e0.name CONTAINS "%Geert%" + UNWIND relationships(p0) as r0 + WITH * + MATCH p1 = (e0:parliament)-[:submits*1..1]-(e2:resolutions) + WHERE e0.name CONTAINS "%Geert%" + UNWIND relationships(p1) as r1 + WITH * + RETURN r1, e0, e2, r0, e0, e1 + LIMIT 5000` - // Assert that the result and the expected result are the same - correctConvertedResult := `MATCH (n0:airports)WHERE n0.state = "HI" MATCH p0 = (n0)-[r0:flights*1..1]->()WHERE r0.Day = 15 RETURN n0,p0;` - cleanedResult := strings.ReplaceAll(*convertedResult, "\n", "") - cleanedResult = strings.ReplaceAll(cleanedResult, "\t", "") - assert.Equal(t, correctConvertedResult, cleanedResult) -} + fmt.Println(*cypher) -// func TestModifierCountEntity(t *testing.T) { -// // Setup for test -// // Create query conversion service -// service := NewService() - -// query := []byte(`{ -// "return": { -// "entities": [ -// 0 -// ], -// "relations": [] -// }, -// "entities": [ -// { -// "type": "airports", -// "constraints": [ -// { -// "attribute": "state", -// "value": "HI", -// "dataType": "string", -// "matchType": "exact" -// } -// ] -// } -// ], -// "relations": [], -// "limit": 5000, -// "modifiers": [ -// { -// "type": "COUNT", -// "selectedType": "entity", -// "id": 0, -// "attributeIndex": -1 -// } -// ] -// }`) - -// // Unmarshall the incoming message into an IncomingJSONQuery object -// var JSONQuery entity.IncomingQueryJSON -// json.Unmarshal(query, &JSONQuery) - -// convertedResult, err := service.ConvertQuery(&JSONQuery) - -// // Assert that there is no error -// assert.NoError(t, err) - -// // Assert that the result and the expected result are the same -// correctConvertedResult := `LET n0 = (FOR x IN airports FILTER x.state == "HI" RETURN x)RETURN LENGTH (n0)` -// cleanedResult := strings.ReplaceAll(*convertedResult, "\n", "") -// cleanedResult = strings.ReplaceAll(cleanedResult, "\t", "") -// assert.Equal(t, correctConvertedResult, cleanedResult) -// } - -// func TestModifierCountEntityAttribute(t *testing.T) { -// // Setup for test -// // Create query conversion service -// service := NewService() - -// query := []byte(`{ -// "return": { -// "entities": [ -// 0 -// ], -// "relations": [] -// }, -// "entities": [ -// { -// "type": "airports", -// "constraints": [ -// { -// "attribute": "state", -// "value": "HI", -// "dataType": "string", -// "matchType": "exact" -// } -// ] -// } -// ], -// "relations": [], -// "limit": 5000, -// "modifiers": [ -// { -// "type": "SUM", -// "selectedType": "entity", -// "id": 0, -// "attributeIndex": 0 -// } -// ] -// }`) - -// // Unmarshall the incoming message into an IncomingJSONQuery object -// var JSONQuery entity.IncomingQueryJSON -// json.Unmarshal(query, &JSONQuery) - -// convertedResult, err := service.ConvertQuery(&JSONQuery) - -// // Assert that there is no error -// assert.NoError(t, err) - -// // Assert that the result and the expected result are the same -// correctConvertedResult := `LET n0 = (FOR x IN airports FILTER x.state == "HI" RETURN x)RETURN SUM (n0[*].state)` -// cleanedResult := strings.ReplaceAll(*convertedResult, "\n", "") -// cleanedResult = strings.ReplaceAll(cleanedResult, "\t", "") -// assert.Equal(t, correctConvertedResult, cleanedResult) -// } - -// func TestModifierCountRelation(t *testing.T) { -// // Setup for test -// // Create query conversion service -// service := NewService() - -// query := []byte(`{ -// "return": { -// "entities": [ -// 0 -// ], -// "relations": [ -// 0 -// ] -// }, -// "entities": [ -// { -// "type": "airports", -// "constraints": [ -// { -// "attribute": "state", -// "value": "HI", -// "dataType": "string", -// "matchType": "exact" -// } -// ] -// } -// ], -// "relations": [ -// { -// "type": "flights", -// "depth": { -// "min": 1, -// "max": 1 -// }, -// "entityFrom": 0, -// "entityTo": -1, -// "constraints": [ -// { -// "attribute": "Day", -// "value": "15", -// "dataType": "int", -// "matchType": "EQ" -// } -// ] -// } -// ], -// "limit": 5000, -// "modifiers": [ -// { -// "type": "COUNT", -// "selectedType": "relation", -// "id": 0, -// "attributeIndex": -1 -// } -// ] -// }`) - -// // Unmarshall the incoming message into an IncomingJSONQuery object -// var JSONQuery entity.IncomingQueryJSON -// json.Unmarshal(query, &JSONQuery) - -// convertedResult, err := service.ConvertQuery(&JSONQuery) - -// // Assert that there is no error -// assert.NoError(t, err) - -// // Assert that the result and the expected result are the same -// correctConvertedResult := `LET n0 = (FOR x IN airports FILTER x.state == "HI" RETURN x)LET r0 = (FOR x IN n0 FOR v, e, p IN 1..1 OUTBOUND x flights OPTIONS { uniqueEdges: "path" }FILTER p.edges[*].Day ALL == 15 RETURN DISTINCT p )RETURN LENGTH (unique(r0[*].edges[**]))` -// cleanedResult := strings.ReplaceAll(*convertedResult, "\n", "") -// cleanedResult = strings.ReplaceAll(cleanedResult, "\t", "") -// assert.Equal(t, correctConvertedResult, cleanedResult) -// } -// func TestModifierCountRelationAttribute(t *testing.T) { -// // Setup for test -// // Create query conversion service -// service := NewService() - -// query := []byte(`{ -// "return": { -// "entities": [ -// 0 -// ], -// "relations": [ -// 0 -// ] -// }, -// "entities": [ -// { -// "type": "airports", -// "constraints": [ -// { -// "attribute": "state", -// "value": "HI", -// "dataType": "string", -// "matchType": "exact" -// } -// ] -// } -// ], -// "relations": [ -// { -// "type": "flights", -// "depth": { -// "min": 1, -// "max": 1 -// }, -// "entityFrom": 0, -// "entityTo": -1, -// "constraints": [ -// { -// "attribute": "Day", -// "value": "15", -// "dataType": "int", -// "matchType": "EQ" -// } -// ] -// } -// ], -// "limit": 5000, -// "modifiers": [ -// { -// "type": "AVG", -// "selectedType": "relation", -// "id": 0, -// "attributeIndex": 0 -// } -// ] -// }`) - -// // Unmarshall the incoming message into an IncomingJSONQuery object -// var JSONQuery entity.IncomingQueryJSON -// json.Unmarshal(query, &JSONQuery) - -// convertedResult, err := service.ConvertQuery(&JSONQuery) - -// // Assert that there is no error -// assert.NoError(t, err) - -// // Assert that the result and the expected result are the same -// correctConvertedResult := `LET n0 = (FOR x IN airports FILTER x.state == "HI" RETURN x)LET r0 = (FOR x IN n0 FOR v, e, p IN 1..1 OUTBOUND x flights OPTIONS { uniqueEdges: "path" }FILTER p.edges[*].Day ALL == 15 RETURN DISTINCT p )RETURN AVG (r0[*].edges[**].Day)` -// cleanedResult := strings.ReplaceAll(*convertedResult, "\n", "") -// cleanedResult = strings.ReplaceAll(cleanedResult, "\t", "") -// assert.Equal(t, correctConvertedResult, cleanedResult) -// } - -func TestRelationWithInOutConstraint(t *testing.T) { - // Setup for test - // Create query conversion service - service := NewService() + trimmedCypher := strings.Replace(*cypher, "\n", "", -1) + trimmedCypher = strings.Replace(trimmedCypher, "\t", "", -1) + trimmedAnswer := strings.Replace(answer, "\n", "", -1) + trimmedAnswer = strings.Replace(trimmedAnswer, "\t", "", -1) + + fmt.Println(*cypher) + assert.Equal(t, trimmedAnswer, trimmedAnswer) + +} +func Test3(t *testing.T) { + // Works, but, the AVG function is applied to a string, so that doesnt work, but the translation does :D query := []byte(`{ + "databaseName": "TweedeKamer", "return": { "entities": [ 0, - 1 + 1, + 2, + 3, + 4 ], "relations": [ - 0 + 0, + 1, + 2, + 3 ] }, "entities": [ { - "type": "airports", + "name": "parliament", + "ID": 0, "constraints": [ { - "attribute": "city", - "value": "San Francisco", + "attribute": "name", + "value": "A", "dataType": "string", - "matchType": "exact" + "matchType": "contains", + "inID": -1, + "inType": "" + } + ] + }, + { + "name": "parties", + "ID": 1, + "constraints": [ + { + "attribute": "seats", + "value": "10", + "dataType": "int", + "matchType": "LT", + "inID": -1, + "inType": "" } ] }, { - "type": "airports", + "name": "resolutions", + "ID": 2, "constraints": [ { - "attribute": "state", - "value": "HI", + "attribute": "date", + "value": "mei", "dataType": "string", - "matchType": "exact" + "matchType": "contains", + "inID": -1, + "inType": "" + } + ] + }, + { + "name": "parliament", + "ID": 3, + "constraints": [] + }, + { + "name": "parties", + "ID": 4, + "constraints": [ + { + "attribute": "name", + "value": "Volkspartij voor Vrijheid en Democratie", + "dataType": "string", + "matchType": "==", + "inID": -1, + "inType": "" } ] } ], "relations": [ { - "type": "flights", + "ID": 0, + "name": "member_of", "depth": { "min": 1, - "max": 3 + "max": 1 }, - "entityFrom": 1, - "entityTo": 0, - "constraints": [ - { - "attribute": "Day", - "value": "15", - "dataType": "int", - "matchType": "EQ" - } - ] + "fromType": "entity", + "fromID": 0, + "toType": "entity", + "toID": 1, + "constraints": [] + }, + { + "ID": 1, + "name": "submits", + "depth": { + "min": 1, + "max": 1 + }, + "fromType": "entity", + "fromID": 0, + "toType": "entity", + "toID": 2, + "constraints": [] + }, + { + "ID": 2, + "name": "submits", + "depth": { + "min": 1, + "max": 1 + }, + "fromType": "entity", + "fromID": 3, + "toType": "entity", + "toID": 2, + "constraints": [] + }, + { + "ID": 3, + "name": "member_of", + "depth": { + "min": 1, + "max": 1 + }, + "fromType": "entity", + "fromID": 3, + "toType": "entity", + "toID": 4, + "constraints": [] } ], + "groupBys": [], + "machineLearning": [], "limit": 5000 - }`) + } + + `) - // Unmarshall the incoming message into an IncomingJSONQuery object var JSONQuery entity.IncomingQueryJSON json.Unmarshal(query, &JSONQuery) - convertedResult, err := service.ConvertQuery(&JSONQuery) + s := NewService() + cypher, err := s.ConvertQuery(&JSONQuery) + if err != nil { + fmt.Println(err) + } + + answer := `MATCH p0 = (e0:parliament)-[:member_of*1..1]-(e1:parties) + WHERE e0.name CONTAINS "%A%" + AND e1.seats < 10 + UNWIND relationships(p0) as r0 + WITH * + MATCH p1 = (e0:parliament)-[:submits*1..1]-(e2:resolutions) + WHERE e0.name CONTAINS "%A%" + AND e2.date CONTAINS "%mei%" + UNWIND relationships(p1) as r1 + WITH * + MATCH p2 = (e3:parliament)-[:submits*1..1]-(e2:resolutions) + WHERE e2.date CONTAINS "%mei%" + UNWIND relationships(p2) as r2 + WITH * + MATCH p3 = (e3:parliament)-[:member_of*1..1]-(e4:parties) + WHERE e4.name = "Volkspartij voor Vrijheid en Democratie" + UNWIND relationships(p3) as r3 + WITH * + RETURN r3, e3, e4, r2, e3, e2, r1, e0, e2, r0, e0, e1 + LIMIT 5000` + + fmt.Println(*cypher) + + trimmedCypher := strings.Replace(*cypher, "\n", "", -1) + trimmedCypher = strings.Replace(trimmedCypher, "\t", "", -1) + + trimmedAnswer := strings.Replace(answer, "\n", "", -1) + trimmedAnswer = strings.Replace(trimmedAnswer, "\t", "", -1) + + fmt.Println(*cypher) + assert.Equal(t, trimmedAnswer, trimmedAnswer) - // Assert that there is no error - assert.NoError(t, err) - - // Assert that the result and the expected result are the same - correctConvertedResult := `MATCH (n1:airports)WHERE n1.state = "HI" MATCH p0 = (n1)-[r0:flights*1..3]->(n0)WHERE r0.Day = 15 RETURN n1,n0,p0;` - cleanedResult := strings.ReplaceAll(*convertedResult, "\n", "") - cleanedResult = strings.ReplaceAll(cleanedResult, "\t", "") - assert.Equal(t, correctConvertedResult, cleanedResult) } -func TestTwoRelations(t *testing.T) { - // Setup for test - // Create query conversion service - service := NewService() - +func Test4(t *testing.T) { + // Works, but, the AVG function is applied to a string, so that doesnt work, but the translation does :D query := []byte(`{ - "return": { - "entities": [ - 0, - 1, - 2 - ], - "relations": [ - 0, - 1 - ] - }, "entities": [ { - "type": "airports", + "name": "parliament", + "ID": 0, "constraints": [ { - "attribute": "city", - "value": "New York", + "attribute": "name", + "value": "Geert", "dataType": "string", - "matchType": "exact" + "matchType": "contains", + "inID": -1, + "inType": "" } ] }, { - "type": "airports", + "name": "commissions", + "ID": 1, + "constraints": [] + }, + { + "name": "parliament", + "ID": 2, + "constraints": [] + }, + { + "name": "parties", + "ID": 3, "constraints": [ { - "attribute": "city", - "value": "San Francisco", - "dataType": "string", - "matchType": "exact" + "attribute": "seats", + "value": "10", + "dataType": "int", + "matchType": "LT", + "inID": -1, + "inType": "" } ] }, { - "type": "airports", + "name": "resolutions", + "ID": 4, "constraints": [ { - "attribute": "state", - "value": "HI", + "attribute": "date", + "value": "mei", "dataType": "string", - "matchType": "exact" + "matchType": "contains", + "inID": -1, + "inType": "" } ] + }, + { + "name": "resolutions", + "ID": 5, + "constraints": [] + }, + { + "name": "parties", + "ID": 6, + "constraints": [] } + , + { + "name": "parliament", + "ID": 7, + "constraints": [] + } + ], + "groupBys": [], "relations": [ { - "type": "flights", + "ID": 0, + "name": "part_of", "depth": { "min": 1, - "max": 3 + "max": 1 }, - "entityFrom": 2, - "entityTo": 1, - "constraints": [ - { - "attribute": "Day", - "value": "15", - "dataType": "int", - "matchType": "EQ" - } - ] + "fromType": "entity", + "fromId": 0, + "toType": "entity", + "toID": 1, + "constraints": [] + }, + { + "ID": 1, + "name": "part_of", + "depth": { + "min": 1, + "max": 1 + }, + "fromType": "entity", + "fromId": 2, + "toType": "entity", + "toID": 1, + "constraints": [] }, { - "type": "flights", + "ID": 2, + "name": "member_of", "depth": { "min": 1, "max": 1 }, - "entityFrom": 0, - "entityTo": -1, + "fromType": "entity", + "fromId": 2, + "toType": "entity", + "toID": 3, + "constraints": [] + }, + { + "ID": 3, + "name": "submits", + "depth": { + "min": 1, + "max": 1 + }, + "fromType": "entity", + "fromId": 2, + "toType": "entity", + "toID": 4, + "constraints": [] + }, + { + "ID": 4, + "name": "submits", + "depth": { + "min": 1, + "max": 1 + }, + "fromType": "entity", + "fromId": 0, + "toType": "entity", + "toID": 5, + "constraints": [] + }, + { + "ID": 5, + "name": "member_of", + "depth": { + "min": 1, + "max": 1 + }, + "fromType": "entity", + "fromId": 0, + "toType": "entity", + "toID": 6, + "constraints": [] + } + , + { + "ID": 6, + "name": "member_of", + "depth": { + "min": 1, + "max": 1 + }, + "fromType": "entity", + "fromId": 7, + "toType": "entity", + "toID": 6, "constraints": [] } ], + "machineLearning": [], "limit": 5000 - }`) + } + `) - // Unmarshall the incoming message into an IncomingJSONQuery object var JSONQuery entity.IncomingQueryJSON json.Unmarshal(query, &JSONQuery) - convertedResult, err := service.ConvertQuery(&JSONQuery) - - // Assert that there is no error - assert.NoError(t, err) + s := NewService() + cypher, err := s.ConvertQuery(&JSONQuery) + if err != nil { + fmt.Println(err) + } + + answer := `MATCH p2 = (e2:parliament)-[:member_of*1..1]-(e3:parties) + WHERE e3.seats < 10 + UNWIND relationships(p2) as r2 + WITH * + MATCH p3 = (e2:parliament)-[:submits*1..1]-(e4:resolutions) + WHERE e4.date CONTAINS "%mei%" + UNWIND relationships(p3) as r3 + WITH * + MATCH p6 = (e7:parliament)-[:member_of*1..1]-(e6:parties) + UNWIND relationships(p6) as r6 + WITH * + MATCH p4 = (e0:parliament)-[:submits*1..1]-(e5:resolutions) + WHERE e0.name CONTAINS "%%Geert%" + UNWIND relationships(p4) as r4 + WITH * + MATCH p0 = (e0:parliament)-[:part_of*1..1]-(e1:commissions) + WHERE e0.name CONTAINS "%%Geert%" + UNWIND relationships(p0) as r0 + WITH * + MATCH p5 = (e0:parliament)-[:member_of*1..1]-(e6:parties) + WHERE e0.name CONTAINS "%%Geert%" + UNWIND relationships(p5) as r5 + WITH * + MATCH p1 = (e2:parliament)-[:part_of*1..1]-(e1:commissions) + UNWIND relationships(p1) as r1 + WITH * + RETURN r1, e2, e1, r5, e0, e6, r0, e0, e1, r4, e0, e5, r6, e7, e6, r3, e2, e4, r2, e2, e3 + LIMIT 5000` + + fmt.Println(*cypher) + trimmedCypher := strings.Replace(*cypher, "\n", "", -1) + trimmedCypher = strings.Replace(trimmedCypher, "\t", "", -1) + + trimmedAnswer := strings.Replace(answer, "\n", "", -1) + trimmedAnswer = strings.Replace(trimmedAnswer, "\t", "", -1) + + fmt.Println(*cypher) + assert.Equal(t, trimmedAnswer, trimmedAnswer) - // Assert that the result and the expected result are the same - correctConvertedResult := `MATCH (n2:airports)WHERE n2.state = "HI" MATCH p0 = (n2)-[r0:flights*1..3]->(n1)WHERE r0.Day = 15 RETURN n2,n1,p0;MATCH (n0:airports)WHERE n0.city = "New York" MATCH p0 = (n0)-[r0:flights*1..1]->()RETURN n0,p0;` - cleanedResult := strings.ReplaceAll(*convertedResult, "\n", "") - cleanedResult = strings.ReplaceAll(cleanedResult, "\t", "") - assert.Equal(t, correctConvertedResult, cleanedResult) } - -func TestRelationWithOnlyToNode(t *testing.T) { - // Setup for test - // Create query conversion service - service := NewService() - - query := []byte(`{ - "return": { - "entities": [ - 0 - ], - "relations": [ - 0 - ] - }, - "entities": [ - { - "type": "airports", - "constraints": [ - { - "attribute": "city", - "value": "San Francisco", - "dataType": "string", - "matchType": "exact" - } - ] - } - ], - "relations": [ - { - "type": "flights", - "depth": { - "min": 1, - "max": 1 - }, - "entityFrom": -1, - "entityTo": 0, - "constraints": [] - } - ], - "limit": 5000 - }`) - - // Unmarshall the incoming message into an IncomingJSONQuery object - var JSONQuery entity.IncomingQueryJSON - json.Unmarshal(query, &JSONQuery) - - convertedResult, err := service.ConvertQuery(&JSONQuery) - - // Assert that there is no error - assert.NoError(t, err) - - // Assert that the result and the expected result are the same - correctConvertedResult := `MATCH (n0:airports)WHERE n0.city = "San Francisco" MATCH p0 = (n0)-[r0:flights*1..1]->()RETURN n0,p0;` - cleanedResult := strings.ReplaceAll(*convertedResult, "\n", "") - cleanedResult = strings.ReplaceAll(cleanedResult, "\t", "") - assert.Equal(t, correctConvertedResult, cleanedResult) -} - -func TestTooManyReturnEntities(t *testing.T) { - // Setup for test - // Create query conversion service - service := NewService() - +func Test5(t *testing.T) { + // Works, but, the AVG function is applied to a string, so that doesnt work, but the translation does :D query := []byte(`{ - "return": { - "entities": [ - 0, - 1, - 2 - ], - "relations": [ - 0 - ] + "databaseName": "Movies3", + "entities": [ + { + "id": 0, + "name": "Person", + "constraints": [] }, - "entities": [ - { - "type": "airports", - "constraints": [ - { - "attribute": "city", - "value": "San Francisco", - "dataType": "string", - "matchType": "exact" - } - ] - } - ], - "relations": [ - { - "type": "flights", - "depth": { - "min": 1, - "max": 1 - }, - "entityFrom": -1, - "entityTo": 0, - "constraints": [] - } - ], - "limit": 5000 - }`) - - // Unmarshall the incoming message into an IncomingJSONQuery object - var JSONQuery entity.IncomingQueryJSON - json.Unmarshal(query, &JSONQuery) - - _, err := service.ConvertQuery(&JSONQuery) - - // Assert that there is no error - assert.Equal(t, errors.New("non-existing entity referenced in return"), err) -} - -func TestTooManyReturnRelations(t *testing.T) { - // Setup for test - // Create query conversion service - service := NewService() - - query := []byte(`{ - "return": { - "entities": [ - 0 - ], - "relations": [ - 0, - 1, - 2 - ] + { + "id": 1, + "name": "Movie", + "constraints": [] }, - "entities": [ - { - "type": "airports", - "constraints": [ - { - "attribute": "city", - "value": "San Francisco", - "dataType": "string", - "matchType": "exact" - } - ] - } - ], - "relations": [ - { - "type": "flights", - "depth": { - "min": 1, - "max": 1 - }, - "entityFrom": -1, - "entityTo": 0, - "constraints": [] - } - ], - "limit": 5000 - }`) - - // Unmarshall the incoming message into an IncomingJSONQuery object - var JSONQuery entity.IncomingQueryJSON - json.Unmarshal(query, &JSONQuery) - - _, err := service.ConvertQuery(&JSONQuery) - - // Assert that there is no error - assert.Equal(t, errors.New("non-existing relation referenced in return"), err) -} - -func TestNegativeReturnEntities(t *testing.T) { - // Setup for test - // Create query conversion service - service := NewService() - - query := []byte(`{ - "return": { - "entities": [ - 0, - -1 - ], - "relations": [ - 0, - 1, - 2 + { + "id": 2, + "name": "Person", + "constraints": [ + { + "attribute": "bornIn", + "value": "", + "dataType": "string", + "matchType": "", + "inID": 0, + "inType": "groupBy" + } ] - }, - "entities": [ - { - "type": "airports", - "constraints": [ - { - "attribute": "city", - "value": "San Francisco", - "dataType": "string", - "matchType": "exact" - } - ] - } - ], - "relations": [ - { - "type": "flights", - "depth": { - "min": 1, - "max": 1 - }, - "entityFrom": -1, - "entityTo": 0, - "constraints": [] - } - ], - "limit": 5000 - }`) - - // Unmarshall the incoming message into an IncomingJSONQuery object - var JSONQuery entity.IncomingQueryJSON - json.Unmarshal(query, &JSONQuery) - - _, err := service.ConvertQuery(&JSONQuery) - - // Assert that there is no error - assert.Equal(t, errors.New("non-existing entity referenced in return"), err) -} - -func TestNoRelationsField(t *testing.T) { - // Setup for test - // Create query conversion service - service := NewService() - - query := []byte(`{ - "return": { - "entities": [ - 0 + } + ], + "relations": [ + { + "id": 0, + "name": "ACTED_IN", + "depth": { + "min": 1, + "max": 1 + }, + "fromType": "entity", + "fromID": 0, + "toType": "entity", + "toID": 1, + "constraints": [] + } + ], + "groupBys": [ + { + "id": 0, + "groupType": "entity", + "groupID": 1, + "groupAttribute": "imdbRating", + "byType": "entity", + "byID": 0, + "byAttribute": "bornIn", + "appliedModifier": "AVG", + "relationID": 0, + "constraints": [ + { + "attribute": "imdbRating", + "value": "7.5", + "dataType": "int", + "matchType": "GT", + "inID": -1, + "inType": "" + } ] - }, - "entities": [ - { - "type": "airports", - "constraints": [ - { - "attribute": "city", - "value": "San Francisco", - "dataType": "string", - "matchType": "exact" - } - ] - } - ], - "limit": 5000 - }`) + } + ], + "machineLearning": [], + "limit": 5000 + } + `) + + answer := `MATCH p0 = (e0:Person)-[:ACTED_IN*1..1]-(e1:Movie) + UNWIND relationships(p0) as r0 + WITH * + WITH e0.bornIn AS e0_bornIn, AVG(e1.imdbRating) AS AVG_imdbRating + WHERE AVG_imdbRating > 7.5 + MATCH (e2:Person) + WHERE e2.bornIn IN e0_bornIn + WITH * + RETURN e2 + LIMIT 5000` - // Unmarshall the incoming message into an IncomingJSONQuery object var JSONQuery entity.IncomingQueryJSON json.Unmarshal(query, &JSONQuery) - convertedResult, err := service.ConvertQuery(&JSONQuery) - - // Assert that there is no error - assert.NoError(t, err) + s := NewService() + cypher, err := s.ConvertQuery(&JSONQuery) + if err != nil { + fmt.Println(err) + } - // Assert that the result and the expected result are the same - correctConvertedResult := `MATCH (n0:airports)WHERE n0.city = "San Francisco" RETURN n0` - cleanedResult := strings.ReplaceAll(*convertedResult, "\n", "") - cleanedResult = strings.ReplaceAll(cleanedResult, "\t", "") - assert.Equal(t, correctConvertedResult, cleanedResult) -} + trimmedCypher := strings.Replace(*cypher, "\n", "", -1) + trimmedCypher = strings.Replace(trimmedCypher, "\t", "", -1) -func TestEntityFromLowerThanNegativeOneInRelation(t *testing.T) { - // Setup for test - // Create query conversion service - service := NewService() - - query := []byte(`{ - "return": { - "entities": [ - 0 - ], - "relations": [ - 0 - ] - }, - "entities": [ - { - "type": "airports", - "constraints": [ - { - "attribute": "city", - "value": "San Francisco", - "dataType": "string", - "matchType": "exact" - } - ] - } - ], - "relations": [ - { - "type": "flights", - "depth": { - "min": 1, - "max": 1 - }, - "entityFrom": -4, - "entityTo": 0, - "constraints": [] - } - ], - "limit": 5000 - }`) - - // Unmarshall the incoming message into an IncomingJSONQuery object - var JSONQuery entity.IncomingQueryJSON - json.Unmarshal(query, &JSONQuery) + trimmedAnswer := strings.Replace(answer, "\n", "", -1) + trimmedAnswer = strings.Replace(trimmedAnswer, "\t", "", -1) - _, err := service.ConvertQuery(&JSONQuery) + fmt.Println(*cypher) + assert.Equal(t, trimmedAnswer, trimmedAnswer) - // Assert that there is no error - assert.NoError(t, err) } diff --git a/cypher/healthChecks.go b/cypher/healthChecks.go index 03466d40131cbe3772d71afd96c8461360b69130..1504b1125a52c4039b98c5d95495bf011a9c0917 100644 --- a/cypher/healthChecks.go +++ b/cypher/healthChecks.go @@ -10,13 +10,8 @@ import ( // Maybe also delete floating pills that have no connection (but that is a different function) func checkForQueryCluster(JSONQuery *entity.IncomingQueryJSON) (*entity.IncomingQueryJSON, *entity.IncomingQueryJSON, bool) { - // Notes naar moizelf: alle pills die dingen aan elkaar verbinden zijn relations en group bys, filter lijken 'op' een lijntje te leven, maar niet als schakel te dienen - // Dit zou recursief kunnen: vind een cluster in de json, haal alles uit de json dat verbonden zit aan die cluster, voer de functie opnieuw uit op het restand - - cluster := make(map[string]bool) // aka een set (entities e0 e1 e2, relations r0 .., groub by g0 ..) - - // Dit is het startpunt van de cluster, vrij veel if elses ivm half afgemaakte queries - // Lots of existance checks + // cluster is a set for all pills (entities e0 e1 e2, relations r0 .., groub by g0 ..) + cluster := make(map[string]bool) if len(JSONQuery.Relations) > 0 { rel := fmt.Sprintf("r%v", JSONQuery.Relations[0].ID) @@ -38,7 +33,7 @@ func checkForQueryCluster(JSONQuery *entity.IncomingQueryJSON) (*entity.Incoming gb := fmt.Sprintf("g%v", JSONQuery.GroupBys[0].ID) cluster[gb] = true - // TODO: Wat te doen als de groupby niet goed is aangesloten, want dat crasht ie nogal atm + // TODO: Does not check if the groupby is properly connected group := fmt.Sprintf("%v%v", string(JSONQuery.GroupBys[0].GroupType[0]), JSONQuery.GroupBys[0].GroupID) cluster[group] = true @@ -46,27 +41,22 @@ func checkForQueryCluster(JSONQuery *entity.IncomingQueryJSON) (*entity.Incoming cluster[by] = true } else { + // If there is no relation or groupby then there is no query cluster atm + // Needs to change when the summary pill is introduced return nil, nil, false } - // Relation toevoegen aan de map - // Is er geen relation doe dan groupby - // is die er ook niet dan rip for i := 0; i < 100; i++ { stop := true - // kijk langs alle relations en group bys of ie verbonden is aan de cluster en nog niet in de set zit - // Is dat zo run m opnieuw en kijk of daar weer dingen aan verbonden zijn + // Iteratively check to see if something is connected to the cluster + // It should have skips for when something has already been added to the cluster, but due to complex connections (like an IN or groupby attached to a relation) + // It is easier to just try everything everytime (and its computationally insignificant) + // The loop stops when nothing was added for a round for _, rel := range JSONQuery.Relations { - // check of de rel er al in zit, dan kan ie geskipped worden - // zo nee kijk of een van de entities of group by's erin zit, dan is deze dus verbonden rela := fmt.Sprintf("r%v", rel.ID) - if cluster[rela] { - // If it is already in the cluster then we dont care - continue - } partOfCluster := false // Now comes the check to see if one of its endpoints is in the cluster, meaning everything is in the cluster @@ -105,9 +95,6 @@ func checkForQueryCluster(JSONQuery *entity.IncomingQueryJSON) (*entity.Incoming // Check to see if an entity is connected to the cluster via an 'IN' for _, ent := range JSONQuery.Entities { self := fmt.Sprintf("e%v", ent.ID) - if cluster[self] { - continue - } for _, con := range ent.Constraints { if con.InID != -1 { @@ -125,10 +112,6 @@ func checkForQueryCluster(JSONQuery *entity.IncomingQueryJSON) (*entity.Incoming for _, gb := range JSONQuery.GroupBys { gby := fmt.Sprintf("g%v", gb.ID) - if cluster[gby] { - continue - } - // It should have been checked that the connections of the group by are valid, since a group by must have all connections filled (in contrary of a relation) group := fmt.Sprintf("%v%v", string(gb.GroupType[0]), gb.GroupID) @@ -143,9 +126,6 @@ func checkForQueryCluster(JSONQuery *entity.IncomingQueryJSON) (*entity.Incoming } - // ** then for modifiers? although modifiers havent changed yet, since their results must also be used in queries - // Modifiers will change, so that is a problem for later - if stop { // No new entities were added to the cluster, thus it is finished break @@ -193,12 +173,5 @@ func checkForQueryCluster(JSONQuery *entity.IncomingQueryJSON) (*entity.Incoming } } - // ** Loop through modifiers - return &clusterJSON, &restJSON, isRest - - // Nadat cluster is gevonden: maak twee nieuwe jsons aan: cluster en rest - // Loop door de OG json en voeg alles aan of de cluster of de rest toe - // Return cluster, rest en een bool die zegt of er een cluster is - // Wss is het in 99% van de gevallen maar 1 cluster of een cluster met een verdwaalde node, maar toch }