Skip to content
Snippets Groups Projects
Commit 832cf13a authored by Joris's avatar Joris
Browse files

Test added

Test added to see if the hierarchy function works (which it appears to do)
parent 9d878436
No related branches found
No related tags found
1 merge request!2Groupby overhaul
...@@ -6,10 +6,6 @@ This program has been developed by students from the bachelor Computer Science a ...@@ -6,10 +6,6 @@ This program has been developed by students from the bachelor Computer Science a
package cypher package cypher
import ( import (
"errors"
"fmt"
"strings"
"git.science.uu.nl/graphpolaris/query-conversion/entity" "git.science.uu.nl/graphpolaris/query-conversion/entity"
) )
...@@ -20,224 +16,225 @@ ConvertQuery converts an IncomingQueryJSON object into AQL ...@@ -20,224 +16,225 @@ ConvertQuery converts an IncomingQueryJSON object into AQL
*/ */
func (s *Service) ConvertQuery(JSONQuery *entity.IncomingQueryJSON) (*string, error) { func (s *Service) ConvertQuery(JSONQuery *entity.IncomingQueryJSON) (*string, error) {
// Check to make sure all indexes exist // // Check to make sure all indexes exist
// How many entities are there // // How many entities are there
numEntities := len(JSONQuery.Entities) - 1 // numEntities := len(JSONQuery.Entities) - 1
// How many relations there are // // How many relations there are
numRelations := len(JSONQuery.Relations) - 1 // numRelations := len(JSONQuery.Relations) - 1
// Make sure no entity should be returned that is outside the range of that list // // Make sure no entity should be returned that is outside the range of that list
for _, e := range JSONQuery.Return.Entities { // for _, e := range JSONQuery.Return.Entities {
// If this entity references an entity that is outside the range // // If this entity references an entity that is outside the range
if e > numEntities || e < 0 { // if e > numEntities || e < 0 {
return nil, errors.New("non-existing entity referenced in return") // return nil, errors.New("non-existing entity referenced in return")
} // }
} // }
// Make sure that no relation mentions a non-existing entity // // Make sure that no relation mentions a non-existing entity
for _, r := range JSONQuery.Relations { // for _, r := range JSONQuery.Relations {
if r.EntityFrom > numEntities || r.EntityTo > numEntities { // if r.EntityFrom > numEntities || r.EntityTo > numEntities {
return nil, errors.New("non-exisiting entity referenced in relation") // return nil, errors.New("non-exisiting entity referenced in relation")
} // }
} // }
// Make sure no non-existing relation is tried to be returned // // Make sure no non-existing relation is tried to be returned
for _, r := range JSONQuery.Return.Relations { // for _, r := range JSONQuery.Return.Relations {
if r > numRelations || r < 0 { // if r > numRelations || r < 0 {
return nil, errors.New("non-existing relation referenced in return") // return nil, errors.New("non-existing relation referenced in return")
} // }
} // }
result := createQuery(JSONQuery) // result := createQuery(JSONQuery)
return result, nil // return result, nil
} return nil, nil
/*
sliceContains checks if a slice contains the input
s: []int, the slice to check
e: int, what you're checking for
Return: bool, true if it contains 'e'
*/
func sliceContains(s []int, e int) bool {
for _, a := range s {
if a == e {
return true
}
}
return false
}
/*TrimSuffix trims the final character of a string */
func TrimSuffix(s, suffix string) string {
if strings.HasSuffix(s, suffix) {
s = s[:len(s)-len(suffix)]
}
return s
}
/*
createQuery generates a query based on the json file provided
JSONQuery: *entity.IncomingQueryJSON, jsonQuery is a parsedJSON struct holding all the data needed to form a query
Return: *string, a string containing the corresponding AQL query and an error
*/
func createQuery(JSONQuery *entity.IncomingQueryJSON) *string {
// Note: Case #4, where there is an edge only query (without any entity), is not supported by frontend
// If a modifier is used, disable the limit
if len(JSONQuery.Modifiers) > 0 {
JSONQuery.Limit = -1
}
var (
relationsToReturn []string
nodesToReturn []string
nodeUnion string
relationUnion string
queryList [][][]int
entityList []int
ret string
)
for i, relation := range JSONQuery.Relations {
var contains bool
contains = false
for j := range queryList {
if sliceContains(queryList[j][0], relation.EntityFrom) || sliceContains(queryList[j][0], relation.EntityTo) {
if !sliceContains(queryList[j][0], relation.EntityFrom) {
queryList[j][0] = append(queryList[j][0], relation.EntityFrom)
entityList = append(entityList, relation.EntityFrom)
}
if !sliceContains(queryList[j][0], relation.EntityTo) {
queryList[j][0] = append(queryList[j][0], relation.EntityTo)
entityList = append(entityList, relation.EntityTo)
}
queryList[j][1] = append(queryList[j][1], i)
contains = true
}
}
if !contains {
queryList = append(queryList, [][]int{{relation.EntityFrom, relation.EntityTo}, {i}})
}
}
for i := range queryList {
//reset variables for the next query
nodeUnion = ""
relationUnion = ""
relationsToReturn = []string{}
for j, relationID := range queryList[i][1] {
relationName := fmt.Sprintf("r%v", j)
relation := JSONQuery.Relations[relationID]
pathName := fmt.Sprintf("p%v", j)
relationsToReturn = append(relationsToReturn, pathName)
if relation.EntityFrom >= 0 {
// if there is a from-node
// create the let for this node
fromName := fmt.Sprintf("n%v", relation.EntityFrom)
ret += *createNodeMatch(&JSONQuery.Entities[relation.EntityFrom], &fromName)
ret += *createRelationMatch(&relation, relationName, pathName, &JSONQuery.Entities, JSONQuery.Limit, true)
} else if relation.EntityTo >= 0 {
// if there is only a to-node
toName := fmt.Sprintf("n%v", relation.EntityTo)
ret += *createNodeMatch(&JSONQuery.Entities[relation.EntityTo], &toName)
ret += *createRelationMatch(&relation, relationName, pathName, &JSONQuery.Entities, JSONQuery.Limit, false)
// Add this relation to the list
} else {
fmt.Println("Relation-only queries are currently not supported")
continue
}
}
// Create UNION statements that create unique lists of all the nodes and relations
// Thus removing all duplicates
nodeUnion = "RETURN "
for _, entityID := range queryList[i][0] {
if sliceContains(JSONQuery.Return.Entities, entityID) {
nodeUnion += fmt.Sprintf("n%v,", entityID)
}
}
for _, relation := range relationsToReturn {
relationUnion += fmt.Sprintf("%v,", relation)
}
relationUnion = TrimSuffix(relationUnion, ",")
// hier zat een newline
ret += nodeUnion + relationUnion + "; "
}
nodeSet := make(map[int]bool)
for _, relation := range JSONQuery.Relations {
nodeSet[relation.EntityFrom] = true
nodeSet[relation.EntityTo] = true
}
// Check if the entities to return are already returned
for _, entityIndex := range JSONQuery.Return.Entities {
if !nodeSet[entityIndex] {
// If not, return this node
name := fmt.Sprintf("n%v", entityIndex)
ret += *createNodeMatch(&JSONQuery.Entities[entityIndex], &name)
// Add this node to the list
nodesToReturn = append(nodesToReturn, name)
ret += fmt.Sprintf("RETURN %v", name)
}
}
ret = TrimSuffix(ret, " ")
return &ret
} }
/* // /*
createNodeLet generates a 'LET' statement for a node related query // sliceContains checks if a slice contains the input
node: *entity.QueryEntityStruct, node is an entityStruct containing the information of a single node, // s: []int, the slice to check
name: *string, is the autogenerated name of the node consisting of "n" + the index of the node // e: int, what you're checking for
Return: *string, a string containing a single LET-statement in AQL // Return: bool, true if it contains 'e'
*/ // */
func createNodeMatch(node *entity.QueryEntityStruct, name *string) *string { // func sliceContains(s []int, e int) bool {
// hier zat een newline // for _, a := range s {
header := fmt.Sprintf("MATCH (%v:%v) ", *name, node.Type) // if a == e {
constraints := *createConstraintStatements(&node.Constraints, *name) // return true
ret := header + constraints // }
return &ret // }
} // return false
// }
/*
createRelationLetWithFromEntity generates a 'LET' statement for relations with an 'EntityFrom' property and optionally an 'EntitiyTo' property // /*TrimSuffix trims the final character of a string */
relation: *entity.QueryRelationStruct, relation is a relation struct containing the information of a single relation, // func TrimSuffix(s, suffix string) string {
relationName: string, is the name of the relation, is the autogenerated name of the node consisting of "r" + the index of the relation, // if strings.HasSuffix(s, suffix) {
pathName: string, is the path of the name, // s = s[:len(s)-len(suffix)]
entities: *[]entity.QueryEntityStruct, is a list of entityStructs that are needed to form the relation LET-statement // }
limit: int, the limit for the number of nodes to return // return s
outbound: bool, checks if the relation is inbound or outbound // }
Return: *string, a string containing a single LET-statement in AQL
*/ // /*
func createRelationMatch(relation *entity.QueryRelationStruct, relationName string, pathName string, entities *[]entity.QueryEntityStruct, limit int, outbound bool) *string { // createQuery generates a query based on the json file provided
relationReturn := "" // JSONQuery: *entity.IncomingQueryJSON, jsonQuery is a parsedJSON struct holding all the data needed to form a query
var relationBounds int // Return: *string, a string containing the corresponding AQL query and an error
if outbound { // */
relationReturn = fmt.Sprintf("MATCH %v = (n%v)-[%v:%v*%v..%v]->(", pathName, relation.EntityFrom, relationName, relation.Type, relation.Depth.Min, relation.Depth.Max) // func createQuery(JSONQuery *entity.IncomingQueryJSON) *string {
relationBounds = relation.EntityTo // // Note: Case #4, where there is an edge only query (without any entity), is not supported by frontend
} else { // // If a modifier is used, disable the limit
relationReturn = fmt.Sprintf("MATCH %v = (n%v)-[%v:%v*%v..%v]->(", pathName, relation.EntityTo, relationName, relation.Type, relation.Depth.Min, relation.Depth.Max) // if len(JSONQuery.Modifiers) > 0 {
relationBounds = relation.EntityFrom // JSONQuery.Limit = -1
} // }
if relationBounds != -1 { // var (
relationReturn += fmt.Sprintf("n%v", relationBounds) // relationsToReturn []string
} // nodesToReturn []string
relationReturn += ")" // nodeUnion string
// relationUnion string
constraintReturn := *createConstraintStatements(&relation.Constraints, relationName) // queryList [][][]int
// hier zat een newline // entityList []int
ret := relationReturn + " " + constraintReturn // ret string
// )
return &ret
} // for i, relation := range JSONQuery.Relations {
// var contains bool
// contains = false
// for j := range queryList {
// if sliceContains(queryList[j][0], relation.EntityFrom) || sliceContains(queryList[j][0], relation.EntityTo) {
// if !sliceContains(queryList[j][0], relation.EntityFrom) {
// queryList[j][0] = append(queryList[j][0], relation.EntityFrom)
// entityList = append(entityList, relation.EntityFrom)
// }
// if !sliceContains(queryList[j][0], relation.EntityTo) {
// queryList[j][0] = append(queryList[j][0], relation.EntityTo)
// entityList = append(entityList, relation.EntityTo)
// }
// queryList[j][1] = append(queryList[j][1], i)
// contains = true
// }
// }
// if !contains {
// queryList = append(queryList, [][]int{{relation.EntityFrom, relation.EntityTo}, {i}})
// }
// }
// for i := range queryList {
// //reset variables for the next query
// nodeUnion = ""
// relationUnion = ""
// relationsToReturn = []string{}
// for j, relationID := range queryList[i][1] {
// relationName := fmt.Sprintf("r%v", j)
// relation := JSONQuery.Relations[relationID]
// pathName := fmt.Sprintf("p%v", j)
// relationsToReturn = append(relationsToReturn, pathName)
// if relation.EntityFrom >= 0 {
// // if there is a from-node
// // create the let for this node
// fromName := fmt.Sprintf("n%v", relation.EntityFrom)
// ret += *createNodeMatch(&JSONQuery.Entities[relation.EntityFrom], &fromName)
// ret += *createRelationMatch(&relation, relationName, pathName, &JSONQuery.Entities, JSONQuery.Limit, true)
// } else if relation.EntityTo >= 0 {
// // if there is only a to-node
// toName := fmt.Sprintf("n%v", relation.EntityTo)
// ret += *createNodeMatch(&JSONQuery.Entities[relation.EntityTo], &toName)
// ret += *createRelationMatch(&relation, relationName, pathName, &JSONQuery.Entities, JSONQuery.Limit, false)
// // Add this relation to the list
// } else {
// fmt.Println("Relation-only queries are currently not supported")
// continue
// }
// }
// // Create UNION statements that create unique lists of all the nodes and relations
// // Thus removing all duplicates
// nodeUnion = "RETURN "
// for _, entityID := range queryList[i][0] {
// if sliceContains(JSONQuery.Return.Entities, entityID) {
// nodeUnion += fmt.Sprintf("n%v,", entityID)
// }
// }
// for _, relation := range relationsToReturn {
// relationUnion += fmt.Sprintf("%v,", relation)
// }
// relationUnion = TrimSuffix(relationUnion, ",")
// // hier zat een newline
// ret += nodeUnion + relationUnion + "; "
// }
// nodeSet := make(map[int]bool)
// for _, relation := range JSONQuery.Relations {
// nodeSet[relation.EntityFrom] = true
// nodeSet[relation.EntityTo] = true
// }
// // Check if the entities to return are already returned
// for _, entityIndex := range JSONQuery.Return.Entities {
// if !nodeSet[entityIndex] {
// // If not, return this node
// name := fmt.Sprintf("n%v", entityIndex)
// ret += *createNodeMatch(&JSONQuery.Entities[entityIndex], &name)
// // Add this node to the list
// nodesToReturn = append(nodesToReturn, name)
// ret += fmt.Sprintf("RETURN %v", name)
// }
// }
// ret = TrimSuffix(ret, " ")
// return &ret
// }
// /*
// createNodeLet generates a 'LET' statement for a node related query
// node: *entity.QueryEntityStruct, node is an entityStruct containing the information of a single node,
// name: *string, is the autogenerated name of the node consisting of "n" + the index of the node
// Return: *string, a string containing a single LET-statement in AQL
// */
// func createNodeMatch(node *entity.QueryEntityStruct, name *string) *string {
// // hier zat een newline
// header := fmt.Sprintf("MATCH (%v:%v) ", *name, node.Type)
// constraints := *createConstraintStatements(&node.Constraints, *name)
// ret := header + constraints
// return &ret
// }
// /*
// createRelationLetWithFromEntity generates a 'LET' statement for relations with an 'EntityFrom' property and optionally an 'EntitiyTo' property
// relation: *entity.QueryRelationStruct, relation is a relation struct containing the information of a single relation,
// relationName: string, is the name of the relation, is the autogenerated name of the node consisting of "r" + the index of the relation,
// pathName: string, is the path of the name,
// entities: *[]entity.QueryEntityStruct, is a list of entityStructs that are needed to form the relation LET-statement
// limit: int, the limit for the number of nodes to return
// outbound: bool, checks if the relation is inbound or outbound
// Return: *string, a string containing a single LET-statement in AQL
// */
// func createRelationMatch(relation *entity.QueryRelationStruct, relationName string, pathName string, entities *[]entity.QueryEntityStruct, limit int, outbound bool) *string {
// relationReturn := ""
// var relationBounds int
// if outbound {
// relationReturn = fmt.Sprintf("MATCH %v = (n%v)-[%v:%v*%v..%v]->(", pathName, relation.EntityFrom, relationName, relation.Type, relation.Depth.Min, relation.Depth.Max)
// relationBounds = relation.EntityTo
// } else {
// relationReturn = fmt.Sprintf("MATCH %v = (n%v)-[%v:%v*%v..%v]->(", pathName, relation.EntityTo, relationName, relation.Type, relation.Depth.Min, relation.Depth.Max)
// relationBounds = relation.EntityFrom
// }
// if relationBounds != -1 {
// relationReturn += fmt.Sprintf("n%v", relationBounds)
// }
// relationReturn += ")"
// constraintReturn := *createConstraintStatements(&relation.Constraints, relationName)
// // hier zat een newline
// ret := relationReturn + " " + constraintReturn
// return &ret
// }
package cypher package cypher
import ( import (
"errors"
"fmt" "fmt"
"git.science.uu.nl/graphpolaris/query-conversion/entity" "git.science.uu.nl/graphpolaris/query-conversion/entity"
...@@ -47,16 +48,12 @@ func (s *Service) ConvertQuery2(totalJSONQuery *entity.IncomingQueryJSON) (*stri ...@@ -47,16 +48,12 @@ func (s *Service) ConvertQuery2(totalJSONQuery *entity.IncomingQueryJSON) (*stri
// else: // else:
// Code that checks to see if the disconnected pieces are valid queries // Code that checks to see if the disconnected pieces are valid queries
// code that builds the queries // code that builds the queries
} return nil, nil
// Very placeholdery
func magicHierarchyFunction(JSONQuery *entity.IncomingQueryJSON) []pdictList {
return nil
} }
// createCypher creates queries without the return statement, due to the possibility of multiple disconnected queries // createCypher creates queries without the return statement, due to the possibility of multiple disconnected queries
func createCypher(JSONQuery *entity.IncomingQueryJSON) *string { func createCypher(JSONQuery *entity.IncomingQueryJSON) *string {
queryHierarchy := magicHierarchyFunction(JSONQuery) //queryHierarchy := magicHierarchyFunction(JSONQuery)
/* /*
Match (deel 1) Match (deel 1)
...@@ -67,24 +64,21 @@ func createCypher(JSONQuery *entity.IncomingQueryJSON) *string { ...@@ -67,24 +64,21 @@ func createCypher(JSONQuery *entity.IncomingQueryJSON) *string {
Dan weer door Dan weer door
*/ */
return nil
} }
// NOTE MOET MISSCHIEN ANDERS // NOTE MOET MISSCHIEN ANDERS
// createReturnStatement creates the final return statement, connecting all previous cypher together // createReturnStatement creates the final return statement, connecting all previous cypher together
func createReturnStatement(JSONQuery *entity.IncomingQueryJSON) *string { func createReturnStatement(JSONQuery *entity.IncomingQueryJSON) *string {
// Hier dus weer de vraag of dingen boven een GROUP BY gereturned dienen te worden Lijkt mij niet // Hier dus weer de vraag of dingen boven een GROUP BY gereturned dienen te worden Lijkt mij niet
} return nil
func createFilterStatement(filter *entity.QueryFilterStruct) *string {
// Ik neem aan dat de 'r0 = relationships(p0)' al reeds gedeclared is
} }
type queryPart struct { type queryPart struct {
qType string // Eg if it is a relation or groupby qType string // Eg if it is a relation or groupby
qID int // ID of said relation/gb qID int // ID of said relation/gb
partID int // Custom ID used for dependancy partID int // Custom ID used for dependency
dependancies []int // List of partID's that need to come before dependencies []int // List of partID's that need to come before
} }
type query []queryPart type query []queryPart
...@@ -98,11 +92,23 @@ func (q query) find(qID int, qType string) *queryPart { ...@@ -98,11 +92,23 @@ func (q query) find(qID int, qType string) *queryPart {
return nil return nil
} }
func createQueryHierarchy(JSONQuery *entity.IncomingQueryJSON) { func (q query) selectByID(ID int) *queryPart {
for _, part := range q {
if part.partID == ID {
return &part
}
}
return nil
}
func createQueryHierarchy(JSONQuery *entity.IncomingQueryJSON) (query, error) {
// Pak de relations met de entities samen, en vorm groepjes van ent-rel-ent // Pak de relations met de entities samen, en vorm groepjes van ent-rel-ent
// Als A-rel-B-rel-C, dan wordt dat A-rel-B en B-rel-C, waarbij BC na AB moet komen, dus BC _depends_ on AC // Als A-rel-B-rel-C, dan wordt dat A-rel-B en B-rel-C, waarbij BC na AB moet komen, dus BC _depends_ on AB
// Idee is dat je de hele lijst achterstevoren door kan lopen en dat je eerst een depend tegen komt en daarna het gene waarop gedepend wordt // Idee is dat je de hele lijst achterstevoren door kan lopen en dat je eerst een depend tegen komt en daarna het gene waarop gedepend wordt
// ** HOUD NOG GEEN REKENING MET INS, MOET NOG WEL
// maar is wat lastiger omdat dat wat extra checks vergt
var parts query var parts query
IDctr := 0 IDctr := 0
...@@ -112,7 +118,7 @@ func createQueryHierarchy(JSONQuery *entity.IncomingQueryJSON) { ...@@ -112,7 +118,7 @@ func createQueryHierarchy(JSONQuery *entity.IncomingQueryJSON) {
qType: "relation", qType: "relation",
qID: rel.ID, qID: rel.ID,
partID: IDctr, partID: IDctr,
dependancies: make([]int, 0), dependencies: make([]int, 0),
} }
parts = append(parts, part) parts = append(parts, part)
...@@ -125,7 +131,7 @@ func createQueryHierarchy(JSONQuery *entity.IncomingQueryJSON) { ...@@ -125,7 +131,7 @@ func createQueryHierarchy(JSONQuery *entity.IncomingQueryJSON) {
qType: "groupBy", qType: "groupBy",
qID: gb.ID, qID: gb.ID,
partID: IDctr, partID: IDctr,
dependancies: make([]int, 0), dependencies: make([]int, 0),
} }
parts = append(parts, part) parts = append(parts, part)
...@@ -133,13 +139,13 @@ func createQueryHierarchy(JSONQuery *entity.IncomingQueryJSON) { ...@@ -133,13 +139,13 @@ func createQueryHierarchy(JSONQuery *entity.IncomingQueryJSON) {
} }
// Check dependancies in a nice O(n^2) // Check dependencies in a nice O(n^2)
for _, rel := range JSONQuery.Relations { for _, rel := range JSONQuery.Relations {
if rel.FromID == -1 { if rel.FromID == -1 {
continue continue
} }
// Check the dependancies From - To // Check the dependencies From - To
for _, rela := range JSONQuery.Relations { for _, rela := range JSONQuery.Relations {
if rela.ToID == -1 { if rela.ToID == -1 {
continue continue
...@@ -147,7 +153,7 @@ func createQueryHierarchy(JSONQuery *entity.IncomingQueryJSON) { ...@@ -147,7 +153,7 @@ func createQueryHierarchy(JSONQuery *entity.IncomingQueryJSON) {
if rel.FromID == rela.ToID && rel.FromType == "relation" { if rel.FromID == rela.ToID && rel.FromType == "relation" {
part := parts.find(rel.ID, "relation") part := parts.find(rel.ID, "relation")
part.dependancies = append(part.dependancies, parts.find(rela.ID, "relation").partID) part.dependencies = append(part.dependencies, parts.find(rela.ID, "relation").partID)
} }
} }
...@@ -160,7 +166,7 @@ func createQueryHierarchy(JSONQuery *entity.IncomingQueryJSON) { ...@@ -160,7 +166,7 @@ func createQueryHierarchy(JSONQuery *entity.IncomingQueryJSON) {
for _, gb := range JSONQuery.GroupBys { for _, gb := range JSONQuery.GroupBys {
if (rel.FromID == gb.ID && rel.FromType == "groupBy") || (rel.ToID == gb.ID && rel.ToType == "groupBy") { if (rel.FromID == gb.ID && rel.FromType == "groupBy") || (rel.ToID == gb.ID && rel.ToType == "groupBy") {
part := parts.find(rel.ID, "relation") part := parts.find(rel.ID, "relation")
part.dependancies = append(part.dependancies, parts.find(gb.ID, "groupBy").partID) part.dependencies = append(part.dependencies, parts.find(gb.ID, "groupBy").partID)
} }
} }
} }
...@@ -174,26 +180,89 @@ func createQueryHierarchy(JSONQuery *entity.IncomingQueryJSON) { ...@@ -174,26 +180,89 @@ func createQueryHierarchy(JSONQuery *entity.IncomingQueryJSON) {
((gb.ByID == rela.FromID || gb.ByID == rela.ToID) && gb.ByType == "entity") || // Is the by connected to an entity connected to the relation ((gb.ByID == rela.FromID || gb.ByID == rela.ToID) && gb.ByType == "entity") || // Is the by connected to an entity connected to the relation
((gb.GroupID == rela.FromID || gb.GroupID == rela.ToID) && gb.GroupType == "entity") { // Is the Group connected to an entity connected to the relation ((gb.GroupID == rela.FromID || gb.GroupID == rela.ToID) && gb.GroupType == "entity") { // Is the Group connected to an entity connected to the relation
part := parts.find(gb.ID, "groupBy") part := parts.find(gb.ID, "groupBy")
part.dependancies = append(part.dependancies, parts.find(rela.ID, "relation").partID) part.dependencies = append(part.dependencies, parts.find(rela.ID, "relation").partID)
}
}
// Not sure if this is even possible, but hey who knows
// Check to see if the gb is connected to another gb
for _, grb := range JSONQuery.GroupBys {
if gb.ID == grb.ID {
continue
}
if (gb.GroupID == grb.ID && gb.GroupType == "groupBy") || (gb.ByID == grb.ID && gb.ByType == "groupBy") {
part := parts.find(gb.ID, "groupBy")
part.dependencies = append(part.dependencies, parts.find(grb.ID, "groupBy").partID)
} }
} }
}
// Now we have a directed graph, meaning we can use some topological sort (Kahn's algorithm)
var sortedQuery query
incomingEdges := make(map[int]int)
// Set all to 0
for _, p := range parts {
incomingEdges[p.partID] = 0
}
// Count the incoming edges (dependencies)
for _, p := range parts {
for _, dp := range p.dependencies {
incomingEdges[dp]++
}
} }
// ** MODIFIERS? for { // While there is a someone where incomingEdges[someone] == 0
part := queryPart{partID: -1}
// Select a node with no incoming edges
for ID, edges := range incomingEdges {
if edges == 0 {
part = *parts.selectByID(ID)
}
}
// Check to see if there are parts withouth incoming edges left
if part.partID == -1 {
break
}
// Remove it from the set
incomingEdges[part.partID] = -1
sortedQuery = append(sortedQuery, part)
// Decrease incoming edges of other parts
for _, ID := range part.dependencies {
incomingEdges[ID]--
}
}
// Now check for cycles in the graph
partRemaining := false
for _, edges := range incomingEdges {
if edges != -1 {
partRemaining = true
}
}
if partRemaining {
// Somehow there was a cycle in the query,
return nil, errors.New("Cyclic query detected")
}
return sortedQuery, nil
// ** HOUD NOG GEEN REKENING MET INS, MOET NOG WEL
// maar is wat lastiger omdat dat wat extra checks vergt
// Maak van alle rels en gb's een query part // Maak van alle rels en gb's een query part
// Loop door alle rels heen en kijk of hun FROM een TO is van een andere rel --> dependancy // Loop door alle rels heen en kijk of hun FROM een TO is van een andere rel --> dependency
// Als de from of de to een Group by is, dan is ie ook direct dependant // Als de from of de to een Group by is, dan is ie ook direct dependent
// Als een GB aan een relation (A) zit, komen alle andere relations die aan A vastzitten ook eerst // Als een GB aan een relation (A) zit, komen alle andere relations die aan A vastzitten ook eerst
// ** Het geval van wanneer een entity vast zit aan een group by? via een IN? // ** Het geval van wanneer een entity vast zit aan een group by? via een IN?
// Returned I guess een list van query parts met de dependancies naar beneden zodat je van boven naar beneden de lijst kan doorlopen // Returned I guess een list van query parts met de dependencies naar beneden zodat je van boven naar beneden de lijst kan doorlopen
// om de query te maken // om de query te maken
// Na dit, dus tijdens het maken van de query per relation/group by kijken of er een filter op een van de connections zit
// Zie namelijk niet een practisch nut om nu al de filters hierin te stoppen, want ze kunnen vgm maar op 1 manier
// ** CONTROLEER HOE EEN FILTER WERKT OP EEN RELATION ALLEEN, want dan leeft ie nml niet op een connectie
} }
package cypher
import (
"encoding/json"
"fmt"
"testing"
"git.science.uu.nl/graphpolaris/query-conversion/entity"
)
func Test1(t *testing.T) {
query := []byte(`{
"databaseName": "Movies3",
"return": {
"entities": [
0,
1,
2
],
"relations": [
0,
1
],
"groupBys": [
0
]
},
"entities": [
{
"id": 0,
"type": "Person",
"constraints": [
{
"attribute": "name",
"value": "Raymond Campbell",
"dataType": "string",
"matchType": "EQ",
"inID": -1,
"inType": ""
}
]
},
{
"id": 1,
"type": "Movie",
"constraints": []
},
{
"id": 2,
"type": "Genre",
"constraints": []
}
],
"relations": [
{
"id": 0,
"name": "DIRECTED",
"depth": {
"min": 1,
"max": 1
},
"fromType": "entity",
"fromID": 0,
"toType": "entity",
"toID": 1,
"constraints": []
},
{
"id": 1,
"name": "IN_GENRE",
"depth": {
"min": 1,
"max": 1
},
"fromType": "groupBy",
"fromID": 0,
"toType": "entity",
"toID": 2,
"constraints": []
}
],
"groupBys": [
{
"id": 0,
"groupType": "entity",
"groupID": 0,
"groupAttribute": "age??????",
"byType": "entity",
"byID": 1,
"byAttribute": "ID????????",
"appliedModifier": "AVG",
"relationID": 0,
"constraints": []
}
],
"machineLearning": [],
"limit": 5000
}`)
var JSONQuery entity.IncomingQueryJSON
err := json.Unmarshal(query, &JSONQuery)
hierarchy, err := createQueryHierarchy(&JSONQuery)
if err != nil {
fmt.Println(err)
}
fmt.Println(hierarchy)
t.Fail()
}
...@@ -52,6 +52,7 @@ func checkForQueryCluster(JSONQuery *entity.IncomingQueryJSON) (*entity.Incoming ...@@ -52,6 +52,7 @@ func checkForQueryCluster(JSONQuery *entity.IncomingQueryJSON) (*entity.Incoming
// Dit is het startpunt van de cluster, vrij veel if elses ivm half afgemaakte queries // Dit is het startpunt van de cluster, vrij veel if elses ivm half afgemaakte queries
// Lots of existance checks // Lots of existance checks
if len(JSONQuery.Relations) > 0 { if len(JSONQuery.Relations) > 0 {
fmt.Println("he")
rel := fmt.Sprintf("r%v", JSONQuery.Relations[0].ID) rel := fmt.Sprintf("r%v", JSONQuery.Relations[0].ID)
cluster[rel] = true cluster[rel] = true
...@@ -78,11 +79,7 @@ func checkForQueryCluster(JSONQuery *entity.IncomingQueryJSON) (*entity.Incoming ...@@ -78,11 +79,7 @@ func checkForQueryCluster(JSONQuery *entity.IncomingQueryJSON) (*entity.Incoming
by := fmt.Sprintf("%v%v", JSONQuery.GroupBys[0].ByType[0], JSONQuery.GroupBys[0].ByID) by := fmt.Sprintf("%v%v", JSONQuery.GroupBys[0].ByType[0], JSONQuery.GroupBys[0].ByID)
cluster[by] = true cluster[by] = true
} else if len(JSONQuery.Modifiers) > 0 {
// I guess dat je ook een enkele entity met bepaalde constraints kan tellen ofzo? of kan averagen
// TODO
} }
// Relation toevoegen aan de map // Relation toevoegen aan de map
// Is er geen relation doe dan groupby // Is er geen relation doe dan groupby
// is die er ook niet dan rip // is die er ook niet dan rip
...@@ -135,6 +132,25 @@ func checkForQueryCluster(JSONQuery *entity.IncomingQueryJSON) (*entity.Incoming ...@@ -135,6 +132,25 @@ func checkForQueryCluster(JSONQuery *entity.IncomingQueryJSON) (*entity.Incoming
stop = false stop = false
} }
// Check to see if an entity is connected to the cluster via an 'IN'
for _, ent := range JSONQuery.Entities {
self := fmt.Sprintf("e%v", ent.ID)
if cluster[self] {
continue
}
for _, con := range ent.Constraints {
if con.InID != -1 {
in := fmt.Sprintf("%v%v", con.InType[0], con.InID)
if cluster[in] {
cluster[self] = true
stop = false
}
}
}
}
} }
// Now the same for Group by's // Now the same for Group by's
...@@ -217,20 +233,6 @@ func checkForQueryCluster(JSONQuery *entity.IncomingQueryJSON) (*entity.Incoming ...@@ -217,20 +233,6 @@ func checkForQueryCluster(JSONQuery *entity.IncomingQueryJSON) (*entity.Incoming
// ** Loop through modifiers // ** Loop through modifiers
// Loop through filters
// Filters were not done in the clustering, since they live on top of a connection, meaning they do not extend the cluster
// This also means that if a From or a To is in the cluster, the other (and thus the filter) is in the cluster as well
for _, filter := range JSONQuery.Filters {
from := fmt.Sprintf("%v%v", filter.FromType[0], filter.FromID)
if cluster[from] {
clusterJSON.Filters = append(clusterJSON.Filters, filter)
} else {
restJSON.Filters = append(restJSON.Filters, filter)
isRest = true
}
}
return &clusterJSON, &restJSON, isRest return &clusterJSON, &restJSON, isRest
// Nadat cluster is gevonden: maak twee nieuwe jsons aan: cluster en rest // Nadat cluster is gevonden: maak twee nieuwe jsons aan: cluster en rest
......
// DIT IS LEGACY CODE VAN SWP DIE IK STRAKS OOK NODIG HEB, MAAR IN ZN HUIDIGE STAAT GEBRUIK IK HET NIET
// ENKEL TER REFERENTIE
// Want global scope vars, YIKES
package cypher
import (
"fmt"
"strconv"
"git.science.uu.nl/graphpolaris/query-conversion/entity"
)
type pdict struct {
typename string
pointer int
}
type pdictList []pdict
func (p pdictList) Len() int {
return len(p)
}
func (p pdictList) Less(i, j int) bool {
if p[i].typename < p[j].typename {
return true
} else if p[i].typename == p[j].typename && p[i].pointer < p[j].pointer {
return true
} else {
return false
}
}
func (p pdictList) Swap(i, j int) {
p[i], p[j] = p[j], p[i]
}
var listoflists []pdictList
var reldone map[int]bool
var entdone map[int]bool
var funcdone map[int]bool
var relfuncdone map[int]bool
var filterDone map[int]bool
func search(JSONQuery *entity.IncomingQueryJSON, index int) {
listoflists = []pdictList{}
reldone = make(map[int]bool)
entdone = make(map[int]bool)
funcdone = make(map[int]bool)
relfuncdone = make(map[int]bool)
filterDone = make(map[int]bool)
var s pdictList
//printSlice(s)
//layercounter = 0
initent := pdict{
typename: "entity",
pointer: index,
}
s = append(s, initent)
listoflists = append(listoflists, s)
EntToRel(JSONQuery, initent)
for i := range listoflists {
for j := range listoflists[i] {
fmt.Println(listoflists[i][j])
}
fmt.Println("")
}
AddFilters(JSONQuery)
fmt.Println(listoflists)
}
/*
RelToEnt Get the entities connected to a relation and recursivly constructs part of the hierarchy
Entities always get added IN FRONT OF their respective relation in the hierarchy
JSONQuery: *entity.IncomingQueryJSON, the query in JSON format
rel: pdict, the relation to find all connected entities for
*/
func RelToEnt(JSONQuery *entity.IncomingQueryJSON, rel pdict) {
var newlist pdictList
layercounter := FindCurrentLayer(listoflists, rel)
// Loop over all entities
// If an entity is already in the entdone dict we already added it to the hierarchy, so we don't have to add it again
// If an entity matches either the from or to in a relation we can add it to the newlist
for i := range JSONQuery.Entities {
if _, ok := entdone[i]; !ok {
if JSONQuery.Relations[rel.pointer].FromID == i && JSONQuery.Relations[rel.pointer].FromType == "entity" {
fromentity := pdict{
typename: "entity",
pointer: i,
}
newlist = append(newlist, fromentity)
} else if JSONQuery.Relations[rel.pointer].ToID == i && JSONQuery.Relations[rel.pointer].ToType == "entity" {
toentity := pdict{
typename: "entity",
pointer: i,
}
newlist = append(newlist, toentity)
}
}
}
// This relation has found all its entities so we can set it's ID to true
reldone[rel.pointer] = true
// If the newlist is empty, we can just skip the recursion
// This is effectively our base case
if len(newlist) != 0 {
// If our layercounter is equal to 0 we are in the first "layer" of the hierarchy
// Because we add the entities IN FRONT OF their respective relation we don't have to move the layercounter before prepending
// If our layercounter is not equal to 0 we lower the layercounter and then add each item to the newly selected layer
if layercounter == 0 {
listoflists = prepend(listoflists, newlist)
fmt.Println("RelToEnt Layercounter 0 prepend entity")
} else {
layercounter--
for i := range newlist {
listoflists[layercounter] = append(listoflists[layercounter], newlist[i])
fmt.Println("RelToEnt Layercounter " + strconv.Itoa(layercounter) + " append to layer above us, appending type: " + newlist[i].typename + " with pointer: " + strconv.Itoa(newlist[i].pointer))
}
}
// After getting a list of entities we can only go towards a list of relation
// So we recurse by calling EntToRel
for i := range newlist {
fmt.Println("EntToRel being called with index?: " + strconv.Itoa(newlist[i].pointer))
EntToRel(JSONQuery, newlist[i])
}
}
}
/*
EntToRel Get the relations connected to a entity and recursivly constructs part of the hierarchy
Relation always get added BEHIND their respective entity in the hierarchy
JSONQuery: *entity.IncomingQueryJSON, the query in JSON format
ent: pdict, the entity to find all connected relations for
*/
func EntToRel(JSONQuery *entity.IncomingQueryJSON, ent pdict) {
var newlist pdictList
layercounter := FindCurrentLayer(listoflists, ent)
// Loop over all relations
// If a relation is already in the reldone dict we already added it to the hierarchy, so we don't have to add it again
// If a relation matches either the from or to with the entity we can add it to the newlist
for i := range JSONQuery.Relations {
if _, ok := reldone[i]; !ok {
if JSONQuery.Relations[i].FromID == ent.pointer && JSONQuery.Relations[i].FromType == "entity" {
rel := pdict{
typename: "relation",
pointer: i,
}
newlist = append(newlist, rel)
} else if JSONQuery.Relations[i].ToID == ent.pointer && JSONQuery.Relations[i].ToType == "entity" {
rel := pdict{
typename: "relation",
pointer: i,
}
newlist = append(newlist, rel)
}
}
}
// This entity has found all its relations so we can set it's ID to true
entdone[ent.pointer] = true
if len(newlist) != 0 {
// If our layercounter is equal to the length of the hierarchy - 1 we are in the last "layer" of the hierarchy
// Because we add the relations BEHIND their respective entities we don't have to move the layercounter before appending
// TODO TAKE OUT UNNEEDED LAYERCOUNTER INCREMENTS AND DECREMENTS
// If our layercounter is any other value we increase the layercounter and then add each item to the newly selected layer
if layercounter == len(listoflists)-1 {
listoflists = append(listoflists, newlist)
layercounter++
fmt.Println("EntToRel Layercounter last appending below: type relation")
} else {
layercounter++
for i := range newlist {
listoflists[layercounter] = append(listoflists[layercounter], newlist[i])
fmt.Println("EntToRel Layercounter " + strconv.Itoa(layercounter) + " append to layer below us, appending type: " + newlist[i].typename + " with pointer: " + strconv.Itoa(newlist[i].pointer))
}
}
// After getting a list of relations we can only go towards a list of entities or a list of functions
// So we recurse by calling RelToEnt and RelToAllFunc
for i := range newlist {
fmt.Println("RelToEnt being called with index?: " + strconv.Itoa(newlist[i].pointer))
RelToEnt(JSONQuery, newlist[i])
fmt.Println("RelToAllFunc being called with index?: " + strconv.Itoa(newlist[i].pointer))
RelToAllFunc(JSONQuery, newlist[i])
}
}
}
/*
RelToAllFunc Get the functions connected (both functions that are applied to a subquery a relation is a part of and functions the relation is connected to itself)
to a relation and recursivly constructs part of the hierarchy
If a function is applied to a subquery the relation is a part of, we add it BEHIND its respective relation
If a function is connected to a relation (relation uses the results from the function), we add it IN FRONT OF its respective relation
JSONQuery: *entity.IncomingQueryJSON, the query in JSON format
rel: pdict, the relation to find all connected functions for
*/
func RelToAllFunc(JSONQuery *entity.IncomingQueryJSON, rel pdict) {
var funcappliedtosubquery pdictList
var functowhichrelapplies pdictList
layercounter := FindCurrentLayer(listoflists, rel)
// Loop over all functions
// If a relation is already in the relfuncdone dict we already added it to the hierarchy, so we don't have to add it again
// If a function's relationID matches the current relation then the function is applied to a subquery
// If the relation's functionpointer matches a function's ID then the relation is connected to the function
// Depending on the case they get put in a different list and are put in different places in the hierarchy
for i := range JSONQuery.GroupBys {
if _, ok := relfuncdone[rel.pointer]; !ok {
if _, ok := funcdone[i]; !ok {
if JSONQuery.GroupBys[i].RelationID == rel.pointer {
relfunc := pdict{
typename: "groupBy",
pointer: i,
}
funcappliedtosubquery = append(funcappliedtosubquery, relfunc)
fmt.Println("I AM HERE 1")
}
if JSONQuery.Relations[rel.pointer].FromID == i && JSONQuery.Relations[rel.pointer].FromType == "groupBy" {
fromfunc := pdict{
typename: "groupBy",
pointer: i,
}
functowhichrelapplies = append(functowhichrelapplies, fromfunc)
fmt.Println("I AM HERE 2")
} else if JSONQuery.Relations[rel.pointer].ToID == i && JSONQuery.Relations[rel.pointer].ToType == "groupBy" {
tofunc := pdict{
typename: "groupBy",
pointer: i,
}
functowhichrelapplies = append(functowhichrelapplies, tofunc)
fmt.Println("I AM HERE 3")
}
}
}
}
relfuncdone[rel.pointer] = true
layercountertwo := layercounter
layercounterthree := layercounter
// See main function comment to see which sublist gets put where in the hierarchy
if len(functowhichrelapplies) != 0 {
if layercountertwo == 0 {
listoflists = prepend(listoflists, functowhichrelapplies)
fmt.Println("RellToAllFunc Layercounter 0 prepend, prepending functowhichrelapplies")
} else {
layercountertwo--
for i := range functowhichrelapplies {
listoflists[layercountertwo] = append(listoflists[layercountertwo], functowhichrelapplies[i])
fmt.Println("RellToAllFunc Layercounter " + strconv.Itoa(layercountertwo) + " append to layer below us, appending type: " + functowhichrelapplies[i].typename + " with pointer: " + strconv.Itoa(functowhichrelapplies[i].pointer))
}
}
for i := range functowhichrelapplies {
fmt.Println("FuncToAllRell being called with index?: " + strconv.Itoa(functowhichrelapplies[i].pointer))
FuncToAllRel(JSONQuery, functowhichrelapplies[i])
}
}
if len(funcappliedtosubquery) != 0 {
//newlayercounter := layercounter
if layercounterthree == len(listoflists)-1 {
listoflists = append(listoflists, funcappliedtosubquery)
layercounterthree++
fmt.Println("RellToAllFunc Layercounter last prepend, appending funcappliedtosubquery")
} else {
layercounterthree++
for i := range funcappliedtosubquery {
listoflists[layercounterthree] = append(listoflists[layercounterthree], funcappliedtosubquery[i])
fmt.Println("RellToAllFunc Layercounter " + strconv.Itoa(layercounterthree) + " append to layer below us, appending type: " + funcappliedtosubquery[i].typename + " with pointer: " + strconv.Itoa(funcappliedtosubquery[i].pointer))
}
}
for i := range funcappliedtosubquery {
fmt.Println("FuncToAllRel being called with index?: " + strconv.Itoa(funcappliedtosubquery[i].pointer))
FuncToAllRel(JSONQuery, funcappliedtosubquery[i])
}
}
}
/*
FuncToAllRel Get the relations connected (both relations that are in a subquery a function is applied to and relations that are connected to the function itself)
to a function and recursivly constructs part of the hierarchy
If a relation is in a subquery that the function is applied to, we add the relation IN FRONT OF its respective function
If a relation is connected to a function, we add the relation BEHIND its respective function
JSONQuery: *entity.IncomingQueryJSON, the query in JSON format
function: pdict, the function to find all connected relations for
*/
func FuncToAllRel(JSONQuery *entity.IncomingQueryJSON, function pdict) {
var funcappliedtosubquery pdictList
var relattachedtofunc pdictList
layercounter := FindCurrentLayer(listoflists, function)
for i := range JSONQuery.Relations {
if _, ok := funcdone[function.pointer]; !ok {
if _, ok := relfuncdone[i]; !ok {
// The func is attached to this relation
if JSONQuery.GroupBys[function.pointer].RelationID == i {
funcrel := pdict{
typename: "relation",
pointer: i,
}
funcappliedtosubquery = append(funcappliedtosubquery, funcrel)
}
if JSONQuery.Relations[i].FromID == function.pointer && JSONQuery.Relations[i].FromType == "groupBy" {
fromrel := pdict{
typename: "relation",
pointer: i,
}
relattachedtofunc = append(relattachedtofunc, fromrel)
} else if JSONQuery.Relations[i].ToID == function.pointer && JSONQuery.Relations[i].ToType == "groupBy" {
torel := pdict{
typename: "relation",
pointer: i,
}
relattachedtofunc = append(relattachedtofunc, torel)
}
}
}
}
funcdone[function.pointer] = true
layercountertwo := layercounter
layercounterthree := layercounter
if len(funcappliedtosubquery) != 0 {
//newlayercounter := layercounter
if layercountertwo == 0 {
listoflists = prepend(listoflists, funcappliedtosubquery)
fmt.Println("FuncToAllRel Layercounter 0 prepend, prepending funcappliedtosubquery")
} else {
layercountertwo--
for i := range funcappliedtosubquery {
listoflists[layercountertwo] = append(listoflists[layercountertwo], funcappliedtosubquery[i])
fmt.Println("FuncToAllRel Layercounter " + strconv.Itoa(layercountertwo) + " append to layer below us, appending type: " + funcappliedtosubquery[i].typename + " with pointer: " + strconv.Itoa(funcappliedtosubquery[i].pointer))
}
}
for i := range funcappliedtosubquery {
fmt.Println("RelToEnt being called with index?: " + strconv.Itoa(funcappliedtosubquery[i].pointer))
RelToEnt(JSONQuery, funcappliedtosubquery[i])
fmt.Println("RelToAllFunc being called with index?: " + strconv.Itoa(funcappliedtosubquery[i].pointer))
RelToAllFunc(JSONQuery, funcappliedtosubquery[i])
}
}
if len(relattachedtofunc) != 0 {
if layercounterthree == len(listoflists)-1 {
listoflists = append(listoflists, relattachedtofunc)
layercounterthree++
fmt.Println("FuncToAllRel Layercounter last append, appending relattachedtofunc")
} else {
layercounterthree++
for i := range relattachedtofunc {
listoflists[layercounterthree] = append(listoflists[layercounterthree], relattachedtofunc[i])
fmt.Println("FuncToAllRel Layercounter " + strconv.Itoa(layercounterthree) + " append to layer below us, appending type: " + relattachedtofunc[i].typename + " with pointer: " + strconv.Itoa(relattachedtofunc[i].pointer))
}
}
for i := range relattachedtofunc {
fmt.Println("RelToEnt being called with index?: " + strconv.Itoa(relattachedtofunc[i].pointer))
RelToEnt(JSONQuery, relattachedtofunc[i])
fmt.Println("RelToAllFunc being called with index?: " + strconv.Itoa(relattachedtofunc[i].pointer))
RelToAllFunc(JSONQuery, relattachedtofunc[i])
}
}
}
func AddFilters(JSONQuery *entity.IncomingQueryJSON) {
for i, filter := range JSONQuery.Filters {
if _, ok := filterDone[i]; !ok {
p := pdict{
typename: filter.FromType,
pointer: filter.FromID,
}
f := pdict{
typename: "filter",
pointer: filter.ID,
}
addOneFilter(f, JSONQuery, p, &filterDone)
}
}
}
func addOneFilter(filterPDict pdict, JSONQuery *entity.IncomingQueryJSON, p pdict, filterDone *map[int]bool) {
if p.typename == "filter" && (*filterDone)[p.pointer] {
l := FindCurrentLayer(listoflists, p)
k := pdictList{}
if len(listoflists) > l+1 && listoflists[l+1][0].typename == "filter" {
listoflists[l+1] = append(listoflists[l+1], filterPDict)
} else {
listoflists = BelowAppend(listoflists, l, k)
}
(*filterDone)[filterPDict.pointer] = true
} else if p.typename == "filter" {
pnew := pdict{
typename: JSONQuery.Filters[p.pointer].FromType,
pointer: JSONQuery.Filters[p.pointer].FromID,
}
addOneFilter(p, JSONQuery, pnew, filterDone)
l := FindCurrentLayer(listoflists, p)
k := pdictList{filterPDict}
if len(listoflists) > l+1 && listoflists[l+1][0].typename == "filter" {
listoflists[l+1] = append(listoflists[l+1], filterPDict)
} else {
listoflists = BelowAppend(listoflists, l, k)
}
(*filterDone)[filterPDict.pointer] = true
} else {
l := FindCurrentLayer(listoflists, p)
k := pdictList{filterPDict}
if len(listoflists) > l+1 && listoflists[l+1][0].typename == "filter" {
listoflists[l+1] = append(listoflists[l+1], filterPDict)
} else {
listoflists = BelowAppend(listoflists, l, k)
}
(*filterDone)[filterPDict.pointer] = true
}
}
// A function that appends 1 level above (if index is 0 this won't work)
func AboveAppend(list []pdictList, index int, value pdictList) []pdictList {
if index == 0 {
return prepend(list, value)
}
return BelowAppend(list, index-1, value)
}
// A function that appends 1 level below
func BelowAppend(lists []pdictList, index int, value pdictList) []pdictList {
if len(lists)-1 == index { // nil or empty slice or after last element
return append(lists, value)
}
k := make([]pdictList, len(lists[index+1:]))
copy(k, lists[index+1:])
l := make([]pdictList, len(lists[:index+1]))
copy(l, lists[:index+1])
lists = append(l, value) // index < len(a)
return append(lists, k...)
}
// A simple double-for loop that finds the layer in which an element resides in the hierarchy
// Because we only append elements relative to another element, we can freely use this to keep track of layers
func FindCurrentLayer(list []pdictList, element pdict) int {
currlayer := -1
for i, sublist := range list {
for j := range sublist {
if sublist[j].pointer == element.pointer && sublist[j].typename == element.typename {
currlayer = i
//break
}
}
}
return currlayer
}
// Adds a list of pdicts to the hierarchy, but IN FRONT OF the current layer
// Only needed when a entire new list has to be inserted in front of the hierarcy
// Prepending to existing layers can be done by decreasing the layercounter and appending
// See XToY functions for example usage
func prepend(list []pdictList, element pdictList) []pdictList {
var dummylist pdictList
dummy := pdict{
typename: "dummy",
pointer: -1,
}
dummylist = append(dummylist, dummy)
list = append(list, dummylist)
copy(list[1:], list)
list[0] = element
return list
}
package cypher
import (
"encoding/json"
"fmt"
"sort"
"testing"
"git.science.uu.nl/graphpolaris/query-conversion/entity"
"github.com/stretchr/testify/assert"
)
func TestHierarchyBasic(t *testing.T) {
// Setup for test
// Create query conversion service
query := []byte(`{
"return": {
"entities": [
0,
1
],
"relations": [
0
],
"groupBys": []
},
"entities": [
{
"name": "parliament",
"ID": 0
},
{
"name": "parties",
"ID": 1
}
],
"relations": [
{
"ID": 0,
"name": "member_of",
"depth": {
"min": 1,
"max": 1
},
"fromType": "entity",
"fromID": 0,
"toType": "entity",
"toID": 1
}
],
"groupBys": [],
"filters": [
{
"ID": 0,
"fromType": "entity",
"fromID": 0,
"toType": "relation",
"toID": 0,
"attribute": "age",
"value": "45",
"dataType": "number",
"matchType": "GT",
"inType": "",
"inID": -1
},
{
"ID": 1,
"fromType": "relation",
"fromID": 0,
"toType": "relation",
"toID": 1,
"attribute": "isChairman",
"value": "45",
"dataType": "number",
"matchType": "GT",
"inType": "",
"inID": -1
}
],
"limit": 5000
}
`)
// Unmarshall the incoming message into an IncomingJSONQuery object
var JSONQuery entity.IncomingQueryJSON
json.Unmarshal(query, &JSONQuery)
search(&JSONQuery, 0)
// Assert that the result and the expected result are the same
correctResult := `[[{entity 0} {entity 1}] [{filter 0}] [{relation 0}] [{filter 1}]]`
assert.Equal(t, correctResult, fmt.Sprint(listoflists))
t.Fail()
}
func TestHierarchyRandomStart(t *testing.T) {
// Setup for test
// Create query conversion service
query := []byte(`{
"return": {
"entities": [
0,
1
],
"relations": [
0
],
"groupBys": []
},
"entities": [
{
"name": "parties",
"ID": 1
},
{
"name": "parliament",
"ID": 0
}
],
"relations": [
{
"ID": 0,
"name": "member_of",
"depth": {
"min": 1,
"max": 1
},
"fromType": "entity",
"fromID": 0,
"toType": "entity",
"toID": 1
}
],
"groupBys": [],
"filters": [
{
"ID": 0,
"fromType": "entity",
"fromID": 0,
"toType": "relation",
"toID": 0,
"attribute": "age",
"value": "45",
"dataType": "number",
"matchType": "GT",
"inType": "",
"inID": -1
},
{
"ID": 1,
"fromType": "relation",
"fromID": 0,
"toType": "relation",
"toID": 1,
"attribute": "isChairman",
"value": "45",
"dataType": "number",
"matchType": "GT",
"inType": "",
"inID": -1
}
],
"limit": 5000
}
`)
// Unmarshall the incoming message into an IncomingJSONQuery object
var JSONQuery entity.IncomingQueryJSON
json.Unmarshal(query, &JSONQuery)
correctResult := make([]pdictList, 4)
correctResult[0] = pdictList{{typename: "entity", pointer: 0}, {typename: "entity", pointer: 1}}
correctResult[1] = pdictList{{typename: "filter", pointer: 0}}
correctResult[2] = pdictList{{typename: "relation", pointer: 0}}
correctResult[3] = pdictList{{typename: "filter", pointer: 1}}
for i := range JSONQuery.Entities {
search(&JSONQuery, i)
sortedListOfLists := make([]pdictList, len(listoflists))
for i, list := range listoflists {
k := make(pdictList, list.Len())
copy(k, list)
sort.Sort(k)
sortedListOfLists[i] = k
}
assert.Equal(t, fmt.Sprint(correctResult), fmt.Sprint(sortedListOfLists))
}
}
func TestHierarchyWithGroupby(t *testing.T) {
// Setup for test
// Create query conversion service
query := []byte(`{
"return": {
"entities": [
0,
1,
2,
3
],
"relations": [
0,
1,
2
]
},
"entities": [
{
"ID": 0,
"name": "parliament"
},
{
"ID": 1,
"name": "commissions"
},
{
"ID": 2,
"name": "parliament"
},
{
"ID": 3,
"name": "resolutions"
}
],
"relations": [
{
"type": "part_of",
"depth": {
"min": 1,
"max": 1
},
"fromType": "entity",
"fromId": 0,
"toType": "entity",
"toID": 1
},
{
"type": "part_of",
"depth": {
"min": 1,
"max": 1
},
"fromType": "groupBy",
"fromID": 0,
"toType": "entity",
"toID": 2
},
{
"type": "submits",
"depth": {
"min": 1,
"max": 1
},
"fromType": "entity",
"fromID": 2,
"toType": "entity",
"toID": 3
}
],
"groupBys": [
{
"ID": 0,
"groupType": "entity",
"groupID": 0,
"groupAttribute": "age",
"byType": "entity",
"byID": 1,
"byAttribute": "name",
"appliedModifier": "AVG",
"relationID": 0,
"constraints": [
{
"attribute": "age",
"value": "45",
"dataType": "number",
"matchType": "GT",
"functionPointer": {
"from": -1,
"to": -1
}
}
]
}
],
"filters": [
{
"ID": 0,
"fromType": "groupBy",
"fromID": 0,
"toType": "relation",
"toID": 1,
"attribute": "age",
"value": "45",
"dataType": "number",
"matchType": "GT",
"inType": "",
"inID": -1
}
],
"limit": 5000,
"modifiers": [],
"databaseName": "TweedeKamer"
}
`)
// Unmarshall the incoming message into an IncomingJSONQuery object
var JSONQuery entity.IncomingQueryJSON
json.Unmarshal(query, &JSONQuery)
correctResult := make([]pdictList, 5)
correctResult[0] = pdictList{{typename: "entity", pointer: 0}, {typename: "entity", pointer: 1}}
correctResult[1] = pdictList{{typename: "relation", pointer: 0}}
correctResult[2] = pdictList{{typename: "entity", pointer: 2}, {typename: "entity", pointer: 3}, {typename: "groupBy", pointer: 0}}
correctResult[3] = pdictList{{typename: "filter", pointer: 0}}
correctResult[4] = pdictList{{typename: "relation", pointer: 1}, {typename: "relation", pointer: 2}}
for i := range JSONQuery.Entities {
search(&JSONQuery, i)
fmt.Println(listoflists)
sortedListOfLists := make([]pdictList, len(listoflists))
for i, list := range listoflists {
k := make(pdictList, list.Len())
copy(k, list)
sort.Sort(k)
sortedListOfLists[i] = k
}
assert.Equal(t, fmt.Sprint(correctResult), fmt.Sprint(sortedListOfLists))
}
}
...@@ -2,66 +2,74 @@ package entity ...@@ -2,66 +2,74 @@ package entity
// IncomingQueryJSON describes the query coming into the service in JSON format // IncomingQueryJSON describes the query coming into the service in JSON format
type IncomingQueryJSON struct { type IncomingQueryJSON struct {
DatabaseName string DatabaseName string `json:"databaseName"`
Return QueryReturnStruct Return QueryReturnStruct `json:"return"`
Entities []QueryEntityStruct Entities []QueryEntityStruct `json:"entities"`
Relations []QueryRelationStruct Relations []QueryRelationStruct `json:"relations"`
GroupBys []QueryGroupByStruct GroupBys []QueryGroupByStruct `json:"groupBys"`
Filters []QueryFilterStruct MachineLearning []QueryMLStruct `json:"machineLearning"`
// Limit is for limiting the amount of paths AQL will return in a relation let statement // Limit is for limiting the amount of paths AQL will return in a relation let statement
Limit int Limit int `json:"limit"`
Modifiers []QueryModifierStruct //Modifiers []QueryModifierStruct
} }
// QueryReturnStruct holds the indices of the entities and relations that need to be returned // QueryReturnStruct holds the indices of the entities and relations that need to be returned
type QueryReturnStruct struct { type QueryReturnStruct struct {
Entities []int Entities []int `json:"entities"`
Relations []int Relations []int `json:"relations"`
GroupBys []int GroupBys []int `json:"groupBys"`
//Modifiers []int //Modifiers []int
} }
// QueryEntityStruct encapsulates a single entity with its corresponding constraints // QueryEntityStruct encapsulates a single entity with its corresponding constraints
type QueryEntityStruct struct { type QueryEntityStruct struct {
ID int ID int `json:"id"`
Name string Name string `json:"name"`
Constraints []QueryConstraintStruct `json:"constraints"`
} }
// QueryRelationStruct encapsulates a single relation with its corresponding constraints // QueryRelationStruct encapsulates a single relation with its corresponding constraints
type QueryRelationStruct struct { type QueryRelationStruct struct {
ID int ID int `json:"id"`
Name string Name string `json:"name"`
FromType string Depth QuerySearchDepthStruct `json:"depth"`
FromID int FromType string `json:"fromType"`
ToType string FromID int `json:"fromID"`
ToID int ToType string `json:"toType"`
Depth QuerySearchDepthStruct ToID int `json:"toID"`
QueryConstraintStruct []QueryConstraintStruct `json:"constraints"`
} }
type QueryGroupByStruct struct { type QueryGroupByStruct struct {
ID int ID int `json:"id"`
GroupType string GroupType string `json:"groupType"`
GroupID int GroupID int `json:"groupID"`
GroupAttribute string GroupAttribute string `json:"groupAttribute"`
ByType string ByType string `json:"byType"`
ByID int ByID int `json:"byID"`
ByAttribute string ByAttribute string `json:"byAttribute"`
AppliedModifier string AppliedModifier string `json:"appliedModifier"`
RelationID int RelationID int `json:"relationID"`
Constraints []QueryConstraintStruct `json:"constraints"`
} }
type QueryFilterStruct struct { // QueryConstraintStruct holds the information of the constraint
ID int // Constraint datatypes
FromType string // string MatchTypes: exact/contains/startswith/endswith
FromID int // int MatchTypes: GT/LT/EQ
ToType string // bool MatchTypes: EQ/NEQ
ToID int type QueryConstraintStruct struct {
Attribute string Attribute string `json:"attribute"`
DataType string Value string `json:"value"`
MatchType string DataType string `json:"dataTYpe"`
Value string MatchType string `json:"matchType"`
InType string InID int `json:"inID"`
InID int InType string `json:"inType"`
}
type QueryMLStruct struct {
Queuename string
Parameters []string
} }
// QueryModifierStruct encapsulates a single modifier with its corresponding constraints // QueryModifierStruct encapsulates a single modifier with its corresponding constraints
...@@ -74,12 +82,32 @@ type QueryModifierStruct struct { ...@@ -74,12 +82,32 @@ type QueryModifierStruct struct {
// QuerySearchDepthStruct holds the range of traversals for the relation // QuerySearchDepthStruct holds the range of traversals for the relation
type QuerySearchDepthStruct struct { type QuerySearchDepthStruct struct {
Min int Min int `json:"min"`
Max int Max int `json:"max"`
} }
// QueryConstraintStruct holds the information of the constraint // Moet misschien nog ff anders of opgesplitst worden, want interface is vervelend
// Constraint datatypes func (JSONQuery IncomingQueryJSON) find(qID int, qType string) interface{} {
// string MatchTypes: exact/contains/startswith/endswith
// int MatchTypes: GT/LT/EQ/ if qType == "entity" {
// bool MatchTypes: EQ/NEQ for _, part := range JSONQuery.Entities {
if part.ID == qID {
return &part
}
}
} else if qType == "relation" {
for _, part := range JSONQuery.Relations {
if part.ID == qID {
return &part
}
}
} else if qType == "groupBy" {
for _, part := range JSONQuery.GroupBys {
if part.ID == qID {
return &part
}
}
}
return nil
}
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment