From f536bf160bfb489f698a387b4d14e683ec20e5bc Mon Sep 17 00:00:00 2001
From: "Lelieveld,J.R.J. (Joris)" <j.r.j.lelieveld@uu.nl>
Date: Wed, 8 Dec 2021 15:31:44 +0000
Subject: [PATCH] Groupby overhaul

---
 cypher/clustering.go                     |  301 ++++
 cypher/convertQuery.go                   |  714 ++++++--
 cypher/convertQueryBenchmark_test.go     |    8 +-
 cypher/convertQuery_test.go              | 2088 +++++++++++++++-------
 cypher/createConstraints.go              |   15 +-
 cypher/hierarchy.go                      |  247 +++
 entity/entitycypher/queryStructCypher.go |   62 -
 entity/queryStruct.go                    |  150 +-
 entity/queryStructValidator.go           |   18 +-
 interface.go                             |    4 -
 10 files changed, 2607 insertions(+), 1000 deletions(-)
 create mode 100644 cypher/clustering.go
 create mode 100644 cypher/hierarchy.go
 delete mode 100644 entity/entitycypher/queryStructCypher.go

diff --git a/cypher/clustering.go b/cypher/clustering.go
new file mode 100644
index 0000000..433b77e
--- /dev/null
+++ b/cypher/clustering.go
@@ -0,0 +1,301 @@
+package cypher
+
+import (
+	"errors"
+	"fmt"
+
+	"git.science.uu.nl/graphpolaris/query-conversion/entity"
+)
+
+// checkForQueryCluster will detect (and separate?) if there are multiple queries in the query panel and will try to sepate the queries.
+// Maybe also delete floating pills that have no connection (but that is a different function)
+func checkForQueryCluster(JSONQuery *entity.IncomingQueryJSON) (*entity.IncomingQueryJSON, *entity.IncomingQueryJSON, bool) {
+
+	// cluster is a set for all pills (entities e0 e1 e2, relations r0 .., groub by g0 ..)
+	cluster := make(map[string]bool)
+
+	if len(JSONQuery.Relations) > 0 {
+		rel := fmt.Sprintf("r%v", JSONQuery.Relations[0].ID)
+		cluster[rel] = true
+
+		if JSONQuery.Relations[0].ToID != -1 {
+
+			// Take the first letter: entities with ID 0 -> e0
+			to := fmt.Sprintf("%v%v", string(JSONQuery.Relations[0].ToType[0]), JSONQuery.Relations[0].ToID)
+			cluster[to] = true
+		}
+
+		if JSONQuery.Relations[0].FromID != -1 {
+			from := fmt.Sprintf("%v%v", string(JSONQuery.Relations[0].FromType[0]), JSONQuery.Relations[0].FromID)
+			cluster[from] = true
+		}
+
+	} else if len(JSONQuery.GroupBys) > 0 {
+		gb := fmt.Sprintf("g%v", JSONQuery.GroupBys[0].ID)
+		cluster[gb] = true
+
+		group := fmt.Sprintf("%v%v", string(JSONQuery.GroupBys[0].GroupType[0]), JSONQuery.GroupBys[0].GroupID)
+		cluster[group] = true
+
+		by := fmt.Sprintf("%v%v", string(JSONQuery.GroupBys[0].ByType[0]), JSONQuery.GroupBys[0].ByID)
+		cluster[by] = true
+
+	} else {
+		// If there is no relation or groupby then there is no query cluster atm
+		// Needs to change when the summary pill is introduced
+		return nil, nil, false
+	}
+
+	for i := 0; i < 100; i++ {
+		stop := true
+
+		// Iteratively check to see if something is connected to the cluster
+		// It should have skips for when something has already been added to the cluster, but due to complex connections (like an IN or groupby attached to a relation)
+		// It is easier to just try everything everytime (and its computationally insignificant)
+		// The loop stops when nothing was added for a round
+
+		for _, rel := range JSONQuery.Relations {
+
+			rela := fmt.Sprintf("r%v", rel.ID)
+
+			partOfCluster := false
+			// Now comes the check to see if one of its endpoints is in the cluster, meaning everything is in the cluster
+			if rel.ToID != -1 {
+				to := fmt.Sprintf("%v%v", string(rel.ToType[0]), rel.ToID)
+
+				if cluster[to] {
+					partOfCluster = true
+				}
+			}
+
+			if rel.FromID != -1 {
+				from := fmt.Sprintf("%v%v", string(rel.FromType[0]), rel.FromID)
+
+				if cluster[from] {
+					partOfCluster = true
+				}
+			}
+
+			if partOfCluster {
+				if rel.ToID != -1 {
+					to := fmt.Sprintf("%v%v", string(rel.ToType[0]), rel.ToID)
+					cluster[to] = true
+				}
+
+				if rel.FromID != -1 {
+					from := fmt.Sprintf("%v%v", string(rel.FromType[0]), rel.FromID)
+					cluster[from] = true
+				}
+
+				cluster[rela] = true
+				stop = false
+			}
+		}
+
+		// Check to see if an entity is connected to the cluster via an 'IN'
+		for _, ent := range JSONQuery.Entities {
+			self := fmt.Sprintf("e%v", ent.ID)
+
+			for _, con := range ent.Constraints {
+				if con.InID != -1 {
+					in := fmt.Sprintf("%v%v", string(con.InType[0]), con.InID)
+
+					if cluster[in] {
+						cluster[self] = true
+						stop = false
+					}
+				}
+			}
+		}
+
+		// Now the same for Group by's
+		for _, gb := range JSONQuery.GroupBys {
+			gby := fmt.Sprintf("g%v", gb.ID)
+
+			// It should have been checked that the connections of the group by are valid,
+			// since a group by must have all connections filled (in contrary of a relation)
+
+			// Check all by ID's
+			connected := false
+
+			for _, byID := range gb.ByID {
+				by := fmt.Sprintf("%v%v", string(gb.ByType[0]), byID)
+
+				if cluster[by] {
+					connected = true
+				}
+			}
+
+			// Check all group ID's
+			if !connected {
+
+				for _, gID := range gb.GroupID {
+					group := fmt.Sprintf("%v%v", string(gb.GroupType[0]), gID)
+
+					if cluster[group] {
+						connected = true
+					}
+				}
+			}
+
+			if connected {
+				cluster[gby] = true
+
+				for _, gID := range gb.GroupID {
+					group := fmt.Sprintf("%v%v", string(gb.GroupType[0]), gID)
+					cluster[group] = true
+				}
+
+				for _, byID := range gb.ByID {
+					by := fmt.Sprintf("%v%v", string(gb.ByType[0]), byID)
+					cluster[by] = true
+				}
+				stop = false
+			}
+
+		}
+
+		if stop {
+			// No new entities were added to the cluster, thus it is finished
+			break
+		}
+	}
+
+	// Now walk through the JSON and divide it into the cluster and rest
+	restJSON := entity.IncomingQueryJSON{DatabaseName: JSONQuery.DatabaseName, Limit: JSONQuery.Limit}
+	clusterJSON := entity.IncomingQueryJSON{DatabaseName: JSONQuery.DatabaseName, Limit: JSONQuery.Limit}
+	isRest := false
+
+	// Loop through entities
+	for _, ent := range JSONQuery.Entities {
+		name := fmt.Sprintf("e%v", ent.ID)
+
+		if cluster[name] {
+			clusterJSON.Entities = append(clusterJSON.Entities, ent)
+		} else {
+			restJSON.Entities = append(restJSON.Entities, ent)
+			isRest = true
+		}
+	}
+
+	// Loop through relations
+	for _, rel := range JSONQuery.Relations {
+		name := fmt.Sprintf("r%v", rel.ID)
+
+		if cluster[name] {
+			clusterJSON.Relations = append(clusterJSON.Relations, rel)
+		} else {
+			restJSON.Relations = append(restJSON.Relations, rel)
+			isRest = true
+		}
+	}
+
+	// Loop through groupby's
+	for _, gb := range JSONQuery.GroupBys {
+		name := fmt.Sprintf("g%v", gb.ID)
+
+		if cluster[name] {
+			clusterJSON.GroupBys = append(clusterJSON.GroupBys, gb)
+		} else {
+			restJSON.GroupBys = append(restJSON.GroupBys, gb)
+			isRest = true
+		}
+	}
+
+	return &clusterJSON, &restJSON, isRest
+}
+
+// checkNoDeadEnds checks to see if al from's and to's exist
+func checkNoDeadEnds(JSONQuery *entity.IncomingQueryJSON) (bool, error) {
+
+	// Check for all the connections of a relation
+	for _, rel := range JSONQuery.Relations {
+		if rel.FromID != -1 {
+			if rel.FromType == "entity" {
+				ent := JSONQuery.FindE(rel.FromID)
+				if ent == nil {
+					return false, errors.New("Invalid query")
+				}
+			} else if rel.FromType == "groupBy" {
+				gb := JSONQuery.FindG(rel.FromID)
+				if gb == nil {
+					return false, errors.New("Invalid query")
+				}
+			}
+		}
+
+		if rel.ToID != -1 {
+			if rel.ToType == "entity" {
+				ent := JSONQuery.FindE(rel.ToID)
+				if ent == nil {
+					return false, errors.New("Invalid query")
+				}
+			} else if rel.ToType == "groupBy" {
+				gb := JSONQuery.FindG(rel.ToID)
+				if gb == nil {
+					return false, errors.New("Invalid query")
+				}
+			}
+		}
+	}
+
+	// Check for all the connections of a group by
+	for _, gb := range JSONQuery.GroupBys {
+		if gb.GroupType == "entity" {
+
+			for _, gID := range gb.GroupID {
+				ent := JSONQuery.FindE(gID)
+				if ent == nil {
+					return false, errors.New("Invalid query")
+				}
+			}
+		}
+
+		if gb.GroupType == "relation" {
+			for _, gID := range gb.GroupID {
+				rel := JSONQuery.FindR(gID)
+				if rel == nil {
+					return false, errors.New("Invalid query")
+				}
+			}
+		}
+
+		if gb.ByType == "entity" {
+			for _, gID := range gb.ByID {
+				ent := JSONQuery.FindE(gID)
+				if ent == nil {
+					return false, errors.New("Invalid query")
+				}
+			}
+		}
+
+		if gb.ByType == "relation" {
+			for _, gID := range gb.ByID {
+				rel := JSONQuery.FindR(gID)
+				if rel == nil {
+					return false, errors.New("Invalid query")
+				}
+			}
+		}
+	}
+
+	// Check all the connections of IN-statements
+	for _, ent := range JSONQuery.Entities {
+		if len(ent.Constraints) == 0 {
+			continue
+		}
+
+		for _, cons := range ent.Constraints {
+			if cons.InID == -1 {
+				continue
+			}
+
+			gb := JSONQuery.FindG(cons.InID)
+			if gb == nil {
+				return false, errors.New("Invalid query")
+			}
+		}
+	}
+
+	return true, nil
+}
diff --git a/cypher/convertQuery.go b/cypher/convertQuery.go
index 0284e58..69877f2 100644
--- a/cypher/convertQuery.go
+++ b/cypher/convertQuery.go
@@ -1,8 +1,3 @@
-/*
-This program has been developed by students from the bachelor Computer Science at Utrecht University within the Software Project course.
-© Copyright Utrecht University (Department of Information and Computing Sciences)
-*/
-
 package cypher
 
 import (
@@ -10,234 +5,589 @@ import (
 	"fmt"
 	"strings"
 
-	"git.science.uu.nl/graphpolaris/query-conversion/entity/entitycypher"
+	"git.science.uu.nl/graphpolaris/query-conversion/entity"
 )
 
-/*
-ConvertQuery converts an IncomingQueryJSON object into AQL
-	JSONQuery: *entity.IncomingQueryJSON, the query to be converted to AQL
-	Returns: (*string, error), the AQL query and a possible error
-*/
-func (s *Service) ConvertQuery(JSONQuery *entitycypher.IncomingQueryJSON) (*string, error) {
+// ConvertQuery takes the json from the visual query builder and converts it into Cypher
+func (s *Service) ConvertQuery(totalJSONQuery *entity.IncomingQueryJSON) (*string, error) {
+	var finalCypher *string
 
-	// Check to make sure all indexes exist
-	// How many entities are there
-	numEntities := len(JSONQuery.Entities) - 1
-	// How many relations there are
-	numRelations := len(JSONQuery.Relations) - 1
+	queryJSON := totalJSONQuery
 
-	// Make sure no entity should be returned that is outside the range of that list
-	for _, e := range JSONQuery.Return.Entities {
-		// If this entity references an entity that is outside the range
-		if e > numEntities || e < 0 {
-			return nil, errors.New("non-existing entity referenced in return")
-		}
+	// If you want to query the other cluster as well, remove the underscores
+	query, _, _ := checkForQueryCluster(queryJSON)
+
+	if query == nil {
+		return nil, errors.New("Invalid query")
 	}
 
-	// Make sure that no relation mentions a non-existing entity
-	for _, r := range JSONQuery.Relations {
-		if r.EntityFrom > numEntities || r.EntityTo > numEntities {
-			return nil, errors.New("non-exisiting entity referenced in relation")
-		}
+	ok, err := checkNoDeadEnds(query)
+	if !ok {
+		return nil, err
 	}
 
-	// Make sure no non-existing relation is tried to be returned
-	for _, r := range JSONQuery.Return.Relations {
-		if r > numRelations || r < 0 {
-			return nil, errors.New("non-existing relation referenced in return")
-		}
+	finalCypher, err = createCypher(query)
+	if err != nil {
+		return nil, err
 	}
 
-	result := createQuery(JSONQuery)
-	return result, nil
+	return finalCypher, nil
 }
 
-/*
-sliceContains checks if a slice contains the input
-	s: []int, the slice to check
-	e: int, what you're checking for
-	Return: bool, true if it contains 'e'
-*/
-func sliceContains(s []int, e int) bool {
-	for _, a := range s {
-		if a == e {
-			return true
+// createCypher translates a cluster of nodes (query) to Cypher
+func createCypher(JSONQuery *entity.IncomingQueryJSON) (*string, error) {
+
+	// create the hierarchy from the cluster
+	hierarchy, err := createQueryHierarchy(JSONQuery)
+	if err != nil {
+		return nil, err
+	}
+
+	// translate it to cypher in the right order, using the hierarchy
+	cypher, err := formQuery(JSONQuery, hierarchy)
+	if err != nil {
+		return nil, err
+	}
+
+	// create the return statement
+	returnStatement, err := createReturnStatement(JSONQuery, hierarchy)
+	if err != nil {
+		return nil, errors.New("Creation of return Cypher failed")
+	}
+
+	finalCypher := *cypher + *returnStatement
+
+	return &finalCypher, nil
+}
+
+// createReturnStatement creates the final return statement
+func createReturnStatement(JSONQuery *entity.IncomingQueryJSON, parts entity.Query) (*string, error) {
+
+	// new plan: add them all to a list
+	// MATCH (e0:Person)-[r0:DIRECTED]-(e1:Movie)
+	// WHERE  e0.name <> "Raymond Campbell"
+	// WITH *
+	// MATCH (e0:Person)-[r1:ACTED_IN]-(e3:Movie)
+	// UNWIND [e0, e1, e3] as e
+	// UNWIND [r0, r1] as r
+	// return count(distinct e), count(distinct r)
+
+	var retStatement string
+	var retType string // This is a marker attached to the end, for ease of parsing in the executor
+
+	// First check to see if the return is a table (due to a groupby at the end) or if it is nodelink data
+	numOfParts := len(parts)
+	if numOfParts == 0 {
+		return nil, errors.New("No parts found in return statement")
+	}
+
+	if parts[numOfParts-1].QType == "groupBy" {
+		// Return is a table
+		groupBy := JSONQuery.FindG(parts[numOfParts-1].QID)
+
+		gName := fmt.Sprintf("%v_%v", groupBy.AppliedModifier, groupBy.GroupAttribute)
+		byID := fmt.Sprint(groupBy.ByID[0])
+
+		if len(groupBy.ByID) > 1 {
+			byID = ""
+
+			for _, x := range groupBy.ByID {
+				byID += fmt.Sprint(x)
+			}
+			byID += "L"
+		}
+		by := fmt.Sprintf("%v%v.%v", string(groupBy.ByType[0]), byID, groupBy.ByAttribute)
+		byName := strings.Replace(by, ".", "_", 1)
+
+		retStatement = fmt.Sprintf("RETURN %v, %v", byName, gName)
+		retType = ";table"
+	} else {
+
+		returnlist := make([]string, 0)
+
+		// Return is nodelink
+		// Loop through the parts of the query from back to front
+		for i := numOfParts - 1; i >= 0; i-- {
+			part := parts[i]
+			if part.QType == "relation" {
+				rel := JSONQuery.FindR(part.QID)
+				returnlist = append(returnlist, fmt.Sprintf("r%v", rel.ID))
+
+				if rel.FromID != -1 {
+					if rel.FromType == "entity" {
+
+						returnlist = append(returnlist, fmt.Sprintf("e%v", rel.FromID))
+					} else {
+						id := JSONQuery.FindG(rel.FromID).ByID
+						idstr := fmt.Sprint(id[0])
+						if len(id) > 1 {
+							idstr = ""
+
+							for _, x := range id {
+								idstr += fmt.Sprint(x)
+							}
+							idstr += "L"
+						}
+						returnlist = append(returnlist, fmt.Sprintf("eg%v", idstr)) // HIER GAAT NOG WAT MIS (TODO)
+					}
+				}
+
+				if rel.ToID != -1 {
+					if rel.ToType == "entity" {
+						returnlist = append(returnlist, fmt.Sprintf("e%v", rel.ToID))
+					} else {
+						id := JSONQuery.FindG(rel.ToID).ByID
+						idstr := fmt.Sprint(id[0])
+						if len(id) > 1 {
+							idstr = ""
+
+							for _, x := range id {
+								idstr += fmt.Sprint(x)
+							}
+							idstr += "L"
+						}
+						returnlist = append(returnlist, fmt.Sprintf("eg%v", idstr)) // Hier vgm ook, ffkes kijken hoe en wat het zit met relaties aan een groupby
+					}
+				}
+			} else if part.QType == "entity" {
+				returnlist = append(returnlist, fmt.Sprintf("e%v", part.QID))
+				break
+
+				// Probably ends with a break, since a single entity is always connected via an IN to a groupby? (maybe not in case of ONLY having an entity as the entire query)
+			} else {
+				// Then it is a groupby which must not be returned, thus the returns are done.
+				break
+			}
+		}
+
+		// Format nodes
+		lineStart := ""
+		unwindStatement := "UNWIND ["
+		for _, node := range returnlist {
+			unwindStatement += fmt.Sprintf("%v%v", lineStart, node)
+			lineStart = ","
 		}
+		unwindStatement += "] AS x \n"
+
+		retStatement = unwindStatement + "RETURN DISTINCT x"
+
+		retType = ";nodelink"
 	}
-	return false
+
+	retStatement = retStatement + "\n" + fmt.Sprintf("LIMIT %v", JSONQuery.Limit) + retType
+
+	return &retStatement, nil
 }
 
-/*TrimSuffix trims the final character of a string */
-func TrimSuffix(s, suffix string) string {
-	if strings.HasSuffix(s, suffix) {
-		s = s[:len(s)-len(suffix)]
+// formQuery uses the hierarchy to create cypher for each part of the query in the right order
+func formQuery(JSONQuery *entity.IncomingQueryJSON, hierarchy entity.Query) (*string, error) {
+
+	// Traverse through the hierarchy and for every entry create a part like:
+	// Match p0 = (l:Lorem)-[:Ipsum*1..1]-(d:Dolor)
+	// Constraints on l and d
+	// Unwind relationships(p0) as r0
+	// Constraints on r0
+	// With *
+
+	totalQuery := ""
+
+	for _, entry := range hierarchy {
+		var cypher *string
+		var err error
+
+		switch entry.QType {
+		case "relation":
+			cypher, err = createRelationCypher(JSONQuery, entry)
+			if err != nil {
+				return nil, err
+			}
+			break
+		case "groupBy":
+			cypher, err = createGroupByCypher(JSONQuery, entry)
+			if err != nil {
+				return nil, err
+			}
+
+			break
+		case "entity":
+			// This would be in case of an IN or if there was only 1 entity in the query builder
+			cypher, err = createInCypher(JSONQuery, entry)
+			if err != nil {
+				return nil, err
+			}
+
+			break
+		default:
+			// Should never be reached
+			return nil, errors.New("Invalid query pill type detected")
+		}
+
+		totalQuery += *cypher
 	}
-	return s
+
+	return &totalQuery, nil
 }
 
-/*
-createQuery generates a query based on the json file provided
-	JSONQuery: *entity.IncomingQueryJSON, jsonQuery is a parsedJSON struct holding all the data needed to form a query
-	Return: *string, a string containing the corresponding AQL query and an error
-*/
-func createQuery(JSONQuery *entitycypher.IncomingQueryJSON) *string {
-	// Note: Case #4, where there is an edge only query (without any entity), is not supported by frontend
-
-	// If a modifier is used, disable the limit
-	if len(JSONQuery.Modifiers) > 0 {
-		JSONQuery.Limit = -1
-	}
-
-	var (
-		relationsToReturn []string
-		nodesToReturn     []string
-		nodeUnion         string
-		relationUnion     string
-		queryList         [][][]int
-		entityList        []int
-		ret               string
-	)
-
-	for i, relation := range JSONQuery.Relations {
-		var contains bool
-		contains = false
-		for j := range queryList {
-			if sliceContains(queryList[j][0], relation.EntityFrom) || sliceContains(queryList[j][0], relation.EntityTo) {
-				if !sliceContains(queryList[j][0], relation.EntityFrom) {
-					queryList[j][0] = append(queryList[j][0], relation.EntityFrom)
-					entityList = append(entityList, relation.EntityFrom)
-				}
-				if !sliceContains(queryList[j][0], relation.EntityTo) {
-					queryList[j][0] = append(queryList[j][0], relation.EntityTo)
-					entityList = append(entityList, relation.EntityTo)
+// createInCypher creates the cypher for an entity with an IN-clause
+func createInCypher(JSONQuery *entity.IncomingQueryJSON, part entity.QueryPart) (*string, error) {
+	ent := JSONQuery.FindE(part.QID)
+	eName := fmt.Sprintf("e%v", ent.ID)
+
+	match := fmt.Sprintf("MATCH (%v:%v)\n", eName, ent.Name)
+	eConstraints := ""
+	newLineStatement := "\tWHERE"
+
+	// Find the IN
+	for _, con := range ent.Constraints {
+		if con.InID != -1 {
+			gby := JSONQuery.FindG(con.InID) // Because this could only be on a groupby
+			byID := fmt.Sprint(gby.ByID[0])
+
+			if len(gby.ByID) > 1 {
+				byID = ""
+
+				for _, x := range gby.ByID {
+					byID += fmt.Sprint(x)
 				}
-				queryList[j][1] = append(queryList[j][1], i)
-				contains = true
+				byID += "L"
 			}
+
+			byName := fmt.Sprintf("%v%v", string(gby.ByType[0]), byID)
+			eConstraints += fmt.Sprintf("%v %v.%v IN %v_%v\n", newLineStatement, eName, con.Attribute, byName, gby.ByAttribute)
+			newLineStatement = "\tAND"
 		}
-		if !contains {
-			queryList = append(queryList, [][]int{{relation.EntityFrom, relation.EntityTo}, {i}})
+	}
+
+	// Attach other constraints (if any)
+	for _, v := range ent.Constraints {
+		if v.InID != -1 {
+			continue
 		}
+		eConstraints += fmt.Sprintf("%v %v \n", newLineStatement, *createConstraintBoolExpression(&v, eName, false))
+	}
+
+	with := "WITH *\n"
+	retStatement := match + eConstraints + with
+	return &retStatement, nil
+
+}
+
+// createRelationCypher takes the json and a query part, finds the necessary entities and converts it into cypher
+func createRelationCypher(JSONQuery *entity.IncomingQueryJSON, part entity.QueryPart) (*string, error) {
+
+	rel := JSONQuery.FindR(part.QID)
+
+	if (rel.FromID == -1) && (rel.ToID == -1) {
+		// Now there is only a relation, which we do not allow
+		return nil, errors.New("Relation only queries are not supported")
 	}
 
-	for i := range queryList {
-		//reset variables for the next query
-		nodeUnion = ""
-		relationUnion = ""
-		relationsToReturn = []string{}
-		for j, relationID := range queryList[i][1] {
-			relationName := fmt.Sprintf("r%v", j)
-			relation := JSONQuery.Relations[relationID]
-			pathName := fmt.Sprintf("p%v", j)
-			relationsToReturn = append(relationsToReturn, pathName)
-			if relation.EntityFrom >= 0 {
-				// if there is a from-node
-				// create the let for this node
-				fromName := fmt.Sprintf("n%v", relation.EntityFrom)
+	var match, eConstraints, unwind, rConstraints string
 
-				ret += *createNodeMatch(&JSONQuery.Entities[relation.EntityFrom], &fromName)
+	// There is some duplicate code here below that could be omitted with extra if-statements, but that is something to do
+	// for a later time. Since this way it is easier to understand the flow of the code
+	// Removing the duplicate code here, probably more than triples the if-statements and is a puzzle for a later time (TODO)
+	if rel.ToID == -1 {
+		// There is no To, only a From
+		var eName string
+		var entFrom *entity.QueryEntityStruct
+		var entFromType string
 
-				ret += *createRelationMatch(&relation, relationName, pathName, &JSONQuery.Entities, JSONQuery.Limit, true)
-			} else if relation.EntityTo >= 0 {
-				// if there is only a to-node
-				toName := fmt.Sprintf("n%v", relation.EntityTo)
+		if rel.FromType == "entity" {
 
-				ret += *createNodeMatch(&JSONQuery.Entities[relation.EntityTo], &toName)
+			entFrom = JSONQuery.FindE(rel.ToID)
+			entFromType = entFrom.Name
+			eName = fmt.Sprintf("e%v", entFrom.ID)
+
+		} else if rel.FromType == "groupBy" {
+			gb := JSONQuery.FindG(rel.FromID)
+			if gb.ByType == "relation" {
+				return nil, errors.New("Invalid query: cannot connect a relation to a group by that groups by another relation")
+			}
 
-				ret += *createRelationMatch(&relation, relationName, pathName, &JSONQuery.Entities, JSONQuery.Limit, false)
-				// Add this relation to the list
+			if len(gb.ByID) == 1 {
+				// This is a sort of dummy variable, since it is not directly visible in the query, but it is definitely needed
+				eName = fmt.Sprintf("eg%v", gb.ByID[0])
 			} else {
-				fmt.Println("Relation-only queries are currently not supported")
-				continue
+				byID := ""
+
+				for _, x := range gb.ByID {
+					byID += fmt.Sprint(x)
+				}
+				byID += "L"
+				eName = fmt.Sprintf("eg%v", byID)
 			}
+			entFromType = JSONQuery.FindE(gb.ByID[0]).Name
+
+		} else {
+			// Should never be reachable
+			return nil, errors.New("Invalid connection type to relation")
 		}
 
-		// Create UNION statements that create unique lists of all the nodes and relations
+		match = fmt.Sprintf("MATCH p%v = (%v:%v)-[:%v*%v..%v]-()\n", part.PartID, eName, entFromType, rel.Name, rel.Depth.Min, rel.Depth.Max)
 
-		// Thus removing all duplicates
-		nodeUnion = "RETURN "
+		eConstraints = ""
+		newLineStatement := "\tWHERE"
 
-		for _, entityID := range queryList[i][0] {
-			if sliceContains(JSONQuery.Return.Entities, entityID) {
-				nodeUnion += fmt.Sprintf("n%v,", entityID)
+		// The nil-check is there in case it is connected to a groupby
+		if entFrom != nil {
+			for _, v := range entFrom.Constraints {
+				eConstraints += fmt.Sprintf("%v %v \n", newLineStatement, *createConstraintBoolExpression(&v, eName, false))
+				newLineStatement = "\tAND"
 			}
 		}
 
-		for _, relation := range relationsToReturn {
-			relationUnion += fmt.Sprintf("%v,", relation)
+		// Add an IN clause, connecting the relation to the output of the groupby
+		if rel.FromType == "groupBy" {
+			gb := JSONQuery.FindG(rel.FromID)
+			inConstraint := fmt.Sprintf("%v %v.%v IN %v_%v \n", newLineStatement, eName, gb.ByAttribute, gb.AppliedModifier, gb.ByAttribute)
+			eConstraints += inConstraint
 		}
 
-		relationUnion = TrimSuffix(relationUnion, ",")
-		// hier zat een newline
-		ret += nodeUnion + relationUnion + "; "
-	}
+	} else if rel.FromID == -1 {
+		var eName string
+		var entToType string
+		var entTo *entity.QueryEntityStruct
 
-	nodeSet := make(map[int]bool)
-	for _, relation := range JSONQuery.Relations {
-		nodeSet[relation.EntityFrom] = true
-		nodeSet[relation.EntityTo] = true
-	}
+		if rel.ToType == "entity" {
+			entTo = JSONQuery.FindE(rel.ToID)
+			entToType = entTo.Name
+			eName = fmt.Sprintf("e%v", entTo.ID)
+
+		} else if rel.ToType == "groupBy" {
+			gb := JSONQuery.FindG(rel.ToID)
+			if gb.ByType == "relation" {
+				return nil, errors.New("Invalid query: cannot connect a relation to a group by that groups by another relation")
+			}
+
+			if len(gb.ByID) == 1 {
+				// This is a sort of dummy variable, since it is not directly visible in the query, but it is definitely needed
+				eName = fmt.Sprintf("eg%v", gb.ByID[0])
+			} else {
+				byID := ""
 
-	// Check if the entities to return are already returned
-	for _, entityIndex := range JSONQuery.Return.Entities {
-		if !nodeSet[entityIndex] {
-			// If not, return this node
-			name := fmt.Sprintf("n%v", entityIndex)
-			ret += *createNodeMatch(&JSONQuery.Entities[entityIndex], &name)
-			// Add this node to the list
-			nodesToReturn = append(nodesToReturn, name)
-			ret += fmt.Sprintf("RETURN %v", name)
+				for _, x := range gb.ByID {
+					byID += fmt.Sprint(x)
+				}
+				byID += "L"
+				eName = fmt.Sprintf("eg%v", byID)
+			}
+			entToType = JSONQuery.FindE(gb.ByID[0]).Name
+
+		} else {
+			// Should never be reachable
+			return nil, errors.New("Invalid connection type to relation")
 		}
-	}
 
-	ret = TrimSuffix(ret, " ")
-	return &ret
-}
+		match = fmt.Sprintf("MATCH p%v = ()-[:%v*%v..%v]-(%v:%v)\n", part.PartID, rel.Name, rel.Depth.Min, rel.Depth.Max, eName, entToType)
 
-/*
-createNodeLet generates a 'LET' statement for a node related query
-	node: *entity.QueryEntityStruct, node is an entityStruct containing the information of a single node,
-	name: *string, is the autogenerated name of the node consisting of "n" + the index of the node
-	Return: *string, a string containing a single LET-statement in AQL
-*/
-func createNodeMatch(node *entitycypher.QueryEntityStruct, name *string) *string {
-	// hier zat een newline
-	header := fmt.Sprintf("MATCH (%v:%v) ", *name, node.Type)
-	constraints := *createConstraintStatements(&node.Constraints, *name)
-	ret := header + constraints
-	return &ret
-}
+		eConstraints = ""
+		newLineStatement := "\tWHERE"
 
-/*
-createRelationLetWithFromEntity generates a 'LET' statement for relations with an 'EntityFrom' property and optionally an 'EntitiyTo' property
-	relation: *entity.QueryRelationStruct, relation is a relation struct containing the information of a single relation,
-	relationName: string, is the name of the relation, is the autogenerated name of the node consisting of "r" + the index of the relation,
-	pathName: string, is the path of the name,
-	entities: *[]entity.QueryEntityStruct, is a list of entityStructs that are needed to form the relation LET-statement
-	limit: int, the limit for the number of nodes to return
-	outbound: bool, checks if the relation is inbound or outbound
-	Return: *string, a string containing a single LET-statement in AQL
-*/
-func createRelationMatch(relation *entitycypher.QueryRelationStruct, relationName string, pathName string, entities *[]entitycypher.QueryEntityStruct, limit int, outbound bool) *string {
-	relationReturn := ""
-	var relationBounds int
-	if outbound {
-		relationReturn = fmt.Sprintf("MATCH %v = (n%v)-[%v:%v*%v..%v]->(", pathName, relation.EntityFrom, relationName, relation.Type, relation.Depth.Min, relation.Depth.Max)
-		relationBounds = relation.EntityTo
+		if entTo != nil {
+			for _, v := range entTo.Constraints {
+				eConstraints += fmt.Sprintf("%v %v \n", newLineStatement, *createConstraintBoolExpression(&v, eName, false))
+				newLineStatement = "\tAND"
+			}
+		}
+
+		// Add an IN clause, connecting the relation to the output of the groupby
+		if rel.ToType == "groupBy" {
+			gb := JSONQuery.FindG(rel.ToID)
+			inConstraint := fmt.Sprintf("%v %v.%v IN %v_%v \n", newLineStatement, eName, gb.ByAttribute, gb.AppliedModifier, gb.ByAttribute)
+			eConstraints += inConstraint
+		}
 
 	} else {
-		relationReturn = fmt.Sprintf("MATCH %v = (n%v)-[%v:%v*%v..%v]->(", pathName, relation.EntityTo, relationName, relation.Type, relation.Depth.Min, relation.Depth.Max)
-		relationBounds = relation.EntityFrom
+		var eTName string
+		var entFromType string
+		var eFName string
+		var entToType string
+		var entFrom, entTo *entity.QueryEntityStruct
+
+		// Check of what type the To is
+		if rel.ToType == "entity" {
+			entTo = JSONQuery.FindE(rel.ToID)
+			entToType = entTo.Name
+			eTName = fmt.Sprintf("e%v", entTo.ID)
+
+		} else if rel.ToType == "groupBy" {
+			gb := JSONQuery.FindG(rel.ToID)
+			if gb.ByType == "relation" {
+				return nil, errors.New("Invalid query: cannot connect a relation to a group by that groups by another relation")
+			}
+
+			if len(gb.ByID) == 1 {
+				// This is a sort of dummy variable, since it is not directly visible in the query, but it is definitely needed
+				eTName = fmt.Sprintf("eg%v", gb.ByID[0])
+			} else {
+				byID := ""
+
+				for _, x := range gb.ByID {
+					byID += fmt.Sprint(x)
+				}
+				byID += "L"
+				eTName = fmt.Sprintf("eg%v", byID)
+			}
+			entToType = JSONQuery.FindE(gb.ByID[0]).Name
+
+		} else {
+			// Should never be reachable
+			return nil, errors.New("Invalid connection type to relation")
+		}
+
+		// Check of what type the From is
+		if rel.FromType == "entity" {
+
+			entFrom = JSONQuery.FindE(rel.FromID)
+			entFromType = entFrom.Name
+			eFName = fmt.Sprintf("e%v", entFrom.ID)
+
+		} else if rel.FromType == "groupBy" {
+			gb := JSONQuery.FindG(rel.FromID)
+			if gb.ByType == "relation" {
+				return nil, errors.New("Invalid query: cannot connect a relation to a group by that groups by another relation")
+			}
+
+			if len(gb.ByID) == 1 {
+				// This is a sort of dummy variable, since it is not directly visible in the query, but it is definitely needed
+				eFName = fmt.Sprintf("eg%v", gb.ByID[0])
+			} else {
+				byID := ""
+
+				for _, x := range gb.ByID {
+					byID += fmt.Sprint(x)
+				}
+				byID += "L"
+				eFName = fmt.Sprintf("eg%v", byID)
+			}
+
+			entFromType = JSONQuery.FindE(gb.ByID[0]).Name
+
+		} else {
+			// Should never be reachable
+			return nil, errors.New("Invalid connection type to relation")
+		}
+
+		match = fmt.Sprintf("MATCH p%v = (%v:%v)-[:%v*%v..%v]-(%v:%v)\n", part.PartID, eFName, entFromType, rel.Name, rel.Depth.Min, rel.Depth.Max, eTName, entToType)
+
+		eConstraints = ""
+		newLineStatement := "\tWHERE"
+		if entFrom != nil {
+			for _, v := range entFrom.Constraints {
+				eConstraints += fmt.Sprintf("%v %v \n", newLineStatement, *createConstraintBoolExpression(&v, eFName, false))
+				newLineStatement = "\tAND"
+			}
+		}
+		if entTo != nil {
+			for _, v := range entTo.Constraints {
+				eConstraints += fmt.Sprintf("%v %v \n", newLineStatement, *createConstraintBoolExpression(&v, eTName, false))
+				newLineStatement = "\tAND"
+			}
+		}
+
+		// Add an IN clause, connecting the relation to the output of the groupby
+		if rel.ToType == "groupBy" {
+			gb := JSONQuery.FindG(rel.ToID)
+			inConstraint := fmt.Sprintf("%v %v.%v IN %v_%v \n", newLineStatement, eTName, gb.ByAttribute, strings.Replace(eFName, "g", "", 1), gb.ByAttribute)
+			eConstraints += inConstraint
+			newLineStatement = "\tAND"
+		}
+
+		if rel.FromType == "groupBy" {
+			gb := JSONQuery.FindG(rel.FromID)
+			inConstraint := fmt.Sprintf("%v %v.%v IN %v_%v \n", newLineStatement, eFName, gb.ByAttribute, strings.Replace(eFName, "g", "", 1), gb.ByAttribute)
+			eConstraints += inConstraint
+		}
 	}
 
-	if relationBounds != -1 {
-		relationReturn += fmt.Sprintf("n%v", relationBounds)
+	rName := fmt.Sprintf("r%v", part.QID)
+	unwind = fmt.Sprintf("UNWIND relationships(p%v) as %v \nWITH *\n", part.PartID, rName)
+
+	rConstraints = ""
+	newLineStatement := "\tWHERE"
+	for _, v := range rel.Constraints {
+		rConstraints += fmt.Sprintf("%v %v \n", newLineStatement, *createConstraintBoolExpression(&v, rName, false))
+		newLineStatement = "\tAND"
+	}
+
+	retString := match + eConstraints + unwind + rConstraints
+	return &retString, nil
+
+}
+
+// createGroupByCypher takes the json and a query part, finds the group by and converts it into cypher
+func createGroupByCypher(JSONQuery *entity.IncomingQueryJSON, part entity.QueryPart) (*string, error) {
+	groupBy := JSONQuery.FindG(part.QID)
+	var group, by string
+
+	gName := fmt.Sprintf("%v_%v", groupBy.AppliedModifier, groupBy.GroupAttribute)
+	byID := fmt.Sprint(groupBy.ByID[0])
+	groupID := fmt.Sprint(groupBy.GroupID[0])
+	unwindBy := ""
+	unwindGroup := ""
+	gType := string(groupBy.GroupType[0])
+	bType := string(groupBy.ByType[0])
+
+	// Due to this concatenation, there is a chance that there is a ID of 1, 2 and 12. Which could create a conflict
+	// Thus I will add an L to the end, indicating that it is a List
+	if len(groupBy.ByID) > 1 {
+		byID = ""
+		unwindBy = "UNWIND ["
+		linestart := ""
+
+		for _, x := range groupBy.ByID {
+			byID += fmt.Sprint(x)
+			unwindBy += linestart + bType + fmt.Sprint(x)
+			linestart = ","
+		}
+		byID += "L"
+		unwindBy += "] AS " + bType + byID + "\n"
+	}
+
+	if len(groupBy.GroupID) > 1 {
+		groupID = ""
+		unwindGroup = "UNWIND ["
+		linestart := ""
+
+		for _, x := range groupBy.GroupID {
+			groupID += fmt.Sprint(x)
+			unwindGroup += linestart + gType + fmt.Sprint(x)
+			linestart = ","
+		}
+
+		groupID += "L"
+		unwindGroup += "] AS " + gType + groupID + "\n"
 	}
-	relationReturn += ")"
 
-	constraintReturn := *createConstraintStatements(&relation.Constraints, relationName)
-	// hier zat een newline
-	ret := relationReturn + " " + constraintReturn
+	group = fmt.Sprintf("%v%v.%v", gType, groupID, groupBy.GroupAttribute)
+	by = fmt.Sprintf("%v%v.%v", bType, byID, groupBy.ByAttribute)
+	byName := strings.Replace(by, ".", "_", 1)
 
-	return &ret
+	// If you do not use a *, then everything needs to be aliased
+	with := fmt.Sprintf("WITH %v AS %v, %v(%v) AS %v \n", by, byName, groupBy.AppliedModifier, group, gName)
+
+	gConstraints := ""
+	newLineStatement := "\tWHERE"
+	for _, v := range groupBy.Constraints {
+		gConstraints += fmt.Sprintf("%v %v \n", newLineStatement, *createConstraintBoolExpression(&v, gName, true))
+		newLineStatement = "\tAND"
+	}
+
+	retString := unwindBy + unwindGroup + with + gConstraints
+	return &retString, nil
 }
+
+// // Manier voor groupby's op samengevoegde entities
+// CALL {
+// MATCH p0 = (e11:Person)-[:DIRECTED*1..1]-(e12:Movie)
+// UNWIND relationships(p0) as r10
+// Return e11 as e1, e12 as e2
+// UNION
+// MATCH p1 = (e13:Person)-[:ACTED_IN]-(e14:Movie)
+// UNWIND relationships(p1) as r11
+// Return e13 as e1, e14 as e2
+// }
+// WITH e1.bornIn AS e1_bornIn, AVG(e2.budget) AS AVG_budget
+// RETURN e1_bornIn, AVG_budget
+// LIMIT 5000
diff --git a/cypher/convertQueryBenchmark_test.go b/cypher/convertQueryBenchmark_test.go
index 5e14480..d1a5397 100644
--- a/cypher/convertQueryBenchmark_test.go
+++ b/cypher/convertQueryBenchmark_test.go
@@ -4,7 +4,7 @@ import (
 	"encoding/json"
 	"testing"
 
-	"git.science.uu.nl/graphpolaris/query-conversion/entity/entitycypher"
+	"git.science.uu.nl/graphpolaris/query-conversion/entity"
 )
 
 func BenchmarkConvertEmptyQuery(b *testing.B) {
@@ -23,7 +23,7 @@ func BenchmarkConvertEmptyQuery(b *testing.B) {
 	}`)
 
 	// Unmarshall the incoming message into an IncomingJSONQuery object
-	var JSONQuery entitycypher.IncomingQueryJSON
+	var JSONQuery entity.IncomingQueryJSON
 	json.Unmarshal(query, &JSONQuery)
 
 	b.ResetTimer()
@@ -63,7 +63,7 @@ func BenchmarkConvertOneAttributeQuery(b *testing.B) {
 	}`)
 
 	// Unmarshall the incoming message into an IncomingJSONQuery object
-	var JSONQuery entitycypher.IncomingQueryJSON
+	var JSONQuery entity.IncomingQueryJSON
 	json.Unmarshal(query, &JSONQuery)
 
 	b.ResetTimer()
@@ -158,7 +158,7 @@ func BenchmarkConvertTwoRelationQuery(b *testing.B) {
 	}`)
 
 	// Unmarshall the incoming message into an IncomingJSONQuery object
-	var JSONQuery entitycypher.IncomingQueryJSON
+	var JSONQuery entity.IncomingQueryJSON
 	json.Unmarshal(query, &JSONQuery)
 
 	b.ResetTimer()
diff --git a/cypher/convertQuery_test.go b/cypher/convertQuery_test.go
index fffb0a0..39e0303 100644
--- a/cypher/convertQuery_test.go
+++ b/cypher/convertQuery_test.go
@@ -3,892 +3,1596 @@ package cypher
 import (
 	"encoding/json"
 	"errors"
+	"fmt"
 	"strings"
 	"testing"
 
-	"git.science.uu.nl/graphpolaris/query-conversion/entity/entitycypher"
+	"git.science.uu.nl/graphpolaris/query-conversion/entity"
 	"github.com/stretchr/testify/assert"
 )
 
-func TestEmptyQueryConversion(t *testing.T) {
-	// Setup for test
-	// Create query conversion service
-	service := NewService()
+// All these tests test the entire flow
 
+func TestGroupBy(t *testing.T) {
 	query := []byte(`{
+		"databaseName": "Movies3",
 		"return": {
-			"entities": [],
-			"relations": []
+			"entities": [
+				0,
+				1,
+				2
+			],
+			"relations": [
+				0,
+				1
+			],
+			"groupBys": [
+				0
+			]
 		},
-		"entities": [],
-		"relations": [],
+		"entities": [
+			{
+				"id": 0,
+				"name": "Person",
+				"constraints": [
+				{
+					"attribute": "name",
+					"value": "Raymond Campbell",
+					"dataType": "string",
+					"matchType": "NEQ",
+					"inID": -1,
+					"inType": ""
+				}
+				]
+			},
+			{
+				"id": 1,
+				"name": "Movie",
+				"constraints": []
+			},
+			{
+				"id": 2,
+				"name": "Genre",
+				"constraints": []
+			}
+		],
+		"relations": [
+			{
+				"id": 0,
+				"name": "DIRECTED",
+				"depth": {
+					"min": 1,
+					"max": 1
+				},
+				"fromType": "entity",
+				"fromID": 0,
+				"toType": "entity",
+				"toID": 1,
+				"constraints": []
+			},
+			{
+				"id": 1,
+				"name": "IN_GENRE",
+				"depth": {
+					"min": 1,
+					"max": 1
+				},
+				"fromType": "groupBy",
+				"fromID": 0,
+				"toType": "entity",
+				"toID": 2,
+				"constraints": []
+			}
+		],
+		"groupBys": [
+			{
+				"id": 0,
+				"groupType": "entity",
+				"groupID": [0],
+				"groupAttribute": "bornIn",
+				"byType": "entity",
+				"byID": [1],
+				"byAttribute": "imdbId",
+				"appliedModifier": "AVG",
+				"relationID": 0,
+				"constraints": []
+			}
+		],
+		"machineLearning": [],
 		"limit": 5000
 	}`)
 
-	// Unmarshall the incoming message into an IncomingJSONQuery object
-	var JSONQuery entitycypher.IncomingQueryJSON
+	var JSONQuery entity.IncomingQueryJSON
 	json.Unmarshal(query, &JSONQuery)
 
-	convertedResult, err := service.ConvertQuery(&JSONQuery)
-
-	// Assert that there is no error
-	assert.NoError(t, err)
+	s := NewService()
+	cypher, err := s.ConvertQuery(&JSONQuery)
+	if err != nil {
+		fmt.Println(err)
+	}
+
+	answer := `MATCH p0 = (e0:Person)-[:DIRECTED*1..1]-(e1:Movie)
+	WHERE  e0.name <> "Raymond Campbell" 
+	UNWIND relationships(p0) as r0 
+	WITH *
+	WITH e1.imdbId AS e1_imdbId, AVG(e0.bornIn) AS AVG_bornIn 
+	MATCH p1 = (eg1:Movie)-[:IN_GENRE*1..1]-(e2:Genre)
+	WHERE eg1.imdbId IN e1_imdbId UNWIND relationships(p1) as r1 
+	WITH *
+	UNWIND [r1,eg1,e2] AS x 
+	RETURN DISTINCT x
+	LIMIT 5000;nodelink`
+
+	trimmedCypher := strings.Replace(*cypher, "\n", "", -1)
+	trimmedCypher = strings.Replace(trimmedCypher, "\t", "", -1)
+
+	trimmedAnswer := strings.Replace(answer, "\n", "", -1)
+	trimmedAnswer = strings.Replace(trimmedAnswer, "\t", "", -1)
+
+	fmt.Println(*cypher)
+	assert.Equal(t, trimmedAnswer, trimmedCypher)
 
-	// Assert that the result and the expected result are the same
-	correctConvertedResult := ``
-	assert.Equal(t, correctConvertedResult, *convertedResult)
 }
-
-func TestEntityOneAttributeQuery(t *testing.T) {
-	// Setup for test
-	// Create query conversion service
-	service := NewService()
-
+func TestSmallChain(t *testing.T) {
 	query := []byte(`{
+		"databaseName": "TweedeKamer",
 		"return": {
 			"entities": [
-				0
+				0,
+				1,
+				2
 			],
-			"relations": []
+			"relations": [
+				0,
+				1
+			]
 		},
 		"entities": [
 			{
-				"type": "airports",
+				"name": "parliament",
+				"ID": 0,
 				"constraints": [
 					{
-						"attribute": "state",
-						"value": "HI",
+						"attribute": "name",
+						"value": "Geert",
 						"dataType": "string",
-						"matchType": "exact"
+						"matchType": "contains",
+						"inID": -1,
+						"inType": ""
 					}
 				]
+			},
+			{
+				"name": "parties",
+				"ID": 1,
+				"constraints": []
+			},
+			{
+				"name": "resolutions",
+				"ID": 2,
+				"constraints": []
 			}
 		],
-		"relations": [],
+		"relations": [
+			{
+				"ID": 0,
+				"name": "member_of",
+				"depth": {
+					"min": 1,
+					"max": 1
+				},
+				"fromType": "entity",
+				"fromID": 0,
+				"toType": "entity",
+				"toID": 1,
+				"constraints": []
+			},
+			{
+				"ID": 1,
+				"name": "submits",
+				"depth": {
+					"min": 1,
+					"max": 1
+				},
+				"fromType": "entity",
+				"fromID": 0,
+				"toType": "entity",
+				"toID": 2,
+				"constraints": []
+			}
+		],
+		"groupBys": [],
+		"machineLearning": [],
 		"limit": 5000
-	}`)
+	}
+	`)
 
-	// Unmarshall the incoming message into an IncomingJSONQuery object
-	var JSONQuery entitycypher.IncomingQueryJSON
+	var JSONQuery entity.IncomingQueryJSON
 	json.Unmarshal(query, &JSONQuery)
 
-	convertedResult, err := service.ConvertQuery(&JSONQuery)
+	s := NewService()
+	cypher, err := s.ConvertQuery(&JSONQuery)
+	if err != nil {
+		fmt.Println(err)
+	}
 
-	// Assert that there is no error
-	assert.NoError(t, err)
+	answer := `MATCH p0 = (e0:parliament)-[:member_of*1..1]-(e1:parties)
+	WHERE  e0.name CONTAINS "%Geert%" 
+UNWIND relationships(p0) as r0 
+WITH *
+MATCH p1 = (e0:parliament)-[:submits*1..1]-(e2:resolutions)
+	WHERE  e0.name CONTAINS "%Geert%" 
+UNWIND relationships(p1) as r1 
+WITH *
+UNWIND [r1,e0,e2,r0,e0,e1] AS x 
+RETURN DISTINCT x
+LIMIT 5000;nodelink`
 
-	// Assert that the result and the expected result are the same
-	correctConvertedResult := `MATCH (n0:airports) WHERE n0.state = "HI" RETURN n0`
-	cleanedResult := strings.ReplaceAll(*convertedResult, "\n", "")
-	cleanedResult = strings.ReplaceAll(cleanedResult, "\t", "")
-	assert.Equal(t, correctConvertedResult, cleanedResult)
-}
+	fmt.Println(*cypher)
+
+	trimmedCypher := strings.Replace(*cypher, "\n", "", -1)
+	trimmedCypher = strings.Replace(trimmedCypher, "\t", "", -1)
 
-func TestRelationWithConstraint(t *testing.T) {
-	// Setup for test
-	// Create query conversion service
-	service := NewService()
+	trimmedAnswer := strings.Replace(answer, "\n", "", -1)
+	trimmedAnswer = strings.Replace(trimmedAnswer, "\t", "", -1)
 
+	fmt.Println(*cypher)
+	assert.Equal(t, trimmedAnswer, trimmedCypher)
+
+}
+
+// This one does not really have dependencies, the order doesnt matter, maybe sort on numbers between equal dependencies?
+func TestLargeQueryChain(t *testing.T) {
 	query := []byte(`{
+		"databaseName": "TweedeKamer",
 		"return": {
 			"entities": [
-				0
+				0,
+				1,
+				2,
+				3,
+				4
 			],
 			"relations": [
-				0
+				0,
+				1,
+				2,
+				3
 			]
 		},
 		"entities": [
 			{
-				"type": "airports",
+				"name": "parliament",
+				"ID": 0,
+				"constraints": [
+					{
+						"attribute": "name",
+						"value": "A",
+						"dataType": "string",
+						"matchType": "contains",
+						"inID": -1,
+						"inType": ""
+					}
+				]
+			},
+			{
+				"name": "parties",
+				"ID": 1,
+				"constraints": [
+					{
+						"attribute": "seats",
+						"value": "10",
+						"dataType": "int",
+						"matchType": "LT",
+						"inID": -1,
+						"inType": ""
+					}
+				]
+			},
+			{
+				"name": "resolutions",
+				"ID": 2,
+				"constraints": [
+					{
+						"attribute": "date",
+						"value": "mei",
+						"dataType": "string",
+						"matchType": "contains",
+						"inID": -1,
+						"inType": ""
+					}
+				]
+			},
+			{
+				"name": "parliament",
+				"ID": 3,
+				"constraints": []
+			},
+			{
+				"name": "parties",
+				"ID": 4,
 				"constraints": [
 					{
-						"attribute": "state",
-						"value": "HI",
+						"attribute": "name",
+						"value": "Volkspartij voor Vrijheid en Democratie",
 						"dataType": "string",
-						"matchType": "exact"
+						"matchType": "==",
+						"inID": -1,
+						"inType": ""
 					}
 				]
 			}
 		],
 		"relations": [
 			{
-				"type": "flights",
+				"ID": 0,
+				"name": "member_of",
 				"depth": {
 					"min": 1,
 					"max": 1
 				},
-				"entityFrom": 0,
-				"entityTo": -1,
-				"constraints": [
-					{
-						"attribute": "Day",
-						"value": "15",
-						"dataType": "int",
-						"matchType": "EQ"
-					}
-				]
+				"fromType": "entity",
+				"fromID": 0,
+				"toType": "entity",
+				"toID": 1,
+				"constraints": []
+			},
+			{
+				"ID": 1,
+				"name": "submits",
+				"depth": {
+					"min": 1,
+					"max": 1
+				},
+				"fromType": "entity",
+				"fromID": 1,
+				"toType": "entity",
+				"toID": 2,
+				"constraints": []
+			},
+			{
+				"ID": 2,
+				"name": "submits",
+				"depth": {
+					"min": 1,
+					"max": 1
+				},
+				"fromType": "entity",
+				"fromID": 2,
+				"toType": "entity",
+				"toID": 3,
+				"constraints": []
+			},
+			{
+				"ID": 3,
+				"name": "member_of",
+				"depth": {
+					"min": 1,
+					"max": 1
+				},
+				"fromType": "entity",
+				"fromID": 3,
+				"toType": "entity",
+				"toID": 4,
+				"constraints": []
 			}
 		],
+		"groupBys": [],
+		"machineLearning": [],
 		"limit": 5000
-	}`)
+	}
+	
+	`)
 
-	// Unmarshall the incoming message into an IncomingJSONQuery object
-	var JSONQuery entitycypher.IncomingQueryJSON
+	var JSONQuery entity.IncomingQueryJSON
 	json.Unmarshal(query, &JSONQuery)
 
-	convertedResult, err := service.ConvertQuery(&JSONQuery)
-
-	// Assert that there is no error
-	assert.NoError(t, err)
+	s := NewService()
+	cypher, err := s.ConvertQuery(&JSONQuery)
+	if err != nil {
+		fmt.Println(err)
+	}
+
+	answer := `MATCH p0 = (e0:parliament)-[:member_of*1..1]-(e1:parties)
+	WHERE  e0.name CONTAINS "%A%" 
+	AND  e1.seats < 10 
+UNWIND relationships(p0) as r0 
+WITH *
+MATCH p1 = (e1:parties)-[:submits*1..1]-(e2:resolutions)
+	WHERE  e1.seats < 10 
+	AND  e2.date CONTAINS "%mei%" 
+UNWIND relationships(p1) as r1 
+WITH *
+MATCH p2 = (e2:resolutions)-[:submits*1..1]-(e3:parliament)
+	WHERE  e2.date CONTAINS "%mei%" 
+UNWIND relationships(p2) as r2 
+WITH *
+MATCH p3 = (e3:parliament)-[:member_of*1..1]-(e4:parties)
+	WHERE  e4.name = "Volkspartij voor Vrijheid en Democratie" 
+UNWIND relationships(p3) as r3 
+WITH *
+UNWIND [r3,e3,e4,r2,e2,e3,r1,e1,e2,r0,e0,e1] AS x 
+RETURN DISTINCT x
+LIMIT 5000;nodelink`
+
+	trimmedCypher := strings.Replace(*cypher, "\n", "", -1)
+	trimmedCypher = strings.Replace(trimmedCypher, "\t", "", -1)
+
+	trimmedAnswer := strings.Replace(answer, "\n", "", -1)
+	trimmedAnswer = strings.Replace(trimmedAnswer, "\t", "", -1)
+
+	fmt.Println(*cypher)
+	assert.Equal(t, trimmedAnswer, trimmedCypher)
 
-	// Assert that the result and the expected result are the same
-	correctConvertedResult := `MATCH (n0:airports) WHERE n0.state = "HI" MATCH p0 = (n0)-[r0:flights*1..1]->() WHERE r0.Day = 15 RETURN n0,p0;`
-	cleanedResult := strings.ReplaceAll(*convertedResult, "\n", "")
-	cleanedResult = strings.ReplaceAll(cleanedResult, "\t", "")
-	assert.Equal(t, correctConvertedResult, cleanedResult)
 }
 
-// func TestModifierCountEntity(t *testing.T) {
-// 	// Setup for test
-// 	// Create query conversion service
-// 	service := NewService()
-
-// 	query := []byte(`{
-// 		"return": {
-// 			"entities": [
-// 				0
-// 			],
-// 			"relations": []
-// 		},
-// 		"entities": [
-// 			{
-// 				"type": "airports",
-// 				"constraints": [
-// 					{
-// 						"attribute": "state",
-// 						"value": "HI",
-// 						"dataType": "string",
-// 						"matchType": "exact"
-// 					}
-// 				]
-// 			}
-// 		],
-// 		"relations": [],
-// 		"limit": 5000,
-// 		"modifiers": [
-// 			{
-// 				"type": "COUNT",
-// 				"selectedType": "entity",
-// 				"id": 0,
-// 				"attributeIndex": -1
-// 			}
-// 		]
-// 	}`)
-
-// 	// Unmarshall the incoming message into an IncomingJSONQuery object
-// 	var JSONQuery entity.IncomingQueryJSON
-// 	json.Unmarshal(query, &JSONQuery)
-
-// 	convertedResult, err := service.ConvertQuery(&JSONQuery)
-
-// 	// Assert that there is no error
-// 	assert.NoError(t, err)
-
-// 	// Assert that the result and the expected result are the same
-// 	correctConvertedResult := `LET n0 = (FOR x IN airports FILTER x.state == "HI" RETURN x)RETURN LENGTH (n0)`
-// 	cleanedResult := strings.ReplaceAll(*convertedResult, "\n", "")
-// 	cleanedResult = strings.ReplaceAll(cleanedResult, "\t", "")
-// 	assert.Equal(t, correctConvertedResult, cleanedResult)
-// }
-
-// func TestModifierCountEntityAttribute(t *testing.T) {
-// 	// Setup for test
-// 	// Create query conversion service
-// 	service := NewService()
-
-// 	query := []byte(`{
-// 		"return": {
-// 			"entities": [
-// 				0
-// 			],
-// 			"relations": []
-// 		},
-// 		"entities": [
-// 			{
-// 				"type": "airports",
-// 				"constraints": [
-// 					{
-// 						"attribute": "state",
-// 						"value": "HI",
-// 						"dataType": "string",
-// 						"matchType": "exact"
-// 					}
-// 				]
-// 			}
-// 		],
-// 		"relations": [],
-// 		"limit": 5000,
-// 		"modifiers": [
-// 			{
-// 				"type": "SUM",
-// 				"selectedType": "entity",
-// 				"id": 0,
-// 				"attributeIndex": 0
-// 			}
-// 		]
-// 	}`)
-
-// 	// Unmarshall the incoming message into an IncomingJSONQuery object
-// 	var JSONQuery entity.IncomingQueryJSON
-// 	json.Unmarshal(query, &JSONQuery)
-
-// 	convertedResult, err := service.ConvertQuery(&JSONQuery)
-
-// 	// Assert that there is no error
-// 	assert.NoError(t, err)
-
-// 	// Assert that the result and the expected result are the same
-// 	correctConvertedResult := `LET n0 = (FOR x IN airports FILTER x.state == "HI" RETURN x)RETURN SUM (n0[*].state)`
-// 	cleanedResult := strings.ReplaceAll(*convertedResult, "\n", "")
-// 	cleanedResult = strings.ReplaceAll(cleanedResult, "\t", "")
-// 	assert.Equal(t, correctConvertedResult, cleanedResult)
-// }
-
-// func TestModifierCountRelation(t *testing.T) {
-// 	// Setup for test
-// 	// Create query conversion service
-// 	service := NewService()
-
-// 	query := []byte(`{
-// 		"return": {
-// 			"entities": [
-// 				0
-// 			],
-// 			"relations": [
-// 				0
-// 			]
-// 		},
-// 		"entities": [
-// 			{
-// 				"type": "airports",
-// 				"constraints": [
-// 					{
-// 						"attribute": "state",
-// 						"value": "HI",
-// 						"dataType": "string",
-// 						"matchType": "exact"
-// 					}
-// 				]
-// 			}
-// 		],
-// 		"relations": [
-// 			{
-// 				"type": "flights",
-// 				"depth": {
-// 					"min": 1,
-// 					"max": 1
-// 				},
-// 				"entityFrom": 0,
-// 				"entityTo": -1,
-// 				"constraints": [
-// 					{
-// 						"attribute": "Day",
-// 						"value": "15",
-// 						"dataType": "int",
-// 						"matchType": "EQ"
-// 					}
-// 				]
-// 			}
-// 		],
-// 		"limit": 5000,
-// 		"modifiers": [
-// 			{
-// 				"type": "COUNT",
-// 				"selectedType": "relation",
-// 				"id": 0,
-// 				"attributeIndex": -1
-// 			}
-// 		]
-// 	}`)
-
-// 	// Unmarshall the incoming message into an IncomingJSONQuery object
-// 	var JSONQuery entity.IncomingQueryJSON
-// 	json.Unmarshal(query, &JSONQuery)
-
-// 	convertedResult, err := service.ConvertQuery(&JSONQuery)
-
-// 	// Assert that there is no error
-// 	assert.NoError(t, err)
-
-// 	// Assert that the result and the expected result are the same
-// 	correctConvertedResult := `LET n0 = (FOR x IN airports FILTER x.state == "HI" RETURN x)LET r0 = (FOR x IN n0 FOR v, e, p IN 1..1 OUTBOUND x flights OPTIONS { uniqueEdges: "path" }FILTER p.edges[*].Day ALL == 15 RETURN DISTINCT p )RETURN LENGTH (unique(r0[*].edges[**]))`
-// 	cleanedResult := strings.ReplaceAll(*convertedResult, "\n", "")
-// 	cleanedResult = strings.ReplaceAll(cleanedResult, "\t", "")
-// 	assert.Equal(t, correctConvertedResult, cleanedResult)
-// }
-// func TestModifierCountRelationAttribute(t *testing.T) {
-// 	// Setup for test
-// 	// Create query conversion service
-// 	service := NewService()
-
-// 	query := []byte(`{
-// 		"return": {
-// 			"entities": [
-// 				0
-// 			],
-// 			"relations": [
-// 				0
-// 			]
-// 		},
-// 		"entities": [
-// 			{
-// 				"type": "airports",
-// 				"constraints": [
-// 					{
-// 						"attribute": "state",
-// 						"value": "HI",
-// 						"dataType": "string",
-// 						"matchType": "exact"
-// 					}
-// 				]
-// 			}
-// 		],
-// 		"relations": [
-// 			{
-// 				"type": "flights",
-// 				"depth": {
-// 					"min": 1,
-// 					"max": 1
-// 				},
-// 				"entityFrom": 0,
-// 				"entityTo": -1,
-// 				"constraints": [
-// 					{
-// 						"attribute": "Day",
-// 						"value": "15",
-// 						"dataType": "int",
-// 						"matchType": "EQ"
-// 					}
-// 				]
-// 			}
-// 		],
-// 		"limit": 5000,
-// 		"modifiers": [
-// 			{
-// 				"type": "AVG",
-// 				"selectedType": "relation",
-// 				"id": 0,
-// 				"attributeIndex": 0
-// 			}
-// 		]
-// 	}`)
-
-// 	// Unmarshall the incoming message into an IncomingJSONQuery object
-// 	var JSONQuery entity.IncomingQueryJSON
-// 	json.Unmarshal(query, &JSONQuery)
-
-// 	convertedResult, err := service.ConvertQuery(&JSONQuery)
-
-// 	// Assert that there is no error
-// 	assert.NoError(t, err)
-
-// 	// Assert that the result and the expected result are the same
-// 	correctConvertedResult := `LET n0 = (FOR x IN airports FILTER x.state == "HI" RETURN x)LET r0 = (FOR x IN n0 FOR v, e, p IN 1..1 OUTBOUND x flights OPTIONS { uniqueEdges: "path" }FILTER p.edges[*].Day ALL == 15 RETURN DISTINCT p )RETURN AVG (r0[*].edges[**].Day)`
-// 	cleanedResult := strings.ReplaceAll(*convertedResult, "\n", "")
-// 	cleanedResult = strings.ReplaceAll(cleanedResult, "\t", "")
-// 	assert.Equal(t, correctConvertedResult, cleanedResult)
-// }
-
-func TestRelationWithInOutConstraint(t *testing.T) {
-	// Setup for test
-	// Create query conversion service
-	service := NewService()
-
+func TestInStatement(t *testing.T) {
 	query := []byte(`{
-		"return": {
-			"entities": [
-				0,
-				1
-			],
-			"relations": [
-				0
-			]
-		},
+		"databaseName": "Movies3",
 		"entities": [
 			{
-				"type": "airports",
-				"constraints": [
-					{
-						"attribute": "city",
-						"value": "San Francisco",
-						"dataType": "string",
-						"matchType": "exact"
-					}
-				]
+				"id": 0,
+				"name": "Person",
+				"constraints": []
+			},
+			{
+				"id": 1,
+				"name": "Movie",
+				"constraints": []
 			},
 			{
-				"type": "airports",
+				"id": 2,
+				"name": "Person",
 				"constraints": [
 					{
-						"attribute": "state",
-						"value": "HI",
+						"attribute": "bornIn",
+						"value": "",
 						"dataType": "string",
-						"matchType": "exact"
+						"matchType": "",
+						"inID": 0,
+						"inType": "groupBy"
 					}
 				]
 			}
 		],
 		"relations": [
 			{
-				"type": "flights",
+				"id": 0,
+				"name": "ACTED_IN",
 				"depth": {
 					"min": 1,
-					"max": 3
+					"max": 1
 				},
-				"entityFrom": 1,
-				"entityTo": 0,
+				"fromType": "entity",
+				"fromID": 0,
+				"toType": "entity",
+				"toID": 1,
+				"constraints": []
+			}
+		],
+		"groupBys": [
+			{
+				"id": 0,
+				"groupType": "entity",
+				"groupID": [1],
+				"groupAttribute": "imdbRating",
+				"byType": "entity",
+				"byID": [0],
+				"byAttribute": "bornIn",
+				"appliedModifier": "AVG",
+				"relationID": 0,
 				"constraints": [
 					{
-						"attribute": "Day",
-						"value": "15",
+						"attribute": "imdbRating",
+						"value": "7.5",
 						"dataType": "int",
-						"matchType": "EQ"
+						"matchType": "GT",
+						"inID": -1,
+						"inType": ""
 					}
 				]
 			}
 		],
+		"machineLearning": [],
 		"limit": 5000
-	}`)
-
-	// Unmarshall the incoming message into an IncomingJSONQuery object
-	var JSONQuery entitycypher.IncomingQueryJSON
+	}
+	`)
+
+	answer := `MATCH p0 = (e0:Person)-[:ACTED_IN*1..1]-(e1:Movie)
+	UNWIND relationships(p0) as r0 
+	WITH *
+	WITH e0.bornIn AS e0_bornIn, AVG(e1.imdbRating) AS AVG_imdbRating 
+		WHERE  AVG_imdbRating > 7.5 
+	MATCH (e2:Person)
+		WHERE e2.bornIn IN e0_bornIn
+	WITH *
+	UNWIND [e2] AS x 
+	RETURN DISTINCT x
+	LIMIT 5000;nodelink`
+
+	var JSONQuery entity.IncomingQueryJSON
 	json.Unmarshal(query, &JSONQuery)
 
-	convertedResult, err := service.ConvertQuery(&JSONQuery)
+	s := NewService()
+	cypher, err := s.ConvertQuery(&JSONQuery)
+	if err != nil {
+		fmt.Println(err)
+	}
 
-	// Assert that there is no error
-	assert.NoError(t, err)
+	trimmedCypher := strings.Replace(*cypher, "\n", "", -1)
+	trimmedCypher = strings.Replace(trimmedCypher, "\t", "", -1)
 
-	// Assert that the result and the expected result are the same
-	correctConvertedResult := `MATCH (n1:airports) WHERE n1.state = "HI" MATCH p0 = (n1)-[r0:flights*1..3]->(n0) WHERE r0.Day = 15 RETURN n1,n0,p0;`
-	cleanedResult := strings.ReplaceAll(*convertedResult, "\n", "")
-	cleanedResult = strings.ReplaceAll(cleanedResult, "\t", "")
-	assert.Equal(t, correctConvertedResult, cleanedResult)
-}
+	trimmedAnswer := strings.Replace(answer, "\n", "", -1)
+	trimmedAnswer = strings.Replace(trimmedAnswer, "\t", "", -1)
 
-func TestTwoRelations(t *testing.T) {
-	// Setup for test
-	// Create query conversion service
-	service := NewService()
+	fmt.Println(*cypher)
+	assert.Equal(t, trimmedAnswer, trimmedCypher)
 
+}
+func TestMultipleByStatementDisconnected(t *testing.T) {
 	query := []byte(`{
-		"return": {
-			"entities": [
-				0,
-				1,
-				2
-			],
-			"relations": [
-				0,
-				1
-			]
-		},
+		"databaseName": "Movies3",
 		"entities": [
 			{
-				"type": "airports",
+				"id": 0,
+				"name": "Person",
+				"constraints": []
+			},
+			{
+				"id": 1,
+				"name": "Movie",
+				"constraints": []
+			},
+			{
+				"id": 2,
+				"name": "Person",
+				"constraints": []
+			},
+			{
+				"id": 3,
+				"name": "Movie",
+				"constraints": []
+			}
+		],
+		"relations": [
+			{
+				"id": 0,
+				"name": "ACTED_IN",
+				"depth": {
+					"min": 1,
+					"max": 1
+				},
+				"fromType": "entity",
+				"fromID": 0,
+				"toType": "entity",
+				"toID": 1,
+				"constraints": []
+			},
+			{
+				"id": 2,
+				"name": "ACTED_IN",
+				"depth": {
+					"min": 1,
+					"max": 1
+				},
+				"fromType": "entity",
+				"fromID": 2,
+				"toType": "entity",
+				"toID": 3,
+				"constraints": []
+			}
+		],
+		"groupBys": [
+			{
+				"id": 0,
+				"groupType": "entity",
+				"groupID": [1,3],
+				"groupAttribute": "imdbRating",
+				"byType": "entity",
+				"byID": [0,2],
+				"byAttribute": "bornIn",
+				"appliedModifier": "AVG",
+				"relationID": 0,
 				"constraints": [
 					{
-						"attribute": "city",
-						"value": "New York",
-						"dataType": "string",
-						"matchType": "exact"
+						"attribute": "imdbRating",
+						"value": "7.5",
+						"dataType": "int",
+						"matchType": "GT",
+						"inID": -1,
+						"inType": ""
 					}
 				]
+			}
+		],
+		"machineLearning": [],
+		"limit": 5000
+	}
+	`)
+
+	answer := `MATCH p0 = (e0:Person)-[:ACTED_IN*1..1]-(e1:Movie)
+	UNWIND relationships(p0) as r0 
+	WITH *
+	MATCH p1 = (e2:Person)-[:ACTED_IN*1..1]-(e3:Movie)
+	UNWIND relationships(p1) as r2 
+	WITH *
+	UNWIND [e0,e2] AS e02L
+	UNWIND [e1,e3] AS e13L
+	WITH e02L.bornIn AS e02L_bornIn, AVG(e13L.imdbRating) AS AVG_imdbRating 
+		WHERE  AVG_imdbRating > 7.5 
+	RETURN e02L_bornIn, AVG_imdbRating
+	LIMIT 5000;table`
+
+	var JSONQuery entity.IncomingQueryJSON
+	json.Unmarshal(query, &JSONQuery)
+
+	s := NewService()
+	cypher, err := s.ConvertQuery(&JSONQuery)
+	if err != nil {
+		fmt.Println(err)
+	}
+
+	trimmedCypher := strings.Replace(*cypher, "\n", "", -1)
+	trimmedCypher = strings.Replace(trimmedCypher, "\t", "", -1)
+
+	trimmedAnswer := strings.Replace(answer, "\n", "", -1)
+	trimmedAnswer = strings.Replace(trimmedAnswer, "\t", "", -1)
+
+	fmt.Println(*cypher)
+	assert.Equal(t, trimmedAnswer, trimmedCypher)
+
+}
+func TestDoubleInStatement(t *testing.T) {
+	query := []byte(`{
+		"databaseName": "Movies3",
+		"entities": [
+			{
+				"id": 0,
+				"name": "Person",
+				"constraints": []
+			},
+			{
+				"id": 1,
+				"name": "Movie",
+				"constraints": []
 			},
 			{
-				"type": "airports",
+				"id": 2,
+				"name": "Person",
 				"constraints": [
 					{
-						"attribute": "city",
-						"value": "San Francisco",
+						"attribute": "bornIn",
+						"value": "",
 						"dataType": "string",
-						"matchType": "exact"
+						"matchType": "",
+						"inID": 0,
+						"inType": "groupBy"
 					}
 				]
 			},
 			{
-				"type": "airports",
+				"id": 3,
+				"name": "Person",
 				"constraints": [
 					{
-						"attribute": "state",
-						"value": "HI",
+						"attribute": "bornIn",
+						"value": "",
 						"dataType": "string",
-						"matchType": "exact"
+						"matchType": "",
+						"inID": 0,
+						"inType": "groupBy"
 					}
 				]
 			}
 		],
 		"relations": [
 			{
-				"type": "flights",
+				"id": 0,
+				"name": "ACTED_IN",
 				"depth": {
 					"min": 1,
-					"max": 3
+					"max": 1
 				},
-				"entityFrom": 2,
-				"entityTo": 1,
+				"fromType": "entity",
+				"fromID": 0,
+				"toType": "entity",
+				"toID": 1,
+				"constraints": []
+			}
+		],
+		"groupBys": [
+			{
+				"id": 0,
+				"groupType": "entity",
+				"groupID": [1],
+				"groupAttribute": "imdbRating",
+				"byType": "entity",
+				"byID": [0],
+				"byAttribute": "bornIn",
+				"appliedModifier": "AVG",
+				"relationID": 0,
 				"constraints": [
 					{
-						"attribute": "Day",
-						"value": "15",
+						"attribute": "imdbRating",
+						"value": "7.5",
 						"dataType": "int",
-						"matchType": "EQ"
+						"matchType": "GT",
+						"inID": -1,
+						"inType": ""
 					}
 				]
-			},
-			{
-				"type": "flights",
-				"depth": {
-					"min": 1,
-					"max": 1
-				},
-				"entityFrom": 0,
-				"entityTo": -1,
-				"constraints": []
 			}
 		],
+		"machineLearning": [],
 		"limit": 5000
-	}`)
-
-	// Unmarshall the incoming message into an IncomingJSONQuery object
-	var JSONQuery entitycypher.IncomingQueryJSON
+	}
+	`)
+
+	answer := `MATCH p0 = (e0:Person)-[:ACTED_IN*1..1]-(e1:Movie)
+	UNWIND relationships(p0) as r0 
+	WITH *
+	WITH e0.bornIn AS e0_bornIn, AVG(e1.imdbRating) AS AVG_imdbRating 
+		WHERE  AVG_imdbRating > 7.5 
+	MATCH (e2:Person)
+		WHERE e2.bornIn IN e0_bornIn
+	WITH *
+	MATCH (e3:Person)
+		WHERE e3.bornIn IN e0_bornIn
+	WITH *
+	UNWIND [e3] AS x 
+	RETURN DISTINCT x
+	LIMIT 5000;nodelink`
+
+	var JSONQuery entity.IncomingQueryJSON
 	json.Unmarshal(query, &JSONQuery)
 
-	convertedResult, err := service.ConvertQuery(&JSONQuery)
+	s := NewService()
+	cypher, err := s.ConvertQuery(&JSONQuery)
+	if err != nil {
+		fmt.Println(err)
+	}
 
-	// Assert that there is no error
-	assert.NoError(t, err)
+	trimmedCypher := strings.Replace(*cypher, "\n", "", -1)
+	trimmedCypher = strings.Replace(trimmedCypher, "\t", "", -1)
 
-	// Assert that the result and the expected result are the same
-	correctConvertedResult := `MATCH (n2:airports) WHERE n2.state = "HI" MATCH p0 = (n2)-[r0:flights*1..3]->(n1) WHERE r0.Day = 15 RETURN n2,n1,p0; MATCH (n0:airports) WHERE n0.city = "New York" MATCH p0 = (n0)-[r0:flights*1..1]->() RETURN n0,p0;`
-	cleanedResult := strings.ReplaceAll(*convertedResult, "\n", "")
-	cleanedResult = strings.ReplaceAll(cleanedResult, "\t", "")
-	assert.Equal(t, correctConvertedResult, cleanedResult)
-}
+	trimmedAnswer := strings.Replace(answer, "\n", "", -1)
+	trimmedAnswer = strings.Replace(trimmedAnswer, "\t", "", -1)
 
-func TestRelationWithOnlyToNode(t *testing.T) {
-	// Setup for test
-	// Create query conversion service
-	service := NewService()
+	fmt.Println(*cypher)
+	assert.Equal(t, trimmedAnswer, trimmedCypher)
 
+}
+func TestEndOnGroupBy(t *testing.T) {
 	query := []byte(`{
-			"return": {
-				"entities": [
-					0
-				],
-				"relations": [
-					0
-				]
-			},
+		"return": {
 			"entities": [
-				{
-					"type": "airports",
-					"constraints": [
-						{
-							"attribute": "city",
-							"value": "San Francisco",
-							"dataType": "string",
-							"matchType": "exact"
-						}
-					]
-				}
+				11,
+				12
 			],
 			"relations": [
-				{
-					"type": "flights",
-					"depth": {
-						"min": 1,
-						"max": 1
-					},
-					"entityFrom": -1,
-					"entityTo": 0,
-					"constraints": []
-				}
+				10
 			],
-			"limit": 5000
-		}`)
-
-	// Unmarshall the incoming message into an IncomingJSONQuery object
-	var JSONQuery entitycypher.IncomingQueryJSON
+			"groupBys": [
+				1
+			]
+		},
+		"entities": [
+			{
+				"name": "Person",
+				"ID": 11,
+				"constraints": []
+			},
+			{
+				"name": "Movie",
+				"ID": 12,
+				"constraints": []
+			}
+		],
+		"relations": [
+			{
+				"ID": 10,
+				"name": "DIRECTED",
+				"depth": {
+					"min": 1,
+					"max": 1
+				},
+				"fromType": "entity",
+				"fromID": 11,
+				"toType": "entity",
+				"toID": 12,
+				"constraints": []
+			}
+		],
+		"groupBys": [
+			{
+			"id": 0,
+			"groupType": "entity",
+			"groupID": [11],
+			"groupAttribute": "imdbRating",
+			"byType": "entity",
+			"byID": [12],
+			"byAttribute": "bornIn",
+			"appliedModifier": "AVG",
+			"relationID": 10,
+			"constraints": []
+			}				
+		],
+		"machineLearning": [],
+		"limit": 5000,
+		"databaseName": "Movies3"
+	}
+	`)
+
+	answer := `MATCH p0 = (e11:Person)-[:DIRECTED*1..1]-(e12:Movie)
+	UNWIND relationships(p0) as r10 
+	WITH *
+	WITH e12.bornIn AS e12_bornIn, AVG(e11.imdbRating) AS AVG_imdbRating 
+	RETURN e12_bornIn, AVG_imdbRating
+	LIMIT 5000;table`
+
+	var JSONQuery entity.IncomingQueryJSON
 	json.Unmarshal(query, &JSONQuery)
 
-	convertedResult, err := service.ConvertQuery(&JSONQuery)
-
-	// Assert that there is no error
-	assert.NoError(t, err)
+	s := NewService()
+	cypher, err := s.ConvertQuery(&JSONQuery)
+	if err != nil {
+		fmt.Println(err)
+	}
 
-	// Assert that the result and the expected result are the same
-	correctConvertedResult := `MATCH (n0:airports) WHERE n0.city = "San Francisco" MATCH p0 = (n0)-[r0:flights*1..1]->() RETURN n0,p0;`
-	cleanedResult := strings.ReplaceAll(*convertedResult, "\n", "")
-	cleanedResult = strings.ReplaceAll(cleanedResult, "\t", "")
-	assert.Equal(t, correctConvertedResult, cleanedResult)
-}
+	trimmedCypher := strings.Replace(*cypher, "\n", "", -1)
+	trimmedCypher = strings.Replace(trimmedCypher, "\t", "", -1)
 
-func TestTooManyReturnEntities(t *testing.T) {
-	// Setup for test
-	// Create query conversion service
-	service := NewService()
+	trimmedAnswer := strings.Replace(answer, "\n", "", -1)
+	trimmedAnswer = strings.Replace(trimmedAnswer, "\t", "", -1)
 
+	fmt.Println(*cypher)
+	assert.Equal(t, trimmedAnswer, trimmedCypher)
+}
+func TestSimpleQuery(t *testing.T) {
 	query := []byte(`{
-			"return": {
-				"entities": [
-					0,
-					1,
-					2
-				],
-				"relations": [
-					0
-				]
-			},
+		"return": {
 			"entities": [
-				{
-					"type": "airports",
-					"constraints": [
-						{
-							"attribute": "city",
-							"value": "San Francisco",
-							"dataType": "string",
-							"matchType": "exact"
-						}
-					]
-				}
+				11,
+				12
 			],
 			"relations": [
-				{
-					"type": "flights",
-					"depth": {
-						"min": 1,
-						"max": 1
-					},
-					"entityFrom": -1,
-					"entityTo": 0,
-					"constraints": []
-				}
+				10
 			],
-			"limit": 5000
-		}`)
-
-	// Unmarshall the incoming message into an IncomingJSONQuery object
-	var JSONQuery entitycypher.IncomingQueryJSON
+			"groupBys": []
+		},
+		"entities": [
+			{
+				"name": "Person",
+				"ID": 11,
+				"constraints": []
+			},
+			{
+				"name": "Movie",
+				"ID": 12,
+				"constraints": []
+			}
+		],
+		"relations": [
+			{
+				"ID": 10,
+				"name": "DIRECTED",
+				"depth": {
+					"min": 1,
+					"max": 1
+				},
+				"fromType": "entity",
+				"fromID": 11,
+				"toType": "entity",
+				"toID": 12,
+				"constraints": []
+			}
+		],
+		"groupBys": [],
+		"machineLearning": [],
+		"limit": 5000,
+		"databaseName": "Movies3"
+	}
+	`)
+
+	answer := `MATCH p0 = (e11:Person)-[:DIRECTED*1..1]-(e12:Movie)
+	UNWIND relationships(p0) as r10 
+	WITH *
+	UNWIND [r10,e11,e12] AS x 
+	RETURN DISTINCT x
+	LIMIT 5000;nodelink`
+
+	var JSONQuery entity.IncomingQueryJSON
 	json.Unmarshal(query, &JSONQuery)
 
-	_, err := service.ConvertQuery(&JSONQuery)
+	s := NewService()
+	cypher, err := s.ConvertQuery(&JSONQuery)
+	if err != nil {
+		fmt.Println(err)
+	}
 
-	// Assert that there is no error
-	assert.Equal(t, errors.New("non-existing entity referenced in return"), err)
-}
+	trimmedCypher := strings.Replace(*cypher, "\n", "", -1)
+	trimmedCypher = strings.Replace(trimmedCypher, "\t", "", -1)
 
-func TestTooManyReturnRelations(t *testing.T) {
-	// Setup for test
-	// Create query conversion service
-	service := NewService()
+	trimmedAnswer := strings.Replace(answer, "\n", "", -1)
+	trimmedAnswer = strings.Replace(trimmedAnswer, "\t", "", -1)
 
+	fmt.Println(*cypher)
+	assert.Equal(t, trimmedAnswer, trimmedCypher)
+}
+func TestNoRelation(t *testing.T) {
 	query := []byte(`{
-			"return": {
-				"entities": [
-					0
-				],
-				"relations": [
-					0,
-					1,
-					2
-				]
-			},
+		"return": {
 			"entities": [
-				{
-					"type": "airports",
-					"constraints": [
-						{
-							"attribute": "city",
-							"value": "San Francisco",
-							"dataType": "string",
-							"matchType": "exact"
-						}
-					]
-				}
+				11,
+				12
 			],
 			"relations": [
-				{
-					"type": "flights",
-					"depth": {
-						"min": 1,
-						"max": 1
-					},
-					"entityFrom": -1,
-					"entityTo": 0,
-					"constraints": []
-				}
+				10
 			],
-			"limit": 5000
-		}`)
-
-	// Unmarshall the incoming message into an IncomingJSONQuery object
-	var JSONQuery entitycypher.IncomingQueryJSON
+			"groupBys": []
+		},
+		"entities": [
+			{
+				"name": "Person",
+				"ID": 11,
+				"constraints": []
+			},
+			{
+				"name": "Movie",
+				"ID": 12,
+				"constraints": []
+			}
+		],
+		"relations": [],
+		"groupBys": [],
+		"machineLearning": [],
+		"limit": 5000,
+		"databaseName": "Movies3"
+	}
+	`)
+
+	var JSONQuery entity.IncomingQueryJSON
 	json.Unmarshal(query, &JSONQuery)
-
-	_, err := service.ConvertQuery(&JSONQuery)
-
-	// Assert that there is no error
-	assert.Equal(t, errors.New("non-existing relation referenced in return"), err)
+	fmt.Println(JSONQuery)
+	fmt.Println(" ")
+
+	s := NewService()
+	_, err := s.ConvertQuery(&JSONQuery)
+	if err != nil {
+		assert.Equal(t, err.Error(), "Invalid query")
+	} else {
+		// It should error, thus it must not reach this
+		t.Fail()
+	}
 }
 
-func TestNegativeReturnEntities(t *testing.T) {
-	// Setup for test
-	// Create query conversion service
-	service := NewService()
-
+func TestNoEntities(t *testing.T) {
 	query := []byte(`{
-			"return": {
-				"entities": [
-					0,
-					-1
-				],
-				"relations": [
-					0,
-					1,
-					2
-				]
+		"return": {
+			"entities": [
+				11,
+				12
+			],
+			"relations": [
+				10
+			],
+			"groupBys": []
+		},
+		"entities": [],
+		"relations": [
+			{
+			"ID": 10,
+			"name": "DIRECTED",
+			"depth": {
+				"min": 1,
+				"max": 1
 			},
+			"fromType": "entity",
+			"fromID": 11,
+			"toType": "entity",
+			"toID": 12,
+			"constraints": []
+		}
+		],
+		"groupBys": [],
+		"machineLearning": [],
+		"limit": 5000,
+		"databaseName": "Movies3"
+	}
+	`)
+
+	var JSONQuery entity.IncomingQueryJSON
+	json.Unmarshal(query, &JSONQuery)
+	fmt.Println(JSONQuery)
+	fmt.Println(" ")
+
+	s := NewService()
+	_, err := s.ConvertQuery(&JSONQuery)
+	if err != nil {
+		assert.Equal(t, err.Error(), "Invalid query")
+	} else {
+		// It should error, thus it must not reach this
+		t.Fail()
+	}
+}
+func TestTwoRelationsCycle(t *testing.T) {
+	query := []byte(`{
+		"return": {
 			"entities": [
-				{
-					"type": "airports",
-					"constraints": [
-						{
-							"attribute": "city",
-							"value": "San Francisco",
-							"dataType": "string",
-							"matchType": "exact"
-						}
-					]
-				}
+				11,
+				12
 			],
 			"relations": [
-				{
-					"type": "flights",
-					"depth": {
-						"min": 1,
-						"max": 1
-					},
-					"entityFrom": -1,
-					"entityTo": 0,
-					"constraints": []
-				}
+				10
 			],
-			"limit": 5000
-		}`)
-
-	// Unmarshall the incoming message into an IncomingJSONQuery object
-	var JSONQuery entitycypher.IncomingQueryJSON
+			"groupBys": []
+		},
+		"entities": [
+			{
+				"name": "Person",
+				"ID": 11,
+				"constraints": []
+			},
+			{
+				"name": "Movie",
+				"ID": 12,
+				"constraints": []
+			}
+		],
+		"relations": [
+			{
+			"ID": 10,
+			"name": "DIRECTED",
+			"depth": {
+				"min": 1,
+				"max": 1
+			},
+			"fromType": "entity",
+			"fromID": 11,
+			"toType": "entity",
+			"toID": 12,
+			"constraints": []
+		},
+		{
+			"ID": 11,
+			"name": "ACTED_IN",
+			"depth": {
+				"min": 1,
+				"max": 1
+			},
+			"fromType": "entity",
+			"fromID": 12,
+			"toType": "entity",
+			"toID": 11,
+			"constraints": []
+		}
+		],
+		"groupBys": [],
+		"machineLearning": [],
+		"limit": 5000,
+		"databaseName": "Movies3"
+	}
+	`)
+
+	var JSONQuery entity.IncomingQueryJSON
 	json.Unmarshal(query, &JSONQuery)
-
-	_, err := service.ConvertQuery(&JSONQuery)
-
-	// Assert that there is no error
-	assert.Equal(t, errors.New("non-existing entity referenced in return"), err)
+	fmt.Println(JSONQuery)
+	fmt.Println(" ")
+
+	s := NewService()
+	cypher, err := s.ConvertQuery(&JSONQuery)
+	if err != nil {
+		fmt.Println(err)
+		t.Fail()
+		return
+	}
+
+	if cypher == nil {
+		t.Fail()
+		return
+	}
+
+	answer1 := `MATCH p0 = (e11:Person)-[:DIRECTED*1..1]-(e12:Movie)
+	UNWIND relationships(p0) as r10 
+	WITH *
+	MATCH p1 = (e12:Movie)-[:ACTED_IN*1..1]-(e11:Person)
+	UNWIND relationships(p1) as r11 
+	WITH *
+	UNWIND [r11,e12,e11,r10,e11,e12] AS x 
+	RETURN DISTINCT x
+	LIMIT 5000;nodelink`
+
+	answer2 := `MATCH p1 = (e12:Movie)-[:ACTED_IN*1..1]-(e11:Person)
+	UNWIND relationships(p1) as r11 
+	WITH *
+	MATCH p0 = (e11:Person)-[:DIRECTED*1..1]-(e12:Movie)
+	UNWIND relationships(p0) as r10 
+	WITH *
+	UNWIND [r10,e11,e12,r11,e12,e11] AS x 
+	RETURN DISTINCT x
+	LIMIT 5000;nodelink`
+
+	trimmedCypher := strings.Replace(*cypher, "\n", "", -1)
+	trimmedCypher = strings.Replace(trimmedCypher, "\t", "", -1)
+
+	trimmedAnswer1 := strings.Replace(answer1, "\n", "", -1)
+	trimmedAnswer1 = strings.Replace(trimmedAnswer1, "\t", "", -1)
+	trimmedAnswer2 := strings.Replace(answer2, "\n", "", -1)
+	trimmedAnswer2 = strings.Replace(trimmedAnswer2, "\t", "", -1)
+
+	fmt.Println(*cypher)
+	// Both answers are correct
+	if !(trimmedAnswer1 == trimmedCypher || trimmedAnswer2 == trimmedCypher) {
+		t.Fail()
+	}
 }
 
-func TestNoRelationsField(t *testing.T) {
-	// Setup for test
-	// Create query conversion service
-	service := NewService()
-
+func TestCyclePlusDependency(t *testing.T) {
 	query := []byte(`{
-			"return": {
-				"entities": [
-					0
-				]
+		"return": {
+			"entities": [
+				11,
+				12
+			],
+			"relations": [
+				10
+			],
+			"groupBys": []
+		},
+		"entities": [
+			{
+				"name": "Person",
+				"ID": 11,
+				"constraints": []
+			},
+			{
+				"name": "Movie",
+				"ID": 12,
+				"constraints": []
+			},
+			{
+				"name": "Person",
+				"ID": 13,
+				"constraints": []
+			}
+		],
+		"relations": [
+			{
+			"ID": 10,
+			"name": "DIRECTED",
+			"depth": {
+				"min": 1,
+				"max": 1
 			},
+			"fromType": "entity",
+			"fromID": 11,
+			"toType": "entity",
+			"toID": 12,
+			"constraints": []
+		},
+		{
+			"ID": 11,
+			"name": "ACTED_IN",
+			"depth": {
+				"min": 1,
+				"max": 1
+			},
+			"fromType": "entity",
+			"fromID": 12,
+			"toType": "entity",
+			"toID": 11,
+			"constraints": []
+		},
+		{
+			"ID": 12,
+			"name": "ACTED_IN",
+			"depth": {
+				"min": 1,
+				"max": 1
+			},
+			"fromType": "entity",
+			"fromID": 13,
+			"toType": "entity",
+			"toID": 12,
+			"constraints": []
+		}
+		],
+		"groupBys": [],
+		"machineLearning": [],
+		"limit": 5000,
+		"databaseName": "Movies3"
+	}
+	`)
+
+	var JSONQuery entity.IncomingQueryJSON
+	json.Unmarshal(query, &JSONQuery)
+	fmt.Println(JSONQuery)
+	fmt.Println(" ")
+
+	s := NewService()
+	cypher, err := s.ConvertQuery(&JSONQuery)
+	if err != nil {
+		fmt.Println(err)
+		t.Fail()
+		return
+	}
+
+	if cypher == nil {
+		t.Fail()
+		return
+	}
+
+	answer := `MATCH p2 = (e13:Person)-[:ACTED_IN*1..1]-(e12:Movie)
+	UNWIND relationships(p2) as r12 
+	WITH *
+	MATCH p1 = (e12:Movie)-[:ACTED_IN*1..1]-(e11:Person)
+	UNWIND relationships(p1) as r11 
+	WITH *
+	MATCH p0 = (e11:Person)-[:DIRECTED*1..1]-(e12:Movie)
+	UNWIND relationships(p0) as r10 
+	WITH *
+	UNWIND [r10,e11,e12,r11,e12,e11,r12,e13,e12] AS x 
+	RETURN DISTINCT x
+	LIMIT 5000;nodelink`
+
+	trimmedCypher := strings.Replace(*cypher, "\n", "", -1)
+	trimmedCypher = strings.Replace(trimmedCypher, "\t", "", -1)
+
+	trimmedAnswer := strings.Replace(answer, "\n", "", -1)
+	trimmedAnswer = strings.Replace(trimmedAnswer, "\t", "", -1)
+
+	fmt.Println(*cypher)
+	assert.Equal(t, trimmedAnswer, trimmedCypher)
+}
+func TestTripleCycle(t *testing.T) {
+	query := []byte(`{
+		"return": {
 			"entities": [
-				{
-					"type": "airports",
-					"constraints": [
-						{
-							"attribute": "city",
-							"value": "San Francisco",
-							"dataType": "string",
-							"matchType": "exact"
-						}
-					]
-				}
+				11,
+				12
+			],
+			"relations": [
+				10
 			],
-			"limit": 5000
-		}`)
+			"groupBys": []
+		},
+		"entities": [
+			{
+				"name": "Person",
+				"ID": 11,
+				"constraints": []
+			},
+			{
+				"name": "Movie",
+				"ID": 12,
+				"constraints": []
+			},
+			{
+				"name": "Person",
+				"ID": 13,
+				"constraints": []
+			}
+		],
+		"relations": [
+			{
+			"ID": 10,
+			"name": "DIRECTED",
+			"depth": {
+				"min": 1,
+				"max": 1
+			},
+			"fromType": "entity",
+			"fromID": 11,
+			"toType": "entity",
+			"toID": 12,
+			"constraints": []
+		},
+		{
+			"ID": 11,
+			"name": "ACTED_IN",
+			"depth": {
+				"min": 1,
+				"max": 1
+			},
+			"fromType": "entity",
+			"fromID": 12,
+			"toType": "entity",
+			"toID": 13,
+			"constraints": []
+		},
+		{
+			"ID": 12,
+			"name": "ACTED_IN",
+			"depth": {
+				"min": 1,
+				"max": 1
+			},
+			"fromType": "entity",
+			"fromID": 13,
+			"toType": "entity",
+			"toID": 11,
+			"constraints": []
+		}
+		],
+		"groupBys": [],
+		"machineLearning": [],
+		"limit": 5000,
+		"databaseName": "Movies3"
+	}
+	`)
+
+	var JSONQuery entity.IncomingQueryJSON
+	json.Unmarshal(query, &JSONQuery)
 
-	// Unmarshall the incoming message into an IncomingJSONQuery object
-	var JSONQuery entitycypher.IncomingQueryJSON
+	s := NewService()
+	cypher, err := s.ConvertQuery(&JSONQuery)
+	if err != nil {
+		fmt.Println(err)
+		assert.Equal(t, err, errors.New("Cyclic query detected"))
+		return
+	}
+
+	if cypher == nil {
+		t.Fail()
+		return
+	}
+	t.Fail()
+}
+
+func TestMultipleByStatementConnected(t *testing.T) {
+	query := []byte(`{
+		"databaseName": "Movies3",
+		"entities": [
+			{
+				"id": 0,
+				"name": "Person",
+				"constraints": []
+			},
+			{
+				"id": 1,
+				"name": "Movie",
+				"constraints": []
+			},
+			{
+				"id": 2,
+				"name": "Person",
+				"constraints": []
+			}
+		],
+		"relations": [
+			{
+				"id": 0,
+				"name": "ACTED_IN",
+				"depth": {
+					"min": 1,
+					"max": 1
+				},
+				"fromType": "entity",
+				"fromID": 0,
+				"toType": "entity",
+				"toID": 1,
+				"constraints": []
+			},
+			{
+				"id": 2,
+				"name": "ACTED_IN",
+				"depth": {
+					"min": 1,
+					"max": 1
+				},
+				"fromType": "entity",
+				"fromID": 2,
+				"toType": "entity",
+				"toID": 1,
+				"constraints": []
+			}
+		],
+		"groupBys": [
+			{
+				"id": 0,
+				"groupType": "entity",
+				"groupID": [1],
+				"groupAttribute": "imdbRating",
+				"byType": "entity",
+				"byID": [0,2],
+				"byAttribute": "bornIn",
+				"appliedModifier": "AVG",
+				"relationID": 0,
+				"constraints": [
+					{
+						"attribute": "imdbRating",
+						"value": "7.5",
+						"dataType": "int",
+						"matchType": "GT",
+						"inID": -1,
+						"inType": ""
+					}
+				]
+			}
+		],
+		"machineLearning": [],
+		"limit": 5000
+	}
+	`)
+
+	answer := `MATCH p0 = (e0:Person)-[:ACTED_IN*1..1]-(e1:Movie)
+	UNWIND relationships(p0) as r0 
+	WITH *
+	MATCH p1 = (e2:Person)-[:ACTED_IN*1..1]-(e1:Movie)
+	UNWIND relationships(p1) as r2 
+	WITH *
+	UNWIND [e0,e2] AS e02L
+	WITH e02L.bornIn AS e02L_bornIn, AVG(e1.imdbRating) AS AVG_imdbRating 
+		WHERE  AVG_imdbRating > 7.5 
+	RETURN e02L_bornIn, AVG_imdbRating
+	LIMIT 5000;table`
+
+	var JSONQuery entity.IncomingQueryJSON
 	json.Unmarshal(query, &JSONQuery)
 
-	convertedResult, err := service.ConvertQuery(&JSONQuery)
+	s := NewService()
+	cypher, err := s.ConvertQuery(&JSONQuery)
+	if err != nil {
+		fmt.Println(err)
+	}
 
-	// Assert that there is no error
-	assert.NoError(t, err)
+	trimmedCypher := strings.Replace(*cypher, "\n", "", -1)
+	trimmedCypher = strings.Replace(trimmedCypher, "\t", "", -1)
 
-	// Assert that the result and the expected result are the same
-	correctConvertedResult := `MATCH (n0:airports) WHERE n0.city = "San Francisco" RETURN n0`
-	cleanedResult := strings.ReplaceAll(*convertedResult, "\n", "")
-	cleanedResult = strings.ReplaceAll(cleanedResult, "\t", "")
-	assert.Equal(t, correctConvertedResult, cleanedResult)
-}
+	trimmedAnswer := strings.Replace(answer, "\n", "", -1)
+	trimmedAnswer = strings.Replace(trimmedAnswer, "\t", "", -1)
 
-func TestEntityFromLowerThanNegativeOneInRelation(t *testing.T) {
-	// Setup for test
-	// Create query conversion service
-	service := NewService()
+	fmt.Println(*cypher)
+	assert.Equal(t, trimmedAnswer, trimmedCypher)
+
+}
 
+func TestRelationOnGroupBy(t *testing.T) {
 	query := []byte(`{
-			"return": {
-				"entities": [
-					0
-				],
-				"relations": [
-					0
-				]
+		"databaseName": "Movies3",
+		"entities": [
+			{
+				"id": 0,
+				"name": "Person",
+				"constraints": []
 			},
-			"entities": [
-				{
-					"type": "airports",
-					"constraints": [
-						{
-							"attribute": "city",
-							"value": "San Francisco",
-							"dataType": "string",
-							"matchType": "exact"
-						}
-					]
-				}
-			],
-			"relations": [
-				{
-					"type": "flights",
-					"depth": {
-						"min": 1,
-						"max": 1
-					},
-					"entityFrom": -4,
-					"entityTo": 0,
-					"constraints": []
-				}
-			],
-			"limit": 5000
-		}`)
-
-	// Unmarshall the incoming message into an IncomingJSONQuery object
-	var JSONQuery entitycypher.IncomingQueryJSON
+			{
+				"id": 1,
+				"name": "Movie",
+				"constraints": []
+			},
+			{
+				"id": 2,
+				"name": "Person",
+				"constraints": []
+			},
+			{
+				"id": 3,
+				"name": "Movie",
+				"constraints": []
+			}
+		],
+		"relations": [
+			{
+				"id": 0,
+				"name": "ACTED_IN",
+				"depth": {
+					"min": 1,
+					"max": 1
+				},
+				"fromType": "entity",
+				"fromID": 0,
+				"toType": "entity",
+				"toID": 1,
+				"constraints": []
+			},
+			{
+				"id": 1,
+				"name": "ACTED_IN",
+				"depth": {
+					"min": 1,
+					"max": 1
+				},
+				"fromType": "entity",
+				"fromID": 2,
+				"toType": "entity",
+				"toID": 1,
+				"constraints": []
+			},
+			{
+				"id": 2,
+				"name": "ACTED_IN",
+				"depth": {
+					"min": 1,
+					"max": 1
+				},
+				"fromType": "groupBy",
+				"fromID": 0,
+				"toType": "entity",
+				"toID": 3,
+				"constraints": []
+			}
+		],
+		"groupBys": [
+			{
+				"id": 0,
+				"groupType": "entity",
+				"groupID": [1],
+				"groupAttribute": "imdbRating",
+				"byType": "entity",
+				"byID": [0,2],
+				"byAttribute": "bornIn",
+				"appliedModifier": "AVG",
+				"relationID": 0,
+				"constraints": [
+					{
+						"attribute": "imdbRating",
+						"value": "7.5",
+						"dataType": "int",
+						"matchType": "GT",
+						"inID": -1,
+						"inType": ""
+					}
+				]
+			}
+		],
+		"machineLearning": [],
+		"limit": 5000
+	}
+	`)
+
+	answer := `MATCH p0 = (e0:Person)-[:ACTED_IN*1..1]-(e1:Movie)
+	UNWIND relationships(p0) as r0 
+	WITH *
+	MATCH p1 = (e2:Person)-[:ACTED_IN*1..1]-(e1:Movie)
+	UNWIND relationships(p1) as r1 
+	WITH *
+	UNWIND [e0,e2] AS e02L
+	WITH e02L.bornIn AS e02L_bornIn, AVG(e1.imdbRating) AS AVG_imdbRating 
+		WHERE  AVG_imdbRating > 7.5 
+	MATCH p2 = (eg02L:Person)-[:ACTED_IN*1..1]-(e3:Movie)
+		WHERE eg02L.bornIn IN e02L_bornIn 
+	UNWIND relationships(p2) as r2 
+	WITH *
+	UNWIND [r2,eg02L,e3] AS x 
+	RETURN DISTINCT x
+	LIMIT 5000;nodelink`
+
+	answer2 := `MATCH p1 = (e2:Person)-[:ACTED_IN*1..1]-(e1:Movie)
+	UNWIND relationships(p1) as r1 
+	WITH *
+	MATCH p0 = (e0:Person)-[:ACTED_IN*1..1]-(e1:Movie)
+	UNWIND relationships(p0) as r0 
+	WITH *
+	UNWIND [e0,e2] AS e02L
+	WITH e02L.bornIn AS e02L_bornIn, AVG(e1.imdbRating) AS AVG_imdbRating 
+		WHERE  AVG_imdbRating > 7.5 
+	MATCH p2 = (eg02L:Person)-[:ACTED_IN*1..1]-(e3:Movie)
+		WHERE eg02L.bornIn IN e02L_bornIn 
+	UNWIND relationships(p2) as r2 
+	WITH *
+	UNWIND [r2,eg02L,e3] AS x 
+	RETURN DISTINCT x
+	LIMIT 5000;nodelink`
+
+	var JSONQuery entity.IncomingQueryJSON
 	json.Unmarshal(query, &JSONQuery)
 
-	_, err := service.ConvertQuery(&JSONQuery)
+	s := NewService()
+	cypher, err := s.ConvertQuery(&JSONQuery)
+	if err != nil {
+		fmt.Println(err)
+	}
+
+	trimmedCypher := strings.Replace(*cypher, "\n", "", -1)
+	trimmedCypher = strings.Replace(trimmedCypher, "\t", "", -1)
+
+	trimmedAnswer := strings.Replace(answer, "\n", "", -1)
+	trimmedAnswer = strings.Replace(trimmedAnswer, "\t", "", -1)
+	trimmedAnswer2 := strings.Replace(answer2, "\n", "", -1)
+	trimmedAnswer2 = strings.Replace(trimmedAnswer2, "\t", "", -1)
 
-	// Assert that there is no error
-	assert.NoError(t, err)
+	if !(trimmedCypher == trimmedAnswer || trimmedCypher == trimmedAnswer2) {
+		t.Fail()
+	}
 }
diff --git a/cypher/createConstraints.go b/cypher/createConstraints.go
index d88a374..33cd9f0 100644
--- a/cypher/createConstraints.go
+++ b/cypher/createConstraints.go
@@ -3,7 +3,7 @@ package cypher
 import (
 	"fmt"
 
-	"git.science.uu.nl/graphpolaris/query-conversion/entity/entitycypher"
+	"git.science.uu.nl/graphpolaris/query-conversion/entity"
 )
 
 /* createConstraintStatements generates the appropriate amount of constraint lines calling createConstraingBoolExpression
@@ -13,7 +13,7 @@ isRelation is a boolean specifying if this constraint comes from a node or relat
 
 Return: a string containing a FILTER-statement with all the constraints
 */
-func createConstraintStatements(constraints *[]entitycypher.QueryConstraintStruct, name string) *string {
+func createConstraintStatements(constraints *[]entity.QueryConstraintStruct, name string) *string {
 	s := ""
 	if len(*constraints) == 0 {
 		return &s
@@ -22,7 +22,7 @@ func createConstraintStatements(constraints *[]entitycypher.QueryConstraintStruc
 	newLineStatement := "\tWHERE"
 
 	for _, v := range *constraints {
-		s += fmt.Sprintf("%v%v \n", newLineStatement, *createConstraintBoolExpression(&v, name))
+		s += fmt.Sprintf("%v%v \n", newLineStatement, *createConstraintBoolExpression(&v, name, false))
 		newLineStatement = "\tAND"
 	}
 
@@ -38,7 +38,7 @@ isRelation is a boolean specifying if this constraint comes from a node or relat
 
 Return: a string containing an boolean expression of a single constraint
 */
-func createConstraintBoolExpression(constraint *entitycypher.QueryConstraintStruct, name string) *string {
+func createConstraintBoolExpression(constraint *entity.QueryConstraintStruct, name string, customAttribute bool) *string {
 	var (
 		match string
 		value string
@@ -95,7 +95,12 @@ func createConstraintBoolExpression(constraint *entitycypher.QueryConstraintStru
 		}
 	}
 
-	line = fmt.Sprintf("%s %s.%s %s %s", neq, name, constraint.Attribute, match, value)
+	if customAttribute {
+		line = fmt.Sprintf("%s %s %s %s", neq, name, match, value)
+	} else {
+
+		line = fmt.Sprintf("%s %s.%s %s %s", neq, name, constraint.Attribute, match, value)
+	}
 
 	return &line
 }
diff --git a/cypher/hierarchy.go b/cypher/hierarchy.go
new file mode 100644
index 0000000..29bdf1a
--- /dev/null
+++ b/cypher/hierarchy.go
@@ -0,0 +1,247 @@
+package cypher
+
+import (
+	"errors"
+	"log"
+
+	"git.science.uu.nl/graphpolaris/query-conversion/entity"
+)
+
+// createQueryHierarchy finds out what depends on what, then uses topological sort to create a hierarchy
+func createQueryHierarchy(JSONQuery *entity.IncomingQueryJSON) (entity.Query, error) {
+
+	var parts entity.Query
+	IDctr := 0
+
+	// Add relations all to query parts
+	for _, rel := range JSONQuery.Relations {
+
+		part := entity.QueryPart{
+			QType:        "relation",
+			QID:          rel.ID,
+			PartID:       IDctr,
+			Dependencies: make([]int, 0),
+		}
+		parts = append(parts, part)
+
+		IDctr++
+
+	}
+
+	// Add the Groupby's
+	for _, gb := range JSONQuery.GroupBys {
+		part := entity.QueryPart{
+			QType:        "groupBy",
+			QID:          gb.ID,
+			PartID:       IDctr,
+			Dependencies: make([]int, 0),
+		}
+		parts = append(parts, part)
+
+		IDctr++
+
+	}
+
+	// Add the entities, if they have an IN, otherwise they are not important
+	for _, ent := range JSONQuery.Entities {
+
+		skip := true
+		for _, con := range ent.Constraints {
+			if con.InID != -1 {
+				skip = false
+			}
+		}
+
+		if skip {
+			continue
+		}
+
+		part := entity.QueryPart{
+			QType:        "entity",
+			QID:          ent.ID,
+			PartID:       IDctr,
+			Dependencies: make([]int, 0),
+		}
+		parts = append(parts, part)
+
+		IDctr++
+	}
+
+	// Check dependencies in a nice O(n^2)
+	for _, rel := range JSONQuery.Relations {
+		if rel.FromID == -1 {
+			continue
+		}
+
+		// Check the dependencies From - To
+		for _, rela := range JSONQuery.Relations {
+			if rela.ToID == -1 {
+				continue
+			}
+
+			if rel.FromID == rela.ToID && rel.FromType == rela.ToType {
+				part := parts.Find(rel.ID, "relation")
+				part.Dependencies = append(part.Dependencies, parts.Find(rela.ID, "relation").PartID)
+			}
+		}
+
+		if rel.ToID == -1 {
+			continue
+		}
+
+		// Now for connections to group by's it doesnt matter if the GB is attached to the from or the to
+		// The GB always has priority
+		for _, gb := range JSONQuery.GroupBys {
+			if (rel.FromID == gb.ID && rel.FromType == "groupBy") || (rel.ToID == gb.ID && rel.ToType == "groupBy") {
+				part := parts.Find(rel.ID, "relation")
+				gbID := parts.Find(gb.ID, "groupBy").PartID
+				part.Dependencies = append(part.Dependencies, gbID)
+			}
+		}
+	}
+
+	// Same trick for group by's
+	for _, gb := range JSONQuery.GroupBys {
+		for _, rela := range JSONQuery.Relations {
+			// Check if the gb is connected to the relation
+
+			// Check for all by's
+			for _, ID := range gb.ByID {
+				if (ID == rela.ID && gb.ByType == "relation") || // Is the By connected to a relation
+					(ID == rela.FromID && gb.ByType == rela.FromType) || // Is the by connected to an entity connected to the "From" of a relation
+					(ID == rela.ToID && gb.ByType == rela.ToType) { // Is the by connected to an entity connected to the "To" of a relation
+					part := parts.Find(gb.ID, "groupBy")
+					part.Dependencies = append(part.Dependencies, parts.Find(rela.ID, "relation").PartID)
+					continue
+				}
+			}
+
+			// Check for all groups
+			for _, ID := range gb.GroupID {
+
+				if (ID == rela.ID && gb.GroupType == "relation") || // is the Group connected to a relation
+					(ID == rela.FromID && gb.GroupType == rela.FromType) || // Is the group connected to an entity connected to the "From" of arelation
+					(ID == rela.ToID && gb.GroupType == rela.ToType) { // Is the group connected to an entity connected to the "To" of a relation
+					part := parts.Find(gb.ID, "groupBy")
+					part.Dependencies = append(part.Dependencies, parts.Find(rela.ID, "relation").PartID)
+				}
+			}
+		}
+
+	}
+
+	for _, ent := range JSONQuery.Entities {
+		for _, con := range ent.Constraints {
+			if con.InID != -1 {
+				part := parts.Find(ent.ID, "entity") // Should always be groupBy
+				part.Dependencies = append(part.Dependencies, parts.Find(con.InID, con.InType).PartID)
+			}
+		}
+
+	}
+
+	// Here comes a checker for (A)-->(B) and (B)-->(A). This is mitigated partly by ignoring it
+	// Lets call it a small cycle. It wont catch bigger cycles (with 3 nodes for example)
+
+	for _, p := range parts {
+		// We only allow small cycles with relations
+		if p.QType != "relation" {
+			continue
+		}
+
+		for _, dep := range p.Dependencies {
+			other := parts.SelectByID(dep)
+
+			if other.QType != "relation" {
+				continue
+			}
+
+			// Deleting from a slice while looping through it is an easy way to make mistakes, hence the workaround
+			cycle := false
+			toRemove := -1
+
+			for i, otherDep := range other.Dependencies {
+				if otherDep == p.PartID {
+					// Small cycle detected
+
+					cycle = true
+					toRemove = i
+				}
+			}
+
+			// Remove one of the two dependencies, does not really matter which, cypher knits it back together due to the query
+			// using the same ID's, thus making it a cycle again later on.
+			if cycle {
+				log.Println("Cycle detected and removed")
+				if len(other.Dependencies) == 0 {
+					other.Dependencies = make([]int, 0)
+				} else {
+					other.Dependencies[toRemove] = other.Dependencies[len(other.Dependencies)-1]
+					other.Dependencies = other.Dependencies[:len(other.Dependencies)-1]
+				}
+
+			}
+		}
+	}
+
+	// Now we have a directed graph, meaning we can use some topological sort (Kahn's algorithm)
+	var sortedQuery entity.Query
+	incomingEdges := make(map[int]int)
+
+	// Set all to 0
+	for _, p := range parts {
+		incomingEdges[p.PartID] = 0
+	}
+
+	// Count the incoming edges (dependencies)
+	for _, p := range parts {
+		for _, dp := range p.Dependencies {
+			incomingEdges[dp]++
+		}
+	}
+
+	for { // While there is a someone where incomingEdges[someone] == 0
+		part := entity.QueryPart{PartID: -1}
+		// Select a node with no incoming edges
+		for ID, edges := range incomingEdges {
+			if edges == 0 {
+				part = *parts.SelectByID(ID)
+			}
+		}
+
+		// Check to see if there are parts withouth incoming edges left
+		if part.PartID == -1 {
+			break
+		}
+
+		// Remove it from the set
+		incomingEdges[part.PartID] = -1
+		sortedQuery = append(sortedQuery, part)
+
+		// Decrease incoming edges of other parts
+		for _, ID := range part.Dependencies {
+			incomingEdges[ID]--
+		}
+	}
+
+	// Now check for cycles in the graph
+	partRemaining := false
+	for _, edges := range incomingEdges {
+		if edges != -1 {
+			partRemaining = true
+		}
+	}
+
+	if partRemaining {
+		// Somehow there was a cycle in the query,
+		return nil, errors.New("Cyclic query detected")
+	}
+
+	// Reverse the list
+	retQuery := make([]entity.QueryPart, len(sortedQuery))
+	for i := 0; i < len(sortedQuery); i++ {
+		retQuery[i] = sortedQuery[len(sortedQuery)-i-1]
+	}
+
+	return retQuery, nil
+}
diff --git a/entity/entitycypher/queryStructCypher.go b/entity/entitycypher/queryStructCypher.go
deleted file mode 100644
index 6db12d3..0000000
--- a/entity/entitycypher/queryStructCypher.go
+++ /dev/null
@@ -1,62 +0,0 @@
-package entitycypher
-
-// IncomingQueryJSON describes the query coming into the service in JSON format
-type IncomingQueryJSON struct {
-	DatabaseName string
-	Return       QueryReturnStruct
-	Entities     []QueryEntityStruct
-	Relations    []QueryRelationStruct
-	// Limit is for limiting the amount of paths AQL will return in a relation let statement
-	Limit     int
-	Modifiers []QueryModifierStruct
-}
-
-// QueryReturnStruct holds the indices of the entities and relations that need to be returned
-type QueryReturnStruct struct {
-	Entities  []int
-	Relations []int
-	//Modifiers []int
-}
-
-// QueryEntityStruct encapsulates a single entity with its corresponding constraints
-type QueryEntityStruct struct {
-	Type        string
-	Constraints []QueryConstraintStruct
-}
-
-// QueryRelationStruct encapsulates a single relation with its corresponding constraints
-type QueryRelationStruct struct {
-	Type        string
-	EntityFrom  int
-	EntityTo    int
-	Depth       QuerySearchDepthStruct
-	Constraints []QueryConstraintStruct
-}
-
-// QueryModifierStruct encapsulates a single modifier with its corresponding constraints
-type QueryModifierStruct struct {
-	Type           string // SUM COUNT AVG
-	SelectedType   string // node relation
-	SelectedTypeID int    // ID of the enitity or relation
-	AttributeIndex int    // = -1 if its the node or relation, = > -1 if an attribute is selected
-	InType         string
-	InID           int
-}
-
-// QuerySearchDepthStruct holds the range of traversals for the relation
-type QuerySearchDepthStruct struct {
-	Min int
-	Max int
-}
-
-// QueryConstraintStruct holds the information of the constraint
-// Constraint datatypes
-// 	string     MatchTypes: exact/contains/startswith/endswith
-// 	int   MatchTypes: GT/LT/EQ
-// 	bool     MatchTypes: EQ/NEQ
-type QueryConstraintStruct struct {
-	Attribute string
-	Value     string
-	DataType  string
-	MatchType string
-}
diff --git a/entity/queryStruct.go b/entity/queryStruct.go
index b74e4f5..a2232f9 100644
--- a/entity/queryStruct.go
+++ b/entity/queryStruct.go
@@ -2,71 +2,73 @@ package entity
 
 // IncomingQueryJSON describes the query coming into the service in JSON format
 type IncomingQueryJSON struct {
-	DatabaseName    string
-	Return          QueryReturnStruct
-	Entities        []QueryEntityStruct
-	Relations       []QueryRelationStruct
-	GroupBys        []QueryGroupByStruct
-	MachineLearning []QueryMLStruct
+	DatabaseName    string                `json:"databaseName"`
+	Return          QueryReturnStruct     `json:"return"`
+	Entities        []QueryEntityStruct   `json:"entities"`
+	Relations       []QueryRelationStruct `json:"relations"`
+	GroupBys        []QueryGroupByStruct  `json:"groupBys"`
+	MachineLearning []QueryMLStruct       `json:"machineLearning"`
 	// Limit is for limiting the amount of paths AQL will return in a relation let statement
-	Limit     int
+	Limit     int `json:"limit"`
 	Modifiers []QueryModifierStruct
 }
 
 // QueryReturnStruct holds the indices of the entities and relations that need to be returned
 type QueryReturnStruct struct {
-	Entities  []int
-	Relations []int
-	GroupBys  []int
+	Entities  []int `json:"entities"`
+	Relations []int `json:"relations"`
+	GroupBys  []int `json:"groupBys"`
 	//Modifiers []int
 }
 
 // QueryEntityStruct encapsulates a single entity with its corresponding constraints
 type QueryEntityStruct struct {
-	ID          int
-	Name        string
-	Constraints []QueryConstraintStruct
+	ID          int                     `json:"id"`
+	Name        string                  `json:"name"`
+	Constraints []QueryConstraintStruct `json:"constraints"`
 }
 
 // QueryRelationStruct encapsulates a single relation with its corresponding constraints
 type QueryRelationStruct struct {
-	ID          int
-	Name        string
-	FromType    string
-	FromID      int
-	ToType      string
-	ToID        int
-	Depth       QuerySearchDepthStruct
-	Constraints []QueryConstraintStruct
+	ID          int                     `json:"id"`
+	Name        string                  `json:"name"`
+	Depth       QuerySearchDepthStruct  `json:"depth"`
+	FromType    string                  `json:"fromType"`
+	FromID      int                     `json:"fromID"`
+	ToType      string                  `json:"toType"`
+	ToID        int                     `json:"toID"`
+	Constraints []QueryConstraintStruct `json:"constraints"`
 }
 
+// QueryGroupByStruct holds all the info needed to form a group by
 type QueryGroupByStruct struct {
-	ID              int
-	GroupType       string
-	GroupID         int
-	GroupAttribute  string
-	ByType          string
-	ByID            int
-	ByAttribute     string
-	AppliedModifier string
-	RelationID      int
-	Constraints     []QueryConstraintStruct
+	ID              int                     `json:"id"`
+	GroupType       string                  `json:"groupType"`
+	GroupID         []int                   `json:"groupID"`
+	GroupAttribute  string                  `json:"groupAttribute"`
+	ByType          string                  `json:"byType"`
+	ByID            []int                   `json:"byID"`
+	ByAttribute     string                  `json:"byAttribute"`
+	AppliedModifier string                  `json:"appliedModifier"`
+	RelationID      int                     `json:"relationID"`
+	Constraints     []QueryConstraintStruct `json:"constraints"`
 }
 
 // QueryConstraintStruct holds the information of the constraint
 // Constraint datatypes
-// 	string     MatchTypes: exact/contains/startswith/endswith
-// 	int   MatchTypes: GT/LT/EQ
-// 	bool     MatchTypes: EQ/NEQ
+//     string     MatchTypes: exact/contains/startswith/endswith
+//     int   MatchTypes: GT/LT/EQ
+//     bool     MatchTypes: EQ/NEQ
 type QueryConstraintStruct struct {
-	Attribute string
-	Value     string
-	DataType  string
-	MatchType string
-	InID      int
-	InType    string
+	Attribute string `json:"attribute"`
+	Value     string `json:"value"`
+	DataType  string `json:"dataType"`
+	MatchType string `json:"matchType"`
+	InID      int    `json:"inID"`
+	InType    string `json:"inType"`
 }
 
+// QueryMLStruct holds info for machinelearning
 type QueryMLStruct struct {
 	Queuename  string
 	Parameters []string
@@ -75,13 +77,75 @@ type QueryMLStruct struct {
 // QueryModifierStruct encapsulates a single modifier with its corresponding constraints
 type QueryModifierStruct struct {
 	Type           string // SUM COUNT AVG
-	SelectedType   string // entity relation
+	SelectedType   string // node relation
 	SelectedTypeID int    // ID of the enitity or relation
 	AttributeIndex int    // = -1 if its the node or relation, = > -1 if an attribute is selected
 }
 
 // QuerySearchDepthStruct holds the range of traversals for the relation
 type QuerySearchDepthStruct struct {
-	Min int
-	Max int
+	Min int `json:"min"`
+	Max int `json:"max"`
+}
+
+// FindE finds the entity with a specified ID in an IncomingQueryJSON struct
+func (JSONQuery IncomingQueryJSON) FindE(qID int) *QueryEntityStruct {
+	for _, part := range JSONQuery.Entities {
+		if part.ID == qID {
+			return &part
+		}
+	}
+	return nil
+}
+
+// FindR finds the relation with a specified ID in an IncomingQueryJSON struct
+func (JSONQuery IncomingQueryJSON) FindR(qID int) *QueryRelationStruct {
+	for _, part := range JSONQuery.Relations {
+		if part.ID == qID {
+			return &part
+		}
+	}
+	return nil
+}
+
+// FindG finds the groupBy with a specified ID in an IncomingQueryJSON struct
+func (JSONQuery IncomingQueryJSON) FindG(qID int) *QueryGroupByStruct {
+	for _, part := range JSONQuery.GroupBys {
+		if part.ID == qID {
+			return &part
+		}
+	}
+	return nil
+}
+
+// QueryPart is a struct containing a part of the query and a list of dependencies on which this part of the query depends
+type QueryPart struct {
+	QType        string     // Eg if it is a relation or groupby
+	QID          int        // ID of said relation/gb
+	PartID       int        // Custom ID used for dependency
+	Dependencies []int      // List of partID's that need to come before
+	NestedPart   *QueryPart // Pointer to another part, used in some cases to avoid cycles
+}
+
+// Query is a list of (possibly unordered) queryparts
+type Query []QueryPart
+
+// Find retrieves a QueryPart based on the query's specifications
+func (q Query) Find(qID int, qType string) *QueryPart {
+	for i := range q {
+		if q[i].QID == qID && q[i].QType == qType {
+			return &q[i]
+		}
+	}
+	return nil
+}
+
+// SelectByID retrieves a QueryPart based on its PartID
+func (q Query) SelectByID(ID int) *QueryPart {
+	for i := range q {
+		if q[i].PartID == ID {
+			return &q[i]
+		}
+	}
+	return nil
 }
diff --git a/entity/queryStructValidator.go b/entity/queryStructValidator.go
index d25f2d8..fdba5fb 100644
--- a/entity/queryStructValidator.go
+++ b/entity/queryStructValidator.go
@@ -90,17 +90,19 @@ func relationFromValid(rel QueryRelationStruct, typeString string, minID int, ma
 }
 
 func groupByGroupValid(groupBy QueryGroupByStruct, typeString string, minID int, maxID int) bool {
-	if groupBy.GroupType == typeString && groupBy.GroupID >= minID && groupBy.GroupID <= maxID {
-		return true
-	}
-	return false
+	// if groupBy.GroupType == typeString && groupBy.GroupID >= minID && groupBy.GroupID <= maxID {
+	// 	return true
+	// }
+	// return false
+	return true
 }
 
 func groupByByValid(groupBy QueryGroupByStruct, typeString string, minID int, maxID int) bool {
-	if groupBy.ByType == typeString && groupBy.ByID >= minID && groupBy.ByID <= maxID {
-		return true
-	}
-	return false
+	// if groupBy.ByType == typeString && groupBy.ByID >= minID && groupBy.ByID <= maxID {
+	// 	return true
+	// }
+	// return false
+	return true
 }
 
 func getMinAndMaxEntityID(entities []QueryEntityStruct) (int, int) {
diff --git a/interface.go b/interface.go
index e4a7500..883c547 100644
--- a/interface.go
+++ b/interface.go
@@ -2,13 +2,9 @@ package query
 
 import (
 	"git.science.uu.nl/graphpolaris/query-conversion/entity"
-	"git.science.uu.nl/graphpolaris/query-conversion/entity/entitycypher"
 )
 
 // A Converter converts an incoming message in our JSON format to a format like AQL or Cypher
 type Converter interface {
 	ConvertQuery(JSONQuery *entity.IncomingQueryJSON) (*string, error)
 }
-type CypherConverterPlaceholder interface {
-	ConvertQuery(JSONQuery *entitycypher.IncomingQueryJSON) (*string, error)
-}
-- 
GitLab