diff --git a/README.md b/README.md
index 7b99f601f6986b9e61f9897f7771bfc2b980f7a4..bed041d3e6c042d7f2d990cadf78ab90155bc5fc 100644
--- a/README.md
+++ b/README.md
@@ -3,7 +3,7 @@ This is the query-conversion package. It holds the code that translates the JSON
 
 ## Creating a new converter
 ```go
-import "git.science.uu.nl/datastrophe/query-conversion"
+import "git.science.uu.nl/graphpolaris/query-conversion"
 
 queryservice := cypher.NewService()
 ```
@@ -17,7 +17,7 @@ query, err := queryservice.ConvertQuery(JSONquery)
 
 ## Creating a mock converter
 ```go
-import "git.science.uu.nl/datastrophe/query-conversion"
+import "git.science.uu.nl/graphpolaris/query-conversion"
 
 mockService := NewMockService()
 ```
\ No newline at end of file
diff --git a/aql/convertQuery.go b/aql/convertQuery.go
index bc15ba79fec8dcc1ee18f6b4d4a915cd9ed6fab0..c9adaa57559e9b09bf9abefd4838c482fcb195b8 100644
--- a/aql/convertQuery.go
+++ b/aql/convertQuery.go
@@ -9,7 +9,7 @@ import (
 	"errors"
 	"fmt"
 
-	"git.science.uu.nl/datastrophe/query-conversion/entity"
+	"git.science.uu.nl/graphpolaris/query-conversion/entity"
 )
 
 // Version 1.13
diff --git a/aql/convertQueryBenchmark_test.go b/aql/convertQueryBenchmark_test.go
index 2506fe36b12162bc8e9d2b9f577be8896d3ee83a..6e87714a9f1dcfd74f291885ee83d319091b8276 100644
--- a/aql/convertQueryBenchmark_test.go
+++ b/aql/convertQueryBenchmark_test.go
@@ -9,7 +9,7 @@ import (
 	"encoding/json"
 	"testing"
 
-	"git.science.uu.nl/datastrophe/query-conversion/entity"
+	"git.science.uu.nl/graphpolaris/query-conversion/entity"
 )
 
 /*
diff --git a/aql/convertQuery_test.go b/aql/convertQuery_test.go
index f083a12123332994a623938178fdd1c973e02887..25b4358aa14054ed7b88ec968faea3b562b09e07 100644
--- a/aql/convertQuery_test.go
+++ b/aql/convertQuery_test.go
@@ -11,7 +11,7 @@ import (
 	"strings"
 	"testing"
 
-	"git.science.uu.nl/datastrophe/query-conversion/entity"
+	"git.science.uu.nl/graphpolaris/query-conversion/entity"
 	"github.com/stretchr/testify/assert"
 )
 
diff --git a/aql/createConstraints.go b/aql/createConstraints.go
index 0a6c8ed08fca24bbe593323320a6b7b505fcb605..0481bd94cbd9cc15935664b45289007c2e343ffb 100644
--- a/aql/createConstraints.go
+++ b/aql/createConstraints.go
@@ -3,7 +3,7 @@ package aql
 import (
 	"fmt"
 
-	"git.science.uu.nl/datastrophe/query-conversion/entity"
+	"git.science.uu.nl/graphpolaris/query-conversion/entity"
 )
 
 /* createConstraintStatements generates the appropriate amount of constraint lines calling createConstraingBoolExpression
diff --git a/aql/debug.test b/aql/debug.test
deleted file mode 100644
index 934cd9a89adf48e864e89ee0528ad2681abfaa01..0000000000000000000000000000000000000000
Binary files a/aql/debug.test and /dev/null differ
diff --git a/aql/mockConvertQuery.go b/aql/mockConvertQuery.go
index 5f82c691a8cc7477ca6faecb09e7c644f684d295..8a23e4c8a1896fdab0475a361da09a3f6716e4b6 100644
--- a/aql/mockConvertQuery.go
+++ b/aql/mockConvertQuery.go
@@ -8,7 +8,7 @@ package aql
 import (
 	"errors"
 
-	"git.science.uu.nl/datastrophe/query-conversion/entity"
+	"git.science.uu.nl/graphpolaris/query-conversion/entity"
 )
 
 /*
diff --git a/cypher/clustering.go b/cypher/clustering.go
new file mode 100644
index 0000000000000000000000000000000000000000..0ca211451714f05dc3809a4add1ef6b15efa9953
--- /dev/null
+++ b/cypher/clustering.go
@@ -0,0 +1,263 @@
+package cypher
+
+import (
+	"errors"
+	"fmt"
+
+	"git.science.uu.nl/graphpolaris/query-conversion/entity"
+)
+
+// checkForQueryCluster will detect (and separate?) if there are multiple queries in the query panel and will try to sepate the queries.
+// Maybe also delete floating pills that have no connection (but that is a different function)
+func checkForQueryCluster(JSONQuery *entity.IncomingQueryJSON) (*entity.IncomingQueryJSON, *entity.IncomingQueryJSON, bool) {
+
+	// cluster is a set for all pills (entities e0 e1 e2, relations r0 .., groub by g0 ..)
+	cluster := make(map[string]bool)
+
+	if len(JSONQuery.Relations) > 0 {
+		rel := fmt.Sprintf("r%v", JSONQuery.Relations[0].ID)
+		cluster[rel] = true
+
+		if JSONQuery.Relations[0].ToID != -1 {
+
+			// Take the first letter: entities with ID 0 -> e0
+			to := fmt.Sprintf("%v%v", string(JSONQuery.Relations[0].ToType[0]), JSONQuery.Relations[0].ToID)
+			cluster[to] = true
+		}
+
+		if JSONQuery.Relations[0].FromID != -1 {
+			from := fmt.Sprintf("%v%v", string(JSONQuery.Relations[0].FromType[0]), JSONQuery.Relations[0].FromID)
+			cluster[from] = true
+		}
+
+	} else if len(JSONQuery.GroupBys) > 0 {
+		gb := fmt.Sprintf("g%v", JSONQuery.GroupBys[0].ID)
+		cluster[gb] = true
+
+		group := fmt.Sprintf("%v%v", string(JSONQuery.GroupBys[0].GroupType[0]), JSONQuery.GroupBys[0].GroupID)
+		cluster[group] = true
+
+		by := fmt.Sprintf("%v%v", string(JSONQuery.GroupBys[0].ByType[0]), JSONQuery.GroupBys[0].ByID)
+		cluster[by] = true
+
+	} else {
+		// If there is no relation or groupby then there is no query cluster atm
+		// Needs to change when the summary pill is introduced
+		return nil, nil, false
+	}
+
+	for i := 0; i < 100; i++ {
+		stop := true
+
+		// Iteratively check to see if something is connected to the cluster
+		// It should have skips for when something has already been added to the cluster, but due to complex connections (like an IN or groupby attached to a relation)
+		// It is easier to just try everything everytime (and its computationally insignificant)
+		// The loop stops when nothing was added for a round
+
+		for _, rel := range JSONQuery.Relations {
+
+			rela := fmt.Sprintf("r%v", rel.ID)
+
+			partOfCluster := false
+			// Now comes the check to see if one of its endpoints is in the cluster, meaning everything is in the cluster
+			if rel.ToID != -1 {
+				to := fmt.Sprintf("%v%v", string(rel.ToType[0]), rel.ToID)
+
+				if cluster[to] {
+					partOfCluster = true
+				}
+			}
+
+			if rel.FromID != -1 {
+				from := fmt.Sprintf("%v%v", string(rel.FromType[0]), rel.FromID)
+
+				if cluster[from] {
+					partOfCluster = true
+				}
+			}
+
+			if partOfCluster {
+				if rel.ToID != -1 {
+					to := fmt.Sprintf("%v%v", string(rel.ToType[0]), rel.ToID)
+					cluster[to] = true
+				}
+
+				if rel.FromID != -1 {
+					from := fmt.Sprintf("%v%v", string(rel.FromType[0]), rel.FromID)
+					cluster[from] = true
+				}
+
+				cluster[rela] = true
+				stop = false
+			}
+		}
+
+		// Check to see if an entity is connected to the cluster via an 'IN'
+		for _, ent := range JSONQuery.Entities {
+			self := fmt.Sprintf("e%v", ent.ID)
+
+			for _, con := range ent.Constraints {
+				if con.InID != -1 {
+					in := fmt.Sprintf("%v%v", string(con.InType[0]), con.InID)
+
+					if cluster[in] {
+						cluster[self] = true
+						stop = false
+					}
+				}
+			}
+		}
+
+		// Now the same for Group by's
+		for _, gb := range JSONQuery.GroupBys {
+			gby := fmt.Sprintf("g%v", gb.ID)
+
+			// It should have been checked that the connections of the group by are valid, since a group by must have all connections filled (in contrary of a relation)
+
+			group := fmt.Sprintf("%v%v", string(gb.GroupType[0]), gb.GroupID)
+			by := fmt.Sprintf("%v%v", string(gb.ByType[0]), gb.ByID)
+
+			if cluster[group] || cluster[by] {
+				cluster[gby] = true
+				cluster[group] = true
+				cluster[by] = true
+				stop = false
+			}
+
+		}
+
+		if stop {
+			// No new entities were added to the cluster, thus it is finished
+			break
+		}
+	}
+
+	// Now walk through the JSON and divide it into the cluster and rest
+	restJSON := entity.IncomingQueryJSON{DatabaseName: JSONQuery.DatabaseName, Limit: JSONQuery.Limit}
+	clusterJSON := entity.IncomingQueryJSON{DatabaseName: JSONQuery.DatabaseName, Limit: JSONQuery.Limit}
+	isRest := false
+
+	// Loop through entities
+	for _, ent := range JSONQuery.Entities {
+		name := fmt.Sprintf("e%v", ent.ID)
+
+		if cluster[name] {
+			clusterJSON.Entities = append(clusterJSON.Entities, ent)
+		} else {
+			restJSON.Entities = append(restJSON.Entities, ent)
+			isRest = true
+		}
+	}
+
+	// Loop through relations
+	for _, rel := range JSONQuery.Relations {
+		name := fmt.Sprintf("r%v", rel.ID)
+
+		if cluster[name] {
+			clusterJSON.Relations = append(clusterJSON.Relations, rel)
+		} else {
+			restJSON.Relations = append(restJSON.Relations, rel)
+			isRest = true
+		}
+	}
+
+	// Loop through groupby's
+	for _, gb := range JSONQuery.GroupBys {
+		name := fmt.Sprintf("g%v", gb.ID)
+
+		if cluster[name] {
+			clusterJSON.GroupBys = append(clusterJSON.GroupBys, gb)
+		} else {
+			restJSON.GroupBys = append(restJSON.GroupBys, gb)
+			isRest = true
+		}
+	}
+
+	return &clusterJSON, &restJSON, isRest
+}
+
+// checkNoDeadEnds checks to see if al from's and to's exist
+func checkNoDeadEnds(JSONQuery *entity.IncomingQueryJSON) (bool, error) {
+
+	// Check for all the connections of a relation
+	for _, rel := range JSONQuery.Relations {
+		if rel.FromID != -1 {
+			if rel.FromType == "entity" {
+				ent := JSONQuery.FindE(rel.FromID)
+				if ent == nil {
+					return false, errors.New("Invalid query")
+				}
+			} else if rel.FromType == "groupBy" {
+				gb := JSONQuery.FindG(rel.FromID)
+				if gb == nil {
+					return false, errors.New("Invalid query")
+				}
+			}
+		}
+
+		if rel.ToID != -1 {
+			if rel.ToType == "entity" {
+				ent := JSONQuery.FindE(rel.ToID)
+				if ent == nil {
+					return false, errors.New("Invalid query")
+				}
+			} else if rel.ToType == "groupBy" {
+				gb := JSONQuery.FindG(rel.ToID)
+				if gb == nil {
+					return false, errors.New("Invalid query")
+				}
+			}
+		}
+	}
+
+	// Check for all the connections of a group by
+	for _, gb := range JSONQuery.GroupBys {
+		if gb.GroupType == "entity" {
+			ent := JSONQuery.FindE(gb.GroupID)
+			if ent == nil {
+				return false, errors.New("Invalid query")
+			}
+		}
+
+		if gb.GroupType == "relation" {
+			rel := JSONQuery.FindE(gb.GroupID)
+			if rel == nil {
+				return false, errors.New("Invalid query")
+			}
+		}
+
+		if gb.ByType == "entity" {
+			ent := JSONQuery.FindE(gb.ByID)
+			if ent == nil {
+				return false, errors.New("Invalid query")
+			}
+		}
+
+		if gb.ByType == "relation" {
+			rel := JSONQuery.FindE(gb.ByID)
+			if rel == nil {
+				return false, errors.New("Invalid query")
+			}
+		}
+	}
+
+	// Check all the connections of IN-statements
+	for _, ent := range JSONQuery.Entities {
+		if len(ent.Constraints) == 0 {
+			continue
+		}
+
+		for _, cons := range ent.Constraints {
+			if cons.InID == -1 {
+				continue
+			}
+
+			gb := JSONQuery.FindG(cons.InID)
+			if gb == nil {
+				return false, errors.New("Invalid query")
+			}
+		}
+	}
+
+	return true, nil
+}
diff --git a/cypher/convertQuery.go b/cypher/convertQuery.go
index 3a89ec88c6b66c76c5fe716180db9247d8ac8715..dfdd414daaf845d7734e0ca1361f5452309ad4dd 100644
--- a/cypher/convertQuery.go
+++ b/cypher/convertQuery.go
@@ -1,243 +1,658 @@
-/*
-This program has been developed by students from the bachelor Computer Science at Utrecht University within the Software Project course.
-© Copyright Utrecht University (Department of Information and Computing Sciences)
-*/
-
 package cypher
 
 import (
 	"errors"
 	"fmt"
+	"log"
 	"strings"
 
-	"git.science.uu.nl/datastrophe/query-conversion/entity"
+	"git.science.uu.nl/graphpolaris/query-conversion/entity"
 )
 
-/*
-ConvertQuery converts an IncomingQueryJSON object into AQL
-	JSONQuery: *entity.IncomingQueryJSON, the query to be converted to AQL
-	Returns: (*string, error), the AQL query and a possible error
-*/
-func (s *Service) ConvertQuery(JSONQuery *entity.IncomingQueryJSON) (*string, error) {
+// ConvertQuery takes the json from the visual query builder and converts it into Cypher
+func (s *Service) ConvertQuery(totalJSONQuery *entity.IncomingQueryJSON) (*string, error) {
+	var finalCypher *string
 
-	// Check to make sure all indexes exist
-	// How many entities are there
-	numEntities := len(JSONQuery.Entities) - 1
-	// How many relations there are
-	numRelations := len(JSONQuery.Relations) - 1
+	queryJSON := totalJSONQuery
 
-	// Make sure no entity should be returned that is outside the range of that list
-	for _, e := range JSONQuery.Return.Entities {
-		// If this entity references an entity that is outside the range
-		if e > numEntities || e < 0 {
-			return nil, errors.New("non-existing entity referenced in return")
-		}
+	// If you want to query the other cluster as well, remove the underscores
+	query, _, _ := checkForQueryCluster(queryJSON)
+
+	if query == nil {
+		return nil, errors.New("Invalid query")
 	}
 
-	// Make sure that no relation mentions a non-existing entity
-	for _, r := range JSONQuery.Relations {
-		if r.EntityFrom > numEntities || r.EntityTo > numEntities {
-			return nil, errors.New("non-exisiting entity referenced in relation")
-		}
+	ok, err := checkNoDeadEnds(query)
+	if !ok {
+		return nil, err
 	}
 
-	// Make sure no non-existing relation is tried to be returned
-	for _, r := range JSONQuery.Return.Relations {
-		if r > numRelations || r < 0 {
-			return nil, errors.New("non-existing relation referenced in return")
-		}
+	finalCypher, err = createCypher(query)
+	if err != nil {
+		return nil, err
 	}
 
-	result := createQuery(JSONQuery)
-	return result, nil
+	return finalCypher, nil
 }
 
-/*
-sliceContains checks if a slice contains the input
-	s: []int, the slice to check
-	e: int, what you're checking for
-	Return: bool, true if it contains 'e'
-*/
-func sliceContains(s []int, e int) bool {
-	for _, a := range s {
-		if a == e {
-			return true
-		}
+// createCypher translates a cluster of nodes (query) to Cypher
+func createCypher(JSONQuery *entity.IncomingQueryJSON) (*string, error) {
+
+	// create the hierarchy from the cluster
+	hierarchy, err := createQueryHierarchy(JSONQuery)
+	if err != nil {
+		return nil, err
 	}
-	return false
-}
 
-/*TrimSuffix trims the final character of a string */
-func TrimSuffix(s, suffix string) string {
-	if strings.HasSuffix(s, suffix) {
-		s = s[:len(s)-len(suffix)]
+	// translate it to cypher in the right order, using the hierarchy
+	cypher, err := formQuery(JSONQuery, hierarchy)
+	if err != nil {
+		return nil, errors.New("Creation of query Cypher failed")
 	}
-	return s
+
+	// create the return statement
+	returnStatement, err := createReturnStatement(JSONQuery, hierarchy)
+	if err != nil {
+		return nil, errors.New("Creation of return Cypher failed")
+	}
+
+	finalCypher := *cypher + *returnStatement
+
+	return &finalCypher, nil
 }
 
-/*
-createQuery generates a query based on the json file provided
-	JSONQuery: *entity.IncomingQueryJSON, jsonQuery is a parsedJSON struct holding all the data needed to form a query
-	Return: *string, a string containing the corresponding AQL query and an error
-*/
-func createQuery(JSONQuery *entity.IncomingQueryJSON) *string {
-	// Note: Case #4, where there is an edge only query (without any entity), is not supported by frontend
-
-	// If a modifier is used, disable the limit
-	if len(JSONQuery.Modifiers) > 0 {
-		JSONQuery.Limit = -1
-	}
-
-	var (
-		relationsToReturn []string
-		nodesToReturn     []string
-		nodeUnion         string
-		relationUnion     string
-		queryList         [][][]int
-		entityList        []int
-		ret               string
-	)
-
-	for i, relation := range JSONQuery.Relations {
-		var contains bool
-		contains = false
-		for j := range queryList {
-			if sliceContains(queryList[j][0], relation.EntityFrom) || sliceContains(queryList[j][0], relation.EntityTo) {
-				if !sliceContains(queryList[j][0], relation.EntityFrom) {
-					queryList[j][0] = append(queryList[j][0], relation.EntityFrom)
-					entityList = append(entityList, relation.EntityFrom)
+// createReturnStatement creates the final return statement
+func createReturnStatement(JSONQuery *entity.IncomingQueryJSON, parts entity.Query) (*string, error) {
+
+	var retStatement string
+	var retType string // This is a marker attached to the end, for ease of parsing in the executor
+
+	// First check to see if the return is a table (due to a groupby at the end) or if it is nodelink data
+	numOfParts := len(parts)
+	if numOfParts == 0 {
+		return nil, errors.New("No parts found in return statement")
+	}
+
+	if parts[numOfParts-1].QType == "groupBy" {
+		// Return is a table
+		groupBy := JSONQuery.FindG(parts[numOfParts-1].QID)
+
+		gName := fmt.Sprintf("%v_%v", groupBy.AppliedModifier, groupBy.GroupAttribute)
+		by := fmt.Sprintf("%v%v.%v", string(groupBy.ByType[0]), groupBy.ByID, groupBy.ByAttribute)
+		byName := strings.Replace(by, ".", "_", 1)
+
+		retStatement = fmt.Sprintf("RETURN %v, %v", byName, gName)
+		retType = ";table"
+	} else {
+		// Return is nodelink
+		// Loop through the parts of the query from back to front
+		retStatement = "RETURN "
+		lineStart := ""
+		for i := numOfParts - 1; i >= 0; i-- {
+			part := parts[i]
+			if part.QType == "relation" {
+				rel := JSONQuery.FindR(part.QID)
+				retStatement += fmt.Sprintf("%v r%v", lineStart, rel.ID)
+				lineStart = ","
+
+				if rel.FromID != -1 {
+					if rel.FromType == "entity" {
+
+						retStatement += fmt.Sprintf("%v e%v", lineStart, rel.FromID)
+					} else {
+						id := JSONQuery.FindG(rel.FromID).ByID
+						retStatement += fmt.Sprintf("%v eg%v", lineStart, id)
+					}
 				}
-				if !sliceContains(queryList[j][0], relation.EntityTo) {
-					queryList[j][0] = append(queryList[j][0], relation.EntityTo)
-					entityList = append(entityList, relation.EntityTo)
+
+				if rel.ToID != -1 {
+					if rel.ToType == "entity" {
+
+						retStatement += fmt.Sprintf("%v e%v", lineStart, rel.ToID)
+					} else {
+						id := JSONQuery.FindG(rel.ToID).ByID
+						retStatement += fmt.Sprintf("%v eg%v", lineStart, id)
+					}
 				}
-				queryList[j][1] = append(queryList[j][1], i)
-				contains = true
+			} else if part.QType == "entity" {
+				retStatement += fmt.Sprintf("%v e%v", lineStart, part.QID)
+				break
+
+				// Probably ends with a break, since a single entity is always connected via an IN to a groupby? (maybe not in case of ONLY having an entity as the entire query)
+			} else {
+				// Then it is a groupby which must not be returned, thus the returns are done.
+				break
 			}
 		}
-		if !contains {
-			queryList = append(queryList, [][]int{{relation.EntityFrom, relation.EntityTo}, {i}})
+
+		retType = ";nodelink"
+	}
+
+	retStatement = retStatement + "\n" + fmt.Sprintf("LIMIT %v", JSONQuery.Limit) + retType
+
+	return &retStatement, nil
+}
+
+// createQueryHierarchy finds out what depends on what, then uses topological sort to create a hierarchy
+func createQueryHierarchy(JSONQuery *entity.IncomingQueryJSON) (entity.Query, error) {
+
+	var parts entity.Query
+	IDctr := 0
+
+	// Add relations all to query parts
+	for _, rel := range JSONQuery.Relations {
+
+		part := entity.QueryPart{
+			QType:        "relation",
+			QID:          rel.ID,
+			PartID:       IDctr,
+			Dependencies: make([]int, 0),
 		}
+		parts = append(parts, part)
+
+		IDctr++
+
 	}
 
-	for i := range queryList {
-		//reset variables for the next query
-		nodeUnion = ""
-		relationUnion = ""
-		relationsToReturn = []string{}
-		for j, relationID := range queryList[i][1] {
-			relationName := fmt.Sprintf("r%v", j)
-			relation := JSONQuery.Relations[relationID]
-			pathName := fmt.Sprintf("p%v", j)
-			relationsToReturn = append(relationsToReturn, pathName)
-			if relation.EntityFrom >= 0 {
-				// if there is a from-node
-				// create the let for this node
-				fromName := fmt.Sprintf("n%v", relation.EntityFrom)
+	// Add the Groupby's
+	for _, gb := range JSONQuery.GroupBys {
+		part := entity.QueryPart{
+			QType:        "groupBy",
+			QID:          gb.ID,
+			PartID:       IDctr,
+			Dependencies: make([]int, 0),
+		}
+		parts = append(parts, part)
 
-				ret += *createNodeMatch(&JSONQuery.Entities[relation.EntityFrom], &fromName)
+		IDctr++
 
-				ret += *createRelationMatch(&relation, relationName, pathName, &JSONQuery.Entities, JSONQuery.Limit, true)
-			} else if relation.EntityTo >= 0 {
-				// if there is only a to-node
-				toName := fmt.Sprintf("n%v", relation.EntityTo)
+	}
 
-				ret += *createNodeMatch(&JSONQuery.Entities[relation.EntityTo], &toName)
+	// Add the entities, if they have an IN, otherwise they are not important
+	for _, ent := range JSONQuery.Entities {
 
-				ret += *createRelationMatch(&relation, relationName, pathName, &JSONQuery.Entities, JSONQuery.Limit, false)
-				// Add this relation to the list
-			} else {
-				fmt.Println("Relation-only queries are currently not supported")
+		skip := true
+		for _, con := range ent.Constraints {
+			if con.InID != -1 {
+				skip = false
+			}
+		}
+
+		if skip {
+			continue
+		}
+
+		part := entity.QueryPart{
+			QType:        "entity",
+			QID:          ent.ID,
+			PartID:       IDctr,
+			Dependencies: make([]int, 0),
+		}
+		parts = append(parts, part)
+
+		IDctr++
+	}
+
+	// Check dependencies in a nice O(n^2)
+	for _, rel := range JSONQuery.Relations {
+		if rel.FromID == -1 {
+			continue
+		}
+
+		// Check the dependencies From - To
+		for _, rela := range JSONQuery.Relations {
+			if rela.ToID == -1 {
 				continue
 			}
+
+			if rel.FromID == rela.ToID && rel.FromType == rela.ToType {
+				part := parts.Find(rel.ID, "relation")
+				part.Dependencies = append(part.Dependencies, parts.Find(rela.ID, "relation").PartID)
+			}
+		}
+
+		if rel.ToID == -1 {
+			continue
+		}
+
+		// Now for connections to group by's it doesnt matter if the GB is attached to the from or the to
+		// The GB always has priority
+		for _, gb := range JSONQuery.GroupBys {
+			if (rel.FromID == gb.ID && rel.FromType == "groupBy") || (rel.ToID == gb.ID && rel.ToType == "groupBy") {
+				part := parts.Find(rel.ID, "relation")
+				gbID := parts.Find(gb.ID, "groupBy").PartID
+				part.Dependencies = append(part.Dependencies, gbID)
+			}
 		}
+	}
 
-		// Create UNION statements that create unique lists of all the nodes and relations
+	// Same trick for group by's
+	for _, gb := range JSONQuery.GroupBys {
+		for _, rela := range JSONQuery.Relations {
+			// Check if the gb is connected to the relation
+			if (gb.ByID == rela.ID && gb.ByType == "relation") || // Is the By connected to a relation
+				(gb.GroupID == rela.ID && gb.GroupType == "relation") || // is the Group connected to a relation
+				(gb.ByID == rela.FromID && gb.ByType == rela.FromType) || // Is the by connected to an entity connected to the "From" of a relation
+				(gb.ByID == rela.ToID && gb.ByType == rela.ToType) || // Is the by connected to an entity connected to the "To" of a relation
+				(gb.GroupID == rela.FromID && gb.GroupType == rela.FromType) || // Is the group connected to an entity connected to the "From" of arelation
+				(gb.GroupID == rela.ToID && gb.GroupType == rela.ToType) { // Is the group connected to an entity connected to the "To" of a relation
+				part := parts.Find(gb.ID, "groupBy")
+				part.Dependencies = append(part.Dependencies, parts.Find(rela.ID, "relation").PartID)
+			}
+		}
 
-		// Thus removing all duplicates
-		nodeUnion = "RETURN "
+		// Not sure if this is even possible, but hey who knows
+		// Check to see if the gb is connected to another gb
+		for _, grb := range JSONQuery.GroupBys {
+			if gb.ID == grb.ID {
+				continue
+			}
 
-		for _, entityID := range queryList[i][0] {
-			if sliceContains(JSONQuery.Return.Entities, entityID) {
-				nodeUnion += fmt.Sprintf("n%v,", entityID)
+			if (gb.GroupID == grb.ID && gb.GroupType == "groupBy") || (gb.ByID == grb.ID && gb.ByType == "groupBy") {
+				part := parts.Find(gb.ID, "groupBy")
+				part.Dependencies = append(part.Dependencies, parts.Find(grb.ID, "groupBy").PartID)
 			}
 		}
+	}
 
-		for _, relation := range relationsToReturn {
-			relationUnion += fmt.Sprintf("%v,", relation)
+	for _, ent := range JSONQuery.Entities {
+		for _, con := range ent.Constraints {
+			if con.InID != -1 {
+				part := parts.Find(ent.ID, "entity") // Should always be groupBy
+				part.Dependencies = append(part.Dependencies, parts.Find(con.InID, con.InType).PartID)
+			}
 		}
 
-		relationUnion = TrimSuffix(relationUnion, ",")
-		// hier zat een newline
-		ret += nodeUnion + relationUnion + "; "
 	}
 
-	nodeSet := make(map[int]bool)
-	for _, relation := range JSONQuery.Relations {
-		nodeSet[relation.EntityFrom] = true
-		nodeSet[relation.EntityTo] = true
+	// Here comes a checker for (A)-->(B) and (B)-->(A). This is mitigated partly by ignoring it
+	// Lets call it a small cycle. It wont catch bigger cycles (with 3 nodes for example)
+
+	for _, p := range parts {
+		// We only allow small cycles with relations
+		if p.QType != "relation" {
+			continue
+		}
+
+		for _, dep := range p.Dependencies {
+			other := parts.SelectByID(dep)
+
+			if other.QType != "relation" {
+				continue
+			}
+
+			// Deleting from a slice while looping through it is an easy way to make mistakes, hence the workaround
+			cycle := false
+			toRemove := -1
+
+			for i, otherDep := range other.Dependencies {
+				if otherDep == p.PartID {
+					// Small cycle detected
+
+					cycle = true
+					toRemove = i
+				}
+			}
+
+			// Remove one of the two dependencies, does not really matter which, cypher knits it back together due to the query
+			// using the same ID's, thus making it a cycle again later on.
+			if cycle {
+				log.Println("Cycle detected and removed")
+				if len(other.Dependencies) == 0 {
+					other.Dependencies = make([]int, 0)
+				} else {
+					other.Dependencies[toRemove] = other.Dependencies[len(other.Dependencies)-1]
+					other.Dependencies = other.Dependencies[:len(other.Dependencies)-1]
+				}
+
+			}
+		}
+	}
+
+	// Now we have a directed graph, meaning we can use some topological sort (Kahn's algorithm)
+	var sortedQuery entity.Query
+	incomingEdges := make(map[int]int)
+
+	// Set all to 0
+	for _, p := range parts {
+		incomingEdges[p.PartID] = 0
 	}
 
-	// Check if the entities to return are already returned
-	for _, entityIndex := range JSONQuery.Return.Entities {
-		if !nodeSet[entityIndex] {
-			// If not, return this node
-			name := fmt.Sprintf("n%v", entityIndex)
-			ret += *createNodeMatch(&JSONQuery.Entities[entityIndex], &name)
-			// Add this node to the list
-			nodesToReturn = append(nodesToReturn, name)
-			ret += fmt.Sprintf("RETURN %v", name)
+	// Count the incoming edges (dependencies)
+	for _, p := range parts {
+		for _, dp := range p.Dependencies {
+			incomingEdges[dp]++
 		}
 	}
 
-	ret = TrimSuffix(ret, " ")
-	return &ret
+	for { // While there is a someone where incomingEdges[someone] == 0
+		part := entity.QueryPart{PartID: -1}
+		// Select a node with no incoming edges
+		for ID, edges := range incomingEdges {
+			if edges == 0 {
+				part = *parts.SelectByID(ID)
+			}
+		}
+
+		// Check to see if there are parts withouth incoming edges left
+		if part.PartID == -1 {
+			break
+		}
+
+		// Remove it from the set
+		incomingEdges[part.PartID] = -1
+		sortedQuery = append(sortedQuery, part)
+
+		// Decrease incoming edges of other parts
+		for _, ID := range part.Dependencies {
+			incomingEdges[ID]--
+		}
+	}
+
+	// Now check for cycles in the graph
+	partRemaining := false
+	for _, edges := range incomingEdges {
+		if edges != -1 {
+			partRemaining = true
+		}
+	}
+
+	if partRemaining {
+		// Somehow there was a cycle in the query,
+		return nil, errors.New("Cyclic query detected")
+	}
+
+	// Reverse the list
+	retQuery := make([]entity.QueryPart, len(sortedQuery))
+	for i := 0; i < len(sortedQuery); i++ {
+		retQuery[i] = sortedQuery[len(sortedQuery)-i-1]
+	}
+
+	return retQuery, nil
 }
 
-/*
-createNodeLet generates a 'LET' statement for a node related query
-	node: *entity.QueryEntityStruct, node is an entityStruct containing the information of a single node,
-	name: *string, is the autogenerated name of the node consisting of "n" + the index of the node
-	Return: *string, a string containing a single LET-statement in AQL
-*/
-func createNodeMatch(node *entity.QueryEntityStruct, name *string) *string {
-	// hier zat een newline
-	header := fmt.Sprintf("MATCH (%v:%v) ", *name, node.Type)
-	constraints := *createConstraintStatements(&node.Constraints, *name)
-	ret := header + constraints
-	return &ret
+// formQuery uses the hierarchy to create cypher for each part of the query in the right order
+func formQuery(JSONQuery *entity.IncomingQueryJSON, hierarchy entity.Query) (*string, error) {
+
+	// Traverse through the hierarchy and for every entry create a part like:
+	// Match p0 = (l:Lorem)-[:Ipsum*1..1]-(d:Dolor)
+	// Constraints on l and d
+	// Unwind relationships(p0) as r0
+	// Constraints on r0
+	// With *
+
+	totalQuery := ""
+
+	for _, entry := range hierarchy {
+		var cypher *string
+		var err error
+
+		switch entry.QType {
+		case "relation":
+			cypher, err = createRelationCypher(JSONQuery, entry)
+			if err != nil {
+				return nil, err
+			}
+			break
+		case "groupBy":
+			cypher, err = createGroupByCypher(JSONQuery, entry)
+			if err != nil {
+				return nil, err
+			}
+
+			break
+		case "entity":
+			// This would be in case of an IN or if there was only 1 entity in the query builder
+			cypher, err = createInCypher(JSONQuery, entry)
+			if err != nil {
+				return nil, err
+			}
+
+			break
+		default:
+			// Should never be reached
+			return nil, errors.New("Invalid query pill type detected")
+		}
+
+		totalQuery += *cypher
+	}
+
+	return &totalQuery, nil
+}
+
+// createInCypher creates the cypher for an entity with an IN-clause
+func createInCypher(JSONQuery *entity.IncomingQueryJSON, part entity.QueryPart) (*string, error) {
+	ent := JSONQuery.FindE(part.QID)
+	eName := fmt.Sprintf("e%v", ent.ID)
+
+	match := fmt.Sprintf("MATCH (%v:%v)\n", eName, ent.Name)
+	eConstraints := ""
+	newLineStatement := "\tWHERE"
+
+	// Find the IN
+	for _, con := range ent.Constraints {
+		if con.InID != -1 {
+			gby := JSONQuery.FindG(con.InID) // Because this could only be on a groupby
+			byName := fmt.Sprintf("%v%v", string(gby.ByType[0]), gby.ByID)
+			eConstraints += fmt.Sprintf("%v %v.%v IN %v_%v\n", newLineStatement, eName, con.Attribute, byName, gby.ByAttribute)
+			newLineStatement = "\tAND"
+		}
+	}
+
+	// Attach other constraints (if any)
+	for _, v := range ent.Constraints {
+		if v.InID != -1 {
+			continue
+		}
+		eConstraints += fmt.Sprintf("%v %v \n", newLineStatement, *createConstraintBoolExpression(&v, eName, false))
+	}
+
+	with := "WITH *\n"
+	retStatement := match + eConstraints + with
+	return &retStatement, nil
+
 }
 
-/*
-createRelationLetWithFromEntity generates a 'LET' statement for relations with an 'EntityFrom' property and optionally an 'EntitiyTo' property
-	relation: *entity.QueryRelationStruct, relation is a relation struct containing the information of a single relation,
-	relationName: string, is the name of the relation, is the autogenerated name of the node consisting of "r" + the index of the relation,
-	pathName: string, is the path of the name,
-	entities: *[]entity.QueryEntityStruct, is a list of entityStructs that are needed to form the relation LET-statement
-	limit: int, the limit for the number of nodes to return
-	outbound: bool, checks if the relation is inbound or outbound
-	Return: *string, a string containing a single LET-statement in AQL
-*/
-func createRelationMatch(relation *entity.QueryRelationStruct, relationName string, pathName string, entities *[]entity.QueryEntityStruct, limit int, outbound bool) *string {
-	relationReturn := ""
-	var relationBounds int
-	if outbound {
-		relationReturn = fmt.Sprintf("MATCH %v = (n%v)-[%v:%v*%v..%v]->(", pathName, relation.EntityFrom, relationName, relation.Type, relation.Depth.Min, relation.Depth.Max)
-		relationBounds = relation.EntityTo
+// createRelationCypher takes the json and a query part, finds the necessary entities and converts it into cypher
+func createRelationCypher(JSONQuery *entity.IncomingQueryJSON, part entity.QueryPart) (*string, error) {
+
+	rel := JSONQuery.FindR(part.QID)
+
+	if (rel.FromID == -1) && (rel.ToID == -1) {
+		// Now there is only a relation, which we do not allow
+		return nil, errors.New("Relation only queries are not supported")
+	}
+
+	var match, eConstraints, unwind, rConstraints string
+
+	// There is some duplicate code here below that could be omitted with extra if-statements, but that is something to do
+	// for a later time. Since this way it is easier to understand the flow of the code
+	// Removing the duplicate code here, probably more than triples the if-statements and is a puzzle for a later time (TODO)
+	if rel.ToID == -1 {
+		// There is no To, only a From
+		var eName string
+		var ent *entity.QueryEntityStruct
+
+		if rel.FromType == "entity" {
+
+			ent = JSONQuery.FindE(rel.ToID)
+			eName = fmt.Sprintf("e%v", ent.ID)
+
+		} else if rel.FromType == "groupBy" {
+			gb := JSONQuery.FindG(rel.FromID)
+			if gb.ByType == "relation" {
+				return nil, errors.New("Invalid query: cannot connect a relation to a group by that groups by another relation")
+			}
+
+			ent = JSONQuery.FindE(gb.ByID)
+			// This is a sort of dummy variable, since it is not directly visible in the query, but it is definitely needed
+			eName = fmt.Sprintf("e%v", ent.ID)
+		} else {
+			// Should never be reachable
+			return nil, errors.New("Invalid connection type to relation")
+		}
+
+		match = fmt.Sprintf("MATCH p%v = (%v:%v)-[:%v*%v..%v]-()\n", part.PartID, eName, ent.Name, rel.Name, rel.Depth.Min, rel.Depth.Max)
+
+		eConstraints = ""
+		newLineStatement := "\tWHERE"
+		for _, v := range ent.Constraints {
+			eConstraints += fmt.Sprintf("%v %v \n", newLineStatement, *createConstraintBoolExpression(&v, eName, false))
+			newLineStatement = "\tAND"
+		}
+
+		// Add an IN clause, connecting the relation to the output of the groupby
+		if rel.FromType == "groupBy" {
+			gb := JSONQuery.FindG(rel.FromID)
+			inConstraint := fmt.Sprintf("%v %v.%v IN %v_%v \n", newLineStatement, eName, gb.ByAttribute, gb.AppliedModifier, gb.ByAttribute)
+			eConstraints += inConstraint
+		}
+
+	} else if rel.FromID == -1 {
+		var eName string
+		var ent *entity.QueryEntityStruct
+
+		if rel.ToType == "entity" {
+			ent = JSONQuery.FindE(rel.ToID)
+			eName = fmt.Sprintf("e%v", ent.ID)
+
+		} else if rel.ToType == "groupBy" {
+			gb := JSONQuery.FindG(rel.ToID)
+			if gb.ByType == "relation" {
+				return nil, errors.New("Invalid query: cannot connect a relation to a group by that groups by another relation")
+			}
+
+			ent = JSONQuery.FindE(gb.ByID)
+			// This is a sort of dummy variable, since it is not directly visible in the query, but it is definitely needed
+			eName = fmt.Sprintf("e%v", ent.ID)
+		} else {
+			// Should never be reachable
+			return nil, errors.New("Invalid connection type to relation")
+		}
+
+		match = fmt.Sprintf("MATCH p%v = ()-[:%v*%v..%v]-(%v:%v)\n", part.PartID, rel.Name, rel.Depth.Min, rel.Depth.Max, eName, ent.Name)
+
+		eConstraints = ""
+		newLineStatement := "\tWHERE"
+		for _, v := range ent.Constraints {
+			eConstraints += fmt.Sprintf("%v %v \n", newLineStatement, *createConstraintBoolExpression(&v, eName, false))
+			newLineStatement = "\tAND"
+		}
+
+		// Add an IN clause, connecting the relation to the output of the groupby
+		if rel.ToType == "groupBy" {
+			gb := JSONQuery.FindG(rel.ToID)
+			inConstraint := fmt.Sprintf("%v %v.%v IN %v_%v \n", newLineStatement, eName, gb.ByAttribute, gb.AppliedModifier, gb.ByAttribute)
+			eConstraints += inConstraint
+		}
 
 	} else {
-		relationReturn = fmt.Sprintf("MATCH %v = (n%v)-[%v:%v*%v..%v]->(", pathName, relation.EntityTo, relationName, relation.Type, relation.Depth.Min, relation.Depth.Max)
-		relationBounds = relation.EntityFrom
+		var eTName string
+		var entFrom *entity.QueryEntityStruct
+		var eFName string
+		var entTo *entity.QueryEntityStruct
+
+		// Check of what type the To is
+		if rel.ToType == "entity" {
+			entTo = JSONQuery.FindE(rel.ToID)
+			eTName = fmt.Sprintf("e%v", entTo.ID)
+
+		} else if rel.ToType == "groupBy" {
+			gb := JSONQuery.FindG(rel.ToID)
+			if gb.ByType == "relation" {
+				return nil, errors.New("Invalid query: cannot connect a relation to a group by that groups by another relation")
+			}
+
+			entTo = JSONQuery.FindE(gb.ByID)
+			// this is a sort of dummy variable, since it is not directly visible in the query, but it is definitely needed
+			eTName = fmt.Sprintf("e%v", entTo.ID)
+		} else {
+			// Should never be reachable
+			return nil, errors.New("Invalid connection type to relation")
+		}
+
+		// Check of what type the From is
+		if rel.FromType == "entity" {
+
+			entFrom = JSONQuery.FindE(rel.FromID)
+			eFName = fmt.Sprintf("e%v", entFrom.ID)
+
+		} else if rel.FromType == "groupBy" {
+			gb := JSONQuery.FindG(rel.FromID)
+			if gb.ByType == "relation" {
+				return nil, errors.New("Invalid query: cannot connect a relation to a group by that groups by another relation")
+			}
+
+			entFrom = JSONQuery.FindE(gb.ByID)
+			// This is a sort of dummy variable, since it is not directly visible in the query, but it is definitely needed
+			eFName = fmt.Sprintf("eg%v", entFrom.ID)
+		} else {
+			// Should never be reachable
+			return nil, errors.New("Invalid connection type to relation")
+		}
+
+		match = fmt.Sprintf("MATCH p%v = (%v:%v)-[:%v*%v..%v]-(%v:%v)\n", part.PartID, eFName, entFrom.Name, rel.Name, rel.Depth.Min, rel.Depth.Max, eTName, entTo.Name)
+
+		eConstraints = ""
+		newLineStatement := "\tWHERE"
+		for _, v := range entFrom.Constraints {
+			eConstraints += fmt.Sprintf("%v %v \n", newLineStatement, *createConstraintBoolExpression(&v, eFName, false))
+			newLineStatement = "\tAND"
+		}
+		for _, v := range entTo.Constraints {
+			eConstraints += fmt.Sprintf("%v %v \n", newLineStatement, *createConstraintBoolExpression(&v, eTName, false))
+			newLineStatement = "\tAND"
+		}
+
+		// Add an IN clause, connecting the relation to the output of the groupby
+		if rel.ToType == "groupBy" {
+			gb := JSONQuery.FindG(rel.ToID)
+			inConstraint := fmt.Sprintf("%v %v.%v IN %v_%v \n", newLineStatement, eTName, gb.ByAttribute, strings.Replace(eFName, "g", "", 1), gb.ByAttribute)
+			eConstraints += inConstraint
+			newLineStatement = "\tAND"
+		}
+
+		if rel.FromType == "groupBy" {
+			gb := JSONQuery.FindG(rel.FromID)
+			inConstraint := fmt.Sprintf("%v %v.%v IN %v_%v \n", newLineStatement, eFName, gb.ByAttribute, strings.Replace(eFName, "g", "", 1), gb.ByAttribute)
+			eConstraints += inConstraint
+		}
 	}
 
-	if relationBounds != -1 {
-		relationReturn += fmt.Sprintf("n%v", relationBounds)
+	rName := fmt.Sprintf("r%v", part.QID)
+	unwind = fmt.Sprintf("UNWIND relationships(p%v) as %v \nWITH *\n", part.PartID, rName)
+
+	rConstraints = ""
+	newLineStatement := "\tWHERE"
+	for _, v := range rel.Constraints {
+		rConstraints += fmt.Sprintf("%v %v \n", newLineStatement, *createConstraintBoolExpression(&v, rName, false))
+		newLineStatement = "\tAND"
 	}
-	relationReturn += ")"
 
-	constraintReturn := *createConstraintStatements(&relation.Constraints, relationName)
-	// hier zat een newline
-	ret := relationReturn + " " + constraintReturn
+	retString := match + eConstraints + unwind + rConstraints
+	return &retString, nil
+
+}
+
+// createGroupByCypher takes the json and a query part, finds the group by and converts it into cypher
+func createGroupByCypher(JSONQuery *entity.IncomingQueryJSON, part entity.QueryPart) (*string, error) {
+	groupBy := JSONQuery.FindG(part.QID)
+
+	gName := fmt.Sprintf("%v_%v", groupBy.AppliedModifier, groupBy.GroupAttribute)
+	by := fmt.Sprintf("%v%v.%v", string(groupBy.ByType[0]), groupBy.ByID, groupBy.ByAttribute)
+	byName := strings.Replace(by, ".", "_", 1)
+	group := fmt.Sprintf("%v%v.%v", string(groupBy.GroupType[0]), groupBy.GroupID, groupBy.GroupAttribute)
+
+	// If you do not use a *, then everything needs to be aliased
+	with := fmt.Sprintf("WITH %v AS %v, %v(%v) AS %v \n", by, byName, groupBy.AppliedModifier, group, gName)
+
+	gConstraints := ""
+	newLineStatement := "\tWHERE"
+	for _, v := range groupBy.Constraints {
+		gConstraints += fmt.Sprintf("%v %v \n", newLineStatement, *createConstraintBoolExpression(&v, gName, true))
+		newLineStatement = "\tAND"
+	}
 
-	return &ret
+	retString := with + gConstraints
+	return &retString, nil
 }
diff --git a/cypher/convertQueryBenchmark_test.go b/cypher/convertQueryBenchmark_test.go
index 3794ab92a7434af21c8fcc34fb640a6d1d789f3a..d1a539722629a52899d790051b19932f2ab836d7 100644
--- a/cypher/convertQueryBenchmark_test.go
+++ b/cypher/convertQueryBenchmark_test.go
@@ -4,7 +4,7 @@ import (
 	"encoding/json"
 	"testing"
 
-	"git.science.uu.nl/datastrophe/query-conversion/entity"
+	"git.science.uu.nl/graphpolaris/query-conversion/entity"
 )
 
 func BenchmarkConvertEmptyQuery(b *testing.B) {
diff --git a/cypher/convertQuery_test.go b/cypher/convertQuery_test.go
index c07a06556a6e16f92d52ffae50e7de21d0d75a70..01de62f85542165dca4cb36e07a39fc27999c787 100644
--- a/cypher/convertQuery_test.go
+++ b/cypher/convertQuery_test.go
@@ -3,892 +3,1131 @@ package cypher
 import (
 	"encoding/json"
 	"errors"
+	"fmt"
 	"strings"
 	"testing"
 
-	"git.science.uu.nl/datastrophe/query-conversion/entity"
+	"git.science.uu.nl/graphpolaris/query-conversion/entity"
 	"github.com/stretchr/testify/assert"
 )
 
-func TestEmptyQueryConversion(t *testing.T) {
-	// Setup for test
-	// Create query conversion service
-	service := NewService()
+// All these tests test the entire flow
 
+func TestGroupBy(t *testing.T) {
 	query := []byte(`{
+		"databaseName": "Movies3",
 		"return": {
-			"entities": [],
-			"relations": []
+			"entities": [
+				0,
+				1,
+				2
+			],
+			"relations": [
+				0,
+				1
+			],
+			"groupBys": [
+				0
+			]
 		},
-		"entities": [],
-		"relations": [],
+		"entities": [
+			{
+				"id": 0,
+				"name": "Person",
+				"constraints": [
+				{
+					"attribute": "name",
+					"value": "Raymond Campbell",
+					"dataType": "string",
+					"matchType": "NEQ",
+					"inID": -1,
+					"inType": ""
+				}
+				]
+			},
+			{
+				"id": 1,
+				"name": "Movie",
+				"constraints": []
+			},
+			{
+				"id": 2,
+				"name": "Genre",
+				"constraints": []
+			}
+		],
+		"relations": [
+			{
+				"id": 0,
+				"name": "DIRECTED",
+				"depth": {
+					"min": 1,
+					"max": 1
+				},
+				"fromType": "entity",
+				"fromID": 0,
+				"toType": "entity",
+				"toID": 1,
+				"constraints": []
+			},
+			{
+				"id": 1,
+				"name": "IN_GENRE",
+				"depth": {
+					"min": 1,
+					"max": 1
+				},
+				"fromType": "groupBy",
+				"fromID": 0,
+				"toType": "entity",
+				"toID": 2,
+				"constraints": []
+			}
+		],
+		"groupBys": [
+			{
+				"id": 0,
+				"groupType": "entity",
+				"groupID": 0,
+				"groupAttribute": "bornIn",
+				"byType": "entity",
+				"byID": 1,
+				"byAttribute": "imdbId",
+				"appliedModifier": "AVG",
+				"relationID": 0,
+				"constraints": []
+			}
+		],
+		"machineLearning": [],
 		"limit": 5000
 	}`)
 
-	// Unmarshall the incoming message into an IncomingJSONQuery object
 	var JSONQuery entity.IncomingQueryJSON
 	json.Unmarshal(query, &JSONQuery)
 
-	convertedResult, err := service.ConvertQuery(&JSONQuery)
-
-	// Assert that there is no error
-	assert.NoError(t, err)
+	s := NewService()
+	cypher, err := s.ConvertQuery(&JSONQuery)
+	if err != nil {
+		fmt.Println(err)
+	}
+
+	answer := `MATCH p0 = (e0:Person)-[:DIRECTED*1..1]-(e1:Movie)
+		WHERE  e0.name <> "Raymond Campbell" 
+	UNWIND relationships(p0) as r0 
+	WITH *
+	WITH e1.imdbId AS e1_imdbId, AVG(e0.bornIn) AS AVG_bornIn 
+	MATCH p1 = (eg1:Movie)-[:IN_GENRE*1..1]-(e2:Genre)
+		WHERE eg1.imdbId IN e1_imdbId 
+	UNWIND relationships(p1) as r1 
+	WITH *
+	RETURN  r1, eg1, e2
+	LIMIT 5000;nodelink`
+
+	trimmedCypher := strings.Replace(*cypher, "\n", "", -1)
+	trimmedCypher = strings.Replace(trimmedCypher, "\t", "", -1)
+
+	trimmedAnswer := strings.Replace(answer, "\n", "", -1)
+	trimmedAnswer = strings.Replace(trimmedAnswer, "\t", "", -1)
+
+	fmt.Println(*cypher)
+	assert.Equal(t, trimmedAnswer, trimmedCypher)
 
-	// Assert that the result and the expected result are the same
-	correctConvertedResult := ``
-	assert.Equal(t, correctConvertedResult, *convertedResult)
 }
-
-func TestEntityOneAttributeQuery(t *testing.T) {
-	// Setup for test
-	// Create query conversion service
-	service := NewService()
-
+func TestSmallChain(t *testing.T) {
 	query := []byte(`{
+		"databaseName": "TweedeKamer",
 		"return": {
 			"entities": [
-				0
+				0,
+				1,
+				2
 			],
-			"relations": []
+			"relations": [
+				0,
+				1
+			]
 		},
 		"entities": [
 			{
-				"type": "airports",
+				"name": "parliament",
+				"ID": 0,
 				"constraints": [
 					{
-						"attribute": "state",
-						"value": "HI",
+						"attribute": "name",
+						"value": "Geert",
 						"dataType": "string",
-						"matchType": "exact"
+						"matchType": "contains",
+						"inID": -1,
+						"inType": ""
 					}
 				]
+			},
+			{
+				"name": "parties",
+				"ID": 1,
+				"constraints": []
+			},
+			{
+				"name": "resolutions",
+				"ID": 2,
+				"constraints": []
 			}
 		],
-		"relations": [],
+		"relations": [
+			{
+				"ID": 0,
+				"name": "member_of",
+				"depth": {
+					"min": 1,
+					"max": 1
+				},
+				"fromType": "entity",
+				"fromID": 0,
+				"toType": "entity",
+				"toID": 1,
+				"constraints": []
+			},
+			{
+				"ID": 1,
+				"name": "submits",
+				"depth": {
+					"min": 1,
+					"max": 1
+				},
+				"fromType": "entity",
+				"fromID": 0,
+				"toType": "entity",
+				"toID": 2,
+				"constraints": []
+			}
+		],
+		"groupBys": [],
+		"machineLearning": [],
 		"limit": 5000
-	}`)
+	}
+	`)
 
-	// Unmarshall the incoming message into an IncomingJSONQuery object
 	var JSONQuery entity.IncomingQueryJSON
 	json.Unmarshal(query, &JSONQuery)
 
-	convertedResult, err := service.ConvertQuery(&JSONQuery)
+	s := NewService()
+	cypher, err := s.ConvertQuery(&JSONQuery)
+	if err != nil {
+		fmt.Println(err)
+	}
 
-	// Assert that there is no error
-	assert.NoError(t, err)
+	answer := `MATCH p0 = (e0:parliament)-[:member_of*1..1]-(e1:parties)
+		WHERE  e0.name CONTAINS "%Geert%" 
+	UNWIND relationships(p0) as r0 
+	WITH *
+	MATCH p1 = (e0:parliament)-[:submits*1..1]-(e2:resolutions)
+		WHERE  e0.name CONTAINS "%Geert%" 
+	UNWIND relationships(p1) as r1 
+	WITH *
+	RETURN  r1, e0, e2, r0, e0, e1
+	LIMIT 5000;nodelink`
 
-	// Assert that the result and the expected result are the same
-	correctConvertedResult := `MATCH (n0:airports)WHERE n0.state = "HI" RETURN n0`
-	cleanedResult := strings.ReplaceAll(*convertedResult, "\n", "")
-	cleanedResult = strings.ReplaceAll(cleanedResult, "\t", "")
-	assert.Equal(t, correctConvertedResult, cleanedResult)
-}
+	fmt.Println(*cypher)
+
+	trimmedCypher := strings.Replace(*cypher, "\n", "", -1)
+	trimmedCypher = strings.Replace(trimmedCypher, "\t", "", -1)
+
+	trimmedAnswer := strings.Replace(answer, "\n", "", -1)
+	trimmedAnswer = strings.Replace(trimmedAnswer, "\t", "", -1)
 
-func TestRelationWithConstraint(t *testing.T) {
-	// Setup for test
-	// Create query conversion service
-	service := NewService()
+	fmt.Println(*cypher)
+	assert.Equal(t, trimmedAnswer, trimmedCypher)
 
+}
+
+// This one does not really have dependencies, the order doesnt matter, maybe sort on numbers between equal dependencies?
+func TestLargeQueryChain(t *testing.T) {
 	query := []byte(`{
+		"databaseName": "TweedeKamer",
 		"return": {
 			"entities": [
-				0
+				0,
+				1,
+				2,
+				3,
+				4
 			],
 			"relations": [
-				0
+				0,
+				1,
+				2,
+				3
 			]
 		},
 		"entities": [
 			{
-				"type": "airports",
+				"name": "parliament",
+				"ID": 0,
 				"constraints": [
 					{
-						"attribute": "state",
-						"value": "HI",
+						"attribute": "name",
+						"value": "A",
 						"dataType": "string",
-						"matchType": "exact"
+						"matchType": "contains",
+						"inID": -1,
+						"inType": ""
+					}
+				]
+			},
+			{
+				"name": "parties",
+				"ID": 1,
+				"constraints": [
+					{
+						"attribute": "seats",
+						"value": "10",
+						"dataType": "int",
+						"matchType": "LT",
+						"inID": -1,
+						"inType": ""
+					}
+				]
+			},
+			{
+				"name": "resolutions",
+				"ID": 2,
+				"constraints": [
+					{
+						"attribute": "date",
+						"value": "mei",
+						"dataType": "string",
+						"matchType": "contains",
+						"inID": -1,
+						"inType": ""
+					}
+				]
+			},
+			{
+				"name": "parliament",
+				"ID": 3,
+				"constraints": []
+			},
+			{
+				"name": "parties",
+				"ID": 4,
+				"constraints": [
+					{
+						"attribute": "name",
+						"value": "Volkspartij voor Vrijheid en Democratie",
+						"dataType": "string",
+						"matchType": "==",
+						"inID": -1,
+						"inType": ""
 					}
 				]
 			}
 		],
 		"relations": [
 			{
-				"type": "flights",
+				"ID": 0,
+				"name": "member_of",
 				"depth": {
 					"min": 1,
 					"max": 1
 				},
-				"entityFrom": 0,
-				"entityTo": -1,
-				"constraints": [
-					{
-						"attribute": "Day",
-						"value": "15",
-						"dataType": "int",
-						"matchType": "EQ"
-					}
-				]
+				"fromType": "entity",
+				"fromID": 0,
+				"toType": "entity",
+				"toID": 1,
+				"constraints": []
+			},
+			{
+				"ID": 1,
+				"name": "submits",
+				"depth": {
+					"min": 1,
+					"max": 1
+				},
+				"fromType": "entity",
+				"fromID": 1,
+				"toType": "entity",
+				"toID": 2,
+				"constraints": []
+			},
+			{
+				"ID": 2,
+				"name": "submits",
+				"depth": {
+					"min": 1,
+					"max": 1
+				},
+				"fromType": "entity",
+				"fromID": 2,
+				"toType": "entity",
+				"toID": 3,
+				"constraints": []
+			},
+			{
+				"ID": 3,
+				"name": "member_of",
+				"depth": {
+					"min": 1,
+					"max": 1
+				},
+				"fromType": "entity",
+				"fromID": 3,
+				"toType": "entity",
+				"toID": 4,
+				"constraints": []
 			}
 		],
+		"groupBys": [],
+		"machineLearning": [],
 		"limit": 5000
-	}`)
+	}
+	
+	`)
 
-	// Unmarshall the incoming message into an IncomingJSONQuery object
 	var JSONQuery entity.IncomingQueryJSON
 	json.Unmarshal(query, &JSONQuery)
 
-	convertedResult, err := service.ConvertQuery(&JSONQuery)
-
-	// Assert that there is no error
-	assert.NoError(t, err)
+	s := NewService()
+	cypher, err := s.ConvertQuery(&JSONQuery)
+	if err != nil {
+		fmt.Println(err)
+	}
+
+	answer := `MATCH p0 = (e0:parliament)-[:member_of*1..1]-(e1:parties)
+		WHERE  e0.name CONTAINS "%A%" 
+		AND  e1.seats < 10 
+	UNWIND relationships(p0) as r0 
+	WITH *
+	MATCH p1 = (e1:parties)-[:submits*1..1]-(e2:resolutions)
+		WHERE  e1.seats < 10 
+		AND  e2.date CONTAINS "%mei%" 
+	UNWIND relationships(p1) as r1 
+	WITH *
+	MATCH p2 = (e2:resolutions)-[:submits*1..1]-(e3:parliament)
+		WHERE  e2.date CONTAINS "%mei%" 
+	UNWIND relationships(p2) as r2 
+	WITH *
+	MATCH p3 = (e3:parliament)-[:member_of*1..1]-(e4:parties)
+		WHERE  e4.name = "Volkspartij voor Vrijheid en Democratie" 
+	UNWIND relationships(p3) as r3 
+	WITH *
+	RETURN  r3, e3, e4, r2, e2, e3, r1, e1, e2, r0, e0, e1
+	LIMIT 5000;nodelink`
+
+	trimmedCypher := strings.Replace(*cypher, "\n", "", -1)
+	trimmedCypher = strings.Replace(trimmedCypher, "\t", "", -1)
+
+	trimmedAnswer := strings.Replace(answer, "\n", "", -1)
+	trimmedAnswer = strings.Replace(trimmedAnswer, "\t", "", -1)
+
+	fmt.Println(*cypher)
+	assert.Equal(t, trimmedAnswer, trimmedCypher)
 
-	// Assert that the result and the expected result are the same
-	correctConvertedResult := `MATCH (n0:airports)WHERE n0.state = "HI" MATCH p0 = (n0)-[r0:flights*1..1]->()WHERE r0.Day = 15 RETURN n0,p0;`
-	cleanedResult := strings.ReplaceAll(*convertedResult, "\n", "")
-	cleanedResult = strings.ReplaceAll(cleanedResult, "\t", "")
-	assert.Equal(t, correctConvertedResult, cleanedResult)
 }
 
-// func TestModifierCountEntity(t *testing.T) {
-// 	// Setup for test
-// 	// Create query conversion service
-// 	service := NewService()
-
-// 	query := []byte(`{
-// 		"return": {
-// 			"entities": [
-// 				0
-// 			],
-// 			"relations": []
-// 		},
-// 		"entities": [
-// 			{
-// 				"type": "airports",
-// 				"constraints": [
-// 					{
-// 						"attribute": "state",
-// 						"value": "HI",
-// 						"dataType": "string",
-// 						"matchType": "exact"
-// 					}
-// 				]
-// 			}
-// 		],
-// 		"relations": [],
-// 		"limit": 5000,
-// 		"modifiers": [
-// 			{
-// 				"type": "COUNT",
-// 				"selectedType": "entity",
-// 				"id": 0,
-// 				"attributeIndex": -1
-// 			}
-// 		]
-// 	}`)
-
-// 	// Unmarshall the incoming message into an IncomingJSONQuery object
-// 	var JSONQuery entity.IncomingQueryJSON
-// 	json.Unmarshal(query, &JSONQuery)
-
-// 	convertedResult, err := service.ConvertQuery(&JSONQuery)
-
-// 	// Assert that there is no error
-// 	assert.NoError(t, err)
-
-// 	// Assert that the result and the expected result are the same
-// 	correctConvertedResult := `LET n0 = (FOR x IN airports FILTER x.state == "HI" RETURN x)RETURN LENGTH (n0)`
-// 	cleanedResult := strings.ReplaceAll(*convertedResult, "\n", "")
-// 	cleanedResult = strings.ReplaceAll(cleanedResult, "\t", "")
-// 	assert.Equal(t, correctConvertedResult, cleanedResult)
-// }
-
-// func TestModifierCountEntityAttribute(t *testing.T) {
-// 	// Setup for test
-// 	// Create query conversion service
-// 	service := NewService()
-
-// 	query := []byte(`{
-// 		"return": {
-// 			"entities": [
-// 				0
-// 			],
-// 			"relations": []
-// 		},
-// 		"entities": [
-// 			{
-// 				"type": "airports",
-// 				"constraints": [
-// 					{
-// 						"attribute": "state",
-// 						"value": "HI",
-// 						"dataType": "string",
-// 						"matchType": "exact"
-// 					}
-// 				]
-// 			}
-// 		],
-// 		"relations": [],
-// 		"limit": 5000,
-// 		"modifiers": [
-// 			{
-// 				"type": "SUM",
-// 				"selectedType": "entity",
-// 				"id": 0,
-// 				"attributeIndex": 0
-// 			}
-// 		]
-// 	}`)
-
-// 	// Unmarshall the incoming message into an IncomingJSONQuery object
-// 	var JSONQuery entity.IncomingQueryJSON
-// 	json.Unmarshal(query, &JSONQuery)
-
-// 	convertedResult, err := service.ConvertQuery(&JSONQuery)
-
-// 	// Assert that there is no error
-// 	assert.NoError(t, err)
-
-// 	// Assert that the result and the expected result are the same
-// 	correctConvertedResult := `LET n0 = (FOR x IN airports FILTER x.state == "HI" RETURN x)RETURN SUM (n0[*].state)`
-// 	cleanedResult := strings.ReplaceAll(*convertedResult, "\n", "")
-// 	cleanedResult = strings.ReplaceAll(cleanedResult, "\t", "")
-// 	assert.Equal(t, correctConvertedResult, cleanedResult)
-// }
-
-// func TestModifierCountRelation(t *testing.T) {
-// 	// Setup for test
-// 	// Create query conversion service
-// 	service := NewService()
-
-// 	query := []byte(`{
-// 		"return": {
-// 			"entities": [
-// 				0
-// 			],
-// 			"relations": [
-// 				0
-// 			]
-// 		},
-// 		"entities": [
-// 			{
-// 				"type": "airports",
-// 				"constraints": [
-// 					{
-// 						"attribute": "state",
-// 						"value": "HI",
-// 						"dataType": "string",
-// 						"matchType": "exact"
-// 					}
-// 				]
-// 			}
-// 		],
-// 		"relations": [
-// 			{
-// 				"type": "flights",
-// 				"depth": {
-// 					"min": 1,
-// 					"max": 1
-// 				},
-// 				"entityFrom": 0,
-// 				"entityTo": -1,
-// 				"constraints": [
-// 					{
-// 						"attribute": "Day",
-// 						"value": "15",
-// 						"dataType": "int",
-// 						"matchType": "EQ"
-// 					}
-// 				]
-// 			}
-// 		],
-// 		"limit": 5000,
-// 		"modifiers": [
-// 			{
-// 				"type": "COUNT",
-// 				"selectedType": "relation",
-// 				"id": 0,
-// 				"attributeIndex": -1
-// 			}
-// 		]
-// 	}`)
-
-// 	// Unmarshall the incoming message into an IncomingJSONQuery object
-// 	var JSONQuery entity.IncomingQueryJSON
-// 	json.Unmarshal(query, &JSONQuery)
-
-// 	convertedResult, err := service.ConvertQuery(&JSONQuery)
-
-// 	// Assert that there is no error
-// 	assert.NoError(t, err)
-
-// 	// Assert that the result and the expected result are the same
-// 	correctConvertedResult := `LET n0 = (FOR x IN airports FILTER x.state == "HI" RETURN x)LET r0 = (FOR x IN n0 FOR v, e, p IN 1..1 OUTBOUND x flights OPTIONS { uniqueEdges: "path" }FILTER p.edges[*].Day ALL == 15 RETURN DISTINCT p )RETURN LENGTH (unique(r0[*].edges[**]))`
-// 	cleanedResult := strings.ReplaceAll(*convertedResult, "\n", "")
-// 	cleanedResult = strings.ReplaceAll(cleanedResult, "\t", "")
-// 	assert.Equal(t, correctConvertedResult, cleanedResult)
-// }
-// func TestModifierCountRelationAttribute(t *testing.T) {
-// 	// Setup for test
-// 	// Create query conversion service
-// 	service := NewService()
-
-// 	query := []byte(`{
-// 		"return": {
-// 			"entities": [
-// 				0
-// 			],
-// 			"relations": [
-// 				0
-// 			]
-// 		},
-// 		"entities": [
-// 			{
-// 				"type": "airports",
-// 				"constraints": [
-// 					{
-// 						"attribute": "state",
-// 						"value": "HI",
-// 						"dataType": "string",
-// 						"matchType": "exact"
-// 					}
-// 				]
-// 			}
-// 		],
-// 		"relations": [
-// 			{
-// 				"type": "flights",
-// 				"depth": {
-// 					"min": 1,
-// 					"max": 1
-// 				},
-// 				"entityFrom": 0,
-// 				"entityTo": -1,
-// 				"constraints": [
-// 					{
-// 						"attribute": "Day",
-// 						"value": "15",
-// 						"dataType": "int",
-// 						"matchType": "EQ"
-// 					}
-// 				]
-// 			}
-// 		],
-// 		"limit": 5000,
-// 		"modifiers": [
-// 			{
-// 				"type": "AVG",
-// 				"selectedType": "relation",
-// 				"id": 0,
-// 				"attributeIndex": 0
-// 			}
-// 		]
-// 	}`)
-
-// 	// Unmarshall the incoming message into an IncomingJSONQuery object
-// 	var JSONQuery entity.IncomingQueryJSON
-// 	json.Unmarshal(query, &JSONQuery)
-
-// 	convertedResult, err := service.ConvertQuery(&JSONQuery)
-
-// 	// Assert that there is no error
-// 	assert.NoError(t, err)
-
-// 	// Assert that the result and the expected result are the same
-// 	correctConvertedResult := `LET n0 = (FOR x IN airports FILTER x.state == "HI" RETURN x)LET r0 = (FOR x IN n0 FOR v, e, p IN 1..1 OUTBOUND x flights OPTIONS { uniqueEdges: "path" }FILTER p.edges[*].Day ALL == 15 RETURN DISTINCT p )RETURN AVG (r0[*].edges[**].Day)`
-// 	cleanedResult := strings.ReplaceAll(*convertedResult, "\n", "")
-// 	cleanedResult = strings.ReplaceAll(cleanedResult, "\t", "")
-// 	assert.Equal(t, correctConvertedResult, cleanedResult)
-// }
-
-func TestRelationWithInOutConstraint(t *testing.T) {
-	// Setup for test
-	// Create query conversion service
-	service := NewService()
-
+func TestInStatement(t *testing.T) {
 	query := []byte(`{
-		"return": {
-			"entities": [
-				0,
-				1
-			],
-			"relations": [
-				0
-			]
-		},
+		"databaseName": "Movies3",
 		"entities": [
 			{
-				"type": "airports",
-				"constraints": [
-					{
-						"attribute": "city",
-						"value": "San Francisco",
-						"dataType": "string",
-						"matchType": "exact"
-					}
-				]
+				"id": 0,
+				"name": "Person",
+				"constraints": []
 			},
 			{
-				"type": "airports",
+				"id": 1,
+				"name": "Movie",
+				"constraints": []
+			},
+			{
+				"id": 2,
+				"name": "Person",
 				"constraints": [
 					{
-						"attribute": "state",
-						"value": "HI",
+						"attribute": "bornIn",
+						"value": "",
 						"dataType": "string",
-						"matchType": "exact"
+						"matchType": "",
+						"inID": 0,
+						"inType": "groupBy"
 					}
 				]
 			}
 		],
 		"relations": [
 			{
-				"type": "flights",
+				"id": 0,
+				"name": "ACTED_IN",
 				"depth": {
 					"min": 1,
-					"max": 3
+					"max": 1
 				},
-				"entityFrom": 1,
-				"entityTo": 0,
+				"fromType": "entity",
+				"fromID": 0,
+				"toType": "entity",
+				"toID": 1,
+				"constraints": []
+			}
+		],
+		"groupBys": [
+			{
+				"id": 0,
+				"groupType": "entity",
+				"groupID": 1,
+				"groupAttribute": "imdbRating",
+				"byType": "entity",
+				"byID": 0,
+				"byAttribute": "bornIn",
+				"appliedModifier": "AVG",
+				"relationID": 0,
 				"constraints": [
 					{
-						"attribute": "Day",
-						"value": "15",
+						"attribute": "imdbRating",
+						"value": "7.5",
 						"dataType": "int",
-						"matchType": "EQ"
+						"matchType": "GT",
+						"inID": -1,
+						"inType": ""
 					}
 				]
 			}
 		],
+		"machineLearning": [],
 		"limit": 5000
-	}`)
+	}
+	`)
+
+	answer := `MATCH p0 = (e0:Person)-[:ACTED_IN*1..1]-(e1:Movie)
+	UNWIND relationships(p0) as r0 
+	WITH *
+	WITH e0.bornIn AS e0_bornIn, AVG(e1.imdbRating) AS AVG_imdbRating 
+		WHERE  AVG_imdbRating > 7.5 
+	MATCH (e2:Person)
+		WHERE e2.bornIn IN e0_bornIn
+	WITH *
+	RETURN  e2
+	LIMIT 5000;nodelink`
 
-	// Unmarshall the incoming message into an IncomingJSONQuery object
 	var JSONQuery entity.IncomingQueryJSON
 	json.Unmarshal(query, &JSONQuery)
 
-	convertedResult, err := service.ConvertQuery(&JSONQuery)
+	s := NewService()
+	cypher, err := s.ConvertQuery(&JSONQuery)
+	if err != nil {
+		fmt.Println(err)
+	}
 
-	// Assert that there is no error
-	assert.NoError(t, err)
+	trimmedCypher := strings.Replace(*cypher, "\n", "", -1)
+	trimmedCypher = strings.Replace(trimmedCypher, "\t", "", -1)
 
-	// Assert that the result and the expected result are the same
-	correctConvertedResult := `MATCH (n1:airports)WHERE n1.state = "HI" MATCH p0 = (n1)-[r0:flights*1..3]->(n0)WHERE r0.Day = 15 RETURN n1,n0,p0;`
-	cleanedResult := strings.ReplaceAll(*convertedResult, "\n", "")
-	cleanedResult = strings.ReplaceAll(cleanedResult, "\t", "")
-	assert.Equal(t, correctConvertedResult, cleanedResult)
-}
+	trimmedAnswer := strings.Replace(answer, "\n", "", -1)
+	trimmedAnswer = strings.Replace(trimmedAnswer, "\t", "", -1)
 
-func TestTwoRelations(t *testing.T) {
-	// Setup for test
-	// Create query conversion service
-	service := NewService()
+	fmt.Println(*cypher)
+	assert.Equal(t, trimmedAnswer, trimmedCypher)
 
+}
+func TestDoubleInStatement(t *testing.T) {
 	query := []byte(`{
-		"return": {
-			"entities": [
-				0,
-				1,
-				2
-			],
-			"relations": [
-				0,
-				1
-			]
-		},
+		"databaseName": "Movies3",
 		"entities": [
 			{
-				"type": "airports",
-				"constraints": [
-					{
-						"attribute": "city",
-						"value": "New York",
-						"dataType": "string",
-						"matchType": "exact"
-					}
-				]
+				"id": 0,
+				"name": "Person",
+				"constraints": []
 			},
 			{
-				"type": "airports",
+				"id": 1,
+				"name": "Movie",
+				"constraints": []
+			},
+			{
+				"id": 2,
+				"name": "Person",
 				"constraints": [
 					{
-						"attribute": "city",
-						"value": "San Francisco",
+						"attribute": "bornIn",
+						"value": "",
 						"dataType": "string",
-						"matchType": "exact"
+						"matchType": "",
+						"inID": 0,
+						"inType": "groupBy"
 					}
 				]
 			},
 			{
-				"type": "airports",
+				"id": 3,
+				"name": "Person",
 				"constraints": [
 					{
-						"attribute": "state",
-						"value": "HI",
+						"attribute": "bornIn",
+						"value": "",
 						"dataType": "string",
-						"matchType": "exact"
+						"matchType": "",
+						"inID": 0,
+						"inType": "groupBy"
 					}
 				]
 			}
 		],
 		"relations": [
 			{
-				"type": "flights",
+				"id": 0,
+				"name": "ACTED_IN",
 				"depth": {
 					"min": 1,
-					"max": 3
+					"max": 1
 				},
-				"entityFrom": 2,
-				"entityTo": 1,
+				"fromType": "entity",
+				"fromID": 0,
+				"toType": "entity",
+				"toID": 1,
+				"constraints": []
+			}
+		],
+		"groupBys": [
+			{
+				"id": 0,
+				"groupType": "entity",
+				"groupID": 1,
+				"groupAttribute": "imdbRating",
+				"byType": "entity",
+				"byID": 0,
+				"byAttribute": "bornIn",
+				"appliedModifier": "AVG",
+				"relationID": 0,
 				"constraints": [
 					{
-						"attribute": "Day",
-						"value": "15",
+						"attribute": "imdbRating",
+						"value": "7.5",
 						"dataType": "int",
-						"matchType": "EQ"
+						"matchType": "GT",
+						"inID": -1,
+						"inType": ""
 					}
 				]
-			},
-			{
-				"type": "flights",
-				"depth": {
-					"min": 1,
-					"max": 1
-				},
-				"entityFrom": 0,
-				"entityTo": -1,
-				"constraints": []
 			}
 		],
+		"machineLearning": [],
 		"limit": 5000
-	}`)
+	}
+	`)
+
+	answer := `MATCH p0 = (e0:Person)-[:ACTED_IN*1..1]-(e1:Movie)
+	UNWIND relationships(p0) as r0 
+	WITH *
+	WITH e0.bornIn AS e0_bornIn, AVG(e1.imdbRating) AS AVG_imdbRating 
+		WHERE  AVG_imdbRating > 7.5 
+	MATCH (e2:Person)
+		WHERE e2.bornIn IN e0_bornIn
+	WITH *
+	MATCH (e3:Person)
+		WHERE e3.bornIn IN e0_bornIn
+	WITH *
+	RETURN  e3
+	LIMIT 5000;nodelink`
 
-	// Unmarshall the incoming message into an IncomingJSONQuery object
 	var JSONQuery entity.IncomingQueryJSON
 	json.Unmarshal(query, &JSONQuery)
 
-	convertedResult, err := service.ConvertQuery(&JSONQuery)
+	s := NewService()
+	cypher, err := s.ConvertQuery(&JSONQuery)
+	if err != nil {
+		fmt.Println(err)
+	}
 
-	// Assert that there is no error
-	assert.NoError(t, err)
+	trimmedCypher := strings.Replace(*cypher, "\n", "", -1)
+	trimmedCypher = strings.Replace(trimmedCypher, "\t", "", -1)
 
-	// Assert that the result and the expected result are the same
-	correctConvertedResult := `MATCH (n2:airports)WHERE n2.state = "HI" MATCH p0 = (n2)-[r0:flights*1..3]->(n1)WHERE r0.Day = 15 RETURN n2,n1,p0;MATCH (n0:airports)WHERE n0.city = "New York" MATCH p0 = (n0)-[r0:flights*1..1]->()RETURN n0,p0;`
-	cleanedResult := strings.ReplaceAll(*convertedResult, "\n", "")
-	cleanedResult = strings.ReplaceAll(cleanedResult, "\t", "")
-	assert.Equal(t, correctConvertedResult, cleanedResult)
-}
+	trimmedAnswer := strings.Replace(answer, "\n", "", -1)
+	trimmedAnswer = strings.Replace(trimmedAnswer, "\t", "", -1)
 
-func TestRelationWithOnlyToNode(t *testing.T) {
-	// Setup for test
-	// Create query conversion service
-	service := NewService()
+	fmt.Println(*cypher)
+	assert.Equal(t, trimmedAnswer, trimmedCypher)
 
+}
+func TestSimpleQuery(t *testing.T) {
 	query := []byte(`{
-			"return": {
-				"entities": [
-					0
-				],
-				"relations": [
-					0
-				]
-			},
+		"return": {
 			"entities": [
-				{
-					"type": "airports",
-					"constraints": [
-						{
-							"attribute": "city",
-							"value": "San Francisco",
-							"dataType": "string",
-							"matchType": "exact"
-						}
-					]
-				}
+				11,
+				12
 			],
 			"relations": [
-				{
-					"type": "flights",
-					"depth": {
-						"min": 1,
-						"max": 1
-					},
-					"entityFrom": -1,
-					"entityTo": 0,
-					"constraints": []
-				}
+				10
 			],
-			"limit": 5000
-		}`)
+			"groupBys": []
+		},
+		"entities": [
+			{
+				"name": "Person",
+				"ID": 11,
+				"constraints": []
+			},
+			{
+				"name": "Movie",
+				"ID": 12,
+				"constraints": []
+			}
+		],
+		"relations": [
+			{
+				"ID": 10,
+				"name": "DIRECTED",
+				"depth": {
+					"min": 1,
+					"max": 1
+				},
+				"fromType": "entity",
+				"fromID": 11,
+				"toType": "entity",
+				"toID": 12,
+				"constraints": []
+			}
+		],
+		"groupBys": [],
+		"machineLearning": [],
+		"limit": 5000,
+		"databaseName": "Movies3"
+	}
+	`)
+
+	answer := `MATCH p0 = (e11:Person)-[:DIRECTED*1..1]-(e12:Movie)
+	UNWIND relationships(p0) as r10 
+	WITH *
+	RETURN  r10, e11, e12
+	LIMIT 5000;nodelink`
 
-	// Unmarshall the incoming message into an IncomingJSONQuery object
 	var JSONQuery entity.IncomingQueryJSON
 	json.Unmarshal(query, &JSONQuery)
 
-	convertedResult, err := service.ConvertQuery(&JSONQuery)
+	s := NewService()
+	cypher, err := s.ConvertQuery(&JSONQuery)
+	if err != nil {
+		fmt.Println(err)
+	}
 
-	// Assert that there is no error
-	assert.NoError(t, err)
+	trimmedCypher := strings.Replace(*cypher, "\n", "", -1)
+	trimmedCypher = strings.Replace(trimmedCypher, "\t", "", -1)
 
-	// Assert that the result and the expected result are the same
-	correctConvertedResult := `MATCH (n0:airports)WHERE n0.city = "San Francisco" MATCH p0 = (n0)-[r0:flights*1..1]->()RETURN n0,p0;`
-	cleanedResult := strings.ReplaceAll(*convertedResult, "\n", "")
-	cleanedResult = strings.ReplaceAll(cleanedResult, "\t", "")
-	assert.Equal(t, correctConvertedResult, cleanedResult)
-}
-
-func TestTooManyReturnEntities(t *testing.T) {
-	// Setup for test
-	// Create query conversion service
-	service := NewService()
+	trimmedAnswer := strings.Replace(answer, "\n", "", -1)
+	trimmedAnswer = strings.Replace(trimmedAnswer, "\t", "", -1)
 
+	fmt.Println(*cypher)
+	assert.Equal(t, trimmedAnswer, trimmedCypher)
+}
+func TestNoRelation(t *testing.T) {
 	query := []byte(`{
-			"return": {
-				"entities": [
-					0,
-					1,
-					2
-				],
-				"relations": [
-					0
-				]
-			},
+		"return": {
 			"entities": [
-				{
-					"type": "airports",
-					"constraints": [
-						{
-							"attribute": "city",
-							"value": "San Francisco",
-							"dataType": "string",
-							"matchType": "exact"
-						}
-					]
-				}
+				11,
+				12
 			],
 			"relations": [
-				{
-					"type": "flights",
-					"depth": {
-						"min": 1,
-						"max": 1
-					},
-					"entityFrom": -1,
-					"entityTo": 0,
-					"constraints": []
-				}
+				10
 			],
-			"limit": 5000
-		}`)
+			"groupBys": []
+		},
+		"entities": [
+			{
+				"name": "Person",
+				"ID": 11,
+				"constraints": []
+			},
+			{
+				"name": "Movie",
+				"ID": 12,
+				"constraints": []
+			}
+		],
+		"relations": [],
+		"groupBys": [],
+		"machineLearning": [],
+		"limit": 5000,
+		"databaseName": "Movies3"
+	}
+	`)
 
-	// Unmarshall the incoming message into an IncomingJSONQuery object
 	var JSONQuery entity.IncomingQueryJSON
 	json.Unmarshal(query, &JSONQuery)
-
-	_, err := service.ConvertQuery(&JSONQuery)
-
-	// Assert that there is no error
-	assert.Equal(t, errors.New("non-existing entity referenced in return"), err)
+	fmt.Println(JSONQuery)
+	fmt.Println(" ")
+
+	s := NewService()
+	_, err := s.ConvertQuery(&JSONQuery)
+	if err != nil {
+		assert.Equal(t, err.Error(), "Invalid query")
+	} else {
+		// It should error, thus it must not reach this
+		t.Fail()
+	}
 }
 
-func TestTooManyReturnRelations(t *testing.T) {
-	// Setup for test
-	// Create query conversion service
-	service := NewService()
-
+func TestNoEntities(t *testing.T) {
 	query := []byte(`{
-			"return": {
-				"entities": [
-					0
-				],
-				"relations": [
-					0,
-					1,
-					2
-				]
-			},
+		"return": {
 			"entities": [
-				{
-					"type": "airports",
-					"constraints": [
-						{
-							"attribute": "city",
-							"value": "San Francisco",
-							"dataType": "string",
-							"matchType": "exact"
-						}
-					]
-				}
+				11,
+				12
 			],
 			"relations": [
-				{
-					"type": "flights",
-					"depth": {
-						"min": 1,
-						"max": 1
-					},
-					"entityFrom": -1,
-					"entityTo": 0,
-					"constraints": []
-				}
+				10
 			],
-			"limit": 5000
-		}`)
+			"groupBys": []
+		},
+		"entities": [],
+		"relations": [
+			{
+			"ID": 10,
+			"name": "DIRECTED",
+			"depth": {
+				"min": 1,
+				"max": 1
+			},
+			"fromType": "entity",
+			"fromID": 11,
+			"toType": "entity",
+			"toID": 12,
+			"constraints": []
+		}
+		],
+		"groupBys": [],
+		"machineLearning": [],
+		"limit": 5000,
+		"databaseName": "Movies3"
+	}
+	`)
 
-	// Unmarshall the incoming message into an IncomingJSONQuery object
 	var JSONQuery entity.IncomingQueryJSON
 	json.Unmarshal(query, &JSONQuery)
-
-	_, err := service.ConvertQuery(&JSONQuery)
-
-	// Assert that there is no error
-	assert.Equal(t, errors.New("non-existing relation referenced in return"), err)
+	fmt.Println(JSONQuery)
+	fmt.Println(" ")
+
+	s := NewService()
+	_, err := s.ConvertQuery(&JSONQuery)
+	if err != nil {
+		assert.Equal(t, err.Error(), "Invalid query")
+	} else {
+		// It should error, thus it must not reach this
+		t.Fail()
+	}
 }
-
-func TestNegativeReturnEntities(t *testing.T) {
-	// Setup for test
-	// Create query conversion service
-	service := NewService()
-
+func TestTwoRelationsCycle(t *testing.T) {
 	query := []byte(`{
-			"return": {
-				"entities": [
-					0,
-					-1
-				],
-				"relations": [
-					0,
-					1,
-					2
-				]
-			},
+		"return": {
 			"entities": [
-				{
-					"type": "airports",
-					"constraints": [
-						{
-							"attribute": "city",
-							"value": "San Francisco",
-							"dataType": "string",
-							"matchType": "exact"
-						}
-					]
-				}
+				11,
+				12
 			],
 			"relations": [
-				{
-					"type": "flights",
-					"depth": {
-						"min": 1,
-						"max": 1
-					},
-					"entityFrom": -1,
-					"entityTo": 0,
-					"constraints": []
-				}
+				10
 			],
-			"limit": 5000
-		}`)
+			"groupBys": []
+		},
+		"entities": [
+			{
+				"name": "Person",
+				"ID": 11,
+				"constraints": []
+			},
+			{
+				"name": "Movie",
+				"ID": 12,
+				"constraints": []
+			}
+		],
+		"relations": [
+			{
+			"ID": 10,
+			"name": "DIRECTED",
+			"depth": {
+				"min": 1,
+				"max": 1
+			},
+			"fromType": "entity",
+			"fromID": 11,
+			"toType": "entity",
+			"toID": 12,
+			"constraints": []
+		},
+		{
+			"ID": 11,
+			"name": "ACTED_IN",
+			"depth": {
+				"min": 1,
+				"max": 1
+			},
+			"fromType": "entity",
+			"fromID": 12,
+			"toType": "entity",
+			"toID": 11,
+			"constraints": []
+		}
+		],
+		"groupBys": [],
+		"machineLearning": [],
+		"limit": 5000,
+		"databaseName": "Movies3"
+	}
+	`)
 
-	// Unmarshall the incoming message into an IncomingJSONQuery object
 	var JSONQuery entity.IncomingQueryJSON
 	json.Unmarshal(query, &JSONQuery)
-
-	_, err := service.ConvertQuery(&JSONQuery)
-
-	// Assert that there is no error
-	assert.Equal(t, errors.New("non-existing entity referenced in return"), err)
+	fmt.Println(JSONQuery)
+	fmt.Println(" ")
+
+	s := NewService()
+	cypher, err := s.ConvertQuery(&JSONQuery)
+	if err != nil {
+		fmt.Println(err)
+		t.Fail()
+		return
+	}
+
+	if cypher == nil {
+		t.Fail()
+		return
+	}
+
+	answer1 := `MATCH p0 = (e11:Person)-[:DIRECTED*1..1]-(e12:Movie)
+	UNWIND relationships(p0) as r10
+	WITH *
+	MATCH p1 = (e11:Person)-[:DIRECTED*1..1]-(e12:Movie)
+	UNWIND relationships(p1) as r10
+	WITH *
+	RETURN  r10, e11, e12, r10, e11, e12
+	LIMIT 5000;nodelink`
+
+	answer2 := `MATCH p1 = (e12:Movie)-[:ACTED_IN*1..1]-(e11:Person)
+	UNWIND relationships(p1) as r11 
+	WITH *
+	MATCH p0 = (e11:Person)-[:DIRECTED*1..1]-(e12:Movie)
+	UNWIND relationships(p0) as r10 
+	WITH *
+	RETURN  r10, e11, e12, r11, e12, e11
+	LIMIT 5000;nodelink`
+
+	trimmedCypher := strings.Replace(*cypher, "\n", "", -1)
+	trimmedCypher = strings.Replace(trimmedCypher, "\t", "", -1)
+
+	trimmedAnswer1 := strings.Replace(answer1, "\n", "", -1)
+	trimmedAnswer1 = strings.Replace(trimmedAnswer1, "\t", "", -1)
+	trimmedAnswer2 := strings.Replace(answer2, "\n", "", -1)
+	trimmedAnswer2 = strings.Replace(trimmedAnswer2, "\t", "", -1)
+
+	fmt.Println(*cypher)
+	// Both answers are correct
+	if !(trimmedAnswer1 == trimmedCypher || trimmedAnswer2 == trimmedCypher) {
+		t.Fail()
+	}
 }
 
-func TestNoRelationsField(t *testing.T) {
-	// Setup for test
-	// Create query conversion service
-	service := NewService()
-
+func TestCyclePlusDependency(t *testing.T) {
 	query := []byte(`{
-			"return": {
-				"entities": [
-					0
-				]
-			},
+		"return": {
 			"entities": [
-				{
-					"type": "airports",
-					"constraints": [
-						{
-							"attribute": "city",
-							"value": "San Francisco",
-							"dataType": "string",
-							"matchType": "exact"
-						}
-					]
-				}
+				11,
+				12
 			],
-			"limit": 5000
-		}`)
+			"relations": [
+				10
+			],
+			"groupBys": []
+		},
+		"entities": [
+			{
+				"name": "Person",
+				"ID": 11,
+				"constraints": []
+			},
+			{
+				"name": "Movie",
+				"ID": 12,
+				"constraints": []
+			},
+			{
+				"name": "Person",
+				"ID": 13,
+				"constraints": []
+			}
+		],
+		"relations": [
+			{
+			"ID": 10,
+			"name": "DIRECTED",
+			"depth": {
+				"min": 1,
+				"max": 1
+			},
+			"fromType": "entity",
+			"fromID": 11,
+			"toType": "entity",
+			"toID": 12,
+			"constraints": []
+		},
+		{
+			"ID": 11,
+			"name": "ACTED_IN",
+			"depth": {
+				"min": 1,
+				"max": 1
+			},
+			"fromType": "entity",
+			"fromID": 12,
+			"toType": "entity",
+			"toID": 11,
+			"constraints": []
+		},
+		{
+			"ID": 12,
+			"name": "ACTED_IN",
+			"depth": {
+				"min": 1,
+				"max": 1
+			},
+			"fromType": "entity",
+			"fromID": 13,
+			"toType": "entity",
+			"toID": 12,
+			"constraints": []
+		}
+		],
+		"groupBys": [],
+		"machineLearning": [],
+		"limit": 5000,
+		"databaseName": "Movies3"
+	}
+	`)
 
-	// Unmarshall the incoming message into an IncomingJSONQuery object
 	var JSONQuery entity.IncomingQueryJSON
 	json.Unmarshal(query, &JSONQuery)
-
-	convertedResult, err := service.ConvertQuery(&JSONQuery)
-
-	// Assert that there is no error
-	assert.NoError(t, err)
-
-	// Assert that the result and the expected result are the same
-	correctConvertedResult := `MATCH (n0:airports)WHERE n0.city = "San Francisco" RETURN n0`
-	cleanedResult := strings.ReplaceAll(*convertedResult, "\n", "")
-	cleanedResult = strings.ReplaceAll(cleanedResult, "\t", "")
-	assert.Equal(t, correctConvertedResult, cleanedResult)
+	fmt.Println(JSONQuery)
+	fmt.Println(" ")
+
+	s := NewService()
+	cypher, err := s.ConvertQuery(&JSONQuery)
+	if err != nil {
+		fmt.Println(err)
+		t.Fail()
+		return
+	}
+
+	if cypher == nil {
+		t.Fail()
+		return
+	}
+
+	answer := `MATCH p2 = (e13:Person)-[:ACTED_IN*1..1]-(e12:Movie)
+	UNWIND relationships(p2) as r12 
+	WITH *
+	MATCH p1 = (e12:Movie)-[:ACTED_IN*1..1]-(e11:Person)
+	UNWIND relationships(p1) as r11 
+	WITH *
+	MATCH p0 = (e11:Person)-[:DIRECTED*1..1]-(e12:Movie)
+	UNWIND relationships(p0) as r10 
+	WITH *
+	RETURN  r10, e11, e12, r11, e12, e11, r12, e13, e12
+	LIMIT 5000;nodelink`
+
+	trimmedCypher := strings.Replace(*cypher, "\n", "", -1)
+	trimmedCypher = strings.Replace(trimmedCypher, "\t", "", -1)
+
+	trimmedAnswer := strings.Replace(answer, "\n", "", -1)
+	trimmedAnswer = strings.Replace(trimmedAnswer, "\t", "", -1)
+
+	fmt.Println(*cypher)
+	assert.Equal(t, trimmedAnswer, trimmedCypher)
 }
-
-func TestEntityFromLowerThanNegativeOneInRelation(t *testing.T) {
-	// Setup for test
-	// Create query conversion service
-	service := NewService()
-
+func TestTripleCycle(t *testing.T) {
 	query := []byte(`{
-			"return": {
-				"entities": [
-					0
-				],
-				"relations": [
-					0
-				]
-			},
+		"return": {
 			"entities": [
-				{
-					"type": "airports",
-					"constraints": [
-						{
-							"attribute": "city",
-							"value": "San Francisco",
-							"dataType": "string",
-							"matchType": "exact"
-						}
-					]
-				}
+				11,
+				12
 			],
 			"relations": [
-				{
-					"type": "flights",
-					"depth": {
-						"min": 1,
-						"max": 1
-					},
-					"entityFrom": -4,
-					"entityTo": 0,
-					"constraints": []
-				}
+				10
 			],
-			"limit": 5000
-		}`)
+			"groupBys": []
+		},
+		"entities": [
+			{
+				"name": "Person",
+				"ID": 11,
+				"constraints": []
+			},
+			{
+				"name": "Movie",
+				"ID": 12,
+				"constraints": []
+			},
+			{
+				"name": "Person",
+				"ID": 13,
+				"constraints": []
+			}
+		],
+		"relations": [
+			{
+			"ID": 10,
+			"name": "DIRECTED",
+			"depth": {
+				"min": 1,
+				"max": 1
+			},
+			"fromType": "entity",
+			"fromID": 11,
+			"toType": "entity",
+			"toID": 12,
+			"constraints": []
+		},
+		{
+			"ID": 11,
+			"name": "ACTED_IN",
+			"depth": {
+				"min": 1,
+				"max": 1
+			},
+			"fromType": "entity",
+			"fromID": 12,
+			"toType": "entity",
+			"toID": 13,
+			"constraints": []
+		},
+		{
+			"ID": 12,
+			"name": "ACTED_IN",
+			"depth": {
+				"min": 1,
+				"max": 1
+			},
+			"fromType": "entity",
+			"fromID": 13,
+			"toType": "entity",
+			"toID": 11,
+			"constraints": []
+		}
+		],
+		"groupBys": [],
+		"machineLearning": [],
+		"limit": 5000,
+		"databaseName": "Movies3"
+	}
+	`)
 
-	// Unmarshall the incoming message into an IncomingJSONQuery object
 	var JSONQuery entity.IncomingQueryJSON
 	json.Unmarshal(query, &JSONQuery)
 
-	_, err := service.ConvertQuery(&JSONQuery)
-
-	// Assert that there is no error
-	assert.NoError(t, err)
+	s := NewService()
+	cypher, err := s.ConvertQuery(&JSONQuery)
+	if err != nil {
+		fmt.Println(err)
+		assert.Equal(t, err, errors.New("Cyclic query detected"))
+		return
+	}
+
+	if cypher == nil {
+		t.Fail()
+		return
+	}
+	t.Fail()
 }
diff --git a/cypher/createConstraints.go b/cypher/createConstraints.go
index b24d895f7c9afbccd519bc4c6ec3c9fecd89de9a..33cd9f086176dd33b70f21ede8b794b7638526f2 100644
--- a/cypher/createConstraints.go
+++ b/cypher/createConstraints.go
@@ -3,7 +3,7 @@ package cypher
 import (
 	"fmt"
 
-	"git.science.uu.nl/datastrophe/query-conversion/entity"
+	"git.science.uu.nl/graphpolaris/query-conversion/entity"
 )
 
 /* createConstraintStatements generates the appropriate amount of constraint lines calling createConstraingBoolExpression
@@ -22,7 +22,7 @@ func createConstraintStatements(constraints *[]entity.QueryConstraintStruct, nam
 	newLineStatement := "\tWHERE"
 
 	for _, v := range *constraints {
-		s += fmt.Sprintf("%v%v \n", newLineStatement, *createConstraintBoolExpression(&v, name))
+		s += fmt.Sprintf("%v%v \n", newLineStatement, *createConstraintBoolExpression(&v, name, false))
 		newLineStatement = "\tAND"
 	}
 
@@ -38,7 +38,7 @@ isRelation is a boolean specifying if this constraint comes from a node or relat
 
 Return: a string containing an boolean expression of a single constraint
 */
-func createConstraintBoolExpression(constraint *entity.QueryConstraintStruct, name string) *string {
+func createConstraintBoolExpression(constraint *entity.QueryConstraintStruct, name string, customAttribute bool) *string {
 	var (
 		match string
 		value string
@@ -95,7 +95,12 @@ func createConstraintBoolExpression(constraint *entity.QueryConstraintStruct, na
 		}
 	}
 
-	line = fmt.Sprintf("%s %s.%s %s %s", neq, name, constraint.Attribute, match, value)
+	if customAttribute {
+		line = fmt.Sprintf("%s %s %s %s", neq, name, match, value)
+	} else {
+
+		line = fmt.Sprintf("%s %s.%s %s %s", neq, name, constraint.Attribute, match, value)
+	}
 
 	return &line
 }
diff --git a/entity/queryStruct.go b/entity/queryStruct.go
index 7b6c2760ac2ad7c0b1a002c516559011a85cdd5d..ea069ee5e3d53c2e4b056ebca724dbf607a49933 100644
--- a/entity/queryStruct.go
+++ b/entity/queryStruct.go
@@ -2,35 +2,76 @@ package entity
 
 // IncomingQueryJSON describes the query coming into the service in JSON format
 type IncomingQueryJSON struct {
-	DatabaseName string
-	Return       QueryReturnStruct
-	Entities     []QueryEntityStruct
-	Relations    []QueryRelationStruct
+	DatabaseName    string                `json:"databaseName"`
+	Return          QueryReturnStruct     `json:"return"`
+	Entities        []QueryEntityStruct   `json:"entities"`
+	Relations       []QueryRelationStruct `json:"relations"`
+	GroupBys        []QueryGroupByStruct  `json:"groupBys"`
+	MachineLearning []QueryMLStruct       `json:"machineLearning"`
 	// Limit is for limiting the amount of paths AQL will return in a relation let statement
-	Limit     int
-	Modifiers []QueryModifierStruct
+	Limit int `json:"limit"`
+	//Modifiers []QueryModifierStruct
 }
 
 // QueryReturnStruct holds the indices of the entities and relations that need to be returned
 type QueryReturnStruct struct {
-	Entities  []int
-	Relations []int
+	Entities  []int `json:"entities"`
+	Relations []int `json:"relations"`
+	GroupBys  []int `json:"groupBys"`
 	//Modifiers []int
 }
 
 // QueryEntityStruct encapsulates a single entity with its corresponding constraints
 type QueryEntityStruct struct {
-	Type        string
-	Constraints []QueryConstraintStruct
+	ID          int                     `json:"id"`
+	Name        string                  `json:"name"`
+	Constraints []QueryConstraintStruct `json:"constraints"`
 }
 
 // QueryRelationStruct encapsulates a single relation with its corresponding constraints
 type QueryRelationStruct struct {
-	Type        string
-	EntityFrom  int
-	EntityTo    int
-	Depth       QuerySearchDepthStruct
-	Constraints []QueryConstraintStruct
+	ID          int                     `json:"id"`
+	Name        string                  `json:"name"`
+	Depth       QuerySearchDepthStruct  `json:"depth"`
+	FromType    string                  `json:"fromType"`
+	FromID      int                     `json:"fromID"`
+	ToType      string                  `json:"toType"`
+	ToID        int                     `json:"toID"`
+	Constraints []QueryConstraintStruct `json:"constraints"`
+}
+
+// QueryGroupByStruct holds all the info needed to form a group by
+type QueryGroupByStruct struct {
+	ID              int                     `json:"id"`
+	GroupType       string                  `json:"groupType"`
+	GroupID         int                     `json:"groupID"`
+	GroupAttribute  string                  `json:"groupAttribute"`
+	ByType          string                  `json:"byType"`
+	ByID            int                     `json:"byID"`
+	ByAttribute     string                  `json:"byAttribute"`
+	AppliedModifier string                  `json:"appliedModifier"`
+	RelationID      int                     `json:"relationID"`
+	Constraints     []QueryConstraintStruct `json:"constraints"`
+}
+
+// QueryConstraintStruct holds the information of the constraint
+// Constraint datatypes
+//     string     MatchTypes: exact/contains/startswith/endswith
+//     int   MatchTypes: GT/LT/EQ
+//     bool     MatchTypes: EQ/NEQ
+type QueryConstraintStruct struct {
+	Attribute string `json:"attribute"`
+	Value     string `json:"value"`
+	DataType  string `json:"dataTYpe"`
+	MatchType string `json:"matchType"`
+	InID      int    `json:"inID"`
+	InType    string `json:"inType"`
+}
+
+// QueryMLStruct holds info for machinelearning
+type QueryMLStruct struct {
+	Queuename  string
+	Parameters []string
 }
 
 // QueryModifierStruct encapsulates a single modifier with its corresponding constraints
@@ -43,18 +84,68 @@ type QueryModifierStruct struct {
 
 // QuerySearchDepthStruct holds the range of traversals for the relation
 type QuerySearchDepthStruct struct {
-	Min int
-	Max int
+	Min int `json:"min"`
+	Max int `json:"max"`
 }
 
-// QueryConstraintStruct holds the information of the constraint
-// Constraint datatypes
-// 	string     MatchTypes: exact/contains/startswith/endswith
-// 	int   MatchTypes: GT/LT/EQ
-// 	bool     MatchTypes: EQ/NEQ
-type QueryConstraintStruct struct {
-	Attribute string
-	Value     string
-	DataType  string
-	MatchType string
+// FindE finds the entity with a specified ID in an IncomingQueryJSON struct
+func (JSONQuery IncomingQueryJSON) FindE(qID int) *QueryEntityStruct {
+	for _, part := range JSONQuery.Entities {
+		if part.ID == qID {
+			return &part
+		}
+	}
+	return nil
+}
+
+// FindR finds the relation with a specified ID in an IncomingQueryJSON struct
+func (JSONQuery IncomingQueryJSON) FindR(qID int) *QueryRelationStruct {
+	for _, part := range JSONQuery.Relations {
+		if part.ID == qID {
+			return &part
+		}
+	}
+	return nil
+}
+
+// FindG finds the groupBy with a specified ID in an IncomingQueryJSON struct
+func (JSONQuery IncomingQueryJSON) FindG(qID int) *QueryGroupByStruct {
+	for _, part := range JSONQuery.GroupBys {
+		if part.ID == qID {
+			return &part
+		}
+	}
+	return nil
+}
+
+// QueryPart is a struct containing a part of the query and a list of dependencies on which this part of the query depends
+type QueryPart struct {
+	QType        string     // Eg if it is a relation or groupby
+	QID          int        // ID of said relation/gb
+	PartID       int        // Custom ID used for dependency
+	Dependencies []int      // List of partID's that need to come before
+	NestedPart   *QueryPart // Pointer to another part, used in some cases to avoid cycles
+}
+
+// Query is a list of (possibly unordered) queryparts
+type Query []QueryPart
+
+// Find retrieves a QueryPart based on the query's specifications
+func (q Query) Find(qID int, qType string) *QueryPart {
+	for i := range q {
+		if q[i].QID == qID && q[i].QType == qType {
+			return &q[i]
+		}
+	}
+	return nil
+}
+
+// SelectByID retrieves a QueryPart based on its PartID
+func (q Query) SelectByID(ID int) *QueryPart {
+	for i := range q {
+		if q[i].PartID == ID {
+			return &q[i]
+		}
+	}
+	return nil
 }
diff --git a/go.mod b/go.mod
index 928390fa583a1088a3fb71b1c6010eeb7bbcce37..affb71a91296de6da4964e8a97049d3012de5d1f 100644
--- a/go.mod
+++ b/go.mod
@@ -1,4 +1,4 @@
-module git.science.uu.nl/datastrophe/query-conversion
+module git.science.uu.nl/graphpolaris/query-conversion
 
 go 1.16
 
diff --git a/interface.go b/interface.go
index 3010a43207c4b0e676295e2cb7ac6bcec46a8512..2d7a76a82e7c27fc1ed327185b936ff33e38d4c8 100644
--- a/interface.go
+++ b/interface.go
@@ -1,6 +1,6 @@
 package query
 
-import "git.science.uu.nl/datastrophe/query-conversion/entity"
+import "git.science.uu.nl/graphpolaris/query-conversion/entity"
 
 // A Converter converts an incoming message in our JSON format to a format like AQL or Cypher
 type Converter interface {
diff --git a/main/main.go b/main/main.go
index 204395ac15bd59e116505e1e9185e193bee4f0f0..82d24ee2943c9a682d2e6751d946286bb4dcf54e 100644
--- a/main/main.go
+++ b/main/main.go
@@ -9,8 +9,8 @@ import (
 	"encoding/json"
 	"log"
 
-	"git.science.uu.nl/datastrophe/query-conversion/aql"
-	"git.science.uu.nl/datastrophe/query-conversion/entity"
+	"git.science.uu.nl/graphpolaris/query-conversion/aql"
+	"git.science.uu.nl/graphpolaris/query-conversion/entity"
 )
 
 /*
diff --git a/main/node_modules/.yarn-integrity b/main/node_modules/.yarn-integrity
deleted file mode 100644
index 1a3aded6105fb67caa747e15081dd1543f4cb74b..0000000000000000000000000000000000000000
--- a/main/node_modules/.yarn-integrity
+++ /dev/null
@@ -1,10 +0,0 @@
-{
-  "systemParams": "win32-x64-72",
-  "modulesFolders": [],
-  "flags": [],
-  "linkedModules": [],
-  "topLevelPatterns": [],
-  "lockfileEntries": {},
-  "files": [],
-  "artifacts": {}
-}
\ No newline at end of file
diff --git a/main/yarn.lock b/main/yarn.lock
deleted file mode 100644
index fb57ccd13afbd082ad82051c2ffebef4840661ec..0000000000000000000000000000000000000000
--- a/main/yarn.lock
+++ /dev/null
@@ -1,4 +0,0 @@
-# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY.
-# yarn lockfile v1
-
-
diff --git a/unmarshalJSON.go b/unmarshalJSON.go
index 0cac26fd2b8ccd58672e491338654ad54f0cb9a2..3be9df409acd5bcb5f56ba21c3a8dd5510f05ab4 100644
--- a/unmarshalJSON.go
+++ b/unmarshalJSON.go
@@ -3,7 +3,7 @@ package query
 import (
 	"encoding/json"
 
-	"git.science.uu.nl/datastrophe/query-conversion/entity"
+	"git.science.uu.nl/graphpolaris/query-conversion/entity"
 )
 
 /*