Skip to content
Snippets Groups Projects
centrality.py 2.89 KiB
Newer Older
##
# This program has been developed by students from the bachelor Computer Science at Utrecht University within the Software Project course.
# © Copyright Utrecht University (Department of Information and Computing Sciences)
##

# !/usr/bin/env python
import os
import sys
import inspect
import networkx as nx
import json

# Importing in Python is rather complicated when attempting to adhere to clean code architecture
# These few lines set the import paths so Docker can actually build the images without having to resort to copy paste-ing
currentdir = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))
parentdir = os.path.dirname(currentdir)
sys.path.insert(0, parentdir)
from MLBasicFunctions import addNodeMetaData, buildGraph
from MLRepositoryInterface import MLServerInterface

# We often compare against this specific string, so we clearly define it to prevent any typos
# Python has no constants so we simply give it a very obvious name that implies it is not supposed to be changed
##
# MLServer implements the MLServerInterface interface
##

    ##
    # __init__ initialises the MLServer with the proper parameters
    #   self: Self@MLServer, the MLServer implementation
    #   Return: None, return nothing
    ##
    def __init__(self):
        # Fill in the parameters for communication with the algorithm here
        self.parameters = {}

        self.this = {}
        self.this[CONST_MLQUEUENAME] = self.parameters

    ##
    # decodeMessage builds a NetworkX graph based on the incoming query data
    #   self: Self@MLServer, the MLServer implementation
    #   incomingQueryData: Any, the incoming query data in JSON format
    #   Return: Graph, the NetworkX graph
    ##
    def decodeMessage(self, incomingQueryData):
        graph = buildGraph(incomingQueryData)
    ##
    # handleJSON takes an incoming message and applies the machine learning algorithm and data transformations
    #   self: Self@MLServer, the MLServer implementation 
    #   body: Any, the body of the incoming RabbitMQ message
    #   Return: str, a formatted JSON string of the query result after the application of a machine learning algorithm
    ##
        # Decode the incoming RabbitMQ message
        incomingQueryData = json.loads(body.decode())
        # Log the queryID and type
        print(incomingQueryData["queryID"])
        print(incomingQueryData["type"])
        # Decode the incoming query data into a NetworkX graph
        G = self.decodeMessage(incomingQueryData)
        # This is where the specific algorithm is actually called
        # Centrality results are added to nodes
        result = addNodeMetaData(incomingQueryData, mlresult)
        # Transforms the result (a dictionary) into JSON
        return json.dumps(result, indent=4)