Commit aea9dde7 authored by Yuncong Yu's avatar Yuncong Yu
Browse files

Update backend for TP's data in IAV

- Update backend for TP's data;
- Time format from datetime to second;
- Unittest for some backend functions;
- IAV color;
parent 291d8bec
......@@ -41,8 +41,8 @@
}
.correct-selected {
background-color: #4caf50
background-color: #91c60e /*#4caf50*/
}
.neutral-selected { background-color: #ffa300
.neutral-selected { background-color: #ffc600 /*#ffa300*/
}
.incorrect-selected { background-color: #f44336 }
.incorrect-selected { background-color: #ce0037 /*#f44336*/ }
......@@ -84,7 +84,7 @@ mat-tab-group {
.channel_header {
height: 18px;
background-color: lightblue;
background-color: #14639e /*lightblue*/;
border-bottom: 1px solid lightgray;
color: white;
text-align: center;
......
......@@ -108,7 +108,7 @@ path {
}
.channel_header {
background-color: lightblue;
background-color: #14639e /*lightblue*/;
border-bottom: 1px solid lightgray;
color: white;
text-align: center;
......
......@@ -147,11 +147,11 @@ export class OverviewWindowComponent implements OnInit {
scaleX: {
zooming: true,
'min-value': channel.index[0],
step: "6minute",
transform: {
type: "date",
all: "%m/%d/%Y<br>%h:%i"
},
// step: "6minute",
// transform: {
// type: "date",
// all: "%m/%d/%Y<br>%h:%i"
// },
tick: {
visible: index === 0,
},
......@@ -249,11 +249,11 @@ export class OverviewWindowComponent implements OnInit {
scaleX: {
zooming: true,
'min-value': channel.index[0],
step: "6minute",
transform: {
type: "date",
all: "%m/%d/%Y<br>%h:%i"
},
// step: "6minute",
// transform: {
// type: "date",
// all: "%m/%d/%Y<br>%h:%i"
// },
tick: {
visible: index === 0,
},
......
......@@ -5,11 +5,11 @@
.train-button:hover {
background-color: white;
color: black;
border: 1px solid #4CAF50;
border: 3px solid #14639e /*#4CAF50*/;
}
.train-button {
background-color: #4CAF50;
background-color: #14639e /*#4CAF50*/;
color: white;
}
......@@ -21,7 +21,7 @@
}
button {
background-color: #4CAF50;
background-color: #14639e /*#4CAF50*/;
border: none;
color: white;
padding: 10px 30px;
......@@ -29,6 +29,7 @@ button {
text-decoration: none;
display: inline-block;
font-size: 16px;
font-weight: bold;
margin: 4px 2px;
transition-duration: 0.4s;
cursor: pointer;
......
from flask import Flask, request
import logging
import os
from pathlib import Path
from time import time, perf_counter
from typing import Any, Dict, List
import numpy as np
from flask_cors import CORS
from time import time
import orjson
import os.path
import pseudo
from flask import Flask, request
from flask_cors import CORS
import preprocessing
import pseudo
data_path = 'data/processed-data.npy'
# Config
path_preprocessed_data_npy = 'data/processed-data.npy'
reload = False
logging.basicConfig(level=logging.INFO)
app = Flask(__name__)
CORS(app)
@app.route('/', methods=['GET'])
def index():
return "hi"
"""
Returns raw data
Output: [{
index: 1d array [x]
values: 1d array [x]
}]
"""
@app.route('/read-data', methods=['GET'])
def read_data():
t0 = time()
response = preprocessing.read_weather_data()
"""
Load raw data.
Returns
-------
response : List[Dict[str, np.ndarray]]
Loaded data with the following interface
{
index: 1d array [x]
values: 1d array [x]
name: str
}[]
"""
logging.info('Loading data ...')
time_start = perf_counter()
# response = preprocessing.read_weather_data()
response = preprocessing.read_egr_data()
response = orjson.dumps(response)
print('Data read: ' + str(time()-t0))
logging.info(f'Completed loading data with {perf_counter() - time_start:.2f} second(s).')
return response
"""
Creates windows
@app.route('/create-windows', methods=['POST'])
def create_windows():
"""
Creates windows to transform the local pattern search problem to time series indexing.
Input: {
Returns
-------
'1'
Use dynamic input from request with the interface
{
parameters: {
windowssize: int
}
}
}
Output: '1'
"""
@app.route('/create-windows', methods=['POST'])
def create_windows():
t0 = time()
if (not os.path.isfile(data_path)):
"""
logging.info('Creating window ...')
time_start = perf_counter()
if not Path(path_preprocessed_data_npy).is_file():
raw_data = request.json
window_size = int(raw_data['parameters']["windowsize"])
preprocessing.create_eeg_windows(window_size, 5)
print('Windows created: ' + str(time()-t0))
return '1'
# preprocessing.create_eeg_windows(window_size, 5)
preprocessing.create_egr_windows(window_size)
logging.info(f'Completed windows with {perf_counter() - time_start:.2f} second(s).')
"""
Does first iteration of LSH and returns a bunch of useful information
return '1'
Input: {
query: 2d array [d][t]
}
Output: {
@app.route('/initialize', methods=['POST'])
def initialize():
"""
Conduct the initial LSH.
Returns
-------
response : Dict[str, Any]
Response with the interface
{
hash_functions: 3d array [k][l][d]
candidates: 3d array [k][l][i]
distances: 3d array [k][l][i]
......@@ -79,57 +111,73 @@ Output: {
}
samples: 1d array
parameters: 1d array
}
"""
@app.route('/initialize', methods=['POST'])
def initialize():
t0 = time()
}
Use dynamic input from request have the interface
{
query: 2d array [d][t]
}
"""
logging.info("Starting the initial LSH ...")
time_start = perf_counter()
# Read windows
raw_data = orjson.loads(request.data)
data = np.load(data_path)
data = np.swapaxes(data, 1, 2)
data_windowized = np.load(path_preprocessed_data_npy)
data_windowized = np.swapaxes(data_windowized, 1, 2) # Use a column vector for each channel
# Read the query
query = raw_data["query"]
query = np.swapaxes(query, 0, 1)
# parameters = np.load('parameters.npy')
lsh_data = pseudo.lsh(data, query)
# Run the initial LSH
logging.info("Executing the initial LSH ...")
lsh_data = pseudo.lsh(data_windowized, query)
response = orjson.dumps(lsh_data)
print('LSH done: ' + str(time()-t0))
logging.info(f'Completed the initial LSH with {perf_counter() - time_start:2f} second(s)')
return response
"""
Calculates LSH parameters based on the dataset
Input: {
@app.route('/get-lsh-parameters', methods=['POST'])
def get_lsh_parameters():
"""
Calculates LSH parameters based on the dataset
Input: {
windowsize: int
}
}
Output: {
Output: {
parameters: 1d array [3]
}
"""
@app.route('/get-lsh-parameters', methods=['POST'])
def get_lsh_parameters():
}
"""
t0 = time()
raw_data = orjson.loads(request.data)
window_size = raw_data["windowsize"]
data = np.load(data_path)
data = np.load(path_preprocessed_data_npy)
data = np.swapaxes(data, 1, 2)
parameters = pseudo.get_lsh_parameters(data, window_size)
response = orjson.dumps(parameters)
print('Parameter calculation done: ' + str(time()-t0))
print('Parameter calculation done: ' + str(time() - t0))
return response
"""
Does LSH and returns a bunch of useful information
@app.route('/update', methods=['POST'])
def update():
"""
Does LSH and returns a bunch of useful information
Input: {
Input: {
query: 2d array [d][t]
}
}
Output: {
Output: {
hash_functions: 3d array [k][l][d]
candidates: 3d array [k][l][i]
distances: 3d array [k][l][i]
......@@ -142,13 +190,11 @@ Output: {
bucket: 1d array
}
samples: 1d array
}
"""
@app.route('/update', methods=['POST'])
def update():
}
"""
t0 = time()
raw_data = orjson.loads(request.data)
data = np.load(data_path)
data = np.load(path_preprocessed_data_npy)
data = np.swapaxes(data, 1, 2)
query = raw_data["query"]
query = np.swapaxes(query, 0, 1)
......@@ -158,30 +204,30 @@ def update():
lsh_data = pseudo.lsh(data, query, parameters=parameters, weights=weights)
response = orjson.dumps(lsh_data)
print('LSH done: ' + str(time()-t0))
print('LSH done: ' + str(time() - t0))
return response
"""
Calculates new weights for LSH algorithm
@app.route('/weights', methods=['POST'])
def weights():
"""
Calculates new weights for LSH algorithm
Input: {
Input: {
labels: 1d array [?]
hash_functions: 2d array [?][d]
query: 2d array [d][t]
weights: 1d array [d]
}
}
Output: 1d array [d]
"""
@app.route('/weights', methods=['POST'])
def weights():
Output: 1d array [d]
"""
raw_data = orjson.loads(request.data)
labels = raw_data["labels"]
hash_functions = raw_data["hash_functions"]
query = raw_data["query"]
old_weights = raw_data["weights"]
data = np.load(data_path)
data = np.load(path_preprocessed_data_npy)
new_weights = pseudo.weights(data, query, old_weights, labels, hash_functions)
......@@ -189,79 +235,85 @@ def weights():
return response
"""
Calculates query based on given indices
@app.route('/query', methods=['POST'])
def query():
"""
Calculate the query based on the given indices.
Input: {
Input: {
indices: 1d array [?]
}
}
Output: 2d array [d][t]
"""
logging.info("Preparing the query ...")
time_start = perf_counter()
Output: 2d array [d][t]
"""
@app.route('/query', methods=['POST'])
def query():
t0 = time()
raw_data = orjson.loads(request.data)
print(raw_data)
# print(raw_data)
start_index = raw_data['start_index']
query_size = raw_data['query_size']
window_indices = raw_data['indices']
if start_index is not None:
preprocessing.create_weather_windows(query_size)
# preprocessing.create_weather_windows(query_size)
preprocessing.create_egr_windows(query_size)
window_indices = int(start_index)
data = np.load(data_path)
response = pseudo.query(data, window_indices)
data_windowized = np.load(path_preprocessed_data_npy)
response = pseudo.query(data_windowized, window_indices)
response = orjson.dumps(response)
print("Query done: " + str(time() - t0))
logging.info(f"Completed preparing the query with {time() - time_start}.")
return response
"""
Returns values of windows on given indices
@app.route('/window', methods=['POST'])
def window():
"""
Returns values of windows on given indices
Input: {
Input: {
indices: 1d array [x]
}
}
Output: 3d array [x][d][t]
"""
@app.route('/window', methods=['POST'])
def window():
Output: 3d array [x][d][t]
"""
t0 = time()
raw_data = orjson.loads(request.data)
indices = raw_data['indices']
output = np.load(data_path)[indices]
output = np.load(path_preprocessed_data_npy)[indices]
response = orjson.dumps(output.tolist())
print("Window(s) done: " + str(time() - t0))
return response
"""
Returns additional information on given table
@app.route('/table-info', methods=['POST'])
def table_info():
"""
Returns additional information on given table
Input: {
Input: {
table: 2d array [x][?]
}
}
Output: {
Output: {
prototypes: {
average: 1d array [t]
max: 1d array [t]
min: 1d array [t]
}
distances: 2d array [x][x]
}
"""
@app.route('/table-info', methods=['POST'])
def table_info():
}
"""
t0 = time()
raw_data = orjson.loads(request.data)
table = raw_data['table']
data = np.load(data_path)
data = np.load(path_preprocessed_data_npy)
response = pseudo.table_info(data, table)
......
import numpy as np
import pandas as pd
import logging
import os.path
from pathlib import Path
from typing import Union
# from libs import bigwig
# import bbi
import dask.dataframe as dd
import os.path
from sklearn import preprocessing
import numpy as np
import pandas as pd
import tables
from sklearn.preprocessing import minmax_scale
data_path = 'data/processed-data.npy'
logging.basicConfig(level=logging.INFO)
data_path = "data/processed-data.npy"
def read_data():
size = bbi.chromsizes('test.bigWig')['chr1']
size = bbi.chromsizes("test.bigWig")["chr1"]
bins = 100000
data = bigwig.get('data/test.bigWig', 'chr1', 0, size, bins)
data = bigwig.get("data/test.bigWig", "chr1", 0, size, bins)
print(data.shape)
response = [
{
"index": list(range(0, size, int(size/(bins)))),
"values": data.tolist()
},
{
"index": list(range(0, size, int(size / (bins)))),
"values": data.tolist()
},
{
"index": list(range(0, size, int(size / (bins)))),
"values": data.tolist()
}
{"index": list(range(0, size, int(size / (bins)))), "values": data.tolist()},
{"index": list(range(0, size, int(size / (bins)))), "values": data.tolist()},
{"index": list(range(0, size, int(size / (bins)))), "values": data.tolist()},
]
return response
def create_peax_windows_12kb(window_size):
data = bigwig.chunk(
'test.bigWig',
12000,
int(12000 / window_size),
int(12000 / 6),
['chr1'],
verbose=True,
)
data = bigwig.chunk("test.bigWig", 12000, int(12000 / window_size), int(12000 / 6), ["chr1"], verbose=True,)
data = np.reshape(data, (len(data), 1, len(data[0])))
np.save(data_path, data)
return '1'
return "1"
def create_peax_windows_12kb_mts(window_size):
data = bigwig.chunk(
'test.bigWig',
12000,
int(12000 / window_size),
int(12000 / 6),
['chr1'],
verbose=True,
)
data = bigwig.chunk("test.bigWig", 12000, int(12000 / window_size), int(12000 / 6), ["chr1"], verbose=True,)
data = np.reshape(data, (len(data), 1, len(data[0])))
data2 = np.copy(data)
np.random.shuffle(data2)
......@@ -61,65 +46,64 @@ def create_peax_windows_12kb_mts(window_size):
data = np.concatenate((data, data2), axis=1)
data = np.concatenate((data, data3), axis=1)
np.save(data_path, data)
return '1'
return "1"
def read_eeg_data(nr_of_channels):
response = []
datafile = 'data/21.csv'
datafile = "data/21.csv"
data = pd.read_csv(datafile, header=None)
npdata = np.array(data, dtype="float32")
del data
for i in range(4, 4+ nr_of_channels):
response.append(
{
'index': list(range(0, len(npdata), 100)),
'values': npdata[::100,i].tolist()
}
)
for i in range(4, 4 + nr_of_channels):
response.append({"index": list(range(0, len(npdata), 100)), "values": npdata[::100, i].tolist()})
print(npdata.shape)
return response