-
Notifications
You must be signed in to change notification settings - Fork 0
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Merge remote-tracking branch 'refs/remotes/origin/main' into release/…
…sk-learn1 # Conflicts: # .idea/workspace.xml # Dockerfile # source/handlers/biclustering.py # source/handlers/clustering.py # source/handlers/ensemble.py # source/handlers/nearestneighbours.py # source/handlers/neuralnetwork.py # source/handlers/pipeline.py # source/handlers/regression.py # source/handlers/svm.py # source/handlers/tree.py # source/handlers/xgboost.py # source/helpers/json_to_predreq.py
- Loading branch information
Showing
15 changed files
with
821 additions
and
806 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,40 @@ | ||
name: Publish Docker image | ||
|
||
on: | ||
push: | ||
branches: [ | ||
"release/sk-learn0.20", | ||
"release/sk-learn0.22", | ||
"release/sk-learn0.23", | ||
"release/sk-learn0.24", | ||
"release/sk-learn1", | ||
] | ||
|
||
jobs: | ||
push_to_registry: | ||
name: Push Docker image to Docker Hub | ||
runs-on: ubuntu-latest | ||
steps: | ||
- name: Check out the repo | ||
uses: actions/checkout@v4 | ||
|
||
- name: Log in to Docker Hub | ||
uses: docker/login-action@v3 | ||
with: | ||
username: ${{ secrets.DOCKER_USERNAME }} | ||
password: ${{ secrets.DOCKER_PASSWORD }} | ||
|
||
- name: Extract metadata (tags, labels) for Docker | ||
id: meta | ||
uses: docker/metadata-action@v5 | ||
with: | ||
images: upcintua/legacy-generic-python | ||
|
||
- name: Build and push Docker image | ||
uses: docker/build-push-action@v5 | ||
with: | ||
context: . | ||
file: ./Dockerfile | ||
push: true | ||
tags: ${{ steps.meta.outputs.tags }} | ||
labels: ${{ steps.meta.outputs.labels }} |
Large diffs are not rendered by default.
Oops, something went wrong.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,16 +1,18 @@ | ||
FROM python:3.7 | ||
|
||
RUN pip install --upgrade pip | ||
RUN pip install fastapi uvicorn | ||
RUN pip install tornado==4.2 | ||
RUN pip install numpy | ||
RUN pip install scikit-learn==0.20.0 | ||
# RUN pip install xgboost==1.3.3 | ||
RUN pip install scikit-learn==0.20.4 | ||
RUN pip install pandas | ||
RUN pip install xgboost | ||
|
||
# Expose the ports we're interested in | ||
EXPOSE 8002 | ||
|
||
COPY ./source /app/source | ||
COPY application.py /app/application.py | ||
|
||
CMD ["python", "/app/application.py"] | ||
# CMD ["uvicorn", "/app:app.py", "--host", "0.0.0.0", "--port", "8002"] | ||
ADD source /generic-python/source | ||
ADD application.py /generic-python/application.py | ||
|
||
CMD ["python","/generic-python/application.py"] | ||
|
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,2 +1,5 @@ | ||
# generic-python | ||
|
||
The old python inference repository that is not used anymore. | ||
|
||
Some images are built in this repository to support old models |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,59 +1,60 @@ | ||
# from tornado import httpserver | ||
# from tornado import gen | ||
# from tornado.ioloop import IOLoop | ||
# import tornado.web | ||
# from tornado.escape import json_decode, json_encode | ||
# from ..entities.prediction_request import PredictionRequest | ||
# from ..entities.dataset import Dataset | ||
# from ..entities.dataentry import DataEntry | ||
# from ..helpers import model_decoder, json_to_predreq | ||
# from ..helpers import doa_calc | ||
# import numpy as np | ||
# | ||
# | ||
# class BiclusteringModelHandler(tornado.web.RequestHandler): | ||
# # @tornado.asynchronous | ||
# # @gen.engine | ||
# def post(self): | ||
# # print(self.request.body) | ||
# json_request = json_decode(self.request.body) | ||
# pred_request = PredictionRequest(json_request['dataset'], json_request['rawModel'], json_request['additionalInfo']) | ||
# predFeatures = pred_request.additionalInfo['predictedFeatures'] | ||
# rawModel = pred_request.rawModel[0] | ||
# model = model_decoder.decode(rawModel) | ||
# dataEntryAll = json_to_predreq.decode(self.request) | ||
# doaM = [] | ||
# try: | ||
# doaM = json_request['doaMatrix'] | ||
# except KeyError: | ||
# pass | ||
# a = None | ||
# if type(doaM).__name__ != 'NoneType' and len(doaM) > 0: | ||
# doaMnp = np.asarray(doaM) | ||
# a = doa_calc.calc_doa(doaMnp, dataEntryAll) | ||
# predictions = model.predict(dataEntryAll) | ||
# preds = [] | ||
# j = 0 | ||
# for i in list(predFeatures.values()): | ||
# for pred in predictions: | ||
# if np.issubdtype(type(predictions[j]), int): | ||
# fPred = {i: int(predictions[j])} | ||
# if a is not None: | ||
# for key, value in a[j].items(): | ||
# fPred[key] = value | ||
# preds.append(fPred) | ||
# if np.issubdtype(type(predictions[j]), float): | ||
# fPred = {i: float(predictions[j])} | ||
# if a is not None: | ||
# for key, value in a[j].items(): | ||
# fPred[key] = value | ||
# preds.append(fPred) | ||
# if np.issubdtype(type(predictions[j]), str): | ||
# fPred = {i: predictions[j]} | ||
# if a is not None: | ||
# for key, value in a[j].items(): | ||
# fPred[key] = value | ||
# preds.append(fPred) | ||
# j += 1 | ||
# finalAll = {"predictions": preds} | ||
# self.write(json_encode(finalAll)) | ||
from tornado import httpserver | ||
from tornado import gen | ||
from tornado.ioloop import IOLoop | ||
import tornado.web | ||
from tornado.escape import json_decode, json_encode | ||
from ..entities.prediction_request import PredictionRequest | ||
from ..entities.dataset import Dataset | ||
from ..entities.dataentry import DataEntry | ||
from ..helpers import model_decoder, json_to_predreq | ||
from ..helpers import doa_calc | ||
import numpy as np | ||
|
||
|
||
class BiclusteringModelHandler(tornado.web.RequestHandler): | ||
# @tornado.asynchronous | ||
# @gen.engine | ||
def post(self): | ||
# print(self.request.body) | ||
json_request = json_decode(self.request.body) | ||
pred_request = PredictionRequest(json_request['dataset'], json_request['rawModel'], json_request['additionalInfo']) | ||
predFeatures = pred_request.additionalInfo['predictedFeatures'] | ||
rawModel = pred_request.rawModel[0] | ||
model = model_decoder.decode(rawModel) | ||
dataEntryAll = json_to_predreq.decode(self.request) | ||
doaM = [] | ||
try: | ||
doaM = json_request['doaMatrix'] | ||
except KeyError: | ||
pass | ||
a = None | ||
if type(doaM).__name__ != 'NoneType' and len(doaM) > 0: | ||
doaMnp = np.asarray(doaM) | ||
a = doa_calc.calc_doa(doaMnp, dataEntryAll) | ||
predictions = model.predict(dataEntryAll) | ||
preds = [] | ||
j = 0 | ||
for i in list(predFeatures.values()): | ||
for pred in predictions: | ||
if np.issubdtype(type(predictions[j]), int): | ||
fPred = {i: int(predictions[j])} | ||
if a is not None: | ||
for key, value in a[j].items(): | ||
fPred[key] = value | ||
preds.append(fPred) | ||
if np.issubdtype(type(predictions[j]), float): | ||
fPred = {i: float(predictions[j])} | ||
if a is not None: | ||
for key, value in a[j].items(): | ||
fPred[key] = value | ||
preds.append(fPred) | ||
if np.issubdtype(type(predictions[j]), str): | ||
fPred = {i: predictions[j]} | ||
if a is not None: | ||
for key, value in a[j].items(): | ||
fPred[key] = value | ||
preds.append(fPred) | ||
j += 1 | ||
finalAll = {"predictions": preds} | ||
self.set_header("Content-Type", "application/json") | ||
self.write(json_encode(finalAll)) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,59 +1,60 @@ | ||
# from tornado import httpserver | ||
# from tornado import gen | ||
# from tornado.ioloop import IOLoop | ||
# import tornado.web | ||
# from tornado.escape import json_decode, json_encode | ||
# from ..entities.prediction_request import PredictionRequest | ||
# from ..entities.dataset import Dataset | ||
# from ..entities.dataentry import DataEntry | ||
# from ..helpers import model_decoder, json_to_predreq, doa_calc | ||
# | ||
# import numpy as np | ||
# | ||
# | ||
# class ClusteringModelHandler(tornado.web.RequestHandler): | ||
# # @tornado.asynchronous | ||
# # @gen.engine | ||
# def post(self): | ||
# # print(self.request.body) | ||
# json_request = json_decode(self.request.body) | ||
# pred_request = PredictionRequest(json_request['dataset'], json_request['rawModel'], json_request['additionalInfo']) | ||
# predFeatures = pred_request.additionalInfo['predictedFeatures'] | ||
# rawModel = pred_request.rawModel[0] | ||
# model = model_decoder.decode(rawModel) | ||
# dataEntryAll = json_to_predreq.decode(self.request) | ||
# doaM = [] | ||
# try: | ||
# doaM = json_request['doaMatrix'] | ||
# except KeyError: | ||
# pass | ||
# a = None | ||
# if type(doaM).__name__ != 'NoneType' and len(doaM) > 0: | ||
# doaMnp = np.asarray(doaM) | ||
# a = doa_calc.calc_doa(doaMnp, dataEntryAll) | ||
# predictions = model.predict(dataEntryAll) | ||
# preds = [] | ||
# j = 0 | ||
# for i in list(predFeatures.values()): | ||
# for pred in predictions: | ||
# if np.issubdtype(type(predictions[j]), int): | ||
# fPred = {i: int(predictions[j])} | ||
# if a is not None: | ||
# for key, value in a[j].items(): | ||
# fPred[key] = value | ||
# preds.append(fPred) | ||
# if np.issubdtype(type(predictions[j]), float): | ||
# fPred = {i: float(predictions[j])} | ||
# if a is not None: | ||
# for key, value in a[j].items(): | ||
# fPred[key] = value | ||
# preds.append(fPred) | ||
# if np.issubdtype(type(predictions[j]), str): | ||
# fPred = {i: predictions[j]} | ||
# if a is not None: | ||
# for key, value in a[j].items(): | ||
# fPred[key] = value | ||
# preds.append(fPred) | ||
# j += 1 | ||
# finalAll = {"predictions": preds} | ||
# self.write(json_encode(finalAll)) | ||
from tornado import httpserver | ||
from tornado import gen | ||
from tornado.ioloop import IOLoop | ||
import tornado.web | ||
from tornado.escape import json_decode, json_encode | ||
from ..entities.prediction_request import PredictionRequest | ||
from ..entities.dataset import Dataset | ||
from ..entities.dataentry import DataEntry | ||
from ..helpers import model_decoder, json_to_predreq, doa_calc | ||
|
||
import numpy as np | ||
|
||
|
||
class ClusteringModelHandler(tornado.web.RequestHandler): | ||
# @tornado.asynchronous | ||
# @gen.engine | ||
def post(self): | ||
# print(self.request.body) | ||
json_request = json_decode(self.request.body) | ||
pred_request = PredictionRequest(json_request['dataset'], json_request['rawModel'], json_request['additionalInfo']) | ||
predFeatures = pred_request.additionalInfo['predictedFeatures'] | ||
rawModel = pred_request.rawModel[0] | ||
model = model_decoder.decode(rawModel) | ||
dataEntryAll = json_to_predreq.decode(self.request) | ||
doaM = [] | ||
try: | ||
doaM = json_request['doaMatrix'] | ||
except KeyError: | ||
pass | ||
a = None | ||
if type(doaM).__name__ != 'NoneType' and len(doaM) > 0: | ||
doaMnp = np.asarray(doaM) | ||
a = doa_calc.calc_doa(doaMnp, dataEntryAll) | ||
predictions = model.predict(dataEntryAll) | ||
preds = [] | ||
j = 0 | ||
for i in list(predFeatures.values()): | ||
for pred in predictions: | ||
if np.issubdtype(type(predictions[j]), int): | ||
fPred = {i: int(predictions[j])} | ||
if a is not None: | ||
for key, value in a[j].items(): | ||
fPred[key] = value | ||
preds.append(fPred) | ||
if np.issubdtype(type(predictions[j]), float): | ||
fPred = {i: float(predictions[j])} | ||
if a is not None: | ||
for key, value in a[j].items(): | ||
fPred[key] = value | ||
preds.append(fPred) | ||
if np.issubdtype(type(predictions[j]), str): | ||
fPred = {i: predictions[j]} | ||
if a is not None: | ||
for key, value in a[j].items(): | ||
fPred[key] = value | ||
preds.append(fPred) | ||
j += 1 | ||
finalAll = {"predictions": preds} | ||
self.set_header("Content-Type", "application/json") | ||
self.write(json_encode(finalAll)) |
Oops, something went wrong.