Linux vmi2545633.contaboserver.net 6.1.0-32-amd64 #1 SMP PREEMPT_DYNAMIC Debian 6.1.129-1 (2025-03-06) x86_64
Apache/2.4.62 (Debian)
Server IP : 127.0.0.1 & Your IP : 127.0.0.1
Domains :
Cant Read [ /etc/named.conf ]
User : www-data
Terminal
Auto Root
Create File
Create Folder
Localroot Suggester
Backdoor Destroyer
Readme
/
var /
python /
ai_endpoint /
Delete
Unzip
Name
Size
Permission
Date
Action
__pycache__
[ DIR ]
drwxr-xr-x
2025-04-08 17:06
hub
[ DIR ]
drwxr-xr-x
2023-02-18 16:09
ai_endpoint.py
6.09
KB
-rwxr-xr-x
2023-02-15 12:44
Save
Rename
# coding: utf-8 # pip install numpy scikit-learn tensorflow tensorflow_hub # pip install flask flask-rest-jsonapi waitress # waitress-serve --port 9999 ai-endpoint:app import os from flask import Flask, request from werkzeug.middleware.proxy_fix import ProxyFix import json import numpy as np from sklearn.metrics.pairwise import cosine_similarity #import tensorflow as tf import tensorflow_hub as hub # download and load model #handle = 'https://tfhub.dev/google/universal-sentence-encoder/4' modulePath = r"./hub/universal-sentence-encoder_4/" def extractTar(source, destination): import tarfile file = tarfile.open(source) file.extractall(destination) file.close() del tarfile if not os.path.exists(modulePath): os.makedirs(modulePath) if not os.path.exists(modulePath + "downloaded"): import urllib.request downloadedFile = modulePath + "module.tar.gz" print("Downloading model data, please wait.") urllib.request.urlretrieve("https://storage.googleapis.com/tfhub-modules/google/universal-sentence-encoder/4.tar.gz", downloadedFile) extractTar(downloadedFile, modulePath) os.remove(downloadedFile) with open(modulePath + "downloaded", "w") as file: file.write("downloaded") del urllib.request model = hub.load(modulePath) # process data model def processData(data): try: targetEmbeddings = model([x["text"] for x in data["targets"]]).numpy() sourceEmbeddings = model([x["text"] for x in data["sources"]]).numpy() count = 1 if "count" not in data else data["count"] if count.__class__ == float: count = int(count) if count < 1: count = 1 elif count > len(data["targets"]): count = len(data["targets"]) minScore = -1 if "minScore" not in data else data["minScore"] if minScore > 1: minScore = 1. elif minScore < -1: minScore = -1. if minScore.__class__ == int: minScore = float(minScore) resultIndex = 0 results = [] for i in sourceEmbeddings: cosineResults = cosine_similarity([i], targetEmbeddings) ind = np.argpartition(cosineResults[0], -count)[-count:] sortedResults = np.array([[x, cosineResults[0, x]] for x in ind if cosineResults[0, x] >= minScore]) sortedResults = sortedResults[sortedResults[:, 1].argsort()[::-1]] innerResults = [] innerIndex = 0 for j, s in sortedResults: innerResults.append({**data["targets"][j.astype(np.int64)], **{"orderIndex": innerIndex, "itemIndex": j.astype(np.int64), "text": data["targets"][j.astype(np.int64)]["text"], "score": s}}) innerIndex += 1 results.append({**data["sources"][resultIndex], **{"orderIndex": resultIndex, "text": data["sources"][resultIndex]["text"], "matchings": innerResults}}) resultIndex += 1 returnData = getResult(0) returnData["results"] = results return returnData except: return getResult(3) def getEmbeddings(data): try: print(data) embeddings = model(data).numpy() returnData = getResult(0) returnData["results"] = embeddings return returnData except err: return getResult(3) # http helpers class NpEncoder(json.JSONEncoder): def default(self, obj): if isinstance(obj, np.integer): return int(obj) if isinstance(obj, np.floating): return float(obj) if isinstance(obj, np.ndarray): return obj.tolist() return super(NpEncoder, self).default(obj) def getResult(code): if code == 0: return {"code": code, "message": "successfull"} if code == 1: return {"code": code, "message": "invalid data"} if code == 2: return {"code": code, "message": "failed"} if code == 3: return {"code": code, "message": "unexpected error"} if code == 4: return {"code": code, "message": "unsupported app type"} else: return {"code": 5, "message": "undefined"} def validateData(data): if "count" in data and data["count"].__class__ != int and data["count"].__class__ != float: return False if "minScore" in data and data["minScore"].__class__ != int and data["minScore"].__class__ != float: return False if not "sources" in data or not isinstance(data["sources"], list) or len(data["sources"]) <= 0 or data["sources"][0].__class__ != dict or not "text" in data["sources"][0] or data["sources"][0]["text"].__class__ != str: return False if not "targets" in data or not isinstance(data["targets"], list) or len(data["targets"]) <= 0 or data["targets"][0].__class__ != dict or not "text" in data["targets"][0] or data["targets"][0]["text"].__class__ != str: return False return True def validateData2(data): if data.__class__ != list: return False if len(data) == 0 or data[0].__class__ != str: return False return True # http app def createApp(): app = Flask(__name__) app.wsgi_app = ProxyFix(app.wsgi_app, x_for=1, x_proto=1, x_host=1, x_prefix=1) @app.route('/') def homePage(): return json.dumps({**getResult(0), **{"status": "working"}}) @app.route('/compareSimilarities', methods=['POST']) def appCompareSimilarities(): content_type = request.headers.get('Content-Type') if (content_type == 'application/json'): if validateData(request.json): return json.dumps(processData(request.json), cls = NpEncoder) else: return getResult(1) else: return getResult(4) @app.route('/getEmbeddings', methods=['POST']) def appGetEmbeddings(): content_type = request.headers.get('Content-Type') if (content_type == 'application/json'): if validateData2(request.json): return json.dumps(getEmbeddings(request.json), cls = NpEncoder) else: return getResult(1) else: return getResult(4) return app if __name__ == '__main__': app= createApp() app.run()