Spaces:
Paused
Paused
| from flask import Flask,request,render_template,send_file,jsonify | |
| import os | |
| from transformers import AutoTokenizer, AutoModel | |
| import anvil.server | |
| import pathlib | |
| import textwrap | |
| import google.generativeai as genai | |
| import import_ipynb | |
| from library import call_gpt, call_gemini | |
| from background_service import BackgroundTaskService | |
| anvil.server.connect('PLMOIU5VCGGUOJH2XORIBWV3-ZXZVFLWX7QFIIAF4') | |
| app=Flask(__name__) | |
| MESSAGED={'title':'API Server', | |
| 'messageL':['published server functions:','encode(text)', | |
| 'call_gemini(text,key)','call_gpt(text,key,model)', | |
| 'task_id<=launch(func_name,*args)','poll(task_id)']} | |
| tokenizer = AutoTokenizer.from_pretrained('allenai/specter') | |
| encoder = AutoModel.from_pretrained('allenai/specter') | |
| # GOOGLE_API_KEY=os.getenv('GOOGLE_API_KEY') | |
| # genai.configure(api_key=GOOGLE_API_KEY) | |
| service=BackgroundTaskService(max_tasks=10) | |
| service.register(call_gpt) | |
| service.register(call_gemini) | |
| def launch(func_name,*args): | |
| global service | |
| # Launch task | |
| task_id = service.launch_task(func_name, *args) | |
| print(f"Task launched with ID: {task_id}") | |
| return task_id | |
| def poll(task_id): | |
| global service | |
| # Poll for completion; if not complete return "In Progress" else return result | |
| result = service.get_result(task_id) | |
| if result=='No such task': return str(result) | |
| elif result!='In Progress': | |
| del service.results[task_id] | |
| if isinstance(result, (int, float, str, list, dict, tuple)): | |
| return result | |
| else: | |
| print(str(result)) | |
| return str(result) | |
| else: return str(result) | |
| # @anvil.server.callable | |
| # def call_gemini(text): | |
| # model = genai.GenerativeModel('gemini-pro') | |
| # response = model.generate_content(text) | |
| # return response.text | |
| def encode_anvil(text): | |
| inputs = tokenizer(text, padding=True, truncation=True, | |
| return_tensors="pt", max_length=512) | |
| result = encoder(**inputs) | |
| embeddings = result.last_hidden_state[:, 0, :] | |
| emb_array = embeddings.detach().numpy() | |
| embedding=emb_array.tolist() | |
| return embedding | |
| def reset_service(): | |
| global call_gpt, call_gemini, service | |
| service=BackgroundTaskService(max_tasks=10) | |
| service.register(call_gpt) | |
| service.register(call_gemini) | |
| def print_results_table(): | |
| global service | |
| return(service.results) | |
| def encode(): | |
| print(request) | |
| if request.method=='GET': | |
| text=request.args.get('text') | |
| elif request.method=='POST': | |
| data=request.get_json() | |
| if 'text' in data: text=data["text"] | |
| if text=='' or text is None: return -1 | |
| inputs = tokenizer(text, padding=True, truncation=True, | |
| return_tensors="pt", max_length=512) | |
| result = encoder(**inputs) | |
| embeddings = result.last_hidden_state[:, 0, :] | |
| emb_array = embeddings.detach().numpy() | |
| embedding=emb_array.tolist() | |
| return jsonify({'embedding': embedding}) | |
| def return_file(filename): | |
| return send_file('./data/'+filename) | |
| def run_script(): | |
| script='' | |
| # print(request.method) | |
| print(request) | |
| if request.method=='GET': | |
| script=request.args.get('script') | |
| print('I am in get') | |
| elif request.method=='POST': | |
| print('I am in post') | |
| data=request.get_json() | |
| if 'script' in data: script=data['script'] | |
| if script=='' or script is None: return 'INVALID' | |
| os.system(script+' > ./out.txt') | |
| with open('./out.txt','r') as f: output=f.read() | |
| return output | |
| def home(): | |
| return render_template('home.html',messageD=MESSAGED) | |
| if __name__=='__main__': | |
| app.run(host="0.0.0.0", port=7860) | |