Update tests.py
Browse files
tests.py
CHANGED
|
@@ -9,17 +9,16 @@ import openpyxl
|
|
| 9 |
import shutil
|
| 10 |
from google import genai
|
| 11 |
import pexpect
|
| 12 |
-
|
| 13 |
-
stderr=""
|
| 14 |
client = genai.Client(api_key="AIzaSyDtP05TyoIy9j0uPL7_wLEhgQEE75AZQSc")
|
| 15 |
source_dir = "/app/uploads/temp"
|
| 16 |
destination_dir = "/app/code_interpreter"
|
| 17 |
files_list=[]
|
| 18 |
downloaded_files=[]
|
| 19 |
-
|
| 20 |
-
|
| 21 |
-
|
| 22 |
-
|
| 23 |
mcp = FastMCP("code_sandbox")
|
| 24 |
data={}
|
| 25 |
result=""
|
|
@@ -94,19 +93,6 @@ TOKEN = "5182224145:AAEjkSlPqV-Q3rH8A9X8HfCDYYEQ44v_qy0"
|
|
| 94 |
chat_id = "5075390513"
|
| 95 |
from requests_futures.sessions import FuturesSession
|
| 96 |
session = FuturesSession()
|
| 97 |
-
from subprocess import Popen, PIPE
|
| 98 |
-
import shlex
|
| 99 |
-
from threading import Timer
|
| 100 |
-
def run2(cmd, timeout_sec,forever_cmd):
|
| 101 |
-
global stdout
|
| 102 |
-
global stderr
|
| 103 |
-
proc = Popen(shlex.split(cmd), stdout=PIPE, stderr=PIPE,cwd="/app/code_interpreter/")
|
| 104 |
-
timer = Timer(timeout_sec, proc.kill)
|
| 105 |
-
try:
|
| 106 |
-
timer.start()
|
| 107 |
-
stdout, stderr = proc.communicate()
|
| 108 |
-
finally:
|
| 109 |
-
timer.cancel()
|
| 110 |
|
| 111 |
def run(cmd, timeout_sec,forever_cmd):
|
| 112 |
global Parent
|
|
@@ -207,14 +193,20 @@ def create_code_files(filename: str, code) -> dict:
|
|
| 207 |
global destination_dir
|
| 208 |
transfer_files()
|
| 209 |
transfer_files2()
|
| 210 |
-
if
|
| 211 |
-
|
| 212 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 213 |
else:
|
| 214 |
-
|
| 215 |
-
|
| 216 |
-
f.close()
|
| 217 |
-
return {"info":"task completed. The referenced code files were created successfully. "}
|
| 218 |
|
| 219 |
@mcp.tool()
|
| 220 |
def run_code(python_packages:str,filename: str, code: str,start_cmd:str,forever_cmd:str) -> dict:
|
|
@@ -305,8 +297,8 @@ def run_shell_command(cmd:str,forever_cmd:str) -> dict:
|
|
| 305 |
the `forever_cmd` parameter.
|
| 306 |
Important Environment Notes:
|
| 307 |
- The execution environment is **Alpine Linux**. Commands should be
|
| 308 |
-
compatible
|
| 309 |
-
- `sudo`
|
| 310 |
- Standard bash features like `&&`, `||`, pipes (`|`), etc., are supported.
|
| 311 |
- When installing python packages , add an argument --break-system-packages to the pip install command.
|
| 312 |
- The following npm packages are preinstalled: express ejs chart.js .Any additional packages can be installed with npm install command.
|
|
@@ -325,12 +317,9 @@ def run_shell_command(cmd:str,forever_cmd:str) -> dict:
|
|
| 325 |
- ``'output'`` (str): The captured standard output (stdout) and potentially
|
| 326 |
standard error (stderr) from the command.
|
| 327 |
"""
|
| 328 |
-
global stderr
|
| 329 |
-
global stdout
|
| 330 |
transfer_files()
|
| 331 |
transfer_files2()
|
| 332 |
-
|
| 333 |
-
output = str(stdout)+str(stderr)
|
| 334 |
return {"output":output}
|
| 335 |
|
| 336 |
|
|
@@ -390,7 +379,7 @@ def read_excel_file(filename) -> dict:
|
|
| 390 |
return excel_data_dict
|
| 391 |
@mcp.tool()
|
| 392 |
def scrape_websites(url_list:list,query:str) -> list:
|
| 393 |
-
"""
|
| 394 |
|
| 395 |
conn = http.client.HTTPSConnection("scrapeninja.p.rapidapi.com")
|
| 396 |
|
|
@@ -400,22 +389,25 @@ def scrape_websites(url_list:list,query:str) -> list:
|
|
| 400 |
'x-rapidapi-host': "scrapeninja.p.rapidapi.com",
|
| 401 |
'Content-Type': "application/json"
|
| 402 |
}
|
| 403 |
-
Output=
|
|
|
|
| 404 |
for urls in url_list:
|
| 405 |
payload = {"url" :urls}
|
| 406 |
payload=json.dumps(payload)
|
| 407 |
conn.request("POST", "/scrape", payload, headers)
|
| 408 |
res = conn.getresponse()
|
| 409 |
data = res.read()
|
| 410 |
-
content=str(data.decode("utf-8"))
|
| 411 |
|
| 412 |
-
|
| 413 |
-
|
| 414 |
-
|
| 415 |
-
|
| 416 |
-
]
|
| 417 |
-
|
| 418 |
-
|
|
|
|
|
|
|
| 419 |
|
| 420 |
return {"website_content":Output}
|
| 421 |
|
|
|
|
| 9 |
import shutil
|
| 10 |
from google import genai
|
| 11 |
import pexpect
|
| 12 |
+
|
|
|
|
| 13 |
client = genai.Client(api_key="AIzaSyDtP05TyoIy9j0uPL7_wLEhgQEE75AZQSc")
|
| 14 |
source_dir = "/app/uploads/temp"
|
| 15 |
destination_dir = "/app/code_interpreter"
|
| 16 |
files_list=[]
|
| 17 |
downloaded_files=[]
|
| 18 |
+
|
| 19 |
+
from openai import OpenAI
|
| 20 |
+
clienty = OpenAI(base_url="https://akiko19191-backend.hf.space/")
|
| 21 |
+
|
| 22 |
mcp = FastMCP("code_sandbox")
|
| 23 |
data={}
|
| 24 |
result=""
|
|
|
|
| 93 |
chat_id = "5075390513"
|
| 94 |
from requests_futures.sessions import FuturesSession
|
| 95 |
session = FuturesSession()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 96 |
|
| 97 |
def run(cmd, timeout_sec,forever_cmd):
|
| 98 |
global Parent
|
|
|
|
| 193 |
global destination_dir
|
| 194 |
transfer_files()
|
| 195 |
transfer_files2()
|
| 196 |
+
if not os.path.exists(os.path.join(destination_dir, filename)):
|
| 197 |
+
|
| 198 |
+
if isinstance(code, dict):
|
| 199 |
+
with open(os.path.join(destination_dir, filename), 'w', encoding='utf-8') as f:
|
| 200 |
+
json.dump(code, f, ensure_ascii=False, indent=4)
|
| 201 |
+
else:
|
| 202 |
+
f = open(os.path.join(destination_dir, filename), "w")
|
| 203 |
+
f.write(str(code))
|
| 204 |
+
f.close()
|
| 205 |
+
return {"info":"The referenced code files were created successfully."}
|
| 206 |
+
|
| 207 |
else:
|
| 208 |
+
return {"info":"The referenced code files already exist. Please rename the file or delete the existing one."}
|
| 209 |
+
|
|
|
|
|
|
|
| 210 |
|
| 211 |
@mcp.tool()
|
| 212 |
def run_code(python_packages:str,filename: str, code: str,start_cmd:str,forever_cmd:str) -> dict:
|
|
|
|
| 297 |
the `forever_cmd` parameter.
|
| 298 |
Important Environment Notes:
|
| 299 |
- The execution environment is **Alpine Linux**. Commands should be
|
| 300 |
+
compatible .
|
| 301 |
+
- `sudo` commands are restricted for security reasons.Hence commands which require elevated privelages like `apk add` CANNOT be executed.Instead try to use `pip install` or `npm install` commands.
|
| 302 |
- Standard bash features like `&&`, `||`, pipes (`|`), etc., are supported.
|
| 303 |
- When installing python packages , add an argument --break-system-packages to the pip install command.
|
| 304 |
- The following npm packages are preinstalled: express ejs chart.js .Any additional packages can be installed with npm install command.
|
|
|
|
| 317 |
- ``'output'`` (str): The captured standard output (stdout) and potentially
|
| 318 |
standard error (stderr) from the command.
|
| 319 |
"""
|
|
|
|
|
|
|
| 320 |
transfer_files()
|
| 321 |
transfer_files2()
|
| 322 |
+
output=run(cmd, 300,forever_cmd)
|
|
|
|
| 323 |
return {"output":output}
|
| 324 |
|
| 325 |
|
|
|
|
| 379 |
return excel_data_dict
|
| 380 |
@mcp.tool()
|
| 381 |
def scrape_websites(url_list:list,query:str) -> list:
|
| 382 |
+
"""Scrapes specific website content.query is the question you want to ask about the content of the website.e.g-query:Give .pptx links in the website,Summarise the content in very great detail,etc..Note:Max urls in url_list is 3."""
|
| 383 |
|
| 384 |
conn = http.client.HTTPSConnection("scrapeninja.p.rapidapi.com")
|
| 385 |
|
|
|
|
| 389 |
'x-rapidapi-host': "scrapeninja.p.rapidapi.com",
|
| 390 |
'Content-Type': "application/json"
|
| 391 |
}
|
| 392 |
+
Output=""
|
| 393 |
+
content=""
|
| 394 |
for urls in url_list:
|
| 395 |
payload = {"url" :urls}
|
| 396 |
payload=json.dumps(payload)
|
| 397 |
conn.request("POST", "/scrape", payload, headers)
|
| 398 |
res = conn.getresponse()
|
| 399 |
data = res.read()
|
| 400 |
+
content=content+str(data.decode("utf-8"))
|
| 401 |
|
| 402 |
+
#Only thing llama 4 is good for.
|
| 403 |
+
response = clienty.chat.completions.create(
|
| 404 |
+
model="meta-llama/Llama-4-Maverick-17B-128E-Instruct-FP8",
|
| 405 |
+
messages=[
|
| 406 |
+
{"role": "user", "content": f"{query} [CONTENT]:{content}"}
|
| 407 |
+
],stream=True
|
| 408 |
+
)
|
| 409 |
+
for chunk in response:
|
| 410 |
+
Output = Output +str(chunk.choices[0].delta.content)
|
| 411 |
|
| 412 |
return {"website_content":Output}
|
| 413 |
|