Spaces:
Sleeping
Sleeping
| from smolagents import CodeAgent,DuckDuckGoSearchTool,HfApiModel,load_tool,tool | |
| import datetime | |
| import requests | |
| import pytz | |
| import yaml | |
| from tools.final_answer import FinalAnswerTool | |
| from Gradio_UI import GradioUI | |
| import sys | |
| import urllib.parse | |
| import time | |
| import re | |
| import argparse | |
| # Below is an example of a tool that does nothing. Amaze us with your creativity !! | |
| def my_custom_tool(arg1:str, arg2:int)-> str: #it's import to specify the return type | |
| #Keep this format for the description / args / args description but feel free to modify the tool | |
| """A tool that does nothing yet [BLAST???] | |
| Args: | |
| arg1: the first argument | |
| arg2: the second argument | |
| """ | |
| return "What magic will you build ?" | |
| def get_current_time_in_timezone(timezone: str) -> str: | |
| """A tool that fetches the current local time in a specified timezone. | |
| Args: | |
| timezone: A string representing a valid timezone (e.g., 'America/New_York'). | |
| """ | |
| try: | |
| # Create timezone object | |
| tz = pytz.timezone(timezone) | |
| # Get current time in that timezone | |
| local_time = datetime.datetime.now(tz).strftime("%Y-%m-%d %H:%M:%S") | |
| return f"The current local time in {timezone} is: {local_time}" | |
| except Exception as e: | |
| return f"Error fetching time for timezone '{timezone}': {str(e)}" | |
| def run_blast(arguments_string: str) -> str: | |
| """ | |
| Submits a BLAST job to NCBI and retrieves the results. | |
| Parses arguments from a string input. Accepts a sequence directly as input, not a file. | |
| Args: | |
| arguments_string: A string containing the BLAST program, database, and query sequence. | |
| The string should be formatted as: "program database query_sequence". | |
| For example: "blastp nr ATGCGTAGCTAGCTAG...". | |
| The query sequence should be provided directly within the string. | |
| Returns: | |
| BLAST results in text format on success, or an error message string on failure. | |
| """ | |
| # Use maxsplit=2 so that the query sequence remains intact even if it contains spaces. | |
| tokens = arguments_string.split(maxsplit=2) | |
| if len(tokens) < 3: | |
| return "Error: Invalid arguments. Usage: program database query_sequence" | |
| program, database, query_sequence = tokens | |
| # Remove all whitespace from the query sequence (if a contiguous sequence is desired) | |
| query_sequence = "".join(query_sequence.split()) | |
| # Adjust program parameters for specific cases. | |
| if program.lower() == "megablast": | |
| program = "blastn&MEGABLAST=on" | |
| elif program.lower() == "rpsblast": | |
| program = "blastp&SERVICE=rpsblast" | |
| api_url = 'https://blast.ncbi.nlm.nih.gov/blast/Blast.cgi' | |
| payload = { | |
| 'CMD': 'Put', | |
| 'PROGRAM': program, | |
| 'DATABASE': database, | |
| 'QUERY': query_sequence, | |
| 'FORMAT_TYPE': 'Text', | |
| 'HITLIST_SIZE': 5, # Return only the top 5 hits | |
| 'DESCRIPTIONS': 5, # Print only 5 descriptions | |
| 'ALIGNMENTS': 5 | |
| } | |
| try: | |
| response = requests.post(api_url, data=payload) | |
| response.raise_for_status() | |
| except requests.exceptions.RequestException as e: | |
| return f"Error submitting request: {e}" | |
| response_content = response.text | |
| # Use improved regex patterns to capture RID and RTOE | |
| rid_match = re.search(r"RID\s*=\s*(\S+)", response_content) | |
| rtoe_match = re.search(r"RTOE\s*=\s*(\d+)", response_content) | |
| if rid_match and rtoe_match: | |
| rid = rid_match.group(1).strip() | |
| try: | |
| rtoe = int(rtoe_match.group(1).strip()) | |
| except ValueError: | |
| return "Error: Invalid RTOE value received from BLAST response." | |
| else: | |
| return "Error: Could not parse RID or RTOE from BLAST response.\nResponse content:\n" + response_content | |
| # Initial wait based on the estimated time | |
| time.sleep(rtoe) | |
| # Poll for results with a maximum number of attempts to avoid an infinite loop. | |
| max_polls = 60 # e.g., 60 polls * 5 seconds = 300 seconds maximum wait time | |
| poll_attempts = 0 | |
| while poll_attempts < max_polls: | |
| time.sleep(5) | |
| poll_attempts += 1 | |
| poll_url = f"{api_url}?CMD=Get&FORMAT_OBJECT=SearchInfo&RID={rid}" | |
| try: | |
| poll_response = requests.get(poll_url) | |
| poll_response.raise_for_status() | |
| except requests.exceptions.RequestException as e: | |
| return f"Error polling for results: {e}" | |
| status_content = poll_response.text | |
| if "Status=WAITING" in status_content: | |
| continue | |
| elif "Status=FAILED" in status_content: | |
| return f"Search {rid} failed; please report to blast-help@ncbi.nlm.nih.gov." | |
| elif "Status=UNKNOWN" in status_content: | |
| return f"Search {rid} expired." | |
| elif "Status=READY" in status_content: | |
| if "ThereAreHits=yes" in status_content: | |
| break # Ready to fetch the results | |
| else: | |
| return "No hits found." | |
| else: | |
| return "Unknown error during polling.\nStatus response content:\n" + status_content | |
| else: | |
| return "Error polling for results: timed out." | |
| # Retrieve and return the BLAST results. | |
| result_url = f"{api_url}?CMD=Get&FORMAT_TYPE=Text&RID={rid}" | |
| try: | |
| result_response = requests.get(result_url) | |
| result_response.raise_for_status() | |
| except requests.exceptions.RequestException as e: | |
| return f"Error retrieving results: {e}" | |
| return result_response.text | |
| final_answer = FinalAnswerTool() | |
| # If the agent does not answer, the model is overloaded, please use another model or the following Hugging Face Endpoint that also contains qwen2.5 coder: | |
| # model_id='https://pflgm2locj2t89co.us-east-1.aws.endpoints.huggingface.cloud' | |
| model = HfApiModel( | |
| max_tokens=2096, | |
| temperature=0.5, | |
| model_id="Qwen/Qwen2.5-Coder-32B-Instruct", #'Qwen/Qwen2.5-Coder-32B-Instruct',# it is possible that this model may be overloaded, | |
| custom_role_conversions=None, | |
| ) | |
| # Import tool from Hub | |
| image_generation_tool = load_tool("agents-course/text-to-image", trust_remote_code=True) | |
| with open("prompts.yaml", 'r') as stream: | |
| prompt_templates = yaml.safe_load(stream) | |
| agent = CodeAgent( | |
| model=model, | |
| tools=[DuckDuckGoSearchTool(),run_blast,image_generation_tool,final_answer], ## add your tools here (don't remove final answer) | |
| max_steps=6, | |
| verbosity_level=1, | |
| grammar=None, | |
| planning_interval=None, | |
| name=None, | |
| description=None, | |
| prompt_templates=prompt_templates | |
| ) | |
| GradioUI(agent).launch() |