Commit
·
ad1b7ba
1
Parent(s):
f67cbed
trial
Browse files
app.py
CHANGED
|
@@ -6,7 +6,6 @@ title = """<h1 align="center">Random Prompt Generator</h1>
|
|
| 6 |
<p><center>
|
| 7 |
<a href="https://x.com/gokayfem" target="_blank">[X gokaygokay]</a>
|
| 8 |
<a href="https://github.com/gokayfem" target="_blank">[Github gokayfem]</a>
|
| 9 |
-
<a href="https://github.com/dagthomas/comfyui_dagthomas" target="_blank">[comfyui_dagthomas]</a>
|
| 10 |
<p align="center">Generate random prompts using powerful LLMs from Hugging Face, Groq, and SambaNova.</p>
|
| 11 |
</center></p>
|
| 12 |
"""
|
|
@@ -67,21 +66,42 @@ def create_interface():
|
|
| 67 |
generate_button = gr.Button("Generate Random Prompt with LLM")
|
| 68 |
text_output = gr.Textbox(label="LLM Generated Text", lines=10, show_copy_button=True)
|
| 69 |
|
| 70 |
-
#
|
| 71 |
def update_model_choices(provider):
|
| 72 |
provider_models = {
|
| 73 |
-
"Hugging Face": [
|
| 74 |
-
|
| 75 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 76 |
}
|
| 77 |
models = provider_models.get(provider, [])
|
| 78 |
-
return gr.Dropdown
|
| 79 |
|
| 80 |
def update_api_key_visibility(provider):
|
| 81 |
return gr.update(visible=False) # No API key required for selected providers
|
| 82 |
|
| 83 |
-
llm_provider.change(
|
| 84 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 85 |
|
| 86 |
# **Unified Function to Generate Prompt and Text**
|
| 87 |
def generate_random_prompt_with_llm(custom_input, prompt_type, long_talk, compress, compression_level, custom_base_prompt, provider, api_key, model_selected):
|
|
|
|
| 6 |
<p><center>
|
| 7 |
<a href="https://x.com/gokayfem" target="_blank">[X gokaygokay]</a>
|
| 8 |
<a href="https://github.com/gokayfem" target="_blank">[Github gokayfem]</a>
|
|
|
|
| 9 |
<p align="center">Generate random prompts using powerful LLMs from Hugging Face, Groq, and SambaNova.</p>
|
| 10 |
</center></p>
|
| 11 |
"""
|
|
|
|
| 66 |
generate_button = gr.Button("Generate Random Prompt with LLM")
|
| 67 |
text_output = gr.Textbox(label="LLM Generated Text", lines=10, show_copy_button=True)
|
| 68 |
|
| 69 |
+
# Updated Models based on provider
|
| 70 |
def update_model_choices(provider):
|
| 71 |
provider_models = {
|
| 72 |
+
"Hugging Face": [
|
| 73 |
+
"Qwen/Qwen2.5-72B-Instruct",
|
| 74 |
+
"meta-llama/Meta-Llama-3.1-70B-Instruct",
|
| 75 |
+
"mistralai/Mixtral-8x7B-Instruct-v0.1",
|
| 76 |
+
"mistralai/Mistral-7B-Instruct-v0.3"
|
| 77 |
+
],
|
| 78 |
+
"Groq": [
|
| 79 |
+
"llama-3.1-70b-versatile",
|
| 80 |
+
"mixtral-8x7b-32768",
|
| 81 |
+
"gemma2-9b-it"
|
| 82 |
+
],
|
| 83 |
+
"SambaNova": [
|
| 84 |
+
"Meta-Llama-3.1-70B-Instruct",
|
| 85 |
+
"Meta-Llama-3.1-405B-Instruct",
|
| 86 |
+
"Meta-Llama-3.1-8B-Instruct"
|
| 87 |
+
],
|
| 88 |
}
|
| 89 |
models = provider_models.get(provider, [])
|
| 90 |
+
return gr.Dropdown(choices=models, value=models[0] if models else "")
|
| 91 |
|
| 92 |
def update_api_key_visibility(provider):
|
| 93 |
return gr.update(visible=False) # No API key required for selected providers
|
| 94 |
|
| 95 |
+
llm_provider.change(
|
| 96 |
+
update_model_choices,
|
| 97 |
+
inputs=[llm_provider],
|
| 98 |
+
outputs=[model]
|
| 99 |
+
)
|
| 100 |
+
llm_provider.change(
|
| 101 |
+
update_api_key_visibility,
|
| 102 |
+
inputs=[llm_provider],
|
| 103 |
+
outputs=[api_key]
|
| 104 |
+
)
|
| 105 |
|
| 106 |
# **Unified Function to Generate Prompt and Text**
|
| 107 |
def generate_random_prompt_with_llm(custom_input, prompt_type, long_talk, compress, compression_level, custom_base_prompt, provider, api_key, model_selected):
|