Spaces:
Running
Running
add command-r
Browse files- app.py +2 -2
- model_metadata.jsonl +11 -6
app.py
CHANGED
|
@@ -925,7 +925,7 @@ with gr.Blocks(js=clickable_links_js) as app:
|
|
| 925 |
|
| 926 |
def guardrail_check_se_relevance(user_input):
|
| 927 |
"""
|
| 928 |
-
Use gpt-oss-20b to check if the user input is SE-related.
|
| 929 |
Return True if it is SE-related, otherwise False.
|
| 930 |
"""
|
| 931 |
# Example instructions for classification — adjust to your needs
|
|
@@ -942,7 +942,7 @@ with gr.Blocks(js=clickable_links_js) as app:
|
|
| 942 |
try:
|
| 943 |
# Make the chat completion call
|
| 944 |
response = openai_client.chat.completions.create(
|
| 945 |
-
model="gpt-oss-20b", messages=[system_message, user_message]
|
| 946 |
)
|
| 947 |
classification = response.choices[0].message.content.strip().lower()
|
| 948 |
# Check if the LLM responded with 'Yes'
|
|
|
|
| 925 |
|
| 926 |
def guardrail_check_se_relevance(user_input):
|
| 927 |
"""
|
| 928 |
+
Use gpt-oss-safeguard-20b to check if the user input is SE-related.
|
| 929 |
Return True if it is SE-related, otherwise False.
|
| 930 |
"""
|
| 931 |
# Example instructions for classification — adjust to your needs
|
|
|
|
| 942 |
try:
|
| 943 |
# Make the chat completion call
|
| 944 |
response = openai_client.chat.completions.create(
|
| 945 |
+
model="gpt-oss-safeguard-20b", messages=[system_message, user_message]
|
| 946 |
)
|
| 947 |
classification = response.choices[0].message.content.strip().lower()
|
| 948 |
# Check if the LLM responded with 'Yes'
|
model_metadata.jsonl
CHANGED
|
@@ -9,6 +9,8 @@
|
|
| 9 |
{"model_name": "claude-opus-4-1-20250805", "context_window": 200000, "link": "https://www.anthropic.com/news/claude-opus-4-1"}
|
| 10 |
{"model_name": "claude-sonnet-4-5-20250929", "context_window": 200000, "link": "https://www.anthropic.com/news/claude-sonnet-4-5"}
|
| 11 |
{"model_name": "claude-haiku-4-5-20251001", "context_window": 200000, "link": "https://www.anthropic.com/news/claude-haiku-4-5"}
|
|
|
|
|
|
|
| 12 |
{"model_name": "doubao-1-5-pro-256k-250115", "context_window": 256000, "link": "https://seed.bytedance.com"}
|
| 13 |
{"model_name": "doubao-1-5-thinking-pro-250415", "context_window": 256000, "link": "https://seed.bytedance.com"}
|
| 14 |
{"model_name": "doubao-seed-1-6-250615", "context_window": 256000, "link": "https://seed.bytedance.com"}
|
|
@@ -23,16 +25,17 @@
|
|
| 23 |
{"model_name": "gemma-3-27b-it", "context_window": 128000, "link": "https://ai.google.dev/gemma"}
|
| 24 |
{"model_name": "gpt-3.5-turbo", "context_window": 16000, "link": "https://openai.com"}
|
| 25 |
{"model_name": "gpt-4-turbo", "context_window": 128000, "link": "https://openai.com/index/gpt-4"}
|
| 26 |
-
{"model_name": "gpt-4o", "context_window": 128000, "link": "https://openai.com"}
|
| 27 |
-
{"model_name": "gpt-4o-mini", "context_window": 128000, "link": "https://openai.com"}
|
| 28 |
{"model_name": "gpt-4.1", "context_window": 1000000, "link": "https://openai.com/index/gpt-4-1"}
|
| 29 |
{"model_name": "gpt-4.1-mini", "context_window": 1000000, "link": "https://openai.com/index/gpt-4-1"}
|
| 30 |
{"model_name": "gpt-5", "context_window": 400000, "link": "https://openai.com/gpt-5"}
|
| 31 |
{"model_name": "gpt-5-chat-latest", "context_window": 400000, "link": "https://openai.com/gpt-5"}
|
| 32 |
{"model_name": "gpt-5-mini", "context_window": 400000, "link": "https://openai.com/gpt-5"}
|
| 33 |
{"model_name": "gpt-5-nano", "context_window": 400000, "link": "https://openai.com/gpt-5"}
|
| 34 |
-
{"model_name": "gpt-oss-120b", "context_window": 128000, "link": "https://openai.com"}
|
| 35 |
-
{"model_name": "gpt-oss-20b", "context_window":
|
|
|
|
| 36 |
{"model_name": "grok-3-fast-beta", "context_window": 1000000, "link": "https://x.ai/news/grok-3"}
|
| 37 |
{"model_name": "grok-3-beta", "context_window": 1000000, "link": "https://x.ai/news/grok-3"}
|
| 38 |
{"model_name": "grok-3-mini-fast-beta", "context_window": 1000000, "link": "https://x.ai/news/grok-3"}
|
|
@@ -47,6 +50,8 @@
|
|
| 47 |
{"model_name": "o3", "context_window": 200000, "link": "https://openai.com/index/introducing-o3-and-o4-mini"}
|
| 48 |
{"model_name": "o3-mini", "context_window": 200000, "link": "https://openai.com/index/introducing-o3-and-o4-mini"}
|
| 49 |
{"model_name": "o4-mini", "context_window": 200000, "link": "https://openai.com/index/introducing-o3-and-o4-mini"}
|
| 50 |
-
{"model_name": "qwen3-
|
|
|
|
| 51 |
{"model_name": "qwen3-32b", "context_window": 32768, "link": "https://qwen-3.com"}
|
| 52 |
-
{"model_name": "qwen3-
|
|
|
|
|
|
| 9 |
{"model_name": "claude-opus-4-1-20250805", "context_window": 200000, "link": "https://www.anthropic.com/news/claude-opus-4-1"}
|
| 10 |
{"model_name": "claude-sonnet-4-5-20250929", "context_window": 200000, "link": "https://www.anthropic.com/news/claude-sonnet-4-5"}
|
| 11 |
{"model_name": "claude-haiku-4-5-20251001", "context_window": 200000, "link": "https://www.anthropic.com/news/claude-haiku-4-5"}
|
| 12 |
+
{"model_name": "command-r", "context_window": 128000, "link": "https://cohere.com/command-r"}
|
| 13 |
+
{"model_name": "command-r-plus", "context_window": 128000, "link": "https://cohere.com/command-r"}
|
| 14 |
{"model_name": "doubao-1-5-pro-256k-250115", "context_window": 256000, "link": "https://seed.bytedance.com"}
|
| 15 |
{"model_name": "doubao-1-5-thinking-pro-250415", "context_window": 256000, "link": "https://seed.bytedance.com"}
|
| 16 |
{"model_name": "doubao-seed-1-6-250615", "context_window": 256000, "link": "https://seed.bytedance.com"}
|
|
|
|
| 25 |
{"model_name": "gemma-3-27b-it", "context_window": 128000, "link": "https://ai.google.dev/gemma"}
|
| 26 |
{"model_name": "gpt-3.5-turbo", "context_window": 16000, "link": "https://openai.com"}
|
| 27 |
{"model_name": "gpt-4-turbo", "context_window": 128000, "link": "https://openai.com/index/gpt-4"}
|
| 28 |
+
{"model_name": "gpt-4o", "context_window": 128000, "link": "https://openai.com/index/hello-gpt-4o"}
|
| 29 |
+
{"model_name": "gpt-4o-mini", "context_window": 128000, "link": "https://openai.com/index/gpt-4o-mini-advancing-cost-efficient-intelligence"}
|
| 30 |
{"model_name": "gpt-4.1", "context_window": 1000000, "link": "https://openai.com/index/gpt-4-1"}
|
| 31 |
{"model_name": "gpt-4.1-mini", "context_window": 1000000, "link": "https://openai.com/index/gpt-4-1"}
|
| 32 |
{"model_name": "gpt-5", "context_window": 400000, "link": "https://openai.com/gpt-5"}
|
| 33 |
{"model_name": "gpt-5-chat-latest", "context_window": 400000, "link": "https://openai.com/gpt-5"}
|
| 34 |
{"model_name": "gpt-5-mini", "context_window": 400000, "link": "https://openai.com/gpt-5"}
|
| 35 |
{"model_name": "gpt-5-nano", "context_window": 400000, "link": "https://openai.com/gpt-5"}
|
| 36 |
+
{"model_name": "gpt-oss-120b", "context_window": 128000, "link": "https://openai.com/index/introducing-gpt-oss"}
|
| 37 |
+
{"model_name": "gpt-oss-20b", "context_window": 131072, "link": "https://openai.com/index/introducing-gpt-oss"}
|
| 38 |
+
{"model_name": "gpt-oss-safeguard-20b", "context_window": 128000, "link": "https://openai.com/index/introducing-gpt-oss-safeguard"}
|
| 39 |
{"model_name": "grok-3-fast-beta", "context_window": 1000000, "link": "https://x.ai/news/grok-3"}
|
| 40 |
{"model_name": "grok-3-beta", "context_window": 1000000, "link": "https://x.ai/news/grok-3"}
|
| 41 |
{"model_name": "grok-3-mini-fast-beta", "context_window": 1000000, "link": "https://x.ai/news/grok-3"}
|
|
|
|
| 50 |
{"model_name": "o3", "context_window": 200000, "link": "https://openai.com/index/introducing-o3-and-o4-mini"}
|
| 51 |
{"model_name": "o3-mini", "context_window": 200000, "link": "https://openai.com/index/introducing-o3-and-o4-mini"}
|
| 52 |
{"model_name": "o4-mini", "context_window": 200000, "link": "https://openai.com/index/introducing-o3-and-o4-mini"}
|
| 53 |
+
{"model_name": "qwen3-8b", "context_window": 32768, "link": "https://qwen-3.com"}
|
| 54 |
+
{"model_name": "qwen3-14b", "context_window": 32768, "link": "https://qwen-3.com"}
|
| 55 |
{"model_name": "qwen3-32b", "context_window": 32768, "link": "https://qwen-3.com"}
|
| 56 |
+
{"model_name": "qwen3-coder-plus", "context_window": 128000, "link": "https://qwen-3.com"}
|
| 57 |
+
{"model_name": "qwen-max", "context_window": 262144, "link": "https://qwen-3.com"}
|