Spaces:
Runtime error
Runtime error
:wrench: update model listing to avoid unintentionally listing larger models.
Browse files
app.py
CHANGED
|
@@ -38,10 +38,30 @@ logo = open('./logo.svg').read()
|
|
| 38 |
with open('./model-cache.json', 'r') as f_model_cache:
|
| 39 |
model_cache = json.load(f_model_cache)
|
| 40 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 41 |
def build_model_choices():
|
| 42 |
all_choices = []
|
| 43 |
for model_class in model_cache:
|
| 44 |
-
if model_class in
|
|
|
|
|
|
|
|
|
|
|
|
|
| 45 |
continue
|
| 46 |
all_choices += [ (f"{model_id} ({model_class})", model_id) for model_id in model_cache[model_class] ]
|
| 47 |
|
|
|
|
| 38 |
with open('./model-cache.json', 'r') as f_model_cache:
|
| 39 |
model_cache = json.load(f_model_cache)
|
| 40 |
|
| 41 |
+
|
| 42 |
+
model_class_filter = {
|
| 43 |
+
"mistral-v02-7b-std-lc": True,
|
| 44 |
+
"llama3-8b-8k": True,
|
| 45 |
+
"llama2-solar-10b7-4k": True,
|
| 46 |
+
"mistral-nemo-12b-lc": True,
|
| 47 |
+
"llama2-13b-4k": True,
|
| 48 |
+
"llama3-15b-8k": True,
|
| 49 |
+
|
| 50 |
+
"qwen2-32b-lc":False,
|
| 51 |
+
"llama3-70b-8k":False,
|
| 52 |
+
"qwen2-72b-lc":False,
|
| 53 |
+
"mixtral-8x22b-lc":False,
|
| 54 |
+
"llama3-405b-lc":False,
|
| 55 |
+
}
|
| 56 |
+
|
| 57 |
def build_model_choices():
|
| 58 |
all_choices = []
|
| 59 |
for model_class in model_cache:
|
| 60 |
+
if model_class not in model_class_filter:
|
| 61 |
+
print(f"Warning: new model class {model_class}. Treating as blacklisted")
|
| 62 |
+
continue
|
| 63 |
+
|
| 64 |
+
if not model_class_filter[model_class]:
|
| 65 |
continue
|
| 66 |
all_choices += [ (f"{model_id} ({model_class})", model_id) for model_id in model_cache[model_class] ]
|
| 67 |
|