Spaces:
Sleeping
Sleeping
add taiwan tinyllama
Browse files
app.py
CHANGED
|
@@ -37,6 +37,11 @@ st.markdown("""
|
|
| 37 |
REQUIRED_SPACE_BYTES = 5 * 1024 ** 3 # 5 GB
|
| 38 |
|
| 39 |
MODELS = {
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 40 |
"Llama-3.2-Taiwan-3B-Instruct (Q4_K_M)": {
|
| 41 |
"repo_id": "itlwas/Llama-3.2-Taiwan-3B-Instruct-Q4_K_M-GGUF",
|
| 42 |
"filename": "llama-3.2-taiwan-3b-instruct-q4_k_m.gguf",
|
|
|
|
| 37 |
REQUIRED_SPACE_BYTES = 5 * 1024 ** 3 # 5 GB
|
| 38 |
|
| 39 |
MODELS = {
|
| 40 |
+
"Taiwan-tinyllama-v1.0-chat (Q8_0)": {
|
| 41 |
+
"repo_id": "NapYang/DavidLanz-Taiwan-tinyllama-v1.0-chat.GGUF",
|
| 42 |
+
"filename": "Taiwan-tinyllama-v1.0-chat-Q8_0.gguf",
|
| 43 |
+
"description": "Taiwan-tinyllama-v1.0-chat (Q8_0)"
|
| 44 |
+
},
|
| 45 |
"Llama-3.2-Taiwan-3B-Instruct (Q4_K_M)": {
|
| 46 |
"repo_id": "itlwas/Llama-3.2-Taiwan-3B-Instruct-Q4_K_M-GGUF",
|
| 47 |
"filename": "llama-3.2-taiwan-3b-instruct-q4_k_m.gguf",
|