Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
|
@@ -8,13 +8,18 @@ from llama_cpp import Llama
|
|
| 8 |
from huggingface_hub import hf_hub_download
|
| 9 |
|
| 10 |
app = FastAPI()
|
| 11 |
-
|
| 12 |
llm = Llama(
|
| 13 |
model_path=hf_hub_download(
|
| 14 |
repo_id="TheBloke/Mistral-7B-v0.1-GGUF",
|
| 15 |
filename="mistral-7b-v0.1.Q4_K_M.gguf"),
|
| 16 |
n_ctx=2048,
|
| 17 |
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 18 |
|
| 19 |
@app.get("/")
|
| 20 |
async def generate_text():
|
|
|
|
| 8 |
from huggingface_hub import hf_hub_download
|
| 9 |
|
| 10 |
app = FastAPI()
|
| 11 |
+
"""
|
| 12 |
llm = Llama(
|
| 13 |
model_path=hf_hub_download(
|
| 14 |
repo_id="TheBloke/Mistral-7B-v0.1-GGUF",
|
| 15 |
filename="mistral-7b-v0.1.Q4_K_M.gguf"),
|
| 16 |
n_ctx=2048,
|
| 17 |
)
|
| 18 |
+
"""
|
| 19 |
+
llm = Llama(
|
| 20 |
+
model_path="./TheBloke/Mistral-7B-v0.1-GGUF/mistral-7b-v0.1.Q4_K_M.gguf",
|
| 21 |
+
n_ctx=2048,
|
| 22 |
+
)
|
| 23 |
|
| 24 |
@app.get("/")
|
| 25 |
async def generate_text():
|