model updated model_id = "TheBloke/Mistral-7B-Instruct-v0.2-GGUF"
Browse files
app.py
CHANGED
|
@@ -54,7 +54,7 @@ def initialize_agent(
|
|
| 54 |
checkpointer = MemorySaver()
|
| 55 |
|
| 56 |
# Load local Hugging Face model
|
| 57 |
-
hf_model_id = "
|
| 58 |
tokenizer = AutoTokenizer.from_pretrained(hf_model_id)
|
| 59 |
raw_model = AutoModelForCausalLM.from_pretrained(hf_model_id, device_map="auto")
|
| 60 |
|
|
@@ -63,7 +63,7 @@ def initialize_agent(
|
|
| 63 |
model=raw_model,
|
| 64 |
tokenizer=tokenizer,
|
| 65 |
max_new_tokens=512,
|
| 66 |
-
temperature=temperature
|
| 67 |
top_p=top_p,
|
| 68 |
return_full_text=False,
|
| 69 |
)
|
|
|
|
| 54 |
checkpointer = MemorySaver()
|
| 55 |
|
| 56 |
# Load local Hugging Face model
|
| 57 |
+
hf_model_id = model_id = "TheBloke/Mistral-7B-Instruct-v0.2-GGUF"
|
| 58 |
tokenizer = AutoTokenizer.from_pretrained(hf_model_id)
|
| 59 |
raw_model = AutoModelForCausalLM.from_pretrained(hf_model_id, device_map="auto")
|
| 60 |
|
|
|
|
| 63 |
model=raw_model,
|
| 64 |
tokenizer=tokenizer,
|
| 65 |
max_new_tokens=512,
|
| 66 |
+
temperature=temperature,`
|
| 67 |
top_p=top_p,
|
| 68 |
return_full_text=False,
|
| 69 |
)
|