Clean up tool use snippet
Browse files
README.md
CHANGED
|
@@ -159,18 +159,19 @@ def get_current_weather(location: str, format: str):
|
|
| 159 |
conversation = [{"role": "user", "content": "What's the weather like in Paris?"}]
|
| 160 |
tools = [get_current_weather]
|
| 161 |
|
| 162 |
-
|
| 163 |
-
|
|
|
|
| 164 |
conversation,
|
| 165 |
tools=tools,
|
| 166 |
-
tokenize=False,
|
| 167 |
add_generation_prompt=True,
|
|
|
|
|
|
|
| 168 |
)
|
| 169 |
|
| 170 |
-
inputs = tokenizer(tool_use_prompt, return_tensors="pt")
|
| 171 |
-
|
| 172 |
model = AutoModelForCausalLM.from_pretrained(model_id, torch_dtype=torch.bfloat16, device_map="auto")
|
| 173 |
|
|
|
|
| 174 |
outputs = model.generate(**inputs, max_new_tokens=1000)
|
| 175 |
print(tokenizer.decode(outputs[0], skip_special_tokens=True))
|
| 176 |
```
|
|
|
|
| 159 |
conversation = [{"role": "user", "content": "What's the weather like in Paris?"}]
|
| 160 |
tools = [get_current_weather]
|
| 161 |
|
| 162 |
+
|
| 163 |
+
# format and tokenize the tool use prompt
|
| 164 |
+
inputs = tokenizer.apply_chat_template(
|
| 165 |
conversation,
|
| 166 |
tools=tools,
|
|
|
|
| 167 |
add_generation_prompt=True,
|
| 168 |
+
return_dict=True,
|
| 169 |
+
return_tensors="pt",
|
| 170 |
)
|
| 171 |
|
|
|
|
|
|
|
| 172 |
model = AutoModelForCausalLM.from_pretrained(model_id, torch_dtype=torch.bfloat16, device_map="auto")
|
| 173 |
|
| 174 |
+
inputs.to(model.device)
|
| 175 |
outputs = model.generate(**inputs, max_new_tokens=1000)
|
| 176 |
print(tokenizer.decode(outputs[0], skip_special_tokens=True))
|
| 177 |
```
|