Spaces:
Running
on
Zero
Running
on
Zero
switch to 2b model
Browse files
app.py
CHANGED
|
@@ -2,8 +2,8 @@ import gradio as gr
|
|
| 2 |
import os
|
| 3 |
from transformers import AutoTokenizer, AutoModelForCausalLM, TextStreamer
|
| 4 |
token = os.environ["HF_TOKEN"]
|
| 5 |
-
tokenizer = AutoTokenizer.from_pretrained("google/gemma-
|
| 6 |
-
model = AutoModelForCausalLM.from_pretrained("google/gemma-
|
| 7 |
streamer = TextStreamer(tokenizer,skip_prompt=True)
|
| 8 |
|
| 9 |
|
|
|
|
| 2 |
import os
|
| 3 |
from transformers import AutoTokenizer, AutoModelForCausalLM, TextStreamer
|
| 4 |
token = os.environ["HF_TOKEN"]
|
| 5 |
+
tokenizer = AutoTokenizer.from_pretrained("google/gemma-2b",token=token)
|
| 6 |
+
model = AutoModelForCausalLM.from_pretrained("google/gemma-2b",token=token)
|
| 7 |
streamer = TextStreamer(tokenizer,skip_prompt=True)
|
| 8 |
|
| 9 |
|