Update app.py
Browse files
app.py
CHANGED
|
@@ -1,4 +1,5 @@
|
|
| 1 |
import gradio as gr
|
|
|
|
| 2 |
from transformers import AutoModelForCausalLM, AutoTokenizer
|
| 3 |
from hf_olmo import * # registers the Auto* classes
|
| 4 |
|
|
@@ -10,7 +11,7 @@ Join us : πTeamTonicπ is always making cool demos! Join our active builder
|
|
| 10 |
|
| 11 |
model_name = "allenai/OLMo-7B"
|
| 12 |
tokenizer = AutoTokenizer.from_pretrained(model_name)
|
| 13 |
-
model = AutoModelForCausalLM.from_pretrained(model_name, torch_dtype=
|
| 14 |
# model = AutoModelForCausalLM.from_pretrained(model_name)
|
| 15 |
|
| 16 |
def generate_text(prompt, max_new_tokens, top_k, top_p, do_sample):
|
|
|
|
| 1 |
import gradio as gr
|
| 2 |
+
import torch
|
| 3 |
from transformers import AutoModelForCausalLM, AutoTokenizer
|
| 4 |
from hf_olmo import * # registers the Auto* classes
|
| 5 |
|
|
|
|
| 11 |
|
| 12 |
model_name = "allenai/OLMo-7B"
|
| 13 |
tokenizer = AutoTokenizer.from_pretrained(model_name)
|
| 14 |
+
model = AutoModelForCausalLM.from_pretrained(model_name, torch_dtype=float16, load_in_8bit=True)
|
| 15 |
# model = AutoModelForCausalLM.from_pretrained(model_name)
|
| 16 |
|
| 17 |
def generate_text(prompt, max_new_tokens, top_k, top_p, do_sample):
|