Spaces:
Running
Running
Update src/translate/Translate.py
Browse files
src/translate/Translate.py
CHANGED
|
@@ -79,7 +79,7 @@ def gemma_direct(requestValue: str, model: str = 'Gargaz/gemma-2b-romanian-bette
|
|
| 79 |
# limit max_new_tokens to 150% of the requestValue
|
| 80 |
max_new_tokens = int(len(requestValue) + len(requestValue) * 0.5)
|
| 81 |
max_new_tokens = max_new_tokens if max_new_tokens % 2 == 0 else max_new_tokens + 1
|
| 82 |
-
messages = [{"role": "user", "content": f"Translate this text to Romanian: {requestValue}"}
|
| 83 |
tokenizer = AutoTokenizer.from_pretrained("Gargaz/gemma-2b-romanian-better")
|
| 84 |
model = AutoModelForCausalLM.from_pretrained("Gargaz/gemma-2b-romanian-better")
|
| 85 |
|
|
|
|
| 79 |
# limit max_new_tokens to 150% of the requestValue
|
| 80 |
max_new_tokens = int(len(requestValue) + len(requestValue) * 0.5)
|
| 81 |
max_new_tokens = max_new_tokens if max_new_tokens % 2 == 0 else max_new_tokens + 1
|
| 82 |
+
messages = [{"role": "user", "content": f"Translate this text to Romanian: {requestValue}"}]
|
| 83 |
tokenizer = AutoTokenizer.from_pretrained("Gargaz/gemma-2b-romanian-better")
|
| 84 |
model = AutoModelForCausalLM.from_pretrained("Gargaz/gemma-2b-romanian-better")
|
| 85 |
|