| import gradio as gr | |
| from transformers import AutoTokenizer | |
| from deep_translator import GoogleTranslator | |
| token1=AutoTokenizer.from_pretrained("allenai/Llama-3.1-Tulu-3-8B", use_fast=True) | |
| token2=AutoTokenizer.from_pretrained("burgasdotpro/Sebushka-llama-3.1-8B", use_fast=True) | |
| def ndps(text): | |
| translator = GoogleTranslator(source="auto", target="ru") | |
| out=translator.translate(text=text) | |
| input_ids = token1.encode(out, add_special_tokens=True) | |
| input_ids = token2.decode(input_ids, skip_special_tokens=True) | |
| translator = GoogleTranslator(source="ru", target="auto") | |
| out=translator.translate(text=input_ids) | |
| input_ids = token1.encode(out, add_special_tokens=True) | |
| input_ids = token2.decode(input_ids, skip_special_tokens=True) | |
| translator = GoogleTranslator(source="auto", target="ru") | |
| return translator.translate(text=input_ids) | |
| demo = gr.Interface(fn=ndps, inputs="text", outputs="text") | |
| demo.launch() |