c0sta commited on
Commit
6725681
·
1 Parent(s): bb866a9

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +4 -1
app.py CHANGED
@@ -1,16 +1,19 @@
1
  import gradio as gr
2
 
3
  gr.load("models/ai-forever/ruGPT-3.5-13B").launch()
4
-
5
  def greet(request):
 
6
  encoded_input = tokenizer(request, return_tensors='pt', \
7
  add_special_tokens=False).to('cuda:0')
 
8
  output = model.generate(
9
  **encoded_input,
10
  num_beams=2,
11
  do_sample=True,
12
  max_new_tokens=100
13
  )
 
14
  return tokenizer.decode(output[0], skip_special_tokens=True)
15
 
16
  iface = gr.Interface(fn=greet, inputs="text", outputs="text")
 
1
  import gradio as gr
2
 
3
  gr.load("models/ai-forever/ruGPT-3.5-13B").launch()
4
+ print('launch')
5
  def greet(request):
6
+ print('greet')
7
  encoded_input = tokenizer(request, return_tensors='pt', \
8
  add_special_tokens=False).to('cuda:0')
9
+ print('encoded_input')
10
  output = model.generate(
11
  **encoded_input,
12
  num_beams=2,
13
  do_sample=True,
14
  max_new_tokens=100
15
  )
16
+ print('output')
17
  return tokenizer.decode(output[0], skip_special_tokens=True)
18
 
19
  iface = gr.Interface(fn=greet, inputs="text", outputs="text")