AkashDataScience commited on
Commit
1a5b783
·
1 Parent(s): d0c796a
Files changed (1) hide show
  1. app.py +1 -1
app.py CHANGED
@@ -26,7 +26,7 @@ tokenizer.padding_side = "right"
26
  pipe = pipeline("text-generation", model=model, tokenizer=tokenizer)
27
 
28
  def infer(message, history):
29
- prompt = pipe.tokenizer.apply_chat_template([{"role": "user", "content": prompt}], tokenize=False, add_generation_prompt=True)
30
  outputs = pipe(prompt, max_new_tokens=256, do_sample=True, num_beams=1, temperature=0.3, top_k=50, top_p=0.95, max_time= 180)
31
  return outputs[0]['generated_text'][len(prompt):].strip()
32
 
 
26
  pipe = pipeline("text-generation", model=model, tokenizer=tokenizer)
27
 
28
  def infer(message, history):
29
+ prompt = pipe.tokenizer.apply_chat_template([{"role": "user", "content": message}], tokenize=False, add_generation_prompt=True)
30
  outputs = pipe(prompt, max_new_tokens=256, do_sample=True, num_beams=1, temperature=0.3, top_k=50, top_p=0.95, max_time= 180)
31
  return outputs[0]['generated_text'][len(prompt):].strip()
32