Spaces:
Running
on
Zero
Running
on
Zero
Fix red highlighting
Browse files
app.py
CHANGED
|
@@ -172,12 +172,12 @@ def diffusion_chat(question, eot_weight, max_it, sharpness, noise_clipping, use_
|
|
| 172 |
print('Generating output')
|
| 173 |
generated_tokens, confidences = generate_diffusion_text(current_tokens, answer_start)
|
| 174 |
current_tokens = generated_tokens
|
| 175 |
-
|
| 176 |
decoded_ids = current_tokens[answer_start:]
|
| 177 |
decoded_tokens = tokenizer.convert_ids_to_tokens(decoded_ids)
|
| 178 |
filtered_tokens = [tok for tok in decoded_tokens if tokenizer.convert_tokens_to_ids(tok) != eot_token_id]
|
| 179 |
filtered_prev_tokens = [tok for tok in prev_decoded_tokens if tokenizer.convert_tokens_to_ids(tok) != eot_token_id] if prev_decoded_tokens else []
|
| 180 |
-
|
| 181 |
if filtered_prev_tokens:
|
| 182 |
highlighted = []
|
| 183 |
for i, tok in enumerate(decoded_tokens):
|
|
|
|
| 172 |
print('Generating output')
|
| 173 |
generated_tokens, confidences = generate_diffusion_text(current_tokens, answer_start)
|
| 174 |
current_tokens = generated_tokens
|
| 175 |
+
just_noised_indices = []
|
| 176 |
decoded_ids = current_tokens[answer_start:]
|
| 177 |
decoded_tokens = tokenizer.convert_ids_to_tokens(decoded_ids)
|
| 178 |
filtered_tokens = [tok for tok in decoded_tokens if tokenizer.convert_tokens_to_ids(tok) != eot_token_id]
|
| 179 |
filtered_prev_tokens = [tok for tok in prev_decoded_tokens if tokenizer.convert_tokens_to_ids(tok) != eot_token_id] if prev_decoded_tokens else []
|
| 180 |
+
|
| 181 |
if filtered_prev_tokens:
|
| 182 |
highlighted = []
|
| 183 |
for i, tok in enumerate(decoded_tokens):
|