PythonNut commited on
Commit
079652c
·
1 Parent(s): cafb8f5

Add new 128k model!

Browse files
Files changed (2) hide show
  1. README.md +1 -0
  2. assets/index-DEbmRw68.js +1 -0
README.md CHANGED
@@ -14,6 +14,7 @@ models:
14
  - UW/OLMo2-8B-SuperBPE-t160k
15
  - UW/OLMo2-8B-SuperBPE-t80k
16
  - UW/OLMo2-8B-BPE
 
17
  ---
18
 
19
  Link to source code: https://github.com/xenova/transformers.js/tree/main/examples/tokenizer-playground.
 
14
  - UW/OLMo2-8B-SuperBPE-t160k
15
  - UW/OLMo2-8B-SuperBPE-t80k
16
  - UW/OLMo2-8B-BPE
17
+ - alisawuffles/superbpe-tokenizer-128k
18
  ---
19
 
20
  Link to source code: https://github.com/xenova/transformers.js/tree/main/examples/tokenizer-playground.
assets/index-DEbmRw68.js CHANGED
@@ -7139,6 +7139,7 @@ function jd({
7139
  const Ql = Object.freeze({
7140
  "UW/OLMo2-8B-SuperBPE-t180k": "SuperBPE 200k (t=180k, best on evals)",
7141
  "UW/OLMo2-8B-SuperBPE-t80k": "SuperBPE 200k (t=80k, most efficient)",
 
7142
  "UW/OLMo2-8B-BPE": "BPE 200k (baseline)",
7143
  "Xenova/gpt-4o": "GPT-4o",
7144
  "Xenova/llama-3-tokenizer": "Llama 3",
 
7139
  const Ql = Object.freeze({
7140
  "UW/OLMo2-8B-SuperBPE-t180k": "SuperBPE 200k (t=180k, best on evals)",
7141
  "UW/OLMo2-8B-SuperBPE-t80k": "SuperBPE 200k (t=80k, most efficient)",
7142
+ "alisawuffles/superbpe-tokenizer-128k": "SuperBPE 128k (new recommended!)",
7143
  "UW/OLMo2-8B-BPE": "BPE 200k (baseline)",
7144
  "Xenova/gpt-4o": "GPT-4o",
7145
  "Xenova/llama-3-tokenizer": "Llama 3",