Molbap HF Staff commited on
Commit
3ff5fdf
·
1 Parent(s): 81d2057

large update

Browse files
Files changed (40) hide show
  1. app/.astro/astro/content.d.ts +1 -6
  2. app/dist/_astro/index.DT_nyxPT.css +0 -1
  3. app/dist/_astro/index.DT_nyxPT.css.gz +0 -3
  4. app/dist/_astro/index.beJ178IL.css +1 -0
  5. dist/static/popular_models_barplot.png → app/dist/_astro/index.beJ178IL.css.gz +2 -2
  6. app/dist/index.html +67 -53
  7. app/dist/index.html.gz +2 -2
  8. app/package-lock.json +20 -12
  9. app/package.json +2 -2
  10. app/src/components/HtmlEmbed.astro +14 -1
  11. app/src/content/article.mdx +72 -44
  12. app/src/content/embeds/banner.html +1 -1
  13. app/src/content/new_article.mdx +633 -0
  14. app/src/styles/global.css +29 -0
  15. dist/distill.bundle.js +0 -0
  16. dist/distill.bundle.js.map +0 -0
  17. dist/fragments/attention-visualizer.html +0 -45
  18. dist/fragments/d3-graph.html +0 -12
  19. dist/fragments/dependency-graph.html +0 -6
  20. dist/fragments/glm-compare.html +0 -149
  21. dist/fragments/loc-growth.html +0 -6
  22. dist/fragments/memory-profiler.html +0 -16
  23. dist/fragments/model-timeline.html +0 -6
  24. dist/fragments/model-visualisation.html +0 -0
  25. dist/fragments/terminal.html +0 -43
  26. dist/fragments/tp-plan.html +0 -24
  27. dist/fragments/warmup_demo.html +0 -398
  28. dist/hf-logo.svg +0 -1
  29. dist/index.html +0 -0
  30. dist/main.bundle.js +0 -2028
  31. dist/main.bundle.js.map +0 -0
  32. dist/static/Bloatedness_visualizer.png +0 -3
  33. dist/static/Jaccard_similarity_plot.png +0 -3
  34. dist/static/d3_dependency_graph.html +0 -1902
  35. dist/static/fast_image_processors.png +0 -3
  36. dist/static/graph_modular_related_models.png +0 -3
  37. dist/static/hf-logo.svg +0 -1
  38. dist/static/model_debugger.png +0 -3
  39. dist/static/modular_candidates.png +0 -3
  40. dist/style.css +0 -741
app/.astro/astro/content.d.ts CHANGED
@@ -155,12 +155,7 @@ declare module 'astro:content' {
155
  };
156
 
157
  type DataEntryMap = {
158
- "assets": Record<string, {
159
- id: string;
160
- collection: "assets";
161
- data: any;
162
- }>;
163
- "embeds": Record<string, {
164
  id: string;
165
  collection: "embeds";
166
  data: any;
 
155
  };
156
 
157
  type DataEntryMap = {
158
+ "embeds": Record<string, {
 
 
 
 
 
159
  id: string;
160
  collection: "embeds";
161
  data: any;
app/dist/_astro/index.DT_nyxPT.css DELETED
@@ -1 +0,0 @@
1
- @import"https://fonts.googleapis.com/css2?family=Source+Sans+Pro:ital,wght@0,200..900;1,200..900&display=swap";.html-embed{margin:0 0 var(--block-spacing-y);z-index:var(--z-elevated);position:relative}.html-embed__title{text-align:left;font-weight:600;font-size:.95rem;color:var(--text-color);margin:0;padding:0;padding-bottom:var(--spacing-1);position:relative;display:block;width:100%;background:var(--page-bg);z-index:var(--z-elevated)}.html-embed__card{background:var(--code-bg);border:1px solid var(--border-color);border-radius:10px;padding:12px;z-index:calc(var(--z-elevated) + 1);position:relative}.html-embed__card.is-frameless{background:transparent;border-color:transparent;padding:0}.html-embed__desc{text-align:left;font-size:.9rem;color:var(--muted-color);margin:0;padding:0;padding-top:var(--spacing-1);position:relative;z-index:var(--z-elevated);display:block;width:100%;background:var(--page-bg)}.html-embed__card svg text{fill:var(--text-color)}.html-embed__card label{color:var(--text-color)}.plotly-graph-div{width:100%;min-height:320px}@media (max-width: 768px){.plotly-graph-div{min-height:260px}}[id^=plot-]{display:flex;flex-direction:column;align-items:center;gap:15px}.plotly_caption{font-style:italic;margin-top:10px}.plotly_controls{display:flex;flex-wrap:wrap;justify-content:center;gap:30px}.plotly_input_container{display:flex;align-items:center;flex-direction:column;gap:10px}.plotly_input_container>select{padding:2px 4px;line-height:1.5em;text-align:center;border-radius:4px;font-size:12px;background-color:var(--neutral-200);outline:none;border:1px solid var(--neutral-300)}.plotly_slider{display:flex;align-items:center;gap:10px}.plotly_slider>input[type=range]{-webkit-appearance:none;-moz-appearance:none;appearance:none;height:2px;background:var(--neutral-400);border-radius:5px;outline:none}.plotly_slider>input[type=range]::-webkit-slider-thumb{-webkit-appearance:none;width:18px;height:18px;border-radius:50%;background:var(--primary-color);cursor:pointer}.plotly_slider>input[type=range]::-moz-range-thumb{width:18px;height:18px;border-radius:50%;background:var(--primary-color);cursor:pointer}.plotly_slider>span{font-size:14px;line-height:1.6em;min-width:16px}[data-theme=dark] .html-embed__card:not(.is-frameless){background:#12151b;border-color:#ffffff26}[data-theme=dark] .html-embed__card .xaxislayer-above text,[data-theme=dark] .html-embed__card .yaxislayer-above text,[data-theme=dark] .html-embed__card .infolayer text,[data-theme=dark] .html-embed__card .legend text,[data-theme=dark] .html-embed__card .annotation text,[data-theme=dark] .html-embed__card .colorbar text,[data-theme=dark] .html-embed__card .hoverlayer text{fill:#fff!important}[data-theme=dark] .html-embed__card .xaxislayer-above path,[data-theme=dark] .html-embed__card .yaxislayer-above path,[data-theme=dark] .html-embed__card .xlines-above,[data-theme=dark] .html-embed__card .ylines-above{stroke:#ffffff59!important}[data-theme=dark] .html-embed__card .gridlayer path{stroke:#ffffff26!important}[data-theme=dark] .html-embed__card .legend rect.bg{fill:#00000040!important;stroke:#fff3!important}[data-theme=dark] .html-embed__card .hoverlayer .bg{fill:#000c!important;stroke:#fff3!important}[data-theme=dark] .html-embed__card .colorbar .cbbg{fill:#00000040!important;stroke:#fff3!important}.force-light-mode{filter:invert(0);--csstools-color-scheme--light: initial;color-scheme:light;background:#fff;padding:20px;border-radius:10px}[data-theme=dark] .force-light-mode .html-embed__card{background:#fff!important;border-color:#ddd!important}[data-theme=dark] .force-light-mode *{color:#333!important}@media print{.html-embed,.html-embed__card{max-width:100%!important;width:100%!important;margin-left:0!important;margin-right:0!important}.html-embed__card{padding:6px}.html-embed__card.is-frameless{padding:0}.html-embed__card svg,.html-embed__card canvas,.html-embed__card img{max-width:100%!important;height:auto!important}.html-embed__card>div[id^=frag-]{width:100%!important}}@media print{.html-embed,.html-embed__card{-moz-column-break-inside:avoid;break-inside:avoid;page-break-inside:avoid}.html-embed,.html-embed__card{max-width:100%!important;width:100%!important}.html-embed__card{padding:6px}.html-embed__card.is-frameless{padding:0}.html-embed__card svg,.html-embed__card canvas,.html-embed__card img,.html-embed__card video,.html-embed__card iframe{max-width:100%!important;height:auto!important}.html-embed__card>div[id^=frag-]{width:100%!important;max-width:100%!important}.html-embed .d3-galaxy{width:100%!important;max-width:980px!important;margin-left:auto!important;margin-right:auto!important}}.hero[data-astro-cid-bbe6dxrz]{width:100%;padding:0;text-align:center}.hero-title[data-astro-cid-bbe6dxrz]{font-size:max(28px,min(4vw,48px));font-weight:800;line-height:1.1;max-width:100%;margin:auto}.hero-banner[data-astro-cid-bbe6dxrz]{max-width:980px;margin:0 auto}.hero-desc[data-astro-cid-bbe6dxrz]{color:var(--muted-color);font-style:italic;margin:0 0 16px}.meta[data-astro-cid-bbe6dxrz]{border-top:1px solid var(--border-color);border-bottom:1px solid var(--border-color);padding:1rem 0;font-size:.9rem}.meta-container[data-astro-cid-bbe6dxrz]{max-width:760px;display:flex;flex-direction:row;justify-content:space-between;margin:0 auto;padding:0 var(--content-padding-x);gap:8px}.meta-container[data-astro-cid-bbe6dxrz] a[data-astro-cid-bbe6dxrz]:not(.button){color:var(--primary-color);-webkit-text-decoration:underline;text-decoration:underline;text-underline-offset:2px;text-decoration-thickness:.06em;text-decoration-color:var(--link-underline);transition:text-decoration-color .15s ease-in-out}.meta-container[data-astro-cid-bbe6dxrz] a[data-astro-cid-bbe6dxrz]:hover{text-decoration-color:var(--link-underline-hover)}.meta-container[data-astro-cid-bbe6dxrz] a[data-astro-cid-bbe6dxrz].button,.meta-container[data-astro-cid-bbe6dxrz] .button[data-astro-cid-bbe6dxrz]{-webkit-text-decoration:none;text-decoration:none}.meta-container-cell[data-astro-cid-bbe6dxrz]{display:flex;flex-direction:column;gap:8px;max-width:250px}.meta-container-cell[data-astro-cid-bbe6dxrz] h3[data-astro-cid-bbe6dxrz]{margin:0;font-size:12px;font-weight:400;color:var(--muted-color);text-transform:uppercase;letter-spacing:.02em}.meta-container-cell[data-astro-cid-bbe6dxrz] p[data-astro-cid-bbe6dxrz]{margin:0}.authors[data-astro-cid-bbe6dxrz]{margin:0;list-style-type:none;padding-left:0;display:flex;flex-wrap:wrap}.authors[data-astro-cid-bbe6dxrz] li[data-astro-cid-bbe6dxrz]{white-space:nowrap;margin-right:4px}.affiliations[data-astro-cid-bbe6dxrz]{margin:0;padding-left:1.25em}.affiliations[data-astro-cid-bbe6dxrz] li[data-astro-cid-bbe6dxrz]{margin:0}header[data-astro-cid-bbe6dxrz].meta .meta-container[data-astro-cid-bbe6dxrz]{flex-wrap:wrap;row-gap:12px}@media (max-width: 768px){.meta-container-cell--affiliations[data-astro-cid-bbe6dxrz],.meta-container-cell--pdf[data-astro-cid-bbe6dxrz]{text-align:right}}@media print{.meta-container-cell--pdf[data-astro-cid-bbe6dxrz]{display:none!important}}.footer{contain:layout style;font-size:.8em;line-height:1.7em;margin-top:60px;margin-bottom:0;border-top:1px solid rgba(0,0,0,.1);color:#00000080}.footer-inner{max-width:1280px;margin:0 auto;padding:60px 16px 48px;display:grid;grid-template-columns:220px minmax(0,680px) 260px;grid-gap:32px;gap:32px;align-items:start}.citation-block,.references-block,.reuse-block,.doi-block{display:contents}.citation-block>h3,.references-block>h3,.reuse-block>h3,.doi-block>h3{grid-column:1;font-size:15px;margin:0;text-align:right;padding-right:30px}.citation-block>:not(h3),.references-block>:not(h3),.reuse-block>:not(h3),.doi-block>:not(h3){grid-column:2}.citation-block h3{margin:0 0 8px}.citation-block h4{margin:16px 0 8px;font-size:14px;text-transform:uppercase;color:var(--muted-color)}.citation-block p,.reuse-block p,.doi-block p,.footnotes ol,.footnotes ol p,.references{margin-top:0}.citation{font-family:ui-monospace,SFMono-Regular,Menlo,Monaco,Consolas,Liberation Mono,Courier New,monospace;font-size:11px;line-height:15px;border-left:1px solid rgba(0,0,0,.1);border:1px solid rgba(0,0,0,.1);background:#00000005;padding:10px 18px;border-radius:3px;color:#969696;overflow:hidden;margin-top:-12px;white-space:pre-wrap;word-wrap:break-word}.citation a{color:#0009;-webkit-text-decoration:underline;text-decoration:underline}.citation.short{margin-top:-4px}.references-block h3{margin:0}.references-block ol{padding:0 0 0 15px}@media (min-width: 768px){.references-block ol{padding:0 0 0 30px;margin-left:-30px}}.references-block li{margin-bottom:1em}.references-block a{color:var(--text-color)}[data-theme=dark] .footer{border-top-color:#ffffff26;color:#c8c8c8cc}[data-theme=dark] .citation{background:#ffffff0a;border-color:#ffffff26;color:#c8c8c8}[data-theme=dark] .citation a{color:#ffffffbf}.footer a{color:var(--primary-color);border-bottom:1px solid var(--link-underline);-webkit-text-decoration:none;text-decoration:none}.footer a:hover{color:var(--primary-color-hover);border-bottom-color:var(--link-underline-hover)}[data-theme=dark] .footer a{color:var(--primary-color)}#theme-toggle[data-astro-cid-x3pjskd3]{display:inline-flex;align-items:center;gap:8px;border:none;background:transparent;padding:6px 10px;border-radius:8px;cursor:pointer;color:var(--text-color)!important}#theme-toggle[data-astro-cid-x3pjskd3] .icon[data-astro-cid-x3pjskd3].dark,[data-astro-cid-x3pjskd3][data-theme=dark] #theme-toggle[data-astro-cid-x3pjskd3] .icon[data-astro-cid-x3pjskd3].light{display:none}[data-astro-cid-x3pjskd3][data-theme=dark] #theme-toggle[data-astro-cid-x3pjskd3] .icon[data-astro-cid-x3pjskd3].dark{display:inline}#theme-toggle[data-astro-cid-x3pjskd3] .icon[data-astro-cid-x3pjskd3]{filter:none!important}.table-of-contents{position:sticky;top:32px;margin-top:12px}.table-of-contents nav{border-left:1px solid var(--border-color);padding-left:16px;font-size:13px}.table-of-contents .title{font-weight:600;font-size:14px;margin-bottom:8px}.table-of-contents nav ul{margin:0 0 6px;padding-left:1em}.table-of-contents nav li{list-style:none;margin:.25em 0}.table-of-contents nav a,.table-of-contents nav a:link,.table-of-contents nav a:visited{color:var(--text-color);-webkit-text-decoration:none;text-decoration:none;border-bottom:none}.table-of-contents nav>ul>li>a{font-weight:700}.table-of-contents nav a:hover{-webkit-text-decoration:underline solid var(--muted-color);text-decoration:underline solid var(--muted-color)}.table-of-contents nav a.active{-webkit-text-decoration:underline;text-decoration:underline}.table-of-contents-mobile{display:none;margin:8px 0 16px}.table-of-contents-mobile>summary{cursor:pointer;list-style:none;padding:var(--spacing-3) var(--spacing-4);border:1px solid var(--border-color);border-radius:8px;color:var(--text-color);font-weight:600;position:relative}.table-of-contents-mobile[open]>summary{border-bottom-left-radius:0;border-bottom-right-radius:0}.table-of-contents-mobile>summary:after{content:"";position:absolute;right:var(--spacing-4);top:50%;width:8px;height:8px;border-right:2px solid currentColor;border-bottom:2px solid currentColor;transform:translateY(-70%) rotate(45deg);transition:transform .15s ease;opacity:.7}.table-of-contents-mobile[open]>summary:after{transform:translateY(-30%) rotate(-135deg)}.table-of-contents-mobile nav{border-left:none;padding:10px 12px;font-size:14px;border:1px solid var(--border-color);border-top:none;border-bottom-left-radius:8px;border-bottom-right-radius:8px}.table-of-contents-mobile nav ul{margin:0 0 6px;padding-left:1em}.table-of-contents-mobile nav li{list-style:none;margin:.25em 0}.table-of-contents-mobile nav a,.table-of-contents-mobile nav a:link,.table-of-contents-mobile nav a:visited{color:var(--text-color);-webkit-text-decoration:none;text-decoration:none;border-bottom:none}.table-of-contents-mobile nav>ul>li>a{font-weight:700}.table-of-contents-mobile nav a:hover{-webkit-text-decoration:underline solid var(--muted-color);text-decoration:underline solid var(--muted-color)}.table-of-contents-mobile nav a.active{-webkit-text-decoration:underline;text-decoration:underline}@font-face{font-family:KaTeX_AMS;font-style:normal;font-weight:400;src:url(/_astro/KaTeX_AMS-Regular.BQhdFMY1.woff2) format("woff2"),url(/_astro/KaTeX_AMS-Regular.DMm9YOAa.woff) format("woff"),url(/_astro/KaTeX_AMS-Regular.DRggAlZN.ttf) format("truetype")}@font-face{font-family:KaTeX_Caligraphic;font-style:normal;font-weight:700;src:url(/_astro/KaTeX_Caligraphic-Bold.Dq_IR9rO.woff2) format("woff2"),url(/_astro/KaTeX_Caligraphic-Bold.BEiXGLvX.woff) format("woff"),url(/_astro/KaTeX_Caligraphic-Bold.ATXxdsX0.ttf) format("truetype")}@font-face{font-family:KaTeX_Caligraphic;font-style:normal;font-weight:400;src:url(/_astro/KaTeX_Caligraphic-Regular.Di6jR-x-.woff2) format("woff2"),url(/_astro/KaTeX_Caligraphic-Regular.CTRA-rTL.woff) format("woff"),url(/_astro/KaTeX_Caligraphic-Regular.wX97UBjC.ttf) format("truetype")}@font-face{font-family:KaTeX_Fraktur;font-style:normal;font-weight:700;src:url(/_astro/KaTeX_Fraktur-Bold.CL6g_b3V.woff2) format("woff2"),url(/_astro/KaTeX_Fraktur-Bold.BsDP51OF.woff) format("woff"),url(/_astro/KaTeX_Fraktur-Bold.BdnERNNW.ttf) format("truetype")}@font-face{font-family:KaTeX_Fraktur;font-style:normal;font-weight:400;src:url(/_astro/KaTeX_Fraktur-Regular.CTYiF6lA.woff2) format("woff2"),url(/_astro/KaTeX_Fraktur-Regular.Dxdc4cR9.woff) format("woff"),url(/_astro/KaTeX_Fraktur-Regular.CB_wures.ttf) format("truetype")}@font-face{font-family:KaTeX_Main;font-style:normal;font-weight:700;src:url(/_astro/KaTeX_Main-Bold.Cx986IdX.woff2) format("woff2"),url(/_astro/KaTeX_Main-Bold.Jm3AIy58.woff) format("woff"),url(/_astro/KaTeX_Main-Bold.waoOVXN0.ttf) format("truetype")}@font-face{font-family:KaTeX_Main;font-style:italic;font-weight:700;src:url(/_astro/KaTeX_Main-BoldItalic.DxDJ3AOS.woff2) format("woff2"),url(/_astro/KaTeX_Main-BoldItalic.SpSLRI95.woff) format("woff"),url(/_astro/KaTeX_Main-BoldItalic.DzxPMmG6.ttf) format("truetype")}@font-face{font-family:KaTeX_Main;font-style:italic;font-weight:400;src:url(/_astro/KaTeX_Main-Italic.NWA7e6Wa.woff2) format("woff2"),url(/_astro/KaTeX_Main-Italic.BMLOBm91.woff) format("woff"),url(/_astro/KaTeX_Main-Italic.3WenGoN9.ttf) format("truetype")}@font-face{font-family:KaTeX_Main;font-style:normal;font-weight:400;src:url(/_astro/KaTeX_Main-Regular.B22Nviop.woff2) format("woff2"),url(/_astro/KaTeX_Main-Regular.Dr94JaBh.woff) format("woff"),url(/_astro/KaTeX_Main-Regular.ypZvNtVU.ttf) format("truetype")}@font-face{font-family:KaTeX_Math;font-style:italic;font-weight:700;src:url(/_astro/KaTeX_Math-BoldItalic.CZnvNsCZ.woff2) format("woff2"),url(/_astro/KaTeX_Math-BoldItalic.iY-2wyZ7.woff) format("woff"),url(/_astro/KaTeX_Math-BoldItalic.B3XSjfu4.ttf) format("truetype")}@font-face{font-family:KaTeX_Math;font-style:italic;font-weight:400;src:url(/_astro/KaTeX_Math-Italic.t53AETM-.woff2) format("woff2"),url(/_astro/KaTeX_Math-Italic.DA0__PXp.woff) format("woff"),url(/_astro/KaTeX_Math-Italic.flOr_0UB.ttf) format("truetype")}@font-face{font-family:KaTeX_SansSerif;font-style:normal;font-weight:700;src:url(/_astro/KaTeX_SansSerif-Bold.D1sUS0GD.woff2) format("woff2"),url(/_astro/KaTeX_SansSerif-Bold.DbIhKOiC.woff) format("woff"),url(/_astro/KaTeX_SansSerif-Bold.CFMepnvq.ttf) format("truetype")}@font-face{font-family:KaTeX_SansSerif;font-style:italic;font-weight:400;src:url(/_astro/KaTeX_SansSerif-Italic.C3H0VqGB.woff2) format("woff2"),url(/_astro/KaTeX_SansSerif-Italic.DN2j7dab.woff) format("woff"),url(/_astro/KaTeX_SansSerif-Italic.YYjJ1zSn.ttf) format("truetype")}@font-face{font-family:KaTeX_SansSerif;font-style:normal;font-weight:400;src:url(/_astro/KaTeX_SansSerif-Regular.DDBCnlJ7.woff2) format("woff2"),url(/_astro/KaTeX_SansSerif-Regular.CS6fqUqJ.woff) format("woff"),url(/_astro/KaTeX_SansSerif-Regular.BNo7hRIc.ttf) format("truetype")}@font-face{font-family:KaTeX_Script;font-style:normal;font-weight:400;src:url(/_astro/KaTeX_Script-Regular.D3wIWfF6.woff2) format("woff2"),url(/_astro/KaTeX_Script-Regular.D5yQViql.woff) format("woff"),url(/_astro/KaTeX_Script-Regular.C5JkGWo-.ttf) format("truetype")}@font-face{font-family:KaTeX_Size1;font-style:normal;font-weight:400;src:url(/_astro/KaTeX_Size1-Regular.mCD8mA8B.woff2) format("woff2"),url(/_astro/KaTeX_Size1-Regular.C195tn64.woff) format("woff"),url(/_astro/KaTeX_Size1-Regular.Dbsnue_I.ttf) format("truetype")}@font-face{font-family:KaTeX_Size2;font-style:normal;font-weight:400;src:url(/_astro/KaTeX_Size2-Regular.Dy4dx90m.woff2) format("woff2"),url(/_astro/KaTeX_Size2-Regular.oD1tc_U0.woff) format("woff"),url(/_astro/KaTeX_Size2-Regular.B7gKUWhC.ttf) format("truetype")}@font-face{font-family:KaTeX_Size3;font-style:normal;font-weight:400;src:url(data:font/woff2;base64,d09GMgABAAAAAA4oAA4AAAAAHbQAAA3TAAEAAAAAAAAAAAAAAAAAAAAAAAAAAAAABmAAgRQIDgmcDBEICo1oijYBNgIkA14LMgAEIAWJAAeBHAyBHBvbGiMRdnO0IkRRkiYDgr9KsJ1NUAf2kILNxgUmgqIgq1P89vcbIcmsQbRps3vCcXdYOKSWEPEKgZgQkprQQsxIXUgq0DqpGKmIvrgkeVGtEQD9DzAO29fM9jYhxZEsL2FeURH2JN4MIcTdO049NCVdxQ/w9NrSYFEBKTDKpLKfNkCGDc1RwjZLQcm3vqJ2UW9Xfa3tgAHz6ivp6vgC2yD4/6352ndnN0X0TL7seypkjZlMsjmZnf0Mm5Q+JykRWQBKCVCVPbARPXWyQtb5VgLB6Biq7/Uixcj2WGqdI8tGSgkuRG+t910GKP2D7AQH0DB9FMDW/obJZ8giFI3Wg8Cvevz0M+5m0rTh7XDBlvo9Y4vm13EXmfttwI4mBo1EG15fxJhUiCLbiiyCf/ZA6MFAhg3pGIZGdGIVjtPn6UcMk9A/UUr9PhoNsCENw1APAq0gpH73e+M+0ueyHbabc3vkbcdtzcf/fiy+NxQEjf9ud/ELBHAXJ0nk4z+MXH2Ev/kWyV4k7SkvpPc9Qr38F6RPWnM9cN6DJ0AdD1BhtgABtmoRoFCvPsBAumNm6soZG2Gk5GyVTo2sJncSyp0jQTYoR6WDvTwaaEcHsxHfvuWhHA3a6bN7twRKtcGok6NsCi7jYRrM2jExsUFMxMQYuJbMhuWNOumEJy9hi29Dmg5zMp/A5+hhPG19j1vBrq8JTLr8ki5VLPmG/PynJHVul440bxg5xuymHUFPBshC+nA9I1FmwbRBTNHAcik3Oae0cxKoI3MOriM42UrPe51nsaGxJ+WfXubAsP84aabUlQSJ1IiE0iPETLUU4CATgfXSCSpuRFRmCGbO+wSpAnzaeaCYW1VNEysRtuXCEL1kUFUbbtMv3Tilt/1c11jt3Q5bbMa84cpWipp8Elw3MZhOHsOlwwVUQM3lAR35JiFQbaYCRnMF2lxAWoOg2gyoIV4PouX8HytNIfLhqpJtXB4vjiViUI8IJ7bkC4ikkQvKksnOTKICwnqWSZ9YS5f0WCxmpgjbIq7EJcM4aI2nmhLNY2JIUgOjXZFWBHb+x5oh6cwb0Tv1ackHdKi0I9OO2wE9aogIOn540CCCziyhN+IaejtgAONKznHlHyutPrHGwCx9S6B8kfS4Mfi4Eyv7OU730bT1SCBjt834cXsf43zVjPUqqJjgrjeGnBxSG4aYAKFuVbeCfkDIjAqMb6yLNIbCuvXhMH2/+k2vkNpkORhR59N1CkzoOENvneIosjYmuTxlhUzaGEJQ/iWqx4dmwpmKjrwTiTGTCVozNAYqk/zXOndWxuWSmJkQpJw3pK5KX6QrLt5LATMqpmPAQhkhK6PUjzHUn7E0gHE0kPE0iKkolgkUx9SZmVAdDgpffdyJKg3k7VmzYGCwVXGz/tXmkOIp+vcWs+EMuhhvN0h9uhfzWJziBQmCREGSIFmQIkgVpAnSBRmC//6hkLZwaVhwxlrJSOdqlFtOYxlau9F2QN5Y98xmIAsiM1HVp2VFX+DHHGg6Ecjh3vmqtidX3qHI2qycTk/iwxSt5UzTmEP92ZBnEWTk4Mx8Mpl78ZDokxg/KWb+Q0QkvdKVmq3TMW+RXEgrsziSAfNXFMhDc60N5N9jQzjfO0kBKpUZl0ZmwJ41j/B9Hz6wmRaJB84niNmQrzp9eSlQCDDzazGDdVi3P36VZQ+Jy4f9UBNp+3zTjqI4abaFAm+GShVaXlsGdF3FYzZcDI6cori4kMxUECl9IjJZpzkvitAoxKue+90pDMvcKRxLl53TmOKCmV/xRolNKSqqUxc6LStOETmFOiLZZptlZepcKiAzteG8PEdpnQpbOMNcMsR4RR2Bs0cKFEvSmIjAFcnarqwUL4lDhHmnVkwu1IwshbiCcgvOheZuYyOteufZZwlcTlLgnZ3o/WcYdzZHW/WGaqaVfmTZ1aWCceJjkbZqsfbkOtcFlUZM/jy+hXHDbaUobWqqXaeWobbLO99yG5N3U4wxco0rQGGcOLASFMXeJoham8M+/x6O2WywK2l4HGbq1CoUyC/IZikQhdq3SiuNrvAEj0AVu9x2x3lp/xWzahaxidezFVtdcb5uEnzyl0ZmYiuKI0exvCd4Xc9CV1KB0db00z92wDPde0kukbvZIWN6jUWFTmPIC/Y4UPCm8UfDTFZpZNon1qLFTkBhxzB+FjQRA2Q/YRJT8pQigslMaUpFyAG8TMlXigiqmAZX4xgijKjRlGpLE0GdplRfCaJo0JQaSxNBk6ZmMzcya0FmrcisDdn0Q3HI2sWSppYigmlM1XT/kLQZSNpMJG0WkjYbSZuDpM1F0uYhFc1HxU4m1QJjDK6iL0S5uSj5rgXc3RejEigtcRBtqYPQsiTskmO5vosV+q4VGIKbOkDg0jtRrq+Em1YloaTFar3EGr1EUC8R0kus1Uus00usL97ABr2BjXoDm/QGNhuWtMVBKOwg/i78lT7hBsAvDmwHc/ao3vmUbBmhjeYySZNWvGkfZAgISDSaDo1SVpzGDsAEkF8B+gEapViUoZgUWXcRIGFZNm6gWbAKk0bp0k1MHG9fLYtV4iS2SmLEQFARzRcnf9PUS0LVn05/J9MiRRBU3v2IrvW974v4N00L7ZMk0wXP1409CHo/an8zTRHD3eSJ6m8D4YMkZNl3M79sqeuAsr/m3f+8/yl7A50aiAEJgeBeMWzu7ui9UfUBCe2TIqZIoOd/3/udRBOQidQZUERzb2/VwZN1H/Sju82ew2H2Wfr6qvfVf3hqwDvAIpkQVFy4B9Pe9e4/XvPeceu7h3dvO56iJPf0+A6cqA2ip18ER+iFgggiuOkvj24bby0N9j2UHIkgqIt+sVgfodC4YghLSMjSZbH0VR/6dMDrYJeKHilKTemt6v6kvzvn3/RrdWtr0GoN/xL+Sex/cPYLUpepx9cz/D46UPU5KXgAQa+NDps1v6J3xP1i2HtaDB0M9aX2deA7SYff//+gUCovMmIK/qfsFcOk+4Y5ZN97XlG6zebqtMbKgeRFi51vnxTQYBUik2rS/Cn6PC8ADR8FGxsRPB82dzfND90gIcshOcYUkfjherBz53odpm6TP8txlwOZ71xmfHHOvq053qFF/MRlS3jP0ELudrf2OeN8DHvp6ZceLe8qKYvWz/7yp0u4dKPfli3CYq0O13Ih71mylJ80tOi10On8wi+F4+LWgDPeJ30msSQt9/vkmHq9/Lvo2b461mP801v3W4xTcs6CbvF9UDdrSt+A8OUbpSh55qAUFXWznBBfdeJ8a4d7ugT5tvxUza3h9m4H7ptTqiG4z0g5dc0X29OcGlhpGFMpQo9ytTS+NViZpNdvU4kWx+LKxNY10kQ1yqGXrhe4/1nvP7E+nd5A92TtaRplbHSqoIdOqtRWti+fkB5/n1+/VvCmz12pG1kpQWsfi1ftlBobm0bpngs16CHkbIwdLnParxtTV3QYRlfJ0KFskH7pdN/YDn+yRuSd7sNH3aO0DYPggk6uWuXrfOc+fa3VTxFVvKaNxHsiHmsXyCLIE5yuOeN3/Jdf8HBL/5M6shjyhxHx9BjB1O0+4NLOnjLLSxwO7ukN4jMbOIcD879KLSi6Pk61Oqm2377n8079PXEEQ7cy7OKEC9nbpet118fxweTafpt69x/Bt8UqGzNQt7aelpc44dn5cqhwf71+qKp/Zf/+a0zcizOUWpl/iBcSXip0pplkatCchoH5c5aUM8I7/dWxAej8WicPL1URFZ9BDJelUwEwTkGqUhgSlydVes95YdXvhh9Gfz/aeFWvgVb4tuLbcv4+wLdutVZv/cUonwBD/6eDlE0aSiKK/uoH3+J1wDE/jMVqY2ysGufN84oIXB0sPzy8ollX/LegY74DgJXJR57sn+VGza0x3DnuIgABFM15LmajjjsNlYj+JEZGbuRYcAMOWxFkPN2w6Wd46xo4gVWQR/X4lyI/R6K/YK0110GzudPRW7Y+UOBGTfNNzHeYT0fiH0taunBpq9HEW8OKSaBGj21L0MqenEmNRWBAWDWAk4CpNoEZJ2tTaPFgbQYj8HxtFilErs3BTRwT8uO1NXQaWfIotchmPkAF5mMBAliEmZiOGVgCG9LgRzpscMAOOwowlT3JhusdazXGSC/hxR3UlmWVwWHpOIKheqONvjyhSiTHIkVUco5bnji8m//zL7PKaT1Vl5I6UE609f+gkr6MZKVyKc7zJRmCahLsdlyA5fdQkRSan9LgnnLEyGSkaKJCJog0wAgvepWBt80+1yKln1bMVtCljfNWDueKLsWwaEbBSfSPTEmVRsUcYYMnEjcjeyCZzBXK9E9BYBXLKjOSpUDR+nEV3TFSUdQaz+ot98QxgXwx0GQ+EEUAKB2qZPkQQ0GqFD8UPFMqyaCHM24BZmSGic9EYMagKizOw9Hz50DMrDLrqqLkTAhplMictiCAx5S3BIUQdeJeLnBy2CNtMfz6cV4u8XKoFZQesbf9YZiIERiHjaNodDW6LgcirX/mPnJIkBGDUpTBhSa0EIr38D5hCIszhCM8URGBqImoWjpvpt1ebu/v3Gl3qJfMnNM+9V+kiRFyROTPHQWOcs1dNW94/ukKMPZBvDi55i5CttdeJz84DLngLqjcdwEZ87bFFR8CIG35OAkDVN6VRDZ7aq67NteYqZ2lpT8oYB2CytoBd6VuAx4WgiAsnuj3WohG+LugzXiQRDeM3XYXlULv4dp5VFYC) format("woff2"),url(/_astro/KaTeX_Size3-Regular.CTq5MqoE.woff) format("woff"),url(/_astro/KaTeX_Size3-Regular.DgpXs0kz.ttf) format("truetype")}@font-face{font-family:KaTeX_Size4;font-style:normal;font-weight:400;src:url(/_astro/KaTeX_Size4-Regular.Dl5lxZxV.woff2) format("woff2"),url(/_astro/KaTeX_Size4-Regular.BF-4gkZK.woff) format("woff"),url(/_astro/KaTeX_Size4-Regular.DWFBv043.ttf) format("truetype")}@font-face{font-family:KaTeX_Typewriter;font-style:normal;font-weight:400;src:url(/_astro/KaTeX_Typewriter-Regular.CO6r4hn1.woff2) format("woff2"),url(/_astro/KaTeX_Typewriter-Regular.C0xS9mPB.woff) format("woff"),url(/_astro/KaTeX_Typewriter-Regular.D3Ib7_Hf.ttf) format("truetype")}.katex{font: 1.21em KaTeX_Main,Times New Roman,serif;line-height:1.2;text-indent:0;text-rendering:auto}.katex *{-ms-high-contrast-adjust:none!important;border-color:currentColor}.katex .katex-version:after{content:"0.16.22"}.katex .katex-mathml{clip:rect(1px,1px,1px,1px);border:0;height:1px;overflow:hidden;padding:0;position:absolute;width:1px}.katex .katex-html>.newline{display:block}.katex .base{position:relative;white-space:nowrap;width:-moz-min-content;width:min-content}.katex .base,.katex .strut{display:inline-block}.katex .textbf{font-weight:700}.katex .textit{font-style:italic}.katex .textrm{font-family:KaTeX_Main}.katex .textsf{font-family:KaTeX_SansSerif}.katex .texttt{font-family:KaTeX_Typewriter}.katex .mathnormal{font-family:KaTeX_Math;font-style:italic}.katex .mathit{font-family:KaTeX_Main;font-style:italic}.katex .mathrm{font-style:normal}.katex .mathbf{font-family:KaTeX_Main;font-weight:700}.katex .boldsymbol{font-family:KaTeX_Math;font-style:italic;font-weight:700}.katex .amsrm,.katex .mathbb,.katex .textbb{font-family:KaTeX_AMS}.katex .mathcal{font-family:KaTeX_Caligraphic}.katex .mathfrak,.katex .textfrak{font-family:KaTeX_Fraktur}.katex .mathboldfrak,.katex .textboldfrak{font-family:KaTeX_Fraktur;font-weight:700}.katex .mathtt{font-family:KaTeX_Typewriter}.katex .mathscr,.katex .textscr{font-family:KaTeX_Script}.katex .mathsf,.katex .textsf{font-family:KaTeX_SansSerif}.katex .mathboldsf,.katex .textboldsf{font-family:KaTeX_SansSerif;font-weight:700}.katex .mathitsf,.katex .mathsfit,.katex .textitsf{font-family:KaTeX_SansSerif;font-style:italic}.katex .mainrm{font-family:KaTeX_Main;font-style:normal}.katex .vlist-t{border-collapse:collapse;display:inline-table;table-layout:fixed}.katex .vlist-r{display:table-row}.katex .vlist{display:table-cell;position:relative;vertical-align:bottom}.katex .vlist>span{display:block;height:0;position:relative}.katex .vlist>span>span{display:inline-block}.katex .vlist>span>.pstrut{overflow:hidden;width:0}.katex .vlist-t2{margin-right:-2px}.katex .vlist-s{display:table-cell;font-size:1px;min-width:2px;vertical-align:bottom;width:2px}.katex .vbox{align-items:baseline;display:inline-flex;flex-direction:column}.katex .hbox{width:100%}.katex .hbox,.katex .thinbox{display:inline-flex;flex-direction:row}.katex .thinbox{max-width:0;width:0}.katex .msupsub{text-align:left}.katex .mfrac>span>span{text-align:center}.katex .mfrac .frac-line{border-bottom-style:solid;display:inline-block;width:100%}.katex .hdashline,.katex .hline,.katex .mfrac .frac-line,.katex .overline .overline-line,.katex .rule,.katex .underline .underline-line{min-height:1px}.katex .mspace{display:inline-block}.katex .clap,.katex .llap,.katex .rlap{position:relative;width:0}.katex .clap>.inner,.katex .llap>.inner,.katex .rlap>.inner{position:absolute}.katex .clap>.fix,.katex .llap>.fix,.katex .rlap>.fix{display:inline-block}.katex .llap>.inner{right:0}.katex .clap>.inner,.katex .rlap>.inner{left:0}.katex .clap>.inner>span{margin-left:-50%;margin-right:50%}.katex .rule{border:0 solid;display:inline-block;position:relative}.katex .hline,.katex .overline .overline-line,.katex .underline .underline-line{border-bottom-style:solid;display:inline-block;width:100%}.katex .hdashline{border-bottom-style:dashed;display:inline-block;width:100%}.katex .sqrt>.root{margin-left:.2777777778em;margin-right:-.5555555556em}.katex .fontsize-ensurer.reset-size1.size1,.katex .sizing.reset-size1.size1{font-size:1em}.katex .fontsize-ensurer.reset-size1.size2,.katex .sizing.reset-size1.size2{font-size:1.2em}.katex .fontsize-ensurer.reset-size1.size3,.katex .sizing.reset-size1.size3{font-size:1.4em}.katex .fontsize-ensurer.reset-size1.size4,.katex .sizing.reset-size1.size4{font-size:1.6em}.katex .fontsize-ensurer.reset-size1.size5,.katex .sizing.reset-size1.size5{font-size:1.8em}.katex .fontsize-ensurer.reset-size1.size6,.katex .sizing.reset-size1.size6{font-size:2em}.katex .fontsize-ensurer.reset-size1.size7,.katex .sizing.reset-size1.size7{font-size:2.4em}.katex .fontsize-ensurer.reset-size1.size8,.katex .sizing.reset-size1.size8{font-size:2.88em}.katex .fontsize-ensurer.reset-size1.size9,.katex .sizing.reset-size1.size9{font-size:3.456em}.katex .fontsize-ensurer.reset-size1.size10,.katex .sizing.reset-size1.size10{font-size:4.148em}.katex .fontsize-ensurer.reset-size1.size11,.katex .sizing.reset-size1.size11{font-size:4.976em}.katex .fontsize-ensurer.reset-size2.size1,.katex .sizing.reset-size2.size1{font-size:.8333333333em}.katex .fontsize-ensurer.reset-size2.size2,.katex .sizing.reset-size2.size2{font-size:1em}.katex .fontsize-ensurer.reset-size2.size3,.katex .sizing.reset-size2.size3{font-size:1.1666666667em}.katex .fontsize-ensurer.reset-size2.size4,.katex .sizing.reset-size2.size4{font-size:1.3333333333em}.katex .fontsize-ensurer.reset-size2.size5,.katex .sizing.reset-size2.size5{font-size:1.5em}.katex .fontsize-ensurer.reset-size2.size6,.katex .sizing.reset-size2.size6{font-size:1.6666666667em}.katex .fontsize-ensurer.reset-size2.size7,.katex .sizing.reset-size2.size7{font-size:2em}.katex .fontsize-ensurer.reset-size2.size8,.katex .sizing.reset-size2.size8{font-size:2.4em}.katex .fontsize-ensurer.reset-size2.size9,.katex .sizing.reset-size2.size9{font-size:2.88em}.katex .fontsize-ensurer.reset-size2.size10,.katex .sizing.reset-size2.size10{font-size:3.4566666667em}.katex .fontsize-ensurer.reset-size2.size11,.katex .sizing.reset-size2.size11{font-size:4.1466666667em}.katex .fontsize-ensurer.reset-size3.size1,.katex .sizing.reset-size3.size1{font-size:.7142857143em}.katex .fontsize-ensurer.reset-size3.size2,.katex .sizing.reset-size3.size2{font-size:.8571428571em}.katex .fontsize-ensurer.reset-size3.size3,.katex .sizing.reset-size3.size3{font-size:1em}.katex .fontsize-ensurer.reset-size3.size4,.katex .sizing.reset-size3.size4{font-size:1.1428571429em}.katex .fontsize-ensurer.reset-size3.size5,.katex .sizing.reset-size3.size5{font-size:1.2857142857em}.katex .fontsize-ensurer.reset-size3.size6,.katex .sizing.reset-size3.size6{font-size:1.4285714286em}.katex .fontsize-ensurer.reset-size3.size7,.katex .sizing.reset-size3.size7{font-size:1.7142857143em}.katex .fontsize-ensurer.reset-size3.size8,.katex .sizing.reset-size3.size8{font-size:2.0571428571em}.katex .fontsize-ensurer.reset-size3.size9,.katex .sizing.reset-size3.size9{font-size:2.4685714286em}.katex .fontsize-ensurer.reset-size3.size10,.katex .sizing.reset-size3.size10{font-size:2.9628571429em}.katex .fontsize-ensurer.reset-size3.size11,.katex .sizing.reset-size3.size11{font-size:3.5542857143em}.katex .fontsize-ensurer.reset-size4.size1,.katex .sizing.reset-size4.size1{font-size:.625em}.katex .fontsize-ensurer.reset-size4.size2,.katex .sizing.reset-size4.size2{font-size:.75em}.katex .fontsize-ensurer.reset-size4.size3,.katex .sizing.reset-size4.size3{font-size:.875em}.katex .fontsize-ensurer.reset-size4.size4,.katex .sizing.reset-size4.size4{font-size:1em}.katex .fontsize-ensurer.reset-size4.size5,.katex .sizing.reset-size4.size5{font-size:1.125em}.katex .fontsize-ensurer.reset-size4.size6,.katex .sizing.reset-size4.size6{font-size:1.25em}.katex .fontsize-ensurer.reset-size4.size7,.katex .sizing.reset-size4.size7{font-size:1.5em}.katex .fontsize-ensurer.reset-size4.size8,.katex .sizing.reset-size4.size8{font-size:1.8em}.katex .fontsize-ensurer.reset-size4.size9,.katex .sizing.reset-size4.size9{font-size:2.16em}.katex .fontsize-ensurer.reset-size4.size10,.katex .sizing.reset-size4.size10{font-size:2.5925em}.katex .fontsize-ensurer.reset-size4.size11,.katex .sizing.reset-size4.size11{font-size:3.11em}.katex .fontsize-ensurer.reset-size5.size1,.katex .sizing.reset-size5.size1{font-size:.5555555556em}.katex .fontsize-ensurer.reset-size5.size2,.katex .sizing.reset-size5.size2{font-size:.6666666667em}.katex .fontsize-ensurer.reset-size5.size3,.katex .sizing.reset-size5.size3{font-size:.7777777778em}.katex .fontsize-ensurer.reset-size5.size4,.katex .sizing.reset-size5.size4{font-size:.8888888889em}.katex .fontsize-ensurer.reset-size5.size5,.katex .sizing.reset-size5.size5{font-size:1em}.katex .fontsize-ensurer.reset-size5.size6,.katex .sizing.reset-size5.size6{font-size:1.1111111111em}.katex .fontsize-ensurer.reset-size5.size7,.katex .sizing.reset-size5.size7{font-size:1.3333333333em}.katex .fontsize-ensurer.reset-size5.size8,.katex .sizing.reset-size5.size8{font-size:1.6em}.katex .fontsize-ensurer.reset-size5.size9,.katex .sizing.reset-size5.size9{font-size:1.92em}.katex .fontsize-ensurer.reset-size5.size10,.katex .sizing.reset-size5.size10{font-size:2.3044444444em}.katex .fontsize-ensurer.reset-size5.size11,.katex .sizing.reset-size5.size11{font-size:2.7644444444em}.katex .fontsize-ensurer.reset-size6.size1,.katex .sizing.reset-size6.size1{font-size:.5em}.katex .fontsize-ensurer.reset-size6.size2,.katex .sizing.reset-size6.size2{font-size:.6em}.katex .fontsize-ensurer.reset-size6.size3,.katex .sizing.reset-size6.size3{font-size:.7em}.katex .fontsize-ensurer.reset-size6.size4,.katex .sizing.reset-size6.size4{font-size:.8em}.katex .fontsize-ensurer.reset-size6.size5,.katex .sizing.reset-size6.size5{font-size:.9em}.katex .fontsize-ensurer.reset-size6.size6,.katex .sizing.reset-size6.size6{font-size:1em}.katex .fontsize-ensurer.reset-size6.size7,.katex .sizing.reset-size6.size7{font-size:1.2em}.katex .fontsize-ensurer.reset-size6.size8,.katex .sizing.reset-size6.size8{font-size:1.44em}.katex .fontsize-ensurer.reset-size6.size9,.katex .sizing.reset-size6.size9{font-size:1.728em}.katex .fontsize-ensurer.reset-size6.size10,.katex .sizing.reset-size6.size10{font-size:2.074em}.katex .fontsize-ensurer.reset-size6.size11,.katex .sizing.reset-size6.size11{font-size:2.488em}.katex .fontsize-ensurer.reset-size7.size1,.katex .sizing.reset-size7.size1{font-size:.4166666667em}.katex .fontsize-ensurer.reset-size7.size2,.katex .sizing.reset-size7.size2{font-size:.5em}.katex .fontsize-ensurer.reset-size7.size3,.katex .sizing.reset-size7.size3{font-size:.5833333333em}.katex .fontsize-ensurer.reset-size7.size4,.katex .sizing.reset-size7.size4{font-size:.6666666667em}.katex .fontsize-ensurer.reset-size7.size5,.katex .sizing.reset-size7.size5{font-size:.75em}.katex .fontsize-ensurer.reset-size7.size6,.katex .sizing.reset-size7.size6{font-size:.8333333333em}.katex .fontsize-ensurer.reset-size7.size7,.katex .sizing.reset-size7.size7{font-size:1em}.katex .fontsize-ensurer.reset-size7.size8,.katex .sizing.reset-size7.size8{font-size:1.2em}.katex .fontsize-ensurer.reset-size7.size9,.katex .sizing.reset-size7.size9{font-size:1.44em}.katex .fontsize-ensurer.reset-size7.size10,.katex .sizing.reset-size7.size10{font-size:1.7283333333em}.katex .fontsize-ensurer.reset-size7.size11,.katex .sizing.reset-size7.size11{font-size:2.0733333333em}.katex .fontsize-ensurer.reset-size8.size1,.katex .sizing.reset-size8.size1{font-size:.3472222222em}.katex .fontsize-ensurer.reset-size8.size2,.katex .sizing.reset-size8.size2{font-size:.4166666667em}.katex .fontsize-ensurer.reset-size8.size3,.katex .sizing.reset-size8.size3{font-size:.4861111111em}.katex .fontsize-ensurer.reset-size8.size4,.katex .sizing.reset-size8.size4{font-size:.5555555556em}.katex .fontsize-ensurer.reset-size8.size5,.katex .sizing.reset-size8.size5{font-size:.625em}.katex .fontsize-ensurer.reset-size8.size6,.katex .sizing.reset-size8.size6{font-size:.6944444444em}.katex .fontsize-ensurer.reset-size8.size7,.katex .sizing.reset-size8.size7{font-size:.8333333333em}.katex .fontsize-ensurer.reset-size8.size8,.katex .sizing.reset-size8.size8{font-size:1em}.katex .fontsize-ensurer.reset-size8.size9,.katex .sizing.reset-size8.size9{font-size:1.2em}.katex .fontsize-ensurer.reset-size8.size10,.katex .sizing.reset-size8.size10{font-size:1.4402777778em}.katex .fontsize-ensurer.reset-size8.size11,.katex .sizing.reset-size8.size11{font-size:1.7277777778em}.katex .fontsize-ensurer.reset-size9.size1,.katex .sizing.reset-size9.size1{font-size:.2893518519em}.katex .fontsize-ensurer.reset-size9.size2,.katex .sizing.reset-size9.size2{font-size:.3472222222em}.katex .fontsize-ensurer.reset-size9.size3,.katex .sizing.reset-size9.size3{font-size:.4050925926em}.katex .fontsize-ensurer.reset-size9.size4,.katex .sizing.reset-size9.size4{font-size:.462962963em}.katex .fontsize-ensurer.reset-size9.size5,.katex .sizing.reset-size9.size5{font-size:.5208333333em}.katex .fontsize-ensurer.reset-size9.size6,.katex .sizing.reset-size9.size6{font-size:.5787037037em}.katex .fontsize-ensurer.reset-size9.size7,.katex .sizing.reset-size9.size7{font-size:.6944444444em}.katex .fontsize-ensurer.reset-size9.size8,.katex .sizing.reset-size9.size8{font-size:.8333333333em}.katex .fontsize-ensurer.reset-size9.size9,.katex .sizing.reset-size9.size9{font-size:1em}.katex .fontsize-ensurer.reset-size9.size10,.katex .sizing.reset-size9.size10{font-size:1.2002314815em}.katex .fontsize-ensurer.reset-size9.size11,.katex .sizing.reset-size9.size11{font-size:1.4398148148em}.katex .fontsize-ensurer.reset-size10.size1,.katex .sizing.reset-size10.size1{font-size:.2410800386em}.katex .fontsize-ensurer.reset-size10.size2,.katex .sizing.reset-size10.size2{font-size:.2892960463em}.katex .fontsize-ensurer.reset-size10.size3,.katex .sizing.reset-size10.size3{font-size:.337512054em}.katex .fontsize-ensurer.reset-size10.size4,.katex .sizing.reset-size10.size4{font-size:.3857280617em}.katex .fontsize-ensurer.reset-size10.size5,.katex .sizing.reset-size10.size5{font-size:.4339440694em}.katex .fontsize-ensurer.reset-size10.size6,.katex .sizing.reset-size10.size6{font-size:.4821600771em}.katex .fontsize-ensurer.reset-size10.size7,.katex .sizing.reset-size10.size7{font-size:.5785920926em}.katex .fontsize-ensurer.reset-size10.size8,.katex .sizing.reset-size10.size8{font-size:.6943105111em}.katex .fontsize-ensurer.reset-size10.size9,.katex .sizing.reset-size10.size9{font-size:.8331726133em}.katex .fontsize-ensurer.reset-size10.size10,.katex .sizing.reset-size10.size10{font-size:1em}.katex .fontsize-ensurer.reset-size10.size11,.katex .sizing.reset-size10.size11{font-size:1.1996142719em}.katex .fontsize-ensurer.reset-size11.size1,.katex .sizing.reset-size11.size1{font-size:.2009646302em}.katex .fontsize-ensurer.reset-size11.size2,.katex .sizing.reset-size11.size2{font-size:.2411575563em}.katex .fontsize-ensurer.reset-size11.size3,.katex .sizing.reset-size11.size3{font-size:.2813504823em}.katex .fontsize-ensurer.reset-size11.size4,.katex .sizing.reset-size11.size4{font-size:.3215434084em}.katex .fontsize-ensurer.reset-size11.size5,.katex .sizing.reset-size11.size5{font-size:.3617363344em}.katex .fontsize-ensurer.reset-size11.size6,.katex .sizing.reset-size11.size6{font-size:.4019292605em}.katex .fontsize-ensurer.reset-size11.size7,.katex .sizing.reset-size11.size7{font-size:.4823151125em}.katex .fontsize-ensurer.reset-size11.size8,.katex .sizing.reset-size11.size8{font-size:.578778135em}.katex .fontsize-ensurer.reset-size11.size9,.katex .sizing.reset-size11.size9{font-size:.6945337621em}.katex .fontsize-ensurer.reset-size11.size10,.katex .sizing.reset-size11.size10{font-size:.8336012862em}.katex .fontsize-ensurer.reset-size11.size11,.katex .sizing.reset-size11.size11{font-size:1em}.katex .delimsizing.size1{font-family:KaTeX_Size1}.katex .delimsizing.size2{font-family:KaTeX_Size2}.katex .delimsizing.size3{font-family:KaTeX_Size3}.katex .delimsizing.size4{font-family:KaTeX_Size4}.katex .delimsizing.mult .delim-size1>span{font-family:KaTeX_Size1}.katex .delimsizing.mult .delim-size4>span{font-family:KaTeX_Size4}.katex .nulldelimiter{display:inline-block;width:.12em}.katex .delimcenter,.katex .op-symbol{position:relative}.katex .op-symbol.small-op{font-family:KaTeX_Size1}.katex .op-symbol.large-op{font-family:KaTeX_Size2}.katex .accent>.vlist-t,.katex .op-limits>.vlist-t{text-align:center}.katex .accent .accent-body{position:relative}.katex .accent .accent-body:not(.accent-full){width:0}.katex .overlay{display:block}.katex .mtable .vertical-separator{display:inline-block;min-width:1px}.katex .mtable .arraycolsep{display:inline-block}.katex .mtable .col-align-c>.vlist-t{text-align:center}.katex .mtable .col-align-l>.vlist-t{text-align:left}.katex .mtable .col-align-r>.vlist-t{text-align:right}.katex .svg-align{text-align:left}.katex svg{fill:currentColor;stroke:currentColor;fill-rule:nonzero;fill-opacity:1;stroke-width:1;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1;display:block;height:inherit;position:absolute;width:100%}.katex svg path{stroke:none}.katex img{border-style:none;max-height:none;max-width:none;min-height:0;min-width:0}.katex .stretchy{display:block;overflow:hidden;position:relative;width:100%}.katex .stretchy:after,.katex .stretchy:before{content:""}.katex .hide-tail{overflow:hidden;position:relative;width:100%}.katex .halfarrow-left{left:0;overflow:hidden;position:absolute;width:50.2%}.katex .halfarrow-right{overflow:hidden;position:absolute;right:0;width:50.2%}.katex .brace-left{left:0;overflow:hidden;position:absolute;width:25.1%}.katex .brace-center{left:25%;overflow:hidden;position:absolute;width:50%}.katex .brace-right{overflow:hidden;position:absolute;right:0;width:25.1%}.katex .x-arrow-pad{padding:0 .5em}.katex .cd-arrow-pad{padding:0 .55556em 0 .27778em}.katex .mover,.katex .munder,.katex .x-arrow{text-align:center}.katex .boxpad{padding:0 .3em}.katex .fbox,.katex .fcolorbox{border:.04em solid;box-sizing:border-box}.katex .cancel-pad{padding:0 .2em}.katex .cancel-lap{margin-left:-.2em;margin-right:-.2em}.katex .sout{border-bottom-style:solid;border-bottom-width:.08em}.katex .angl{border-right:.049em solid;border-top:.049em solid;box-sizing:border-box;margin-right:.03889em}.katex .anglpad{padding:0 .03889em}.katex .eqn-num:before{content:"(" counter(katexEqnNo) ")";counter-increment:katexEqnNo}.katex .mml-eqn-num:before{content:"(" counter(mmlEqnNo) ")";counter-increment:mmlEqnNo}.katex .mtr-glue{width:50%}.katex .cd-vert-arrow{display:inline-block;position:relative}.katex .cd-label-left{display:inline-block;position:absolute;right:calc(50% + .3em);text-align:left}.katex .cd-label-right{display:inline-block;left:calc(50% + .3em);position:absolute;text-align:right}.katex-display{display:block;margin:1em 0;text-align:center}.katex-display>.katex{display:block;text-align:center;white-space:nowrap}.katex-display>.katex>.katex-html{display:block;position:relative}.katex-display>.katex>.katex-html>.tag{position:absolute;right:0}.katex-display.leqno>.katex>.katex-html>.tag{left:0;right:auto}.katex-display.fleqn>.katex{padding-left:2em;text-align:left}body{counter-reset:katexEqnNo mmlEqnNo}:root{--neutral-600: rgb(107, 114, 128);--neutral-400: rgb(185, 185, 185);--neutral-300: rgb(228, 228, 228);--neutral-200: rgb(245, 245, 245);--default-font-family: Source Sans Pro, ui-sans-serif, system-ui, -apple-system, Segoe UI, Roboto, Ubuntu, Cantarell, Noto Sans, sans-serif, "Apple Color Emoji", "Segoe UI Emoji", "Segoe UI Symbol", "Noto Color Emoji";--primary-base: rgb(222, 144, 202);--primary-color: var(--primary-base);--primary-color-hover: oklch(from var(--primary-color) calc(l - .05) c h);--primary-color-active: oklch(from var(--primary-color) calc(l - .1) c h);--on-primary: #ffffff;--page-bg: #ffffff;--text-color: rgba(0, 0, 0, .85);--transparent-page-contrast: rgba(255, 255, 255, .85);--muted-color: rgba(0, 0, 0, .6);--border-color: rgba(0, 0, 0, .1);--surface-bg: #fafafa;--code-bg: #f6f8fa;--link-underline: var(--primary-color);--link-underline-hover: var(--primary-color-hover);--spacing-1: 8px;--spacing-2: 12px;--spacing-3: 16px;--spacing-4: 24px;--spacing-5: 32px;--spacing-6: 40px;--spacing-7: 48px;--spacing-8: 56px;--spacing-9: 64px;--spacing-10: 72px;--content-padding-x: 16px;--block-spacing-y: var(--spacing-4);--palette-count: 8;--button-radius: 6px;--button-padding-x: 12px;--button-padding-y: 8px;--button-font-size: 14px;--button-icon-padding: 8px;--button-big-padding-x: 16px;--button-big-padding-y: 12px;--button-big-font-size: 16px;--button-big-icon-padding: 12px;--table-border-radius: 8px;--table-header-bg: oklch(from var(--surface-bg) calc(l - .02) c h);--table-row-odd-bg: oklch(from var(--surface-bg) calc(l - .01) c h);--z-base: 0;--z-content: 1;--z-elevated: 10;--z-overlay: 1000;--z-modal: 1100;--z-tooltip: 1200;--axis-color: var(--muted-color);--tick-color: var(--text-color);--grid-color: rgba(0, 0, 0, .08)}[data-theme=dark]{--page-bg: #0f1115;--text-color: rgba(255, 255, 255, .9);--muted-color: rgba(255, 255, 255, .7);--border-color: rgba(255, 255, 255, .15);--surface-bg: #12151b;--code-bg: #12151b;--transparent-page-contrast: rgba(0, 0, 0, .85);--axis-color: var(--muted-color);--tick-color: var(--muted-color);--grid-color: rgba(255, 255, 255, .1);--primary-color-hover: oklch(from var(--primary-color) calc(l - .05) c h);--primary-color-active: oklch(from var(--primary-color) calc(l - .1) c h);--on-primary: #0f1115;--csstools-color-scheme--light: ;color-scheme:dark}html{box-sizing:border-box;background:#fff;background:var(--page-bg);color:#000000d9;color:var(--text-color)}*,*:before,*:after{box-sizing:inherit}body{margin:0;font-family:Source Sans Pro,ui-sans-serif,system-ui,-apple-system,Segoe UI,Roboto,Ubuntu,Cantarell,Noto Sans,sans-serif,"Apple Color Emoji","Segoe UI Emoji",Segoe UI Symbol,"Noto Color Emoji";font-family:var(--default-font-family);background:#fff;background:var(--page-bg);color:#000000d9;color:var(--text-color)}audio{display:block;width:100%}img,picture{max-width:100%;height:auto;display:block;position:relative;z-index:10;z-index:var(--z-elevated)}html{font-size:16px;line-height:1.6}.content-grid main{color:#000000d9;color:var(--text-color)}.content-grid main p{margin:0 0 16px;margin:0 0 var(--spacing-3)}.content-grid main h2{font-weight:600;font-size:max(22px,min(2.6vw,32px));line-height:1.2;margin:72px 0 32px;margin:var(--spacing-10) 0 var(--spacing-5);padding-bottom:12px;padding-bottom:var(--spacing-2);border-bottom:1px solid rgba(0,0,0,.1);border-bottom:1px solid var(--border-color)}.content-grid main h3{font-weight:700;font-size:max(18px,min(2.1vw,22px));line-height:1.25;margin:56px 0 24px;margin:var(--spacing-8) 0 var(--spacing-4)}.content-grid main h4{font-weight:600;text-transform:uppercase;font-size:14px;line-height:1.2;margin:56px 0 24px;margin:var(--spacing-8) 0 var(--spacing-4)}.content-grid main a{color:#de90ca;color:var(--primary-color);-webkit-text-decoration:none;text-decoration:none;background:var(--sufrace-bg);border-bottom:1px solid rgba(222,144,202,.4)}@supports (color: color-mix(in lch,red,blue)){.content-grid main a{border-bottom:1px solid color-mix(in srgb,var(--primary-color, #007AFF) 40%,transparent)}}.content-grid main a:hover{color:#ce80ba;color:var(--primary-color-hover);border-bottom:1px solid rgba(222,144,202,.4)}@supports (color: color-mix(in lch,red,blue)){.content-grid main a:hover{border-bottom:1px solid color-mix(in srgb,var(--primary-color, #007AFF) 40%,transparent)}}.content-grid main h2 a,.content-grid main h3 a,.content-grid main h4 a,.content-grid main h5 a,.content-grid main h6 a{color:inherit;border-bottom:none;-webkit-text-decoration:none;text-decoration:none}.content-grid main h2 a:hover,.content-grid main h3 a:hover,.content-grid main h4 a:hover,.content-grid main h5 a:hover,.content-grid main h6 a:hover{color:inherit;border-bottom:none;-webkit-text-decoration:none;text-decoration:none}.content-grid main ul,.content-grid main ol{padding-left:24px;margin:0 0 16px;margin:0 0 var(--spacing-3)}.content-grid main li{margin-bottom:12px;margin-bottom:var(--spacing-2)}.content-grid main li:last-child{margin-bottom:0}.content-grid main blockquote{border-left:2px solid rgba(0,0,0,.1);border-left:2px solid var(--border-color);padding-left:24px;padding-left:var(--spacing-4);font-style:italic;color:#0009;color:var(--muted-color);margin:24px 0;margin:var(--spacing-4) 0}.muted{color:#0009;color:var(--muted-color)}[data-footnote-ref]{margin-left:4px}.content-grid main mark{background-color:#de90ca1a;border:1px solid rgba(222,144,202,.18);color:inherit;padding:4px 6px;border-radius:4px;font-weight:500;box-decoration-break:clone;-webkit-box-decoration-break:clone}@supports (color: color-mix(in lch,red,blue)){.content-grid main mark{background-color:color-mix(in srgb,var(--primary-color, #007AFF) 10%,transparent);border:1px solid color-mix(in srgb,var(--primary-color) 18%,transparent)}}.feature-grid{display:grid;grid-template-columns:repeat(auto-fit,minmax(200px,1fr));grid-gap:12px;gap:12px;margin:46px 0}.feature-card{display:flex;flex-direction:column;padding:16px;border:1px solid rgba(222,144,202,.4);background:#de90ca0d!important;border-radius:8px;-webkit-text-decoration:none;text-decoration:none;color:inherit;transition:all .2s ease}@supports (color: color-mix(in lch,red,blue)){.feature-card{border:1px solid color-mix(in srgb,var(--primary-color) 40%,transparent);background:color-mix(in srgb,var(--primary-color, #007AFF) 05%,transparent)!important}}.feature-card:hover{transform:translateY(-2px);box-shadow:0 2px 8px #00000014}.feature-card strong{font-size:14px;font-weight:600;color:#000000d9;color:var(--text-color);color:#de90ca!important;color:var(--primary-color)!important;margin-bottom:0!important}.feature-card span{font-size:12px;color:#0009;color:var(--muted-color);color:#de90ca!important;color:var(--primary-color)!important;margin-bottom:0!important;opacity:1}.katex .tag{background:none;border:none;opacity:.4}.content-grid{max-width:1280px;margin:40px auto 0;padding:0 16px;padding:0 var(--content-padding-x);display:grid;grid-template-columns:260px minmax(0,680px) 260px;grid-gap:32px;gap:32px;align-items:start}.content-grid>main{max-width:100%;margin:0;padding:0}.content-grid>main>*:first-child{margin-top:0}@media (max-width: 1100px){.content-grid{overflow:hidden;display:block;margin-top:12px;margin-top:var(--spacing-2)}.content-grid{grid-template-columns:1fr}.table-of-contents{position:static;display:none}.table-of-contents-mobile{display:block}.footer-inner{grid-template-columns:1fr;gap:16px}.footer-inner>h3{grid-column:auto;margin-top:16px}.footer-inner{display:block;padding:40px 16px}}.wide,.full-width{box-sizing:border-box;position:relative;z-index:10;z-index:var(--z-elevated);background-color:var(--background-color)}.wide{width:min(1100px,100vw - 16px * 2);width:min(1100px,100vw - var(--content-padding-x) * 2);margin-left:50%;transform:translate(-50%);padding:16px;padding:var(--content-padding-x);border-radius:6px;border-radius:var(--button-radius);background-color:#fff;background-color:var(--page-bg)}.full-width{width:100vw;margin-left:calc(50% - 50vw);margin-right:calc(50% - 50vw)}@media (max-width: 1100px){.wide,.full-width{width:100%;margin-left:0;margin-right:0;padding:0;transform:none}}#theme-toggle{position:fixed;top:24px;top:calc(var(--spacing-4) + var(--hf-spaces-topbar, 0px));right:16px;right:var(--spacing-3);margin:0;z-index:1000;z-index:var(--z-overlay)}@media (max-width: 640px){header.meta .meta-container{display:flex;flex-wrap:wrap;row-gap:12px;-moz-column-gap:8px;column-gap:8px;max-width:100%;padding:0 24px;padding:0 var(--spacing-4)}header.meta .meta-container .meta-container-cell{flex:1 1 calc(50% - 8px);min-width:0}}@media (max-width: 320px){header.meta .meta-container .meta-container-cell{flex-basis:100%;text-align:center}header.meta .affiliations{list-style-position:inside;padding-left:0;margin-left:0}header.meta .affiliations li{text-align:center}}@media (max-width: 768px){.d3-neural .panel{flex-direction:column}.d3-neural .panel .left{flex:0 0 auto;width:100%}.d3-neural .panel .right{flex:0 0 auto;width:100%;min-width:0}}@media print{html,body{background:#fff}body{margin:0}#theme-toggle{display:none!important}.content-grid main a{-webkit-text-decoration:none;text-decoration:none;border-bottom:1px solid rgba(0,0,0,.2)}.content-grid main pre,.content-grid main blockquote,.content-grid main table,.content-grid main figure{-moz-column-break-inside:avoid;break-inside:avoid;page-break-inside:avoid}.content-grid main h2{page-break-before:auto;page-break-after:avoid;-moz-column-break-after:avoid;break-after:avoid-page}.code-lang-chip{display:none!important}:root{--border-color: rgba(0,0,0,.2);--link-underline: rgba(0,0,0,.3);--link-underline-hover: rgba(0,0,0,.4)}.content-grid{grid-template-columns:1fr!important}.table-of-contents,.right-aside,.table-of-contents-mobile{display:none!important}main>nav:first-of-type{display:none!important}.hero,.hero-banner,.d3-banner,.d3-banner svg,.html-embed__card,.js-plotly-plot,figure,pre,table,blockquote,.wide,.full-width{-moz-column-break-inside:avoid;break-inside:avoid;page-break-inside:avoid}.hero{page-break-after:avoid}}@media print{.meta-container-cell--pdf{display:none!important}}code{font-size:14px;font-family:ui-monospace,SFMono-Regular,Menlo,Monaco,Consolas,Liberation Mono,Courier New,monospace;background-color:#f6f8fa;background-color:var(--code-bg);border-radius:.3em;border:1px solid rgba(0,0,0,.1);border:1px solid var(--border-color);color:#000000d9;color:var(--text-color);font-weight:400;line-height:1.5}p code,.note code{white-space:nowrap;padding:calc(8px/3) 4px;padding:calc(var(--spacing-1)/3) calc(var(--spacing-1)/2)}.astro-code{position:relative;border:1px solid rgba(0,0,0,.1);border:1px solid var(--border-color);border-radius:6px;padding:0;font-size:14px;--code-gutter-width: 2.5em}.astro-code,section.content-grid pre{width:100%;max-width:100%;box-sizing:border-box;-webkit-overflow-scrolling:touch;padding:0;margin-bottom:24px!important;margin-bottom:var(--block-spacing-y)!important;overflow-x:auto}section.content-grid pre.astro-code{margin:0;padding:8px 0;padding:var(--spacing-1) 0}section.content-grid pre code{display:inline-block;min-width:100%}@media (max-width: 1100px){.astro-code,section.content-grid pre{white-space:pre-wrap;word-wrap:anywhere;word-break:break-word}section.content-grid pre code{white-space:pre-wrap;display:block;min-width:0}}[data-theme=light] .astro-code{background-color:#f6f8fa;background-color:var(--code-bg)}[data-theme=light] .astro-code span{color:var(--shiki-light)!important}[data-theme=dark] .astro-code span{color:var(--shiki-dark)!important}[data-theme=light] .astro-code{--shiki-foreground: #24292f;--shiki-background: #ffffff}.astro-code code{counter-reset:astro-code-line;display:block;background:none;border:none}.astro-code .line{display:inline-block;position:relative;padding-left:calc(var(--code-gutter-width) + 8px);padding-left:calc(var(--code-gutter-width) + var(--spacing-1));min-height:1.25em}.astro-code .line:before{counter-increment:astro-code-line;content:counter(astro-code-line);position:absolute;left:0;top:0;bottom:0;width:calc(var(--code-gutter-width));text-align:right;color:#0009;color:var(--muted-color);opacity:.3;-webkit-user-select:none;-moz-user-select:none;user-select:none;padding-right:12px;padding-right:var(--spacing-2);border-right:1px solid rgba(0,0,0,.1);border-right:1px solid var(--border-color)}.astro-code .line:empty:after{content:" "}.astro-code code>.line:last-child:empty{display:none}.code-card{position:relative}.code-card .code-copy{position:absolute;top:12px;top:var(--spacing-2);right:12px;right:var(--spacing-2);z-index:3;display:none}.code-card:hover .code-copy{display:block}.code-card .code-copy svg{width:16px;height:16px;display:block;fill:currentColor}.code-card pre{margin:0 0 8px;margin:0 0 var(--spacing-1)}.code-card.no-copy:after{top:8px;right:8px}.accordion .astro-code{padding:0;border:none}.accordion .astro-code{margin-bottom:0!important}.accordion .code-output{border:none;border-top:1px solid rgba(0,0,0,.1)!important;border-top:1px solid var(--border-color)!important}.accordion pre{margin-bottom:0!important}.accordion .code-card pre{margin:0!important}.accordion .astro-code:after{right:0;bottom:0}.code-output{position:relative;background:#f4f6f8;border:1px solid rgba(0,0,0,.1);border:1px solid var(--border-color);border-radius:6px;margin-top:0;margin-bottom:24px;margin-bottom:var(--block-spacing-y);padding:0!important}@supports (color: lab(from red l 1 1% / calc(alpha + .1))){.code-output{background:oklch(from var(--code-bg) calc(l - .005) c h)}}.code-output pre{padding:22px 16px 16px!important;padding:calc(var(--spacing-3) + 6px) var(--spacing-3) var(--spacing-3) var(--spacing-3)!important}.code-card+.code-output,.astro-code+.code-output,section.content-grid pre+.code-output{margin-top:0;border-top:none;border-top-left-radius:0;border-top-right-radius:0;box-shadow:inset 0 8px 12px -12px #00000026}.astro-code:has(+.code-output){margin-bottom:0!important}.code-card:has(+.code-output) .astro-code{margin-bottom:0!important}section.content-grid pre:has(+.code-output){margin-bottom:0!important}.astro-code:has(+.code-output){border-bottom-left-radius:0;border-bottom-right-radius:0}.code-card:has(+.code-output) .astro-code{border-bottom-left-radius:0;border-bottom-right-radius:0}section.content-grid pre:has(+.code-output){border-bottom-left-radius:0;border-bottom-right-radius:0}.code-output:before{content:"Output";position:absolute;top:0;right:0;font-size:10px;line-height:1;color:#0009;color:var(--muted-color);text-transform:uppercase;letter-spacing:.04em;border-top:none;border-right:none;border-radius:0 0 0 6px;padding:10px}.code-output>:where(*):first-child{margin-top:0!important}.code-output>:where(*):last-child{margin-bottom:0!important}.code-filename{display:inline-block;font-size:12px;line-height:1;color:#0009;color:var(--muted-color);background:#fafafa;background:var(--surface-bg);border:1px solid rgba(0,0,0,.1);border:1px solid var(--border-color);border-bottom:none;border-radius:6px 6px 0 0;padding:4px 8px;margin:0}.code-filename+.code-card .astro-code,.code-filename+.astro-code,.code-filename+section.content-grid pre{border-top-left-radius:0;border-top-right-radius:6px}button,.button{-webkit-appearance:none;-moz-appearance:none;appearance:none;background:linear-gradient(15deg,#de90ca,#ce80ba 35%);background:linear-gradient(15deg,var(--primary-color) 0%,var(--primary-color-hover) 35%);color:#fff;border:1px solid transparent;border-radius:6px;border-radius:var(--button-radius);padding:8px 12px;padding:var(--button-padding-y) var(--button-padding-x);font-size:14px;font-size:var(--button-font-size);line-height:1;cursor:pointer;display:inline-block;-webkit-text-decoration:none;text-decoration:none;transition:background-color .15s ease,border-color .15s ease,box-shadow .15s ease,transform .02s ease}button:has(>svg:only-child),.button:has(>svg:only-child){padding:8px;padding:var(--button-icon-padding)}button:hover,.button:hover{filter:brightness(96%)}button:active,.button:active{transform:translateY(1px)}button:focus-visible,.button:focus-visible{outline:none}button:disabled,.button:disabled{opacity:.6;cursor:not-allowed}.button--ghost{background:transparent!important;color:#de90ca!important;color:var(--primary-color)!important;border-color:#de90ca!important;border-color:var(--primary-color)!important}.button--ghost:hover{color:#ce80ba!important;color:var(--primary-color-hover)!important;border-color:#ce80ba!important;border-color:var(--primary-color-hover)!important;filter:none}.button.button--big{padding:12px 16px;padding:var(--button-big-padding-y) var(--button-big-padding-x);font-size:16px;font-size:var(--button-big-font-size)}.button.button--big:has(>svg:only-child){padding:12px;padding:var(--button-big-icon-padding)}.button-group .button{margin:5px}.content-grid main table{border-collapse:collapse;table-layout:auto;margin:0}.content-grid main th,.content-grid main td{border-bottom:1px solid rgba(0,0,0,.1);border-bottom:1px solid var(--border-color);padding:6px 8px;font-size:15px;white-space:nowrap;word-break:auto-phrase;white-space:break-spaces;vertical-align:top}.content-grid main thead th{border-bottom:1px solid rgba(0,0,0,.1);border-bottom:1px solid var(--border-color)}.content-grid main thead th{background:#f3f3f3;background:var(--table-header-bg);padding-top:10px;padding-bottom:10px;font-weight:600}.content-grid main hr{border:none;border-bottom:1px solid rgba(0,0,0,.1);border-bottom:1px solid var(--border-color);margin:32px 0;margin:var(--spacing-5) 0}.content-grid main .table-scroll{width:100%;overflow-x:auto;-webkit-overflow-scrolling:touch;border:1px solid rgba(0,0,0,.1);border:1px solid var(--border-color);border-radius:8px;border-radius:var(--table-border-radius);background:#fafafa;background:var(--surface-bg);margin:0 0 24px;margin:0 0 var(--block-spacing-y)}.content-grid main .table-scroll>table{width:-moz-fit-content;width:fit-content;min-width:100%;max-width:none}.content-grid main .table-scroll>table th,.content-grid main .table-scroll>table td{border-right:1px solid rgba(0,0,0,.1);border-right:1px solid var(--border-color)}.content-grid main .table-scroll>table th:last-child,.content-grid main .table-scroll>table td:last-child{border-right:none}.content-grid main .table-scroll>table thead th:first-child{border-top-left-radius:8px;border-top-left-radius:var(--table-border-radius)}.content-grid main .table-scroll>table thead th:last-child{border-top-right-radius:8px;border-top-right-radius:var(--table-border-radius)}.content-grid main .table-scroll>table tbody tr:last-child td:first-child{border-bottom-left-radius:8px;border-bottom-left-radius:var(--table-border-radius)}.content-grid main .table-scroll>table tbody tr:last-child td:last-child{border-bottom-right-radius:8px;border-bottom-right-radius:var(--table-border-radius)}.content-grid main .table-scroll>table tbody tr:nth-child(odd) td{background:#f7f7f7;background:var(--table-row-odd-bg)}.content-grid main .table-scroll>table tbody tr:last-child td{border-bottom:none}.accordion .accordion__content .table-scroll{border:none;border-radius:0;margin:0;margin-bottom:0!important}.accordion .accordion__content table{margin:0!important}.accordion .accordion__content .table-scroll>table thead th:first-child,.accordion .accordion__content .table-scroll>table thead th:last-child,.accordion .accordion__content .table-scroll>table tbody tr:last-child td:first-child,.accordion .accordion__content .table-scroll>table tbody tr:last-child td:last-child{border-radius:0}@supports not ((width: -moz-fit-content) or (width: fit-content)){.content-grid main .table-scroll>table{width:-moz-max-content;width:max-content;min-width:100%}}.tag-list{display:flex;flex-wrap:wrap;gap:8px;margin:8px 0 16px}.tag{display:inline-flex;align-items:center;gap:6px;padding:8px 12px;font-size:12px;line-height:1;border-radius:6px;border-radius:var(--button-radius);background:#fafafa;background:var(--surface-bg);border:1px solid rgba(0,0,0,.1);border:1px solid var(--border-color);color:#000000d9;color:var(--text-color)}.card{background:#fafafa;background:var(--surface-bg);border:1px solid rgba(0,0,0,.1);border:1px solid var(--border-color);border-radius:10px;padding:12px;padding:var(--spacing-2);z-index:11;z-index:calc(var(--z-elevated) + 1);position:relative;margin-bottom:24px;margin-bottom:var(--block-spacing-y)}select{background-color:#fff;background-color:var(--page-bg);border:1px solid rgba(202,131,183,.55);border-radius:6px;border-radius:var(--button-radius);padding:8px 12px;padding:var(--button-padding-y) var(--button-padding-x) var(--button-padding-y) var(--button-padding-x);font-family:Source Sans Pro,ui-sans-serif,system-ui,-apple-system,Segoe UI,Roboto,Ubuntu,Cantarell,Noto Sans,sans-serif,"Apple Color Emoji","Segoe UI Emoji",Segoe UI Symbol,"Noto Color Emoji";font-family:var(--default-font-family);font-size:14px;font-size:var(--button-font-size);color:#000000d9;color:var(--text-color);background-image:url("data:image/svg+xml;charset=utf-8,%3Csvg xmlns='http://www.w3.org/2000/svg' width='12' height='12' viewBox='0 0 12 12'%3E%3Cpath fill='%23666' d='M6 8.825L1.175 4 2.35 2.825 6 6.475 9.65 2.825 10.825 4z'/%3E%3C/svg%3E");background-repeat:no-repeat;background-position:right 14px center;background-position:right calc(var(--button-padding-x) + 2px) center;background-size:12px;cursor:pointer;transition:border-color .2s ease,box-shadow .2s ease;-webkit-appearance:none;-moz-appearance:none;appearance:none}@supports (color: color-mix(in lch,red,blue)){select{border:1px solid color-mix(in srgb,var(--primary-color) 50%,var(--border-color))}}select:hover,select:focus,select:active{border-color:#de90ca;border-color:var(--primary-color)}select:focus{outline:none;border-color:#de90ca;border-color:var(--primary-color);box-shadow:0 0 0 2px #de90ca1a}@supports (color: lab(from red l 1 1% / calc(alpha + .1))){select:focus{box-shadow:0 0 0 2px rgba(from var(--primary-color) r g b / .1)}}select:disabled{opacity:.6;cursor:not-allowed;background-color:#fafafa;background-color:var(--surface-bg)}[data-theme=dark] select{background-image:url("data:image/svg+xml;charset=utf-8,%3Csvg xmlns='http://www.w3.org/2000/svg' width='12' height='12' viewBox='0 0 12 12'%3E%3Cpath fill='%23bbb' d='M6 8.825L1.175 4 2.35 2.825 6 6.475 9.65 2.825 10.825 4z'/%3E%3C/svg%3E")}input[type=checkbox]{-webkit-appearance:none;-moz-appearance:none;appearance:none;width:16px;height:16px;border:2px solid rgba(0,0,0,.1);border:2px solid var(--border-color);border-radius:3px;background-color:#fff;background-color:var(--page-bg);cursor:pointer;position:relative;transition:all .2s ease;margin-right:12px;margin-right:var(--spacing-2)}input[type=checkbox]:hover{border-color:#de90ca;border-color:var(--primary-color)}input[type=checkbox]:focus{outline:none;border-color:#de90ca;border-color:var(--primary-color);box-shadow:0 0 0 2px #de90ca1a}@supports (color: lab(from red l 1 1% / calc(alpha + .1))){input[type=checkbox]:focus{box-shadow:0 0 0 2px rgba(from var(--primary-color) r g b / .1)}}input[type=checkbox]:checked{background-color:#de90ca;background-color:var(--primary-color);border-color:#de90ca;border-color:var(--primary-color)}input[type=checkbox]:checked:before{content:"";position:absolute;top:1px;left:4px;width:4px;height:8px;border:solid #ffffff;border:solid var(--on-primary);border-width:0 2px 2px 0;transform:rotate(45deg)}input[type=checkbox]:disabled{opacity:.6;cursor:not-allowed}input[type=radio]{-webkit-appearance:none;-moz-appearance:none;appearance:none;width:16px;height:16px;border:2px solid rgba(0,0,0,.1);border:2px solid var(--border-color);border-radius:50%;background-color:#fff;background-color:var(--page-bg);cursor:pointer;position:relative;transition:all .2s ease;margin-right:12px;margin-right:var(--spacing-2)}input[type=radio]:hover{border-color:#de90ca;border-color:var(--primary-color)}input[type=radio]:focus{outline:none;border-color:#de90ca;border-color:var(--primary-color);box-shadow:0 0 0 2px #de90ca1a}@supports (color: lab(from red l 1 1% / calc(alpha + .1))){input[type=radio]:focus{box-shadow:0 0 0 2px rgba(from var(--primary-color) r g b / .1)}}input[type=radio]:checked{border-color:#de90ca;border-color:var(--primary-color)}input[type=radio]:checked:before{content:"";position:absolute;top:2px;left:2px;width:8px;height:8px;border-radius:50%;background-color:#de90ca;background-color:var(--primary-color)}input[type=radio]:disabled{opacity:.6;cursor:not-allowed}input[type=text],input[type=email],input[type=password],input[type=number],input[type=url],input[type=search],textarea{-webkit-appearance:none;-moz-appearance:none;appearance:none;background-color:#fff;background-color:var(--page-bg);border:1px solid rgba(0,0,0,.1);border:1px solid var(--border-color);border-radius:6px;border-radius:var(--button-radius);padding:8px 12px;padding:var(--button-padding-y) var(--button-padding-x);font-family:Source Sans Pro,ui-sans-serif,system-ui,-apple-system,Segoe UI,Roboto,Ubuntu,Cantarell,Noto Sans,sans-serif,"Apple Color Emoji","Segoe UI Emoji",Segoe UI Symbol,"Noto Color Emoji";font-family:var(--default-font-family);font-size:14px;font-size:var(--button-font-size);color:#000000d9;color:var(--text-color);transition:border-color .2s ease,box-shadow .2s ease;width:100%}input[type=text]:hover,input[type=email]:hover,input[type=password]:hover,input[type=number]:hover,input[type=url]:hover,input[type=search]:hover,textarea:hover{border-color:#de90ca;border-color:var(--primary-color)}input[type=text]:focus,input[type=email]:focus,input[type=password]:focus,input[type=number]:focus,input[type=url]:focus,input[type=search]:focus,textarea:focus{outline:none;border-color:#de90ca;border-color:var(--primary-color);box-shadow:0 0 0 2px #de90ca1a}@supports (color: lab(from red l 1 1% / calc(alpha + .1))){input[type=text]:focus,input[type=email]:focus,input[type=password]:focus,input[type=number]:focus,input[type=url]:focus,input[type=search]:focus,textarea:focus{box-shadow:0 0 0 2px rgba(from var(--primary-color) r g b / .1)}}input[type=text]:disabled,input[type=email]:disabled,input[type=password]:disabled,input[type=number]:disabled,input[type=url]:disabled,input[type=search]:disabled,textarea:disabled{opacity:.6;cursor:not-allowed;background-color:#fafafa;background-color:var(--surface-bg)}label{display:flex;align-items:center;font-size:14px;font-size:var(--button-font-size);color:#000000d9;color:var(--text-color);cursor:pointer;margin-bottom:0;line-height:1.4;-webkit-user-select:none;-moz-user-select:none;user-select:none}.form-group{margin-bottom:24px;margin-bottom:var(--spacing-4);display:flex;align-items:center;gap:12px;gap:var(--spacing-2)}.form-group label{margin-bottom:0}.form-group.vertical{flex-direction:column;align-items:flex-start}.form-group.vertical label{margin-bottom:8px;margin-bottom:var(--spacing-1)}.form-inline{display:flex;align-items:center;gap:12px;gap:var(--spacing-2);margin-bottom:16px;margin-bottom:var(--spacing-3)}.form-inline label{margin-bottom:0}div[style*="display: flex"] label,div[class*=flex] label,.trackio-controls label,.scale-controls label,.theme-selector label{margin-bottom:0!important;align-self:center}.tenet-list{margin:3rem 0}.tenet-list ol{counter-reset:tenet-counter -1;list-style:none;padding-left:0;display:grid;grid-template-columns:1fr;grid-gap:2.5rem;gap:2.5rem;max-width:900px;margin:0 auto}.tenet-list li.tenet{counter-increment:tenet-counter;background:linear-gradient(135deg,#fff,#f8f9fa);border:2px solid #e2e8f0;border-radius:16px;padding:2rem 2rem 2rem 4rem;margin:0;position:relative;box-shadow:0 12px 35px #0000001f;transition:all .3s ease;cursor:pointer}.tenet-list li.tenet:hover{transform:translateY(-8px) scale(1.02);box-shadow:0 20px 50px #00000040;border-color:#007bff80;background:linear-gradient(135deg,#fff,#f0f8ff)}.tenet-list li.tenet:nth-child(1):before{background:linear-gradient(135deg,#667eea,#764ba2)}.tenet-list li.tenet:nth-child(2):before{background:linear-gradient(135deg,#f093fb,#f5576c)}.tenet-list li.tenet:nth-child(3):before{background:linear-gradient(135deg,#4facfe,#00f2fe)}.tenet-list li.tenet:nth-child(4):before{background:linear-gradient(135deg,#43e97b,#38f9d7)}.tenet-list li.tenet:nth-child(5):before{background:linear-gradient(135deg,#fa709a,#fee140)}.tenet-list li.tenet:nth-child(6):before{background:linear-gradient(135deg,#a8edea,#fed6e3)}.tenet-list li.tenet:nth-child(7):before{background:linear-gradient(135deg,#ff9a9e,#fecfef)}.tenet-list li.tenet:nth-child(8):before{background:linear-gradient(135deg,#a18cd1,#fbc2eb)}.tenet-list li.tenet:nth-child(9):before{background:linear-gradient(135deg,#ffecd2,#fcb69f)}.tenet-list li.tenet:before{content:counter(tenet-counter);position:absolute;top:-12px;left:-12px;color:#fff;width:48px;height:48px;border-radius:50%;display:flex;align-items:center;justify-content:center;font-size:1.2em;font-weight:700;box-shadow:0 4px 12px #00000026;border:3px solid white}.tenet-list li.tenet strong{color:#1a202c;font-size:1.1em;display:block;margin-bottom:.5rem}.tenet-list li.tenet em{color:#4a5568;font-size:.95em;font-style:italic;display:block;margin-top:.75rem;padding:1rem;background:#00000008;border-radius:8px;border-left:3px solid #e2e8f0}.tenet-list li.tenet p{color:#2d3748;line-height:1.6;margin:.5rem 0}@keyframes pulse-glow{0%{box-shadow:0 4px 12px #00000026}50%{box-shadow:0 4px 20px #00000040}to{box-shadow:0 4px 12px #00000026}}.tenet-list li.tenet:hover:before{animation:pulse-glow 2s ease-in-out infinite}[data-theme=dark] .tenet-list li.tenet{background:linear-gradient(135deg,#1a202c,#2d3748);border-color:#4a5568}[data-theme=dark] .tenet-list li.tenet:hover{background:linear-gradient(135deg,#2d3748,#374151);border-color:#667eea80}[data-theme=dark] .tenet-list li.tenet strong{color:#e2e8f0}[data-theme=dark] .tenet-list li.tenet p{color:#cbd5e0}[data-theme=dark] .tenet-list li.tenet em{color:#a0aec0;background:#ffffff0d;border-left-color:#4a5568}@media (max-width: 768px){.tenet-list li.tenet{padding:1.5rem}}.crumbs{background:linear-gradient(135deg,#f0f4ff,#e6eeff);border-left:5px solid #667eea;padding:1.25rem 1.75rem;margin:2.5rem 0;border-radius:0 8px 8px 0;box-shadow:0 2px 8px #667eea1f;font-size:.95em;line-height:1.6;color:#4a5568}.crumbs strong{color:#667eea;font-weight:700}.crumbs code{background:#667eea1a;padding:.15em .4em;border-radius:3px;font-size:.9em;color:#4c51bf}.crumbs a{color:#667eea;font-weight:500}[data-theme=dark] .crumbs{background:linear-gradient(135deg,#1e293b,#334155);border-left-color:#818cf8;color:#cbd5e0}[data-theme=dark] .crumbs strong{color:#a5b4fc}[data-theme=dark] .crumbs code{background:#818cf833;color:#c7d2fe}[data-theme=dark] .crumbs a{color:#a5b4fc}main a[href^="http://"],main a[href^="https://"]{background:linear-gradient(135deg,#e3f2fd,#bbdefb);color:#1565c0;-webkit-text-decoration:none;text-decoration:none;padding:.15em .5em;border-radius:12px;border:1px solid #90caf9;display:inline-block;transition:all .3s ease;font-weight:500;box-shadow:0 1px 3px #1565c026}main a[href^="http://"]:hover,main a[href^="https://"]:hover{background:linear-gradient(135deg,#2196f3,#1976d2);color:#fff;border-color:#1565c0;transform:translateY(-1px);box-shadow:0 4px 12px #1565c04d}main a[href^="http://"]:active,main a[href^="https://"]:active{transform:translateY(0);box-shadow:0 1px 3px #1565c033}a[href^="#source-of-truth"],a[href^="#one-model-one-file"],a[href^="#code-is-product"],a[href^="#standardize-dont-abstract"],a[href^="#do-repeat-yourself"],a[href^="#minimal-user-api"],a[href^="#backwards-compatibility"],a[href^="#consistent-public-surface"],a[href^="#modular"]{position:relative;color:#667eea;font-weight:600;-webkit-text-decoration:underline;text-decoration:underline;text-decoration-color:#667eea4d;transition:all .3s ease}a[href^="#source-of-truth"]:hover,a[href^="#one-model-one-file"]:hover,a[href^="#code-is-product"]:hover,a[href^="#standardize-dont-abstract"]:hover,a[href^="#do-repeat-yourself"]:hover,a[href^="#minimal-user-api"]:hover,a[href^="#backwards-compatibility"]:hover,a[href^="#consistent-public-surface"]:hover,a[href^="#modular"]:hover{color:#4c51bf;text-decoration-color:#4c51bf;background:#667eea1a;padding:2px 4px;border-radius:4px}a[href^="#source-of-truth"]:after{content:"Model implementations should be reliable, reproducible, and faithful to original performances."}a[href^="#one-model-one-file"]:after{content:"All inference and training core logic visible, top‑to‑bottom, in a single file."}a[href^="#code-is-product"]:after{content:"Optimize for reading, diffing, and tweaking. Code quality matters as much as functionality."}a[href^="#standardize-dont-abstract"]:after{content:"Model-specific logic belongs in the model file, not hidden behind abstractions."}a[href^="#do-repeat-yourself"]:after{content:"Strategic duplication can improve readability and maintainability when done thoughtfully."}a[href^="#minimal-user-api"]:after{content:"Config, model, preprocessing; from_pretrained, save_pretrained, push_to_hub. Least amount of codepaths."}a[href^="#backwards-compatibility"]:after{content:"Any artifact once on the hub must remain loadable. Breaking changes are unacceptable."}a[href^="#consistent-public-surface"]:after{content:"Uniform naming, signatures, and conventions across all models for predictability."}a[href^="#modular"]:after{content:"Architecture components shared via modular system, removing boilerplate while keeping expanded files visible."}a[href^="#source-of-truth"]:after,a[href^="#one-model-one-file"]:after,a[href^="#code-is-product"]:after,a[href^="#standardize-dont-abstract"]:after,a[href^="#do-repeat-yourself"]:after,a[href^="#minimal-user-api"]:after,a[href^="#backwards-compatibility"]:after,a[href^="#consistent-public-surface"]:after,a[href^="#modular"]:after{position:absolute;bottom:100%;left:50%;transform:translate(-50%);background:#1a202c;color:#fff;padding:.75rem 1rem;border-radius:8px;font-size:.85em;font-weight:400;white-space:normal;width:300px;line-height:1.4;z-index:1001;opacity:0;visibility:hidden;transition:opacity .3s ease,visibility .3s ease;pointer-events:none;box-shadow:0 4px 12px #0003;margin-bottom:.5rem}a[href^="#source-of-truth"]:hover:after,a[href^="#one-model-one-file"]:hover:after,a[href^="#code-is-product"]:hover:after,a[href^="#standardize-dont-abstract"]:hover:after,a[href^="#do-repeat-yourself"]:hover:after,a[href^="#minimal-user-api"]:hover:after,a[href^="#backwards-compatibility"]:hover:after,a[href^="#consistent-public-surface"]:hover:after,a[href^="#modular"]:hover:after{opacity:1;visibility:visible}[data-theme=dark] main a[href^="http://"],[data-theme=dark] main a[href^="https://"]{background:linear-gradient(135deg,#1e3a5f,#2563eb);color:#bfdbfe;border-color:#3b82f6}[data-theme=dark] main a[href^="http://"]:hover,[data-theme=dark] main a[href^="https://"]:hover{background:linear-gradient(135deg,#2563eb,#1d4ed8);color:#fff;border-color:#60a5fa}[data-theme=dark] a[href^="#source-of-truth"]:after,[data-theme=dark] a[href^="#one-model-one-file"]:after,[data-theme=dark] a[href^="#code-is-product"]:after,[data-theme=dark] a[href^="#standardize-dont-abstract"]:after,[data-theme=dark] a[href^="#do-repeat-yourself"]:after,[data-theme=dark] a[href^="#minimal-user-api"]:after,[data-theme=dark] a[href^="#backwards-compatibility"]:after,[data-theme=dark] a[href^="#consistent-public-surface"]:after,[data-theme=dark] a[href^="#modular"]:after{background:#2d3748;color:#e2e8f0}[data-theme=dark] a[href^="#source-of-truth"],[data-theme=dark] a[href^="#one-model-one-file"],[data-theme=dark] a[href^="#code-is-product"],[data-theme=dark] a[href^="#standardize-dont-abstract"],[data-theme=dark] a[href^="#do-repeat-yourself"],[data-theme=dark] a[href^="#minimal-user-api"],[data-theme=dark] a[href^="#backwards-compatibility"],[data-theme=dark] a[href^="#consistent-public-surface"],[data-theme=dark] a[href^="#modular"]{color:#a5b4fc;text-decoration-color:#a5b4fc4d}[data-theme=dark] a[href^="#source-of-truth"]:hover,[data-theme=dark] a[href^="#one-model-one-file"]:hover,[data-theme=dark] a[href^="#code-is-product"]:hover,[data-theme=dark] a[href^="#standardize-dont-abstract"]:hover,[data-theme=dark] a[href^="#do-repeat-yourself"]:hover,[data-theme=dark] a[href^="#minimal-user-api"]:hover,[data-theme=dark] a[href^="#backwards-compatibility"]:hover,[data-theme=dark] a[href^="#consistent-public-surface"]:hover,[data-theme=dark] a[href^="#modular"]:hover{color:#c7d2fe;background:#a5b4fc26}.demo-wide,.demo-full-width{display:flex;flex-direction:column;align-items:center;justify-content:center;width:100%;min-height:150px;color:#0009;color:var(--muted-color);font-size:12px;border:2px dashed rgba(0,0,0,.1);border:2px dashed var(--border-color);border-radius:8px;background:#fafafa;background:var(--surface-bg);margin-bottom:24px;margin-bottom:var(--block-spacing-y)}.mermaid{background:none!important;margin-bottom:24px!important;margin-bottom:var(--block-spacing-y)!important}
 
 
app/dist/_astro/index.DT_nyxPT.css.gz DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:901dc26b4550bd52a86541b54b41556f334baca1d30afb11e8a4d456e2a8e0af
3
- size 18334
 
 
 
 
app/dist/_astro/index.beJ178IL.css ADDED
@@ -0,0 +1 @@
 
 
1
+ @import"https://fonts.googleapis.com/css2?family=Source+Sans+Pro:ital,wght@0,200..900;1,200..900&display=swap";.html-embed{margin:0 0 var(--block-spacing-y);z-index:var(--z-elevated);position:relative;width:min(1100px,100vw - var(--content-padding-x) * 2);margin-left:50%;transform:translate(-50%)}.html-embed__title{text-align:left;font-weight:600;font-size:.95rem;color:var(--text-color);margin:0;padding:0;padding-bottom:var(--spacing-1);position:relative;display:block;width:100%;background:var(--page-bg);z-index:var(--z-elevated)}.html-embed__card{background:var(--code-bg);border:1px solid var(--border-color);border-radius:10px;padding:12px;z-index:calc(var(--z-elevated) + 1);position:relative}.html-embed__card.is-frameless{background:transparent;border-color:transparent;padding:0}.html-embed__desc{text-align:left;font-size:.9rem;color:var(--muted-color);margin:0;padding:0;padding-top:var(--spacing-1);position:relative;z-index:var(--z-elevated);display:block;width:100%;background:var(--page-bg)}.html-embed__card svg text{fill:var(--text-color)}.html-embed__card label{color:var(--text-color)}.plotly-graph-div{width:100%;min-height:320px}@media (max-width: 768px){.plotly-graph-div{min-height:260px}}[id^=plot-]{display:flex;flex-direction:column;align-items:center;gap:15px}.plotly_caption{font-style:italic;margin-top:10px}.plotly_controls{display:flex;flex-wrap:wrap;justify-content:center;gap:30px}.plotly_input_container{display:flex;align-items:center;flex-direction:column;gap:10px}.plotly_input_container>select{padding:2px 4px;line-height:1.5em;text-align:center;border-radius:4px;font-size:12px;background-color:var(--neutral-200);outline:none;border:1px solid var(--neutral-300)}.plotly_slider{display:flex;align-items:center;gap:10px}.plotly_slider>input[type=range]{-webkit-appearance:none;-moz-appearance:none;appearance:none;height:2px;background:var(--neutral-400);border-radius:5px;outline:none}.plotly_slider>input[type=range]::-webkit-slider-thumb{-webkit-appearance:none;width:18px;height:18px;border-radius:50%;background:var(--primary-color);cursor:pointer}.plotly_slider>input[type=range]::-moz-range-thumb{width:18px;height:18px;border-radius:50%;background:var(--primary-color);cursor:pointer}.plotly_slider>span{font-size:14px;line-height:1.6em;min-width:16px}[data-theme=dark] .html-embed__card:not(.is-frameless){background:#12151b;border-color:#ffffff26}[data-theme=dark] .html-embed__card .xaxislayer-above text,[data-theme=dark] .html-embed__card .yaxislayer-above text,[data-theme=dark] .html-embed__card .infolayer text,[data-theme=dark] .html-embed__card .legend text,[data-theme=dark] .html-embed__card .annotation text,[data-theme=dark] .html-embed__card .colorbar text,[data-theme=dark] .html-embed__card .hoverlayer text{fill:#fff!important}[data-theme=dark] .html-embed__card .xaxislayer-above path,[data-theme=dark] .html-embed__card .yaxislayer-above path,[data-theme=dark] .html-embed__card .xlines-above,[data-theme=dark] .html-embed__card .ylines-above{stroke:#ffffff59!important}[data-theme=dark] .html-embed__card .gridlayer path{stroke:#ffffff26!important}[data-theme=dark] .html-embed__card .legend rect.bg{fill:#00000040!important;stroke:#fff3!important}[data-theme=dark] .html-embed__card .hoverlayer .bg{fill:#000c!important;stroke:#fff3!important}[data-theme=dark] .html-embed__card .colorbar .cbbg{fill:#00000040!important;stroke:#fff3!important}.force-light-mode{filter:invert(0);--csstools-color-scheme--light: initial;color-scheme:light;background:#fff;padding:20px;border-radius:10px}[data-theme=dark] .force-light-mode .html-embed__card{background:#fff!important;border-color:#ddd!important}[data-theme=dark] .force-light-mode *{color:#333!important}@media (max-width: 1024px){.html-embed{width:100%;margin-left:0;transform:none}}@media print{.html-embed,.html-embed__card{max-width:100%!important;width:100%!important;margin-left:0!important;margin-right:0!important}.html-embed__card{padding:6px}.html-embed__card.is-frameless{padding:0}.html-embed__card svg,.html-embed__card canvas,.html-embed__card img{max-width:100%!important;height:auto!important}.html-embed__card>div[id^=frag-]{width:100%!important}}@media print{.html-embed,.html-embed__card{-moz-column-break-inside:avoid;break-inside:avoid;page-break-inside:avoid}.html-embed,.html-embed__card{max-width:100%!important;width:100%!important}.html-embed__card{padding:6px}.html-embed__card.is-frameless{padding:0}.html-embed__card svg,.html-embed__card canvas,.html-embed__card img,.html-embed__card video,.html-embed__card iframe{max-width:100%!important;height:auto!important}.html-embed__card>div[id^=frag-]{width:100%!important;max-width:100%!important}.html-embed .d3-galaxy{width:100%!important;max-width:980px!important;margin-left:auto!important;margin-right:auto!important}}.hero[data-astro-cid-bbe6dxrz]{width:100%;padding:0;text-align:center}.hero-title[data-astro-cid-bbe6dxrz]{font-size:max(28px,min(4vw,48px));font-weight:800;line-height:1.1;max-width:100%;margin:auto}.hero-banner[data-astro-cid-bbe6dxrz]{max-width:980px;margin:0 auto}.hero-desc[data-astro-cid-bbe6dxrz]{color:var(--muted-color);font-style:italic;margin:0 0 16px}.meta[data-astro-cid-bbe6dxrz]{border-top:1px solid var(--border-color);border-bottom:1px solid var(--border-color);padding:1rem 0;font-size:.9rem}.meta-container[data-astro-cid-bbe6dxrz]{max-width:760px;display:flex;flex-direction:row;justify-content:space-between;margin:0 auto;padding:0 var(--content-padding-x);gap:8px}.meta-container[data-astro-cid-bbe6dxrz] a[data-astro-cid-bbe6dxrz]:not(.button){color:var(--primary-color);-webkit-text-decoration:underline;text-decoration:underline;text-underline-offset:2px;text-decoration-thickness:.06em;text-decoration-color:var(--link-underline);transition:text-decoration-color .15s ease-in-out}.meta-container[data-astro-cid-bbe6dxrz] a[data-astro-cid-bbe6dxrz]:hover{text-decoration-color:var(--link-underline-hover)}.meta-container[data-astro-cid-bbe6dxrz] a[data-astro-cid-bbe6dxrz].button,.meta-container[data-astro-cid-bbe6dxrz] .button[data-astro-cid-bbe6dxrz]{-webkit-text-decoration:none;text-decoration:none}.meta-container-cell[data-astro-cid-bbe6dxrz]{display:flex;flex-direction:column;gap:8px;max-width:250px}.meta-container-cell[data-astro-cid-bbe6dxrz] h3[data-astro-cid-bbe6dxrz]{margin:0;font-size:12px;font-weight:400;color:var(--muted-color);text-transform:uppercase;letter-spacing:.02em}.meta-container-cell[data-astro-cid-bbe6dxrz] p[data-astro-cid-bbe6dxrz]{margin:0}.authors[data-astro-cid-bbe6dxrz]{margin:0;list-style-type:none;padding-left:0;display:flex;flex-wrap:wrap}.authors[data-astro-cid-bbe6dxrz] li[data-astro-cid-bbe6dxrz]{white-space:nowrap;margin-right:4px}.affiliations[data-astro-cid-bbe6dxrz]{margin:0;padding-left:1.25em}.affiliations[data-astro-cid-bbe6dxrz] li[data-astro-cid-bbe6dxrz]{margin:0}header[data-astro-cid-bbe6dxrz].meta .meta-container[data-astro-cid-bbe6dxrz]{flex-wrap:wrap;row-gap:12px}@media (max-width: 768px){.meta-container-cell--affiliations[data-astro-cid-bbe6dxrz],.meta-container-cell--pdf[data-astro-cid-bbe6dxrz]{text-align:right}}@media print{.meta-container-cell--pdf[data-astro-cid-bbe6dxrz]{display:none!important}}.footer{contain:layout style;font-size:.8em;line-height:1.7em;margin-top:60px;margin-bottom:0;border-top:1px solid rgba(0,0,0,.1);color:#00000080}.footer-inner{max-width:1280px;margin:0 auto;padding:60px 16px 48px;display:grid;grid-template-columns:220px minmax(0,680px) 260px;grid-gap:32px;gap:32px;align-items:start}.citation-block,.references-block,.reuse-block,.doi-block{display:contents}.citation-block>h3,.references-block>h3,.reuse-block>h3,.doi-block>h3{grid-column:1;font-size:15px;margin:0;text-align:right;padding-right:30px}.citation-block>:not(h3),.references-block>:not(h3),.reuse-block>:not(h3),.doi-block>:not(h3){grid-column:2}.citation-block h3{margin:0 0 8px}.citation-block h4{margin:16px 0 8px;font-size:14px;text-transform:uppercase;color:var(--muted-color)}.citation-block p,.reuse-block p,.doi-block p,.footnotes ol,.footnotes ol p,.references{margin-top:0}.citation{font-family:ui-monospace,SFMono-Regular,Menlo,Monaco,Consolas,Liberation Mono,Courier New,monospace;font-size:11px;line-height:15px;border-left:1px solid rgba(0,0,0,.1);border:1px solid rgba(0,0,0,.1);background:#00000005;padding:10px 18px;border-radius:3px;color:#969696;overflow:hidden;margin-top:-12px;white-space:pre-wrap;word-wrap:break-word}.citation a{color:#0009;-webkit-text-decoration:underline;text-decoration:underline}.citation.short{margin-top:-4px}.references-block h3{margin:0}.references-block ol{padding:0 0 0 15px}@media (min-width: 768px){.references-block ol{padding:0 0 0 30px;margin-left:-30px}}.references-block li{margin-bottom:1em}.references-block a{color:var(--text-color)}[data-theme=dark] .footer{border-top-color:#ffffff26;color:#c8c8c8cc}[data-theme=dark] .citation{background:#ffffff0a;border-color:#ffffff26;color:#c8c8c8}[data-theme=dark] .citation a{color:#ffffffbf}.footer a{color:var(--primary-color);border-bottom:1px solid var(--link-underline);-webkit-text-decoration:none;text-decoration:none}.footer a:hover{color:var(--primary-color-hover);border-bottom-color:var(--link-underline-hover)}[data-theme=dark] .footer a{color:var(--primary-color)}#theme-toggle[data-astro-cid-x3pjskd3]{display:inline-flex;align-items:center;gap:8px;border:none;background:transparent;padding:6px 10px;border-radius:8px;cursor:pointer;color:var(--text-color)!important}#theme-toggle[data-astro-cid-x3pjskd3] .icon[data-astro-cid-x3pjskd3].dark,[data-astro-cid-x3pjskd3][data-theme=dark] #theme-toggle[data-astro-cid-x3pjskd3] .icon[data-astro-cid-x3pjskd3].light{display:none}[data-astro-cid-x3pjskd3][data-theme=dark] #theme-toggle[data-astro-cid-x3pjskd3] .icon[data-astro-cid-x3pjskd3].dark{display:inline}#theme-toggle[data-astro-cid-x3pjskd3] .icon[data-astro-cid-x3pjskd3]{filter:none!important}.table-of-contents{position:sticky;top:32px;margin-top:12px}.table-of-contents nav{border-left:1px solid var(--border-color);padding-left:16px;font-size:13px}.table-of-contents .title{font-weight:600;font-size:14px;margin-bottom:8px}.table-of-contents nav ul{margin:0 0 6px;padding-left:1em}.table-of-contents nav li{list-style:none;margin:.25em 0}.table-of-contents nav a,.table-of-contents nav a:link,.table-of-contents nav a:visited{color:var(--text-color);-webkit-text-decoration:none;text-decoration:none;border-bottom:none}.table-of-contents nav>ul>li>a{font-weight:700}.table-of-contents nav a:hover{-webkit-text-decoration:underline solid var(--muted-color);text-decoration:underline solid var(--muted-color)}.table-of-contents nav a.active{-webkit-text-decoration:underline;text-decoration:underline}.table-of-contents-mobile{display:none;margin:8px 0 16px}.table-of-contents-mobile>summary{cursor:pointer;list-style:none;padding:var(--spacing-3) var(--spacing-4);border:1px solid var(--border-color);border-radius:8px;color:var(--text-color);font-weight:600;position:relative}.table-of-contents-mobile[open]>summary{border-bottom-left-radius:0;border-bottom-right-radius:0}.table-of-contents-mobile>summary:after{content:"";position:absolute;right:var(--spacing-4);top:50%;width:8px;height:8px;border-right:2px solid currentColor;border-bottom:2px solid currentColor;transform:translateY(-70%) rotate(45deg);transition:transform .15s ease;opacity:.7}.table-of-contents-mobile[open]>summary:after{transform:translateY(-30%) rotate(-135deg)}.table-of-contents-mobile nav{border-left:none;padding:10px 12px;font-size:14px;border:1px solid var(--border-color);border-top:none;border-bottom-left-radius:8px;border-bottom-right-radius:8px}.table-of-contents-mobile nav ul{margin:0 0 6px;padding-left:1em}.table-of-contents-mobile nav li{list-style:none;margin:.25em 0}.table-of-contents-mobile nav a,.table-of-contents-mobile nav a:link,.table-of-contents-mobile nav a:visited{color:var(--text-color);-webkit-text-decoration:none;text-decoration:none;border-bottom:none}.table-of-contents-mobile nav>ul>li>a{font-weight:700}.table-of-contents-mobile nav a:hover{-webkit-text-decoration:underline solid var(--muted-color);text-decoration:underline solid var(--muted-color)}.table-of-contents-mobile nav a.active{-webkit-text-decoration:underline;text-decoration:underline}@font-face{font-family:KaTeX_AMS;font-style:normal;font-weight:400;src:url(/_astro/KaTeX_AMS-Regular.BQhdFMY1.woff2) format("woff2"),url(/_astro/KaTeX_AMS-Regular.DMm9YOAa.woff) format("woff"),url(/_astro/KaTeX_AMS-Regular.DRggAlZN.ttf) format("truetype")}@font-face{font-family:KaTeX_Caligraphic;font-style:normal;font-weight:700;src:url(/_astro/KaTeX_Caligraphic-Bold.Dq_IR9rO.woff2) format("woff2"),url(/_astro/KaTeX_Caligraphic-Bold.BEiXGLvX.woff) format("woff"),url(/_astro/KaTeX_Caligraphic-Bold.ATXxdsX0.ttf) format("truetype")}@font-face{font-family:KaTeX_Caligraphic;font-style:normal;font-weight:400;src:url(/_astro/KaTeX_Caligraphic-Regular.Di6jR-x-.woff2) format("woff2"),url(/_astro/KaTeX_Caligraphic-Regular.CTRA-rTL.woff) format("woff"),url(/_astro/KaTeX_Caligraphic-Regular.wX97UBjC.ttf) format("truetype")}@font-face{font-family:KaTeX_Fraktur;font-style:normal;font-weight:700;src:url(/_astro/KaTeX_Fraktur-Bold.CL6g_b3V.woff2) format("woff2"),url(/_astro/KaTeX_Fraktur-Bold.BsDP51OF.woff) format("woff"),url(/_astro/KaTeX_Fraktur-Bold.BdnERNNW.ttf) format("truetype")}@font-face{font-family:KaTeX_Fraktur;font-style:normal;font-weight:400;src:url(/_astro/KaTeX_Fraktur-Regular.CTYiF6lA.woff2) format("woff2"),url(/_astro/KaTeX_Fraktur-Regular.Dxdc4cR9.woff) format("woff"),url(/_astro/KaTeX_Fraktur-Regular.CB_wures.ttf) format("truetype")}@font-face{font-family:KaTeX_Main;font-style:normal;font-weight:700;src:url(/_astro/KaTeX_Main-Bold.Cx986IdX.woff2) format("woff2"),url(/_astro/KaTeX_Main-Bold.Jm3AIy58.woff) format("woff"),url(/_astro/KaTeX_Main-Bold.waoOVXN0.ttf) format("truetype")}@font-face{font-family:KaTeX_Main;font-style:italic;font-weight:700;src:url(/_astro/KaTeX_Main-BoldItalic.DxDJ3AOS.woff2) format("woff2"),url(/_astro/KaTeX_Main-BoldItalic.SpSLRI95.woff) format("woff"),url(/_astro/KaTeX_Main-BoldItalic.DzxPMmG6.ttf) format("truetype")}@font-face{font-family:KaTeX_Main;font-style:italic;font-weight:400;src:url(/_astro/KaTeX_Main-Italic.NWA7e6Wa.woff2) format("woff2"),url(/_astro/KaTeX_Main-Italic.BMLOBm91.woff) format("woff"),url(/_astro/KaTeX_Main-Italic.3WenGoN9.ttf) format("truetype")}@font-face{font-family:KaTeX_Main;font-style:normal;font-weight:400;src:url(/_astro/KaTeX_Main-Regular.B22Nviop.woff2) format("woff2"),url(/_astro/KaTeX_Main-Regular.Dr94JaBh.woff) format("woff"),url(/_astro/KaTeX_Main-Regular.ypZvNtVU.ttf) format("truetype")}@font-face{font-family:KaTeX_Math;font-style:italic;font-weight:700;src:url(/_astro/KaTeX_Math-BoldItalic.CZnvNsCZ.woff2) format("woff2"),url(/_astro/KaTeX_Math-BoldItalic.iY-2wyZ7.woff) format("woff"),url(/_astro/KaTeX_Math-BoldItalic.B3XSjfu4.ttf) format("truetype")}@font-face{font-family:KaTeX_Math;font-style:italic;font-weight:400;src:url(/_astro/KaTeX_Math-Italic.t53AETM-.woff2) format("woff2"),url(/_astro/KaTeX_Math-Italic.DA0__PXp.woff) format("woff"),url(/_astro/KaTeX_Math-Italic.flOr_0UB.ttf) format("truetype")}@font-face{font-family:KaTeX_SansSerif;font-style:normal;font-weight:700;src:url(/_astro/KaTeX_SansSerif-Bold.D1sUS0GD.woff2) format("woff2"),url(/_astro/KaTeX_SansSerif-Bold.DbIhKOiC.woff) format("woff"),url(/_astro/KaTeX_SansSerif-Bold.CFMepnvq.ttf) format("truetype")}@font-face{font-family:KaTeX_SansSerif;font-style:italic;font-weight:400;src:url(/_astro/KaTeX_SansSerif-Italic.C3H0VqGB.woff2) format("woff2"),url(/_astro/KaTeX_SansSerif-Italic.DN2j7dab.woff) format("woff"),url(/_astro/KaTeX_SansSerif-Italic.YYjJ1zSn.ttf) format("truetype")}@font-face{font-family:KaTeX_SansSerif;font-style:normal;font-weight:400;src:url(/_astro/KaTeX_SansSerif-Regular.DDBCnlJ7.woff2) format("woff2"),url(/_astro/KaTeX_SansSerif-Regular.CS6fqUqJ.woff) format("woff"),url(/_astro/KaTeX_SansSerif-Regular.BNo7hRIc.ttf) format("truetype")}@font-face{font-family:KaTeX_Script;font-style:normal;font-weight:400;src:url(/_astro/KaTeX_Script-Regular.D3wIWfF6.woff2) format("woff2"),url(/_astro/KaTeX_Script-Regular.D5yQViql.woff) format("woff"),url(/_astro/KaTeX_Script-Regular.C5JkGWo-.ttf) format("truetype")}@font-face{font-family:KaTeX_Size1;font-style:normal;font-weight:400;src:url(/_astro/KaTeX_Size1-Regular.mCD8mA8B.woff2) format("woff2"),url(/_astro/KaTeX_Size1-Regular.C195tn64.woff) format("woff"),url(/_astro/KaTeX_Size1-Regular.Dbsnue_I.ttf) format("truetype")}@font-face{font-family:KaTeX_Size2;font-style:normal;font-weight:400;src:url(/_astro/KaTeX_Size2-Regular.Dy4dx90m.woff2) format("woff2"),url(/_astro/KaTeX_Size2-Regular.oD1tc_U0.woff) format("woff"),url(/_astro/KaTeX_Size2-Regular.B7gKUWhC.ttf) format("truetype")}@font-face{font-family:KaTeX_Size3;font-style:normal;font-weight:400;src:url(data:font/woff2;base64,d09GMgABAAAAAA4oAA4AAAAAHbQAAA3TAAEAAAAAAAAAAAAAAAAAAAAAAAAAAAAABmAAgRQIDgmcDBEICo1oijYBNgIkA14LMgAEIAWJAAeBHAyBHBvbGiMRdnO0IkRRkiYDgr9KsJ1NUAf2kILNxgUmgqIgq1P89vcbIcmsQbRps3vCcXdYOKSWEPEKgZgQkprQQsxIXUgq0DqpGKmIvrgkeVGtEQD9DzAO29fM9jYhxZEsL2FeURH2JN4MIcTdO049NCVdxQ/w9NrSYFEBKTDKpLKfNkCGDc1RwjZLQcm3vqJ2UW9Xfa3tgAHz6ivp6vgC2yD4/6352ndnN0X0TL7seypkjZlMsjmZnf0Mm5Q+JykRWQBKCVCVPbARPXWyQtb5VgLB6Biq7/Uixcj2WGqdI8tGSgkuRG+t910GKP2D7AQH0DB9FMDW/obJZ8giFI3Wg8Cvevz0M+5m0rTh7XDBlvo9Y4vm13EXmfttwI4mBo1EG15fxJhUiCLbiiyCf/ZA6MFAhg3pGIZGdGIVjtPn6UcMk9A/UUr9PhoNsCENw1APAq0gpH73e+M+0ueyHbabc3vkbcdtzcf/fiy+NxQEjf9ud/ELBHAXJ0nk4z+MXH2Ev/kWyV4k7SkvpPc9Qr38F6RPWnM9cN6DJ0AdD1BhtgABtmoRoFCvPsBAumNm6soZG2Gk5GyVTo2sJncSyp0jQTYoR6WDvTwaaEcHsxHfvuWhHA3a6bN7twRKtcGok6NsCi7jYRrM2jExsUFMxMQYuJbMhuWNOumEJy9hi29Dmg5zMp/A5+hhPG19j1vBrq8JTLr8ki5VLPmG/PynJHVul440bxg5xuymHUFPBshC+nA9I1FmwbRBTNHAcik3Oae0cxKoI3MOriM42UrPe51nsaGxJ+WfXubAsP84aabUlQSJ1IiE0iPETLUU4CATgfXSCSpuRFRmCGbO+wSpAnzaeaCYW1VNEysRtuXCEL1kUFUbbtMv3Tilt/1c11jt3Q5bbMa84cpWipp8Elw3MZhOHsOlwwVUQM3lAR35JiFQbaYCRnMF2lxAWoOg2gyoIV4PouX8HytNIfLhqpJtXB4vjiViUI8IJ7bkC4ikkQvKksnOTKICwnqWSZ9YS5f0WCxmpgjbIq7EJcM4aI2nmhLNY2JIUgOjXZFWBHb+x5oh6cwb0Tv1ackHdKi0I9OO2wE9aogIOn540CCCziyhN+IaejtgAONKznHlHyutPrHGwCx9S6B8kfS4Mfi4Eyv7OU730bT1SCBjt834cXsf43zVjPUqqJjgrjeGnBxSG4aYAKFuVbeCfkDIjAqMb6yLNIbCuvXhMH2/+k2vkNpkORhR59N1CkzoOENvneIosjYmuTxlhUzaGEJQ/iWqx4dmwpmKjrwTiTGTCVozNAYqk/zXOndWxuWSmJkQpJw3pK5KX6QrLt5LATMqpmPAQhkhK6PUjzHUn7E0gHE0kPE0iKkolgkUx9SZmVAdDgpffdyJKg3k7VmzYGCwVXGz/tXmkOIp+vcWs+EMuhhvN0h9uhfzWJziBQmCREGSIFmQIkgVpAnSBRmC//6hkLZwaVhwxlrJSOdqlFtOYxlau9F2QN5Y98xmIAsiM1HVp2VFX+DHHGg6Ecjh3vmqtidX3qHI2qycTk/iwxSt5UzTmEP92ZBnEWTk4Mx8Mpl78ZDokxg/KWb+Q0QkvdKVmq3TMW+RXEgrsziSAfNXFMhDc60N5N9jQzjfO0kBKpUZl0ZmwJ41j/B9Hz6wmRaJB84niNmQrzp9eSlQCDDzazGDdVi3P36VZQ+Jy4f9UBNp+3zTjqI4abaFAm+GShVaXlsGdF3FYzZcDI6cori4kMxUECl9IjJZpzkvitAoxKue+90pDMvcKRxLl53TmOKCmV/xRolNKSqqUxc6LStOETmFOiLZZptlZepcKiAzteG8PEdpnQpbOMNcMsR4RR2Bs0cKFEvSmIjAFcnarqwUL4lDhHmnVkwu1IwshbiCcgvOheZuYyOteufZZwlcTlLgnZ3o/WcYdzZHW/WGaqaVfmTZ1aWCceJjkbZqsfbkOtcFlUZM/jy+hXHDbaUobWqqXaeWobbLO99yG5N3U4wxco0rQGGcOLASFMXeJoham8M+/x6O2WywK2l4HGbq1CoUyC/IZikQhdq3SiuNrvAEj0AVu9x2x3lp/xWzahaxidezFVtdcb5uEnzyl0ZmYiuKI0exvCd4Xc9CV1KB0db00z92wDPde0kukbvZIWN6jUWFTmPIC/Y4UPCm8UfDTFZpZNon1qLFTkBhxzB+FjQRA2Q/YRJT8pQigslMaUpFyAG8TMlXigiqmAZX4xgijKjRlGpLE0GdplRfCaJo0JQaSxNBk6ZmMzcya0FmrcisDdn0Q3HI2sWSppYigmlM1XT/kLQZSNpMJG0WkjYbSZuDpM1F0uYhFc1HxU4m1QJjDK6iL0S5uSj5rgXc3RejEigtcRBtqYPQsiTskmO5vosV+q4VGIKbOkDg0jtRrq+Em1YloaTFar3EGr1EUC8R0kus1Uus00usL97ABr2BjXoDm/QGNhuWtMVBKOwg/i78lT7hBsAvDmwHc/ao3vmUbBmhjeYySZNWvGkfZAgISDSaDo1SVpzGDsAEkF8B+gEapViUoZgUWXcRIGFZNm6gWbAKk0bp0k1MHG9fLYtV4iS2SmLEQFARzRcnf9PUS0LVn05/J9MiRRBU3v2IrvW974v4N00L7ZMk0wXP1409CHo/an8zTRHD3eSJ6m8D4YMkZNl3M79sqeuAsr/m3f+8/yl7A50aiAEJgeBeMWzu7ui9UfUBCe2TIqZIoOd/3/udRBOQidQZUERzb2/VwZN1H/Sju82ew2H2Wfr6qvfVf3hqwDvAIpkQVFy4B9Pe9e4/XvPeceu7h3dvO56iJPf0+A6cqA2ip18ER+iFgggiuOkvj24bby0N9j2UHIkgqIt+sVgfodC4YghLSMjSZbH0VR/6dMDrYJeKHilKTemt6v6kvzvn3/RrdWtr0GoN/xL+Sex/cPYLUpepx9cz/D46UPU5KXgAQa+NDps1v6J3xP1i2HtaDB0M9aX2deA7SYff//+gUCovMmIK/qfsFcOk+4Y5ZN97XlG6zebqtMbKgeRFi51vnxTQYBUik2rS/Cn6PC8ADR8FGxsRPB82dzfND90gIcshOcYUkfjherBz53odpm6TP8txlwOZ71xmfHHOvq053qFF/MRlS3jP0ELudrf2OeN8DHvp6ZceLe8qKYvWz/7yp0u4dKPfli3CYq0O13Ih71mylJ80tOi10On8wi+F4+LWgDPeJ30msSQt9/vkmHq9/Lvo2b461mP801v3W4xTcs6CbvF9UDdrSt+A8OUbpSh55qAUFXWznBBfdeJ8a4d7ugT5tvxUza3h9m4H7ptTqiG4z0g5dc0X29OcGlhpGFMpQo9ytTS+NViZpNdvU4kWx+LKxNY10kQ1yqGXrhe4/1nvP7E+nd5A92TtaRplbHSqoIdOqtRWti+fkB5/n1+/VvCmz12pG1kpQWsfi1ftlBobm0bpngs16CHkbIwdLnParxtTV3QYRlfJ0KFskH7pdN/YDn+yRuSd7sNH3aO0DYPggk6uWuXrfOc+fa3VTxFVvKaNxHsiHmsXyCLIE5yuOeN3/Jdf8HBL/5M6shjyhxHx9BjB1O0+4NLOnjLLSxwO7ukN4jMbOIcD879KLSi6Pk61Oqm2377n8079PXEEQ7cy7OKEC9nbpet118fxweTafpt69x/Bt8UqGzNQt7aelpc44dn5cqhwf71+qKp/Zf/+a0zcizOUWpl/iBcSXip0pplkatCchoH5c5aUM8I7/dWxAej8WicPL1URFZ9BDJelUwEwTkGqUhgSlydVes95YdXvhh9Gfz/aeFWvgVb4tuLbcv4+wLdutVZv/cUonwBD/6eDlE0aSiKK/uoH3+J1wDE/jMVqY2ysGufN84oIXB0sPzy8ollX/LegY74DgJXJR57sn+VGza0x3DnuIgABFM15LmajjjsNlYj+JEZGbuRYcAMOWxFkPN2w6Wd46xo4gVWQR/X4lyI/R6K/YK0110GzudPRW7Y+UOBGTfNNzHeYT0fiH0taunBpq9HEW8OKSaBGj21L0MqenEmNRWBAWDWAk4CpNoEZJ2tTaPFgbQYj8HxtFilErs3BTRwT8uO1NXQaWfIotchmPkAF5mMBAliEmZiOGVgCG9LgRzpscMAOOwowlT3JhusdazXGSC/hxR3UlmWVwWHpOIKheqONvjyhSiTHIkVUco5bnji8m//zL7PKaT1Vl5I6UE609f+gkr6MZKVyKc7zJRmCahLsdlyA5fdQkRSan9LgnnLEyGSkaKJCJog0wAgvepWBt80+1yKln1bMVtCljfNWDueKLsWwaEbBSfSPTEmVRsUcYYMnEjcjeyCZzBXK9E9BYBXLKjOSpUDR+nEV3TFSUdQaz+ot98QxgXwx0GQ+EEUAKB2qZPkQQ0GqFD8UPFMqyaCHM24BZmSGic9EYMagKizOw9Hz50DMrDLrqqLkTAhplMictiCAx5S3BIUQdeJeLnBy2CNtMfz6cV4u8XKoFZQesbf9YZiIERiHjaNodDW6LgcirX/mPnJIkBGDUpTBhSa0EIr38D5hCIszhCM8URGBqImoWjpvpt1ebu/v3Gl3qJfMnNM+9V+kiRFyROTPHQWOcs1dNW94/ukKMPZBvDi55i5CttdeJz84DLngLqjcdwEZ87bFFR8CIG35OAkDVN6VRDZ7aq67NteYqZ2lpT8oYB2CytoBd6VuAx4WgiAsnuj3WohG+LugzXiQRDeM3XYXlULv4dp5VFYC) format("woff2"),url(/_astro/KaTeX_Size3-Regular.CTq5MqoE.woff) format("woff"),url(/_astro/KaTeX_Size3-Regular.DgpXs0kz.ttf) format("truetype")}@font-face{font-family:KaTeX_Size4;font-style:normal;font-weight:400;src:url(/_astro/KaTeX_Size4-Regular.Dl5lxZxV.woff2) format("woff2"),url(/_astro/KaTeX_Size4-Regular.BF-4gkZK.woff) format("woff"),url(/_astro/KaTeX_Size4-Regular.DWFBv043.ttf) format("truetype")}@font-face{font-family:KaTeX_Typewriter;font-style:normal;font-weight:400;src:url(/_astro/KaTeX_Typewriter-Regular.CO6r4hn1.woff2) format("woff2"),url(/_astro/KaTeX_Typewriter-Regular.C0xS9mPB.woff) format("woff"),url(/_astro/KaTeX_Typewriter-Regular.D3Ib7_Hf.ttf) format("truetype")}.katex{font: 1.21em KaTeX_Main,Times New Roman,serif;line-height:1.2;text-indent:0;text-rendering:auto}.katex *{-ms-high-contrast-adjust:none!important;border-color:currentColor}.katex .katex-version:after{content:"0.16.22"}.katex .katex-mathml{clip:rect(1px,1px,1px,1px);border:0;height:1px;overflow:hidden;padding:0;position:absolute;width:1px}.katex .katex-html>.newline{display:block}.katex .base{position:relative;white-space:nowrap;width:-moz-min-content;width:min-content}.katex .base,.katex .strut{display:inline-block}.katex .textbf{font-weight:700}.katex .textit{font-style:italic}.katex .textrm{font-family:KaTeX_Main}.katex .textsf{font-family:KaTeX_SansSerif}.katex .texttt{font-family:KaTeX_Typewriter}.katex .mathnormal{font-family:KaTeX_Math;font-style:italic}.katex .mathit{font-family:KaTeX_Main;font-style:italic}.katex .mathrm{font-style:normal}.katex .mathbf{font-family:KaTeX_Main;font-weight:700}.katex .boldsymbol{font-family:KaTeX_Math;font-style:italic;font-weight:700}.katex .amsrm,.katex .mathbb,.katex .textbb{font-family:KaTeX_AMS}.katex .mathcal{font-family:KaTeX_Caligraphic}.katex .mathfrak,.katex .textfrak{font-family:KaTeX_Fraktur}.katex .mathboldfrak,.katex .textboldfrak{font-family:KaTeX_Fraktur;font-weight:700}.katex .mathtt{font-family:KaTeX_Typewriter}.katex .mathscr,.katex .textscr{font-family:KaTeX_Script}.katex .mathsf,.katex .textsf{font-family:KaTeX_SansSerif}.katex .mathboldsf,.katex .textboldsf{font-family:KaTeX_SansSerif;font-weight:700}.katex .mathitsf,.katex .mathsfit,.katex .textitsf{font-family:KaTeX_SansSerif;font-style:italic}.katex .mainrm{font-family:KaTeX_Main;font-style:normal}.katex .vlist-t{border-collapse:collapse;display:inline-table;table-layout:fixed}.katex .vlist-r{display:table-row}.katex .vlist{display:table-cell;position:relative;vertical-align:bottom}.katex .vlist>span{display:block;height:0;position:relative}.katex .vlist>span>span{display:inline-block}.katex .vlist>span>.pstrut{overflow:hidden;width:0}.katex .vlist-t2{margin-right:-2px}.katex .vlist-s{display:table-cell;font-size:1px;min-width:2px;vertical-align:bottom;width:2px}.katex .vbox{align-items:baseline;display:inline-flex;flex-direction:column}.katex .hbox{width:100%}.katex .hbox,.katex .thinbox{display:inline-flex;flex-direction:row}.katex .thinbox{max-width:0;width:0}.katex .msupsub{text-align:left}.katex .mfrac>span>span{text-align:center}.katex .mfrac .frac-line{border-bottom-style:solid;display:inline-block;width:100%}.katex .hdashline,.katex .hline,.katex .mfrac .frac-line,.katex .overline .overline-line,.katex .rule,.katex .underline .underline-line{min-height:1px}.katex .mspace{display:inline-block}.katex .clap,.katex .llap,.katex .rlap{position:relative;width:0}.katex .clap>.inner,.katex .llap>.inner,.katex .rlap>.inner{position:absolute}.katex .clap>.fix,.katex .llap>.fix,.katex .rlap>.fix{display:inline-block}.katex .llap>.inner{right:0}.katex .clap>.inner,.katex .rlap>.inner{left:0}.katex .clap>.inner>span{margin-left:-50%;margin-right:50%}.katex .rule{border:0 solid;display:inline-block;position:relative}.katex .hline,.katex .overline .overline-line,.katex .underline .underline-line{border-bottom-style:solid;display:inline-block;width:100%}.katex .hdashline{border-bottom-style:dashed;display:inline-block;width:100%}.katex .sqrt>.root{margin-left:.2777777778em;margin-right:-.5555555556em}.katex .fontsize-ensurer.reset-size1.size1,.katex .sizing.reset-size1.size1{font-size:1em}.katex .fontsize-ensurer.reset-size1.size2,.katex .sizing.reset-size1.size2{font-size:1.2em}.katex .fontsize-ensurer.reset-size1.size3,.katex .sizing.reset-size1.size3{font-size:1.4em}.katex .fontsize-ensurer.reset-size1.size4,.katex .sizing.reset-size1.size4{font-size:1.6em}.katex .fontsize-ensurer.reset-size1.size5,.katex .sizing.reset-size1.size5{font-size:1.8em}.katex .fontsize-ensurer.reset-size1.size6,.katex .sizing.reset-size1.size6{font-size:2em}.katex .fontsize-ensurer.reset-size1.size7,.katex .sizing.reset-size1.size7{font-size:2.4em}.katex .fontsize-ensurer.reset-size1.size8,.katex .sizing.reset-size1.size8{font-size:2.88em}.katex .fontsize-ensurer.reset-size1.size9,.katex .sizing.reset-size1.size9{font-size:3.456em}.katex .fontsize-ensurer.reset-size1.size10,.katex .sizing.reset-size1.size10{font-size:4.148em}.katex .fontsize-ensurer.reset-size1.size11,.katex .sizing.reset-size1.size11{font-size:4.976em}.katex .fontsize-ensurer.reset-size2.size1,.katex .sizing.reset-size2.size1{font-size:.8333333333em}.katex .fontsize-ensurer.reset-size2.size2,.katex .sizing.reset-size2.size2{font-size:1em}.katex .fontsize-ensurer.reset-size2.size3,.katex .sizing.reset-size2.size3{font-size:1.1666666667em}.katex .fontsize-ensurer.reset-size2.size4,.katex .sizing.reset-size2.size4{font-size:1.3333333333em}.katex .fontsize-ensurer.reset-size2.size5,.katex .sizing.reset-size2.size5{font-size:1.5em}.katex .fontsize-ensurer.reset-size2.size6,.katex .sizing.reset-size2.size6{font-size:1.6666666667em}.katex .fontsize-ensurer.reset-size2.size7,.katex .sizing.reset-size2.size7{font-size:2em}.katex .fontsize-ensurer.reset-size2.size8,.katex .sizing.reset-size2.size8{font-size:2.4em}.katex .fontsize-ensurer.reset-size2.size9,.katex .sizing.reset-size2.size9{font-size:2.88em}.katex .fontsize-ensurer.reset-size2.size10,.katex .sizing.reset-size2.size10{font-size:3.4566666667em}.katex .fontsize-ensurer.reset-size2.size11,.katex .sizing.reset-size2.size11{font-size:4.1466666667em}.katex .fontsize-ensurer.reset-size3.size1,.katex .sizing.reset-size3.size1{font-size:.7142857143em}.katex .fontsize-ensurer.reset-size3.size2,.katex .sizing.reset-size3.size2{font-size:.8571428571em}.katex .fontsize-ensurer.reset-size3.size3,.katex .sizing.reset-size3.size3{font-size:1em}.katex .fontsize-ensurer.reset-size3.size4,.katex .sizing.reset-size3.size4{font-size:1.1428571429em}.katex .fontsize-ensurer.reset-size3.size5,.katex .sizing.reset-size3.size5{font-size:1.2857142857em}.katex .fontsize-ensurer.reset-size3.size6,.katex .sizing.reset-size3.size6{font-size:1.4285714286em}.katex .fontsize-ensurer.reset-size3.size7,.katex .sizing.reset-size3.size7{font-size:1.7142857143em}.katex .fontsize-ensurer.reset-size3.size8,.katex .sizing.reset-size3.size8{font-size:2.0571428571em}.katex .fontsize-ensurer.reset-size3.size9,.katex .sizing.reset-size3.size9{font-size:2.4685714286em}.katex .fontsize-ensurer.reset-size3.size10,.katex .sizing.reset-size3.size10{font-size:2.9628571429em}.katex .fontsize-ensurer.reset-size3.size11,.katex .sizing.reset-size3.size11{font-size:3.5542857143em}.katex .fontsize-ensurer.reset-size4.size1,.katex .sizing.reset-size4.size1{font-size:.625em}.katex .fontsize-ensurer.reset-size4.size2,.katex .sizing.reset-size4.size2{font-size:.75em}.katex .fontsize-ensurer.reset-size4.size3,.katex .sizing.reset-size4.size3{font-size:.875em}.katex .fontsize-ensurer.reset-size4.size4,.katex .sizing.reset-size4.size4{font-size:1em}.katex .fontsize-ensurer.reset-size4.size5,.katex .sizing.reset-size4.size5{font-size:1.125em}.katex .fontsize-ensurer.reset-size4.size6,.katex .sizing.reset-size4.size6{font-size:1.25em}.katex .fontsize-ensurer.reset-size4.size7,.katex .sizing.reset-size4.size7{font-size:1.5em}.katex .fontsize-ensurer.reset-size4.size8,.katex .sizing.reset-size4.size8{font-size:1.8em}.katex .fontsize-ensurer.reset-size4.size9,.katex .sizing.reset-size4.size9{font-size:2.16em}.katex .fontsize-ensurer.reset-size4.size10,.katex .sizing.reset-size4.size10{font-size:2.5925em}.katex .fontsize-ensurer.reset-size4.size11,.katex .sizing.reset-size4.size11{font-size:3.11em}.katex .fontsize-ensurer.reset-size5.size1,.katex .sizing.reset-size5.size1{font-size:.5555555556em}.katex .fontsize-ensurer.reset-size5.size2,.katex .sizing.reset-size5.size2{font-size:.6666666667em}.katex .fontsize-ensurer.reset-size5.size3,.katex .sizing.reset-size5.size3{font-size:.7777777778em}.katex .fontsize-ensurer.reset-size5.size4,.katex .sizing.reset-size5.size4{font-size:.8888888889em}.katex .fontsize-ensurer.reset-size5.size5,.katex .sizing.reset-size5.size5{font-size:1em}.katex .fontsize-ensurer.reset-size5.size6,.katex .sizing.reset-size5.size6{font-size:1.1111111111em}.katex .fontsize-ensurer.reset-size5.size7,.katex .sizing.reset-size5.size7{font-size:1.3333333333em}.katex .fontsize-ensurer.reset-size5.size8,.katex .sizing.reset-size5.size8{font-size:1.6em}.katex .fontsize-ensurer.reset-size5.size9,.katex .sizing.reset-size5.size9{font-size:1.92em}.katex .fontsize-ensurer.reset-size5.size10,.katex .sizing.reset-size5.size10{font-size:2.3044444444em}.katex .fontsize-ensurer.reset-size5.size11,.katex .sizing.reset-size5.size11{font-size:2.7644444444em}.katex .fontsize-ensurer.reset-size6.size1,.katex .sizing.reset-size6.size1{font-size:.5em}.katex .fontsize-ensurer.reset-size6.size2,.katex .sizing.reset-size6.size2{font-size:.6em}.katex .fontsize-ensurer.reset-size6.size3,.katex .sizing.reset-size6.size3{font-size:.7em}.katex .fontsize-ensurer.reset-size6.size4,.katex .sizing.reset-size6.size4{font-size:.8em}.katex .fontsize-ensurer.reset-size6.size5,.katex .sizing.reset-size6.size5{font-size:.9em}.katex .fontsize-ensurer.reset-size6.size6,.katex .sizing.reset-size6.size6{font-size:1em}.katex .fontsize-ensurer.reset-size6.size7,.katex .sizing.reset-size6.size7{font-size:1.2em}.katex .fontsize-ensurer.reset-size6.size8,.katex .sizing.reset-size6.size8{font-size:1.44em}.katex .fontsize-ensurer.reset-size6.size9,.katex .sizing.reset-size6.size9{font-size:1.728em}.katex .fontsize-ensurer.reset-size6.size10,.katex .sizing.reset-size6.size10{font-size:2.074em}.katex .fontsize-ensurer.reset-size6.size11,.katex .sizing.reset-size6.size11{font-size:2.488em}.katex .fontsize-ensurer.reset-size7.size1,.katex .sizing.reset-size7.size1{font-size:.4166666667em}.katex .fontsize-ensurer.reset-size7.size2,.katex .sizing.reset-size7.size2{font-size:.5em}.katex .fontsize-ensurer.reset-size7.size3,.katex .sizing.reset-size7.size3{font-size:.5833333333em}.katex .fontsize-ensurer.reset-size7.size4,.katex .sizing.reset-size7.size4{font-size:.6666666667em}.katex .fontsize-ensurer.reset-size7.size5,.katex .sizing.reset-size7.size5{font-size:.75em}.katex .fontsize-ensurer.reset-size7.size6,.katex .sizing.reset-size7.size6{font-size:.8333333333em}.katex .fontsize-ensurer.reset-size7.size7,.katex .sizing.reset-size7.size7{font-size:1em}.katex .fontsize-ensurer.reset-size7.size8,.katex .sizing.reset-size7.size8{font-size:1.2em}.katex .fontsize-ensurer.reset-size7.size9,.katex .sizing.reset-size7.size9{font-size:1.44em}.katex .fontsize-ensurer.reset-size7.size10,.katex .sizing.reset-size7.size10{font-size:1.7283333333em}.katex .fontsize-ensurer.reset-size7.size11,.katex .sizing.reset-size7.size11{font-size:2.0733333333em}.katex .fontsize-ensurer.reset-size8.size1,.katex .sizing.reset-size8.size1{font-size:.3472222222em}.katex .fontsize-ensurer.reset-size8.size2,.katex .sizing.reset-size8.size2{font-size:.4166666667em}.katex .fontsize-ensurer.reset-size8.size3,.katex .sizing.reset-size8.size3{font-size:.4861111111em}.katex .fontsize-ensurer.reset-size8.size4,.katex .sizing.reset-size8.size4{font-size:.5555555556em}.katex .fontsize-ensurer.reset-size8.size5,.katex .sizing.reset-size8.size5{font-size:.625em}.katex .fontsize-ensurer.reset-size8.size6,.katex .sizing.reset-size8.size6{font-size:.6944444444em}.katex .fontsize-ensurer.reset-size8.size7,.katex .sizing.reset-size8.size7{font-size:.8333333333em}.katex .fontsize-ensurer.reset-size8.size8,.katex .sizing.reset-size8.size8{font-size:1em}.katex .fontsize-ensurer.reset-size8.size9,.katex .sizing.reset-size8.size9{font-size:1.2em}.katex .fontsize-ensurer.reset-size8.size10,.katex .sizing.reset-size8.size10{font-size:1.4402777778em}.katex .fontsize-ensurer.reset-size8.size11,.katex .sizing.reset-size8.size11{font-size:1.7277777778em}.katex .fontsize-ensurer.reset-size9.size1,.katex .sizing.reset-size9.size1{font-size:.2893518519em}.katex .fontsize-ensurer.reset-size9.size2,.katex .sizing.reset-size9.size2{font-size:.3472222222em}.katex .fontsize-ensurer.reset-size9.size3,.katex .sizing.reset-size9.size3{font-size:.4050925926em}.katex .fontsize-ensurer.reset-size9.size4,.katex .sizing.reset-size9.size4{font-size:.462962963em}.katex .fontsize-ensurer.reset-size9.size5,.katex .sizing.reset-size9.size5{font-size:.5208333333em}.katex .fontsize-ensurer.reset-size9.size6,.katex .sizing.reset-size9.size6{font-size:.5787037037em}.katex .fontsize-ensurer.reset-size9.size7,.katex .sizing.reset-size9.size7{font-size:.6944444444em}.katex .fontsize-ensurer.reset-size9.size8,.katex .sizing.reset-size9.size8{font-size:.8333333333em}.katex .fontsize-ensurer.reset-size9.size9,.katex .sizing.reset-size9.size9{font-size:1em}.katex .fontsize-ensurer.reset-size9.size10,.katex .sizing.reset-size9.size10{font-size:1.2002314815em}.katex .fontsize-ensurer.reset-size9.size11,.katex .sizing.reset-size9.size11{font-size:1.4398148148em}.katex .fontsize-ensurer.reset-size10.size1,.katex .sizing.reset-size10.size1{font-size:.2410800386em}.katex .fontsize-ensurer.reset-size10.size2,.katex .sizing.reset-size10.size2{font-size:.2892960463em}.katex .fontsize-ensurer.reset-size10.size3,.katex .sizing.reset-size10.size3{font-size:.337512054em}.katex .fontsize-ensurer.reset-size10.size4,.katex .sizing.reset-size10.size4{font-size:.3857280617em}.katex .fontsize-ensurer.reset-size10.size5,.katex .sizing.reset-size10.size5{font-size:.4339440694em}.katex .fontsize-ensurer.reset-size10.size6,.katex .sizing.reset-size10.size6{font-size:.4821600771em}.katex .fontsize-ensurer.reset-size10.size7,.katex .sizing.reset-size10.size7{font-size:.5785920926em}.katex .fontsize-ensurer.reset-size10.size8,.katex .sizing.reset-size10.size8{font-size:.6943105111em}.katex .fontsize-ensurer.reset-size10.size9,.katex .sizing.reset-size10.size9{font-size:.8331726133em}.katex .fontsize-ensurer.reset-size10.size10,.katex .sizing.reset-size10.size10{font-size:1em}.katex .fontsize-ensurer.reset-size10.size11,.katex .sizing.reset-size10.size11{font-size:1.1996142719em}.katex .fontsize-ensurer.reset-size11.size1,.katex .sizing.reset-size11.size1{font-size:.2009646302em}.katex .fontsize-ensurer.reset-size11.size2,.katex .sizing.reset-size11.size2{font-size:.2411575563em}.katex .fontsize-ensurer.reset-size11.size3,.katex .sizing.reset-size11.size3{font-size:.2813504823em}.katex .fontsize-ensurer.reset-size11.size4,.katex .sizing.reset-size11.size4{font-size:.3215434084em}.katex .fontsize-ensurer.reset-size11.size5,.katex .sizing.reset-size11.size5{font-size:.3617363344em}.katex .fontsize-ensurer.reset-size11.size6,.katex .sizing.reset-size11.size6{font-size:.4019292605em}.katex .fontsize-ensurer.reset-size11.size7,.katex .sizing.reset-size11.size7{font-size:.4823151125em}.katex .fontsize-ensurer.reset-size11.size8,.katex .sizing.reset-size11.size8{font-size:.578778135em}.katex .fontsize-ensurer.reset-size11.size9,.katex .sizing.reset-size11.size9{font-size:.6945337621em}.katex .fontsize-ensurer.reset-size11.size10,.katex .sizing.reset-size11.size10{font-size:.8336012862em}.katex .fontsize-ensurer.reset-size11.size11,.katex .sizing.reset-size11.size11{font-size:1em}.katex .delimsizing.size1{font-family:KaTeX_Size1}.katex .delimsizing.size2{font-family:KaTeX_Size2}.katex .delimsizing.size3{font-family:KaTeX_Size3}.katex .delimsizing.size4{font-family:KaTeX_Size4}.katex .delimsizing.mult .delim-size1>span{font-family:KaTeX_Size1}.katex .delimsizing.mult .delim-size4>span{font-family:KaTeX_Size4}.katex .nulldelimiter{display:inline-block;width:.12em}.katex .delimcenter,.katex .op-symbol{position:relative}.katex .op-symbol.small-op{font-family:KaTeX_Size1}.katex .op-symbol.large-op{font-family:KaTeX_Size2}.katex .accent>.vlist-t,.katex .op-limits>.vlist-t{text-align:center}.katex .accent .accent-body{position:relative}.katex .accent .accent-body:not(.accent-full){width:0}.katex .overlay{display:block}.katex .mtable .vertical-separator{display:inline-block;min-width:1px}.katex .mtable .arraycolsep{display:inline-block}.katex .mtable .col-align-c>.vlist-t{text-align:center}.katex .mtable .col-align-l>.vlist-t{text-align:left}.katex .mtable .col-align-r>.vlist-t{text-align:right}.katex .svg-align{text-align:left}.katex svg{fill:currentColor;stroke:currentColor;fill-rule:nonzero;fill-opacity:1;stroke-width:1;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1;display:block;height:inherit;position:absolute;width:100%}.katex svg path{stroke:none}.katex img{border-style:none;max-height:none;max-width:none;min-height:0;min-width:0}.katex .stretchy{display:block;overflow:hidden;position:relative;width:100%}.katex .stretchy:after,.katex .stretchy:before{content:""}.katex .hide-tail{overflow:hidden;position:relative;width:100%}.katex .halfarrow-left{left:0;overflow:hidden;position:absolute;width:50.2%}.katex .halfarrow-right{overflow:hidden;position:absolute;right:0;width:50.2%}.katex .brace-left{left:0;overflow:hidden;position:absolute;width:25.1%}.katex .brace-center{left:25%;overflow:hidden;position:absolute;width:50%}.katex .brace-right{overflow:hidden;position:absolute;right:0;width:25.1%}.katex .x-arrow-pad{padding:0 .5em}.katex .cd-arrow-pad{padding:0 .55556em 0 .27778em}.katex .mover,.katex .munder,.katex .x-arrow{text-align:center}.katex .boxpad{padding:0 .3em}.katex .fbox,.katex .fcolorbox{border:.04em solid;box-sizing:border-box}.katex .cancel-pad{padding:0 .2em}.katex .cancel-lap{margin-left:-.2em;margin-right:-.2em}.katex .sout{border-bottom-style:solid;border-bottom-width:.08em}.katex .angl{border-right:.049em solid;border-top:.049em solid;box-sizing:border-box;margin-right:.03889em}.katex .anglpad{padding:0 .03889em}.katex .eqn-num:before{content:"(" counter(katexEqnNo) ")";counter-increment:katexEqnNo}.katex .mml-eqn-num:before{content:"(" counter(mmlEqnNo) ")";counter-increment:mmlEqnNo}.katex .mtr-glue{width:50%}.katex .cd-vert-arrow{display:inline-block;position:relative}.katex .cd-label-left{display:inline-block;position:absolute;right:calc(50% + .3em);text-align:left}.katex .cd-label-right{display:inline-block;left:calc(50% + .3em);position:absolute;text-align:right}.katex-display{display:block;margin:1em 0;text-align:center}.katex-display>.katex{display:block;text-align:center;white-space:nowrap}.katex-display>.katex>.katex-html{display:block;position:relative}.katex-display>.katex>.katex-html>.tag{position:absolute;right:0}.katex-display.leqno>.katex>.katex-html>.tag{left:0;right:auto}.katex-display.fleqn>.katex{padding-left:2em;text-align:left}body{counter-reset:katexEqnNo mmlEqnNo}:root{--neutral-600: rgb(107, 114, 128);--neutral-400: rgb(185, 185, 185);--neutral-300: rgb(228, 228, 228);--neutral-200: rgb(245, 245, 245);--default-font-family: Source Sans Pro, ui-sans-serif, system-ui, -apple-system, Segoe UI, Roboto, Ubuntu, Cantarell, Noto Sans, sans-serif, "Apple Color Emoji", "Segoe UI Emoji", "Segoe UI Symbol", "Noto Color Emoji";--primary-base: rgb(222, 144, 202);--primary-color: var(--primary-base);--primary-color-hover: oklch(from var(--primary-color) calc(l - .05) c h);--primary-color-active: oklch(from var(--primary-color) calc(l - .1) c h);--on-primary: #ffffff;--page-bg: #ffffff;--text-color: rgba(0, 0, 0, .85);--transparent-page-contrast: rgba(255, 255, 255, .85);--muted-color: rgba(0, 0, 0, .6);--border-color: rgba(0, 0, 0, .1);--surface-bg: #fafafa;--code-bg: #f6f8fa;--link-underline: var(--primary-color);--link-underline-hover: var(--primary-color-hover);--spacing-1: 8px;--spacing-2: 12px;--spacing-3: 16px;--spacing-4: 24px;--spacing-5: 32px;--spacing-6: 40px;--spacing-7: 48px;--spacing-8: 56px;--spacing-9: 64px;--spacing-10: 72px;--content-padding-x: 16px;--block-spacing-y: var(--spacing-4);--palette-count: 8;--button-radius: 6px;--button-padding-x: 12px;--button-padding-y: 8px;--button-font-size: 14px;--button-icon-padding: 8px;--button-big-padding-x: 16px;--button-big-padding-y: 12px;--button-big-font-size: 16px;--button-big-icon-padding: 12px;--table-border-radius: 8px;--table-header-bg: oklch(from var(--surface-bg) calc(l - .02) c h);--table-row-odd-bg: oklch(from var(--surface-bg) calc(l - .01) c h);--z-base: 0;--z-content: 1;--z-elevated: 10;--z-overlay: 1000;--z-modal: 1100;--z-tooltip: 1200;--axis-color: var(--muted-color);--tick-color: var(--text-color);--grid-color: rgba(0, 0, 0, .08)}[data-theme=dark]{--page-bg: #0f1115;--text-color: rgba(255, 255, 255, .9);--muted-color: rgba(255, 255, 255, .7);--border-color: rgba(255, 255, 255, .15);--surface-bg: #12151b;--code-bg: #12151b;--transparent-page-contrast: rgba(0, 0, 0, .85);--axis-color: var(--muted-color);--tick-color: var(--muted-color);--grid-color: rgba(255, 255, 255, .1);--primary-color-hover: oklch(from var(--primary-color) calc(l - .05) c h);--primary-color-active: oklch(from var(--primary-color) calc(l - .1) c h);--on-primary: #0f1115;--csstools-color-scheme--light: ;color-scheme:dark}html{box-sizing:border-box;background:#fff;background:var(--page-bg);color:#000000d9;color:var(--text-color)}*,*:before,*:after{box-sizing:inherit}body{margin:0;font-family:Source Sans Pro,ui-sans-serif,system-ui,-apple-system,Segoe UI,Roboto,Ubuntu,Cantarell,Noto Sans,sans-serif,"Apple Color Emoji","Segoe UI Emoji",Segoe UI Symbol,"Noto Color Emoji";font-family:var(--default-font-family);background:#fff;background:var(--page-bg);color:#000000d9;color:var(--text-color)}audio{display:block;width:100%}img,picture{max-width:100%;height:auto;display:block;position:relative;z-index:10;z-index:var(--z-elevated)}html{font-size:16px;line-height:1.6}.content-grid main{color:#000000d9;color:var(--text-color)}.content-grid main p{margin:0 0 16px;margin:0 0 var(--spacing-3)}.content-grid main h2{font-weight:600;font-size:max(22px,min(2.6vw,32px));line-height:1.2;margin:72px 0 32px;margin:var(--spacing-10) 0 var(--spacing-5);padding-bottom:12px;padding-bottom:var(--spacing-2);border-bottom:1px solid rgba(0,0,0,.1);border-bottom:1px solid var(--border-color)}.content-grid main h3{font-weight:700;font-size:max(18px,min(2.1vw,22px));line-height:1.25;margin:56px 0 24px;margin:var(--spacing-8) 0 var(--spacing-4)}.content-grid main h4{font-weight:600;text-transform:uppercase;font-size:14px;line-height:1.2;margin:56px 0 24px;margin:var(--spacing-8) 0 var(--spacing-4)}.content-grid main a{color:#de90ca;color:var(--primary-color);-webkit-text-decoration:none;text-decoration:none;background:var(--sufrace-bg);border-bottom:1px solid rgba(222,144,202,.4)}@supports (color: color-mix(in lch,red,blue)){.content-grid main a{border-bottom:1px solid color-mix(in srgb,var(--primary-color, #007AFF) 40%,transparent)}}.content-grid main a:hover{color:#ce80ba;color:var(--primary-color-hover);border-bottom:1px solid rgba(222,144,202,.4)}@supports (color: color-mix(in lch,red,blue)){.content-grid main a:hover{border-bottom:1px solid color-mix(in srgb,var(--primary-color, #007AFF) 40%,transparent)}}.content-grid main h2 a,.content-grid main h3 a,.content-grid main h4 a,.content-grid main h5 a,.content-grid main h6 a{color:inherit;border-bottom:none;-webkit-text-decoration:none;text-decoration:none}.content-grid main h2 a:hover,.content-grid main h3 a:hover,.content-grid main h4 a:hover,.content-grid main h5 a:hover,.content-grid main h6 a:hover{color:inherit;border-bottom:none;-webkit-text-decoration:none;text-decoration:none}.content-grid main ul,.content-grid main ol{padding-left:24px;margin:0 0 16px;margin:0 0 var(--spacing-3)}.content-grid main li{margin-bottom:12px;margin-bottom:var(--spacing-2)}.content-grid main li:last-child{margin-bottom:0}.content-grid main blockquote{border-left:2px solid rgba(0,0,0,.1);border-left:2px solid var(--border-color);padding-left:24px;padding-left:var(--spacing-4);font-style:italic;color:#0009;color:var(--muted-color);margin:24px 0;margin:var(--spacing-4) 0}.muted{color:#0009;color:var(--muted-color)}[data-footnote-ref]{margin-left:4px}.content-grid main mark{background-color:#de90ca1a;border:1px solid rgba(222,144,202,.18);color:inherit;padding:4px 6px;border-radius:4px;font-weight:500;box-decoration-break:clone;-webkit-box-decoration-break:clone}@supports (color: color-mix(in lch,red,blue)){.content-grid main mark{background-color:color-mix(in srgb,var(--primary-color, #007AFF) 10%,transparent);border:1px solid color-mix(in srgb,var(--primary-color) 18%,transparent)}}.feature-grid{display:grid;grid-template-columns:repeat(auto-fit,minmax(200px,1fr));grid-gap:12px;gap:12px;margin:46px 0}.feature-card{display:flex;flex-direction:column;padding:16px;border:1px solid rgba(222,144,202,.4);background:#de90ca0d!important;border-radius:8px;-webkit-text-decoration:none;text-decoration:none;color:inherit;transition:all .2s ease}@supports (color: color-mix(in lch,red,blue)){.feature-card{border:1px solid color-mix(in srgb,var(--primary-color) 40%,transparent);background:color-mix(in srgb,var(--primary-color, #007AFF) 05%,transparent)!important}}.feature-card:hover{transform:translateY(-2px);box-shadow:0 2px 8px #00000014}.feature-card strong{font-size:14px;font-weight:600;color:#000000d9;color:var(--text-color);color:#de90ca!important;color:var(--primary-color)!important;margin-bottom:0!important}.feature-card span{font-size:12px;color:#0009;color:var(--muted-color);color:#de90ca!important;color:var(--primary-color)!important;margin-bottom:0!important;opacity:1}.katex .tag{background:none;border:none;opacity:.4}.content-grid{max-width:1280px;margin:40px auto 0;padding:0 16px;padding:0 var(--content-padding-x);display:grid;grid-template-columns:260px minmax(0,680px) 260px;grid-gap:32px;gap:32px;align-items:start}.content-grid>main{max-width:100%;margin:0;padding:0}.content-grid>main>*:first-child{margin-top:0}@media (max-width: 1100px){.content-grid{overflow:hidden;display:block;margin-top:12px;margin-top:var(--spacing-2)}.content-grid{grid-template-columns:1fr}.table-of-contents{position:static;display:none}.table-of-contents-mobile{display:block}.footer-inner{grid-template-columns:1fr;gap:16px}.footer-inner>h3{grid-column:auto;margin-top:16px}.footer-inner{display:block;padding:40px 16px}}.wide,.full-width{box-sizing:border-box;position:relative;z-index:10;z-index:var(--z-elevated);background-color:var(--background-color)}.wide{width:min(1100px,100vw - 16px * 2);width:min(1100px,100vw - var(--content-padding-x) * 2);margin-left:50%;transform:translate(-50%);padding:16px;padding:var(--content-padding-x);border-radius:6px;border-radius:var(--button-radius);background-color:#fff;background-color:var(--page-bg)}.full-width{width:100vw;margin-left:calc(50% - 50vw);margin-right:calc(50% - 50vw)}@media (max-width: 1100px){.wide,.full-width{width:100%;margin-left:0;margin-right:0;padding:0;transform:none}}#theme-toggle{position:fixed;top:24px;top:calc(var(--spacing-4) + var(--hf-spaces-topbar, 0px));right:16px;right:var(--spacing-3);margin:0;z-index:1000;z-index:var(--z-overlay)}@media (max-width: 640px){header.meta .meta-container{display:flex;flex-wrap:wrap;row-gap:12px;-moz-column-gap:8px;column-gap:8px;max-width:100%;padding:0 24px;padding:0 var(--spacing-4)}header.meta .meta-container .meta-container-cell{flex:1 1 calc(50% - 8px);min-width:0}}@media (max-width: 320px){header.meta .meta-container .meta-container-cell{flex-basis:100%;text-align:center}header.meta .affiliations{list-style-position:inside;padding-left:0;margin-left:0}header.meta .affiliations li{text-align:center}}@media (max-width: 768px){.d3-neural .panel{flex-direction:column}.d3-neural .panel .left{flex:0 0 auto;width:100%}.d3-neural .panel .right{flex:0 0 auto;width:100%;min-width:0}}@media print{html,body{background:#fff}body{margin:0}#theme-toggle{display:none!important}.content-grid main a{-webkit-text-decoration:none;text-decoration:none;border-bottom:1px solid rgba(0,0,0,.2)}.content-grid main pre,.content-grid main blockquote,.content-grid main table,.content-grid main figure{-moz-column-break-inside:avoid;break-inside:avoid;page-break-inside:avoid}.content-grid main h2{page-break-before:auto;page-break-after:avoid;-moz-column-break-after:avoid;break-after:avoid-page}.code-lang-chip{display:none!important}:root{--border-color: rgba(0,0,0,.2);--link-underline: rgba(0,0,0,.3);--link-underline-hover: rgba(0,0,0,.4)}.content-grid{grid-template-columns:1fr!important}.table-of-contents,.right-aside,.table-of-contents-mobile{display:none!important}main>nav:first-of-type{display:none!important}.hero,.hero-banner,.d3-banner,.d3-banner svg,.html-embed__card,.js-plotly-plot,figure,pre,table,blockquote,.wide,.full-width{-moz-column-break-inside:avoid;break-inside:avoid;page-break-inside:avoid}.hero{page-break-after:avoid}}@media print{.meta-container-cell--pdf{display:none!important}}code{font-size:14px;font-family:ui-monospace,SFMono-Regular,Menlo,Monaco,Consolas,Liberation Mono,Courier New,monospace;background-color:#f6f8fa;background-color:var(--code-bg);border-radius:.3em;border:1px solid rgba(0,0,0,.1);border:1px solid var(--border-color);color:#000000d9;color:var(--text-color);font-weight:400;line-height:1.5}p code,.note code{white-space:nowrap;padding:calc(8px/3) 4px;padding:calc(var(--spacing-1)/3) calc(var(--spacing-1)/2)}.astro-code{position:relative;border:1px solid rgba(0,0,0,.1);border:1px solid var(--border-color);border-radius:6px;padding:0;font-size:14px;--code-gutter-width: 2.5em}.astro-code,section.content-grid pre{width:100%;max-width:100%;box-sizing:border-box;-webkit-overflow-scrolling:touch;padding:0;margin-bottom:24px!important;margin-bottom:var(--block-spacing-y)!important;overflow-x:auto}section.content-grid pre.astro-code{margin:0;padding:8px 0;padding:var(--spacing-1) 0}section.content-grid pre code{display:inline-block;min-width:100%}@media (max-width: 1100px){.astro-code,section.content-grid pre{white-space:pre-wrap;word-wrap:anywhere;word-break:break-word}section.content-grid pre code{white-space:pre-wrap;display:block;min-width:0}}[data-theme=light] .astro-code{background-color:#f6f8fa;background-color:var(--code-bg)}[data-theme=light] .astro-code span{color:var(--shiki-light)!important}[data-theme=dark] .astro-code span{color:var(--shiki-dark)!important}[data-theme=light] .astro-code{--shiki-foreground: #24292f;--shiki-background: #ffffff}.astro-code code{counter-reset:astro-code-line;display:block;background:none;border:none}.astro-code .line{display:inline-block;position:relative;padding-left:calc(var(--code-gutter-width) + 8px);padding-left:calc(var(--code-gutter-width) + var(--spacing-1));min-height:1.25em}.astro-code .line:before{counter-increment:astro-code-line;content:counter(astro-code-line);position:absolute;left:0;top:0;bottom:0;width:calc(var(--code-gutter-width));text-align:right;color:#0009;color:var(--muted-color);opacity:.3;-webkit-user-select:none;-moz-user-select:none;user-select:none;padding-right:12px;padding-right:var(--spacing-2);border-right:1px solid rgba(0,0,0,.1);border-right:1px solid var(--border-color)}.astro-code .line:empty:after{content:" "}.astro-code code>.line:last-child:empty{display:none}.code-card{position:relative}.code-card .code-copy{position:absolute;top:12px;top:var(--spacing-2);right:12px;right:var(--spacing-2);z-index:3;display:none}.code-card:hover .code-copy{display:block}.code-card .code-copy svg{width:16px;height:16px;display:block;fill:currentColor}.code-card pre{margin:0 0 8px;margin:0 0 var(--spacing-1)}.code-card.no-copy:after{top:8px;right:8px}.accordion .astro-code{padding:0;border:none}.accordion .astro-code{margin-bottom:0!important}.accordion .code-output{border:none;border-top:1px solid rgba(0,0,0,.1)!important;border-top:1px solid var(--border-color)!important}.accordion pre{margin-bottom:0!important}.accordion .code-card pre{margin:0!important}.accordion .astro-code:after{right:0;bottom:0}.code-output{position:relative;background:#f4f6f8;border:1px solid rgba(0,0,0,.1);border:1px solid var(--border-color);border-radius:6px;margin-top:0;margin-bottom:24px;margin-bottom:var(--block-spacing-y);padding:0!important}@supports (color: lab(from red l 1 1% / calc(alpha + .1))){.code-output{background:oklch(from var(--code-bg) calc(l - .005) c h)}}.code-output pre{padding:22px 16px 16px!important;padding:calc(var(--spacing-3) + 6px) var(--spacing-3) var(--spacing-3) var(--spacing-3)!important}.code-card+.code-output,.astro-code+.code-output,section.content-grid pre+.code-output{margin-top:0;border-top:none;border-top-left-radius:0;border-top-right-radius:0;box-shadow:inset 0 8px 12px -12px #00000026}.astro-code:has(+.code-output){margin-bottom:0!important}.code-card:has(+.code-output) .astro-code{margin-bottom:0!important}section.content-grid pre:has(+.code-output){margin-bottom:0!important}.astro-code:has(+.code-output){border-bottom-left-radius:0;border-bottom-right-radius:0}.code-card:has(+.code-output) .astro-code{border-bottom-left-radius:0;border-bottom-right-radius:0}section.content-grid pre:has(+.code-output){border-bottom-left-radius:0;border-bottom-right-radius:0}.code-output:before{content:"Output";position:absolute;top:0;right:0;font-size:10px;line-height:1;color:#0009;color:var(--muted-color);text-transform:uppercase;letter-spacing:.04em;border-top:none;border-right:none;border-radius:0 0 0 6px;padding:10px}.code-output>:where(*):first-child{margin-top:0!important}.code-output>:where(*):last-child{margin-bottom:0!important}.code-filename{display:inline-block;font-size:12px;line-height:1;color:#0009;color:var(--muted-color);background:#fafafa;background:var(--surface-bg);border:1px solid rgba(0,0,0,.1);border:1px solid var(--border-color);border-bottom:none;border-radius:6px 6px 0 0;padding:4px 8px;margin:0}.code-filename+.code-card .astro-code,.code-filename+.astro-code,.code-filename+section.content-grid pre{border-top-left-radius:0;border-top-right-radius:6px}button,.button{-webkit-appearance:none;-moz-appearance:none;appearance:none;background:linear-gradient(15deg,#de90ca,#ce80ba 35%);background:linear-gradient(15deg,var(--primary-color) 0%,var(--primary-color-hover) 35%);color:#fff;border:1px solid transparent;border-radius:6px;border-radius:var(--button-radius);padding:8px 12px;padding:var(--button-padding-y) var(--button-padding-x);font-size:14px;font-size:var(--button-font-size);line-height:1;cursor:pointer;display:inline-block;-webkit-text-decoration:none;text-decoration:none;transition:background-color .15s ease,border-color .15s ease,box-shadow .15s ease,transform .02s ease}button:has(>svg:only-child),.button:has(>svg:only-child){padding:8px;padding:var(--button-icon-padding)}button:hover,.button:hover{filter:brightness(96%)}button:active,.button:active{transform:translateY(1px)}button:focus-visible,.button:focus-visible{outline:none}button:disabled,.button:disabled{opacity:.6;cursor:not-allowed}.button--ghost{background:transparent!important;color:#de90ca!important;color:var(--primary-color)!important;border-color:#de90ca!important;border-color:var(--primary-color)!important}.button--ghost:hover{color:#ce80ba!important;color:var(--primary-color-hover)!important;border-color:#ce80ba!important;border-color:var(--primary-color-hover)!important;filter:none}.button.button--big{padding:12px 16px;padding:var(--button-big-padding-y) var(--button-big-padding-x);font-size:16px;font-size:var(--button-big-font-size)}.button.button--big:has(>svg:only-child){padding:12px;padding:var(--button-big-icon-padding)}.button-group .button{margin:5px}.content-grid main table{border-collapse:collapse;table-layout:auto;margin:0}.content-grid main th,.content-grid main td{border-bottom:1px solid rgba(0,0,0,.1);border-bottom:1px solid var(--border-color);padding:6px 8px;font-size:15px;white-space:nowrap;word-break:auto-phrase;white-space:break-spaces;vertical-align:top}.content-grid main thead th{border-bottom:1px solid rgba(0,0,0,.1);border-bottom:1px solid var(--border-color)}.content-grid main thead th{background:#f3f3f3;background:var(--table-header-bg);padding-top:10px;padding-bottom:10px;font-weight:600}.content-grid main hr{border:none;border-bottom:1px solid rgba(0,0,0,.1);border-bottom:1px solid var(--border-color);margin:32px 0;margin:var(--spacing-5) 0}.content-grid main .table-scroll{width:100%;overflow-x:auto;-webkit-overflow-scrolling:touch;border:1px solid rgba(0,0,0,.1);border:1px solid var(--border-color);border-radius:8px;border-radius:var(--table-border-radius);background:#fafafa;background:var(--surface-bg);margin:0 0 24px;margin:0 0 var(--block-spacing-y)}.content-grid main .table-scroll>table{width:-moz-fit-content;width:fit-content;min-width:100%;max-width:none}.content-grid main .table-scroll>table th,.content-grid main .table-scroll>table td{border-right:1px solid rgba(0,0,0,.1);border-right:1px solid var(--border-color)}.content-grid main .table-scroll>table th:last-child,.content-grid main .table-scroll>table td:last-child{border-right:none}.content-grid main .table-scroll>table thead th:first-child{border-top-left-radius:8px;border-top-left-radius:var(--table-border-radius)}.content-grid main .table-scroll>table thead th:last-child{border-top-right-radius:8px;border-top-right-radius:var(--table-border-radius)}.content-grid main .table-scroll>table tbody tr:last-child td:first-child{border-bottom-left-radius:8px;border-bottom-left-radius:var(--table-border-radius)}.content-grid main .table-scroll>table tbody tr:last-child td:last-child{border-bottom-right-radius:8px;border-bottom-right-radius:var(--table-border-radius)}.content-grid main .table-scroll>table tbody tr:nth-child(odd) td{background:#f7f7f7;background:var(--table-row-odd-bg)}.content-grid main .table-scroll>table tbody tr:last-child td{border-bottom:none}.accordion .accordion__content .table-scroll{border:none;border-radius:0;margin:0;margin-bottom:0!important}.accordion .accordion__content table{margin:0!important}.accordion .accordion__content .table-scroll>table thead th:first-child,.accordion .accordion__content .table-scroll>table thead th:last-child,.accordion .accordion__content .table-scroll>table tbody tr:last-child td:first-child,.accordion .accordion__content .table-scroll>table tbody tr:last-child td:last-child{border-radius:0}@supports not ((width: -moz-fit-content) or (width: fit-content)){.content-grid main .table-scroll>table{width:-moz-max-content;width:max-content;min-width:100%}}.tag-list{display:flex;flex-wrap:wrap;gap:8px;margin:8px 0 16px}.tag{display:inline-flex;align-items:center;gap:6px;padding:8px 12px;font-size:12px;line-height:1;border-radius:6px;border-radius:var(--button-radius);background:#fafafa;background:var(--surface-bg);border:1px solid rgba(0,0,0,.1);border:1px solid var(--border-color);color:#000000d9;color:var(--text-color)}.card{background:#fafafa;background:var(--surface-bg);border:1px solid rgba(0,0,0,.1);border:1px solid var(--border-color);border-radius:10px;padding:12px;padding:var(--spacing-2);z-index:11;z-index:calc(var(--z-elevated) + 1);position:relative;margin-bottom:24px;margin-bottom:var(--block-spacing-y)}select{background-color:#fff;background-color:var(--page-bg);border:1px solid rgba(202,131,183,.55);border-radius:6px;border-radius:var(--button-radius);padding:8px 12px;padding:var(--button-padding-y) var(--button-padding-x) var(--button-padding-y) var(--button-padding-x);font-family:Source Sans Pro,ui-sans-serif,system-ui,-apple-system,Segoe UI,Roboto,Ubuntu,Cantarell,Noto Sans,sans-serif,"Apple Color Emoji","Segoe UI Emoji",Segoe UI Symbol,"Noto Color Emoji";font-family:var(--default-font-family);font-size:14px;font-size:var(--button-font-size);color:#000000d9;color:var(--text-color);background-image:url("data:image/svg+xml;charset=utf-8,%3Csvg xmlns='http://www.w3.org/2000/svg' width='12' height='12' viewBox='0 0 12 12'%3E%3Cpath fill='%23666' d='M6 8.825L1.175 4 2.35 2.825 6 6.475 9.65 2.825 10.825 4z'/%3E%3C/svg%3E");background-repeat:no-repeat;background-position:right 14px center;background-position:right calc(var(--button-padding-x) + 2px) center;background-size:12px;cursor:pointer;transition:border-color .2s ease,box-shadow .2s ease;-webkit-appearance:none;-moz-appearance:none;appearance:none}@supports (color: color-mix(in lch,red,blue)){select{border:1px solid color-mix(in srgb,var(--primary-color) 50%,var(--border-color))}}select:hover,select:focus,select:active{border-color:#de90ca;border-color:var(--primary-color)}select:focus{outline:none;border-color:#de90ca;border-color:var(--primary-color);box-shadow:0 0 0 2px #de90ca1a}@supports (color: lab(from red l 1 1% / calc(alpha + .1))){select:focus{box-shadow:0 0 0 2px rgba(from var(--primary-color) r g b / .1)}}select:disabled{opacity:.6;cursor:not-allowed;background-color:#fafafa;background-color:var(--surface-bg)}[data-theme=dark] select{background-image:url("data:image/svg+xml;charset=utf-8,%3Csvg xmlns='http://www.w3.org/2000/svg' width='12' height='12' viewBox='0 0 12 12'%3E%3Cpath fill='%23bbb' d='M6 8.825L1.175 4 2.35 2.825 6 6.475 9.65 2.825 10.825 4z'/%3E%3C/svg%3E")}input[type=checkbox]{-webkit-appearance:none;-moz-appearance:none;appearance:none;width:16px;height:16px;border:2px solid rgba(0,0,0,.1);border:2px solid var(--border-color);border-radius:3px;background-color:#fff;background-color:var(--page-bg);cursor:pointer;position:relative;transition:all .2s ease;margin-right:12px;margin-right:var(--spacing-2)}input[type=checkbox]:hover{border-color:#de90ca;border-color:var(--primary-color)}input[type=checkbox]:focus{outline:none;border-color:#de90ca;border-color:var(--primary-color);box-shadow:0 0 0 2px #de90ca1a}@supports (color: lab(from red l 1 1% / calc(alpha + .1))){input[type=checkbox]:focus{box-shadow:0 0 0 2px rgba(from var(--primary-color) r g b / .1)}}input[type=checkbox]:checked{background-color:#de90ca;background-color:var(--primary-color);border-color:#de90ca;border-color:var(--primary-color)}input[type=checkbox]:checked:before{content:"";position:absolute;top:1px;left:4px;width:4px;height:8px;border:solid #ffffff;border:solid var(--on-primary);border-width:0 2px 2px 0;transform:rotate(45deg)}input[type=checkbox]:disabled{opacity:.6;cursor:not-allowed}input[type=radio]{-webkit-appearance:none;-moz-appearance:none;appearance:none;width:16px;height:16px;border:2px solid rgba(0,0,0,.1);border:2px solid var(--border-color);border-radius:50%;background-color:#fff;background-color:var(--page-bg);cursor:pointer;position:relative;transition:all .2s ease;margin-right:12px;margin-right:var(--spacing-2)}input[type=radio]:hover{border-color:#de90ca;border-color:var(--primary-color)}input[type=radio]:focus{outline:none;border-color:#de90ca;border-color:var(--primary-color);box-shadow:0 0 0 2px #de90ca1a}@supports (color: lab(from red l 1 1% / calc(alpha + .1))){input[type=radio]:focus{box-shadow:0 0 0 2px rgba(from var(--primary-color) r g b / .1)}}input[type=radio]:checked{border-color:#de90ca;border-color:var(--primary-color)}input[type=radio]:checked:before{content:"";position:absolute;top:2px;left:2px;width:8px;height:8px;border-radius:50%;background-color:#de90ca;background-color:var(--primary-color)}input[type=radio]:disabled{opacity:.6;cursor:not-allowed}input[type=text],input[type=email],input[type=password],input[type=number],input[type=url],input[type=search],textarea{-webkit-appearance:none;-moz-appearance:none;appearance:none;background-color:#fff;background-color:var(--page-bg);border:1px solid rgba(0,0,0,.1);border:1px solid var(--border-color);border-radius:6px;border-radius:var(--button-radius);padding:8px 12px;padding:var(--button-padding-y) var(--button-padding-x);font-family:Source Sans Pro,ui-sans-serif,system-ui,-apple-system,Segoe UI,Roboto,Ubuntu,Cantarell,Noto Sans,sans-serif,"Apple Color Emoji","Segoe UI Emoji",Segoe UI Symbol,"Noto Color Emoji";font-family:var(--default-font-family);font-size:14px;font-size:var(--button-font-size);color:#000000d9;color:var(--text-color);transition:border-color .2s ease,box-shadow .2s ease;width:100%}input[type=text]:hover,input[type=email]:hover,input[type=password]:hover,input[type=number]:hover,input[type=url]:hover,input[type=search]:hover,textarea:hover{border-color:#de90ca;border-color:var(--primary-color)}input[type=text]:focus,input[type=email]:focus,input[type=password]:focus,input[type=number]:focus,input[type=url]:focus,input[type=search]:focus,textarea:focus{outline:none;border-color:#de90ca;border-color:var(--primary-color);box-shadow:0 0 0 2px #de90ca1a}@supports (color: lab(from red l 1 1% / calc(alpha + .1))){input[type=text]:focus,input[type=email]:focus,input[type=password]:focus,input[type=number]:focus,input[type=url]:focus,input[type=search]:focus,textarea:focus{box-shadow:0 0 0 2px rgba(from var(--primary-color) r g b / .1)}}input[type=text]:disabled,input[type=email]:disabled,input[type=password]:disabled,input[type=number]:disabled,input[type=url]:disabled,input[type=search]:disabled,textarea:disabled{opacity:.6;cursor:not-allowed;background-color:#fafafa;background-color:var(--surface-bg)}label{display:flex;align-items:center;font-size:14px;font-size:var(--button-font-size);color:#000000d9;color:var(--text-color);cursor:pointer;margin-bottom:0;line-height:1.4;-webkit-user-select:none;-moz-user-select:none;user-select:none}.form-group{margin-bottom:24px;margin-bottom:var(--spacing-4);display:flex;align-items:center;gap:12px;gap:var(--spacing-2)}.form-group label{margin-bottom:0}.form-group.vertical{flex-direction:column;align-items:flex-start}.form-group.vertical label{margin-bottom:8px;margin-bottom:var(--spacing-1)}.form-inline{display:flex;align-items:center;gap:12px;gap:var(--spacing-2);margin-bottom:16px;margin-bottom:var(--spacing-3)}.form-inline label{margin-bottom:0}div[style*="display: flex"] label,div[class*=flex] label,.trackio-controls label,.scale-controls label,.theme-selector label{margin-bottom:0!important;align-self:center}.tenet-list{margin:3rem 0}.tenet-list ol{counter-reset:tenet-counter -1;list-style:none;padding-left:0;display:grid;grid-template-columns:1fr;grid-gap:2.5rem;gap:2.5rem;max-width:900px;margin:0 auto}.tenet-list li.tenet{counter-increment:tenet-counter;background:linear-gradient(135deg,#fff,#f8f9fa);border:2px solid #e2e8f0;border-radius:16px;padding:2rem 2rem 2rem 4rem;margin:0;position:relative;box-shadow:0 12px 35px #0000001f;transition:all .3s ease;cursor:pointer}.tenet-list li.tenet:hover{transform:translateY(-8px) scale(1.02);box-shadow:0 20px 50px #00000040;border-color:#007bff80;background:linear-gradient(135deg,#fff,#f0f8ff)}.tenet-list li.tenet:nth-child(1):before{background:linear-gradient(135deg,#667eea,#764ba2)}.tenet-list li.tenet:nth-child(2):before{background:linear-gradient(135deg,#f093fb,#f5576c)}.tenet-list li.tenet:nth-child(3):before{background:linear-gradient(135deg,#4facfe,#00f2fe)}.tenet-list li.tenet:nth-child(4):before{background:linear-gradient(135deg,#43e97b,#38f9d7)}.tenet-list li.tenet:nth-child(5):before{background:linear-gradient(135deg,#fa709a,#fee140)}.tenet-list li.tenet:nth-child(6):before{background:linear-gradient(135deg,#a8edea,#fed6e3)}.tenet-list li.tenet:nth-child(7):before{background:linear-gradient(135deg,#ff9a9e,#fecfef)}.tenet-list li.tenet:nth-child(8):before{background:linear-gradient(135deg,#a18cd1,#fbc2eb)}.tenet-list li.tenet:nth-child(9):before{background:linear-gradient(135deg,#ffecd2,#fcb69f)}.tenet-list li.tenet:before{content:counter(tenet-counter);position:absolute;top:-12px;left:-12px;color:#fff;width:48px;height:48px;border-radius:50%;display:flex;align-items:center;justify-content:center;font-size:1.2em;font-weight:700;box-shadow:0 4px 12px #00000026;border:3px solid white}.tenet-list li.tenet strong{color:#1a202c;font-size:1.1em;display:block;margin-bottom:.5rem}.tenet-list li.tenet em{color:#4a5568;font-size:.95em;font-style:italic;display:block;margin-top:.75rem;padding:1rem;background:#00000008;border-radius:8px;border-left:3px solid #e2e8f0}.tenet-list li.tenet p{color:#2d3748;line-height:1.6;margin:.5rem 0}@keyframes pulse-glow{0%{box-shadow:0 4px 12px #00000026}50%{box-shadow:0 4px 20px #00000040}to{box-shadow:0 4px 12px #00000026}}.tenet-list li.tenet:hover:before{animation:pulse-glow 2s ease-in-out infinite}[data-theme=dark] .tenet-list li.tenet{background:linear-gradient(135deg,#1a202c,#2d3748);border-color:#4a5568}[data-theme=dark] .tenet-list li.tenet:hover{background:linear-gradient(135deg,#2d3748,#374151);border-color:#667eea80}[data-theme=dark] .tenet-list li.tenet strong{color:#e2e8f0}[data-theme=dark] .tenet-list li.tenet p{color:#cbd5e0}[data-theme=dark] .tenet-list li.tenet em{color:#a0aec0;background:#ffffff0d;border-left-color:#4a5568}@media (max-width: 768px){.tenet-list li.tenet{padding:1.5rem}}.crumbs{background:linear-gradient(135deg,#f0f4ff,#e6eeff);border-left:5px solid #667eea;padding:1.25rem 1.75rem;margin:2.5rem 0;border-radius:0 8px 8px 0;box-shadow:0 2px 8px #667eea1f;font-size:.95em;line-height:1.6;color:#4a5568}.crumbs strong{color:#667eea;font-weight:700}.crumbs code{background:#667eea1a;padding:.15em .4em;border-radius:3px;font-size:.9em;color:#4c51bf}.crumbs a{color:#667eea;font-weight:500}[data-theme=dark] .crumbs{background:linear-gradient(135deg,#1e293b,#334155);border-left-color:#818cf8;color:#cbd5e0}[data-theme=dark] .crumbs strong{color:#a5b4fc}[data-theme=dark] .crumbs code{background:#818cf833;color:#c7d2fe}[data-theme=dark] .crumbs a{color:#a5b4fc}main a[href^="http://"],main a[href^="https://"]{background:linear-gradient(135deg,#e3f2fd,#bbdefb);color:#1565c0;-webkit-text-decoration:none;text-decoration:none;padding:.15em .5em;border-radius:12px;border:1px solid #90caf9;display:inline-block;transition:all .3s ease;font-weight:500;box-shadow:0 1px 3px #1565c026}main a[href^="http://"]:hover,main a[href^="https://"]:hover{background:linear-gradient(135deg,#2196f3,#1976d2);color:#fff;border-color:#1565c0;transform:translateY(-1px);box-shadow:0 4px 12px #1565c04d}main a[href^="http://"]:active,main a[href^="https://"]:active{transform:translateY(0);box-shadow:0 1px 3px #1565c033}a[href^="#source-of-truth"],a[href^="#one-model-one-file"],a[href^="#code-is-product"],a[href^="#standardize-dont-abstract"],a[href^="#do-repeat-yourself"],a[href^="#minimal-user-api"],a[href^="#backwards-compatibility"],a[href^="#consistent-public-surface"],a[href^="#modular"]{position:relative;color:#667eea;font-weight:600;-webkit-text-decoration:underline;text-decoration:underline;text-decoration-color:#667eea4d;transition:all .3s ease}a[href^="#source-of-truth"]:hover,a[href^="#one-model-one-file"]:hover,a[href^="#code-is-product"]:hover,a[href^="#standardize-dont-abstract"]:hover,a[href^="#do-repeat-yourself"]:hover,a[href^="#minimal-user-api"]:hover,a[href^="#backwards-compatibility"]:hover,a[href^="#consistent-public-surface"]:hover,a[href^="#modular"]:hover{color:#4c51bf;text-decoration-color:#4c51bf;background:#667eea1a;padding:2px 4px;border-radius:4px}a[href^="#source-of-truth"]:after{content:"Model implementations should be reliable, reproducible, and faithful to original performances."}a[href^="#one-model-one-file"]:after{content:"All inference and training core logic visible, top‑to‑bottom, in a single file."}a[href^="#code-is-product"]:after{content:"Optimize for reading, diffing, and tweaking. Code quality matters as much as functionality."}a[href^="#standardize-dont-abstract"]:after{content:"Model-specific logic belongs in the model file, not hidden behind abstractions."}a[href^="#do-repeat-yourself"]:after{content:"Strategic duplication can improve readability and maintainability when done thoughtfully."}a[href^="#minimal-user-api"]:after{content:"Config, model, preprocessing; from_pretrained, save_pretrained, push_to_hub. Least amount of codepaths."}a[href^="#backwards-compatibility"]:after{content:"Any artifact once on the hub must remain loadable. Breaking changes are unacceptable."}a[href^="#consistent-public-surface"]:after{content:"Uniform naming, signatures, and conventions across all models for predictability."}a[href^="#modular"]:after{content:"Architecture components shared via modular system, removing boilerplate while keeping expanded files visible."}a[href^="#source-of-truth"]:after,a[href^="#one-model-one-file"]:after,a[href^="#code-is-product"]:after,a[href^="#standardize-dont-abstract"]:after,a[href^="#do-repeat-yourself"]:after,a[href^="#minimal-user-api"]:after,a[href^="#backwards-compatibility"]:after,a[href^="#consistent-public-surface"]:after,a[href^="#modular"]:after{position:absolute;bottom:100%;left:50%;transform:translate(-50%);background:#1a202c;color:#fff;padding:.75rem 1rem;border-radius:8px;font-size:.85em;font-weight:400;white-space:normal;width:300px;line-height:1.4;z-index:1001;opacity:0;visibility:hidden;transition:opacity .3s ease,visibility .3s ease;pointer-events:none;box-shadow:0 4px 12px #0003;margin-bottom:.5rem}a[href^="#source-of-truth"]:hover:after,a[href^="#one-model-one-file"]:hover:after,a[href^="#code-is-product"]:hover:after,a[href^="#standardize-dont-abstract"]:hover:after,a[href^="#do-repeat-yourself"]:hover:after,a[href^="#minimal-user-api"]:hover:after,a[href^="#backwards-compatibility"]:hover:after,a[href^="#consistent-public-surface"]:hover:after,a[href^="#modular"]:hover:after{opacity:1;visibility:visible}[data-theme=dark] main a[href^="http://"],[data-theme=dark] main a[href^="https://"]{background:linear-gradient(135deg,#1e3a5f,#2563eb);color:#bfdbfe;border-color:#3b82f6}[data-theme=dark] main a[href^="http://"]:hover,[data-theme=dark] main a[href^="https://"]:hover{background:linear-gradient(135deg,#2563eb,#1d4ed8);color:#fff;border-color:#60a5fa}[data-theme=dark] a[href^="#source-of-truth"]:after,[data-theme=dark] a[href^="#one-model-one-file"]:after,[data-theme=dark] a[href^="#code-is-product"]:after,[data-theme=dark] a[href^="#standardize-dont-abstract"]:after,[data-theme=dark] a[href^="#do-repeat-yourself"]:after,[data-theme=dark] a[href^="#minimal-user-api"]:after,[data-theme=dark] a[href^="#backwards-compatibility"]:after,[data-theme=dark] a[href^="#consistent-public-surface"]:after,[data-theme=dark] a[href^="#modular"]:after{background:#2d3748;color:#e2e8f0}[data-theme=dark] a[href^="#source-of-truth"],[data-theme=dark] a[href^="#one-model-one-file"],[data-theme=dark] a[href^="#code-is-product"],[data-theme=dark] a[href^="#standardize-dont-abstract"],[data-theme=dark] a[href^="#do-repeat-yourself"],[data-theme=dark] a[href^="#minimal-user-api"],[data-theme=dark] a[href^="#backwards-compatibility"],[data-theme=dark] a[href^="#consistent-public-surface"],[data-theme=dark] a[href^="#modular"]{color:#a5b4fc;text-decoration-color:#a5b4fc4d}[data-theme=dark] a[href^="#source-of-truth"]:hover,[data-theme=dark] a[href^="#one-model-one-file"]:hover,[data-theme=dark] a[href^="#code-is-product"]:hover,[data-theme=dark] a[href^="#standardize-dont-abstract"]:hover,[data-theme=dark] a[href^="#do-repeat-yourself"]:hover,[data-theme=dark] a[href^="#minimal-user-api"]:hover,[data-theme=dark] a[href^="#backwards-compatibility"]:hover,[data-theme=dark] a[href^="#consistent-public-surface"]:hover,[data-theme=dark] a[href^="#modular"]:hover{color:#c7d2fe;background:#a5b4fc26}.demo-wide,.demo-full-width{display:flex;flex-direction:column;align-items:center;justify-content:center;width:100%;min-height:150px;color:#0009;color:var(--muted-color);font-size:12px;border:2px dashed rgba(0,0,0,.1);border:2px dashed var(--border-color);border-radius:8px;background:#fafafa;background:var(--surface-bg);margin-bottom:24px;margin-bottom:var(--block-spacing-y)}.mermaid{background:none!important;margin-bottom:24px!important;margin-bottom:var(--block-spacing-y)!important}.content-grid main img{max-width:100%;height:auto;width:min(1100px,100vw - 16px * 2);width:min(1100px,100vw - var(--content-padding-x) * 2);margin-left:50%;transform:translate(-50%);display:block}.content-grid main .figure-legend{text-align:center;font-size:.9rem;color:#0009;color:var(--muted-color);font-style:italic;margin:12px 0 24px;margin:var(--spacing-2) 0 var(--spacing-4);width:min(1100px,100vw - 16px * 2);width:min(1100px,100vw - var(--content-padding-x) * 2);margin-left:50%;transform:translate(-50%)}@media (max-width: 1024px){.content-grid main img,.content-grid main .figure-legend{width:100%;margin-left:0;transform:none}}
dist/static/popular_models_barplot.png → app/dist/_astro/index.beJ178IL.css.gz RENAMED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:2c75ec717c86a3c71f95f4686f5d27f5ed14ceb875f4438283095ce4cbfee299
3
- size 43484
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b295619b7b1df79e3034566f78788f08d65d11da3dd3d3c5ac113a256470d828
3
+ size 18469
app/dist/index.html CHANGED
@@ -12,12 +12,12 @@
12
  document.documentElement.setAttribute("data-theme", theme);
13
  } catch {}
14
  })();
15
- </script><script type="module" src="/scripts/color-palettes.js"></script><!-- TO MANAGE PROPERLY --><script src="https://cdn.plot.ly/plotly-3.0.0.min.js" charset="utf-8"></script><link rel="stylesheet" href="/_astro/index.DT_nyxPT.css"><script type="module" src="/_astro/hoisted.DK-CdsVg.js"></script>
16
- <script type="module" src="/_astro/page.CH0W_C1Z.js"></script></head> <body> <button id="theme-toggle" aria-label="Toggle color theme" data-astro-cid-x3pjskd3> <svg class="icon light" width="20" height="20" viewBox="0 0 24 24" aria-hidden="true" focusable="false" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" data-astro-cid-x3pjskd3> <circle cx="12" cy="12" r="5" data-astro-cid-x3pjskd3></circle> <line x1="12" y1="1" x2="12" y2="4" data-astro-cid-x3pjskd3></line> <line x1="12" y1="20" x2="12" y2="23" data-astro-cid-x3pjskd3></line> <line x1="1" y1="12" x2="4" y2="12" data-astro-cid-x3pjskd3></line> <line x1="20" y1="12" x2="23" y2="12" data-astro-cid-x3pjskd3></line> <line x1="4.22" y1="4.22" x2="6.34" y2="6.34" data-astro-cid-x3pjskd3></line> <line x1="17.66" y1="17.66" x2="19.78" y2="19.78" data-astro-cid-x3pjskd3></line> <line x1="4.22" y1="19.78" x2="6.34" y2="17.66" data-astro-cid-x3pjskd3></line> <line x1="17.66" y1="6.34" x2="19.78" y2="4.22" data-astro-cid-x3pjskd3></line> </svg> <svg class="icon dark" width="20" height="20" viewBox="0 0 24 24" aria-hidden="true" focusable="false" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" data-astro-cid-x3pjskd3> <path d="M21 12.79A9 9 0 1 1 11.21 3 7 7 0 0 0 21 12.79z" data-astro-cid-x3pjskd3></path> </svg> </button> <section class="hero" data-astro-cid-bbe6dxrz> <h1 class="hero-title" data-astro-cid-bbe6dxrz>Maintain the unmaintainable:<br/>1M python loc, 400+ models</h1> <div class="hero-banner" data-astro-cid-bbe6dxrz> <figure class="html-embed"><div class="html-embed__card is-frameless"><div id="frag-prwee7njsdh"><style>
17
  @import url('https://fonts.googleapis.com/css2?family=Inter:wght@500;600&display=swap');
18
 
19
  .banner-container {
20
- width: 300%;
21
  height: 600px;
22
  position: relative;
23
  overflow: visible;
@@ -400,13 +400,13 @@ Download PDF
400
  if (document.readyState === 'loading') {
401
  document.addEventListener('DOMContentLoaded', buildTOC, { once: true });
402
  } else { buildTOC(); }
403
- </script> <main> <h2 id="introduction"><a href="#introduction">Introduction</a></h2>
404
  <p>One million lines of <code>python</code> code. Through them, the <code>transformers</code> library supports more than 400 model architectures, from state-of-the-art LLMs and VLMs to specialized models for audio, video, and tables.</p>
405
- <p>Built on <code>PyTorch</code>, it’s a foundational tool for modern LLM usage, research, education, and tens of thousands of other open-source projects. Each AI model is added by the community, harmonized into a consistent interface, and tested daily on a CI to ensure reproducibility.</p>
406
  <p>This scale presents a monumental engineering challenge.</p>
407
  <p>How do you keep such a ship afloat, made of so many moving, unrelated parts, contributed to by a buzzing hivemind? Especially as the pace of ML research accelerates? We receive constant feedback on everything from function signatures with hundreds of arguments to duplicated code and optimization concerns, and we listen to all of it, or try to. The library’s usage keeps on growing, and we are a small team of maintainers and contributors, backed by hundreds of open-source community members.
408
  We continue to support all new models and expect to do so for the foreseeable future.</p>
409
- <p>This post dissects the design philosophy that makes this possible today. It’s a continuation of our older principles, detailed on our previous <a href="https://huggingface.co/docs/transformers/en/philosophy">philosophy</a> page, as well as its accompanying <a href="https://huggingface.co/blog/transformers-design-philosophy">blog post from 2022</a>. More recently, and I recommend the read if it’s not done yet, a blog post about <a href="https://huggingface.co/blog/faster-transformers">recent upgrades to transformers</a> was written, explaining in particular what makes the library faster today. Again, all of that development was only made possible thanks to these principles.</p>
410
  <p>We codify the “tenets” that guide our development, demonstrate how they are implemented in code, and show the measurable impact they have on the library’s sustainability and growth.</p>
411
  <p>For any OSS maintainer, power user, or contributor, this is the map to understanding, using, and building upon <code>transformers</code>, but not only: any project of comparable size will require you to make deep choices, not only on design and choice of abstraction, but on the very mindset of the software you are building.</p>
412
  <p><a href="#source-of-truth">Tenets exemplified</a> will have their summary available on hover.</p>
@@ -439,7 +439,7 @@ We continue to support all new models and expect to do so for the foreseeable fu
439
  <p>It works as follows. In order to contribute a model, say for instance define a <code>modular_</code> file that can inherit from <em>any function across all other modeling, configuration and processor files</em>.
440
  This modular file can use inheritance across models: and then, it will be unravelled into a fully functional modeling file.</p>
441
  <summary id="generated-modeling">Auto-generated modeling code</summary>
442
- <figure class="html-embed"><div class="html-embed__card"><div id="frag-11jqh7tr83j"><div class="code-compare" style="display: grid; grid-template-columns: 1fr 1fr; gap: 1rem; margin: 1.5rem 0;">
443
  <div class="code-column" style="border: 1px solid #e2e8f0; border-radius: 8px; overflow: hidden;">
444
  <div class="code-header" style="background: #f8f9fa; padding: 0.75rem 1rem; font-weight: 600; color: #495057; border-bottom: 1px solid #e2e8f0;">
445
  modular_glm.py
@@ -593,41 +593,43 @@ class GlmRMSNorm(nn.Module):
593
  <p>What is the consequence? When adding a model, we do not need to go over the entire modeling file. The modular (left side above) is enough.</p>
594
  <p>When <code>AutoModel.from_pretrained(...)</code> is called, it is indeed the modeling (right side) that is ran, and all the tests are run on the modeling code.</p>
595
  <p>What does that give us?</p>
596
- <div class="crumbs"><p>A small <code>modular_*.py</code> declares reuse; the expanded modeling file stays visible (<a href="#one-model-one-file">tenet kept</a>). Reviewers and contributors maintain the shard, not the repetition. <strong>Next:</strong> the measurable effect on effective LOC and maintenance cost.</p></div>
597
  <h3 id="a-maintainable-control-surface"><a href="#a-maintainable-control-surface">A maintainable control surface</a></h3>
598
  <p>The effect of modular can be measured in lines of code (LOC). If a model only has a modeling file, we add its LOC count.
599
  However, if a model has a modular_<em>.py and a corresponding automatically generated modeling_</em>/.py, we only count the LOC under the modular file. The modeling code has no maintenance cost as it is strictly dependent on the modular file.</p>
600
  <p>That gives an “effective LOC” curve: the 𝗺𝗮𝗶𝗻𝘁𝗲𝗻𝗮𝗻𝗰𝗲 𝘀𝘂𝗿𝗳𝗮𝗰𝗲.</p>
601
- <p>Measured on git history, raw <code>modeling_*.py</code> grew at ~362 LOC/day before modular; counting only modular shards yields ~25 LOC/day after — about <strong>15× lower</strong>. The curve represents the <strong>maintenance surface</strong> today: what maintainers actually read and review.</p>
602
- <p>Less code to hand-maintain means fewer places to break. LOC is not complexity, but they correlate in review effort and change risk.</p>
603
- <figure class="html-embed"><div class="html-embed__card"><div id="frag-efyo0fxayo7"><iframe
604
  src="https://molbap-loc-1.hf.space"
605
  style="width:100%; height:900px; border:0"
606
  allow="clipboard-read; clipboard-write; fullscreen"
607
  referrerpolicy="no-referrer-when-downgrade"
608
  ></iframe></div></div></figure>
609
- <p>There’s a sharp drop near the end, it’s due to us <a href="https://github.com/huggingface/transformers/commit/4df2529d79d75f44e70396df5888a32ffa02d61e#diff-60849db3e9922197854ef1cac92bf4aba08b5d7fd3fe6f3c16a3511e29e0eacc">removing support for Jax and TensorFlow</a> library-wide.</p>
610
- <p>Of course, it is not only this effort that allowed to reduce the maintenance load.</p>
611
- <p>A related optimization was the following one. You’ve likely heard about <a href="https://huggingface.co/docs/text-generation-inference/en/conceptual/flash_attention">flash attention</a> and its several variants.</p>
612
  <p>The <em>attention computation</em> itself happens at a <em>lower</em> level of abstraction than the model itself.</p>
613
- <p>However, we were adding specific torch operations for each backend (sdpa, flash-attention iterations, flex attention) but it wasn’t a <a href="#minimal-user-api">minimal user api</a>.</p>
614
- <div class="crumbs"><p>Evidence: effective LOC drops ~15× when counting shards instead of expanded modeling. Less to read, fewer places to break. Related cleanups: attention backends moved behind a function interface. <strong>Next:</strong> how the attention interface stays standard without hiding semantics.</p></div>
615
  <h3 id="-external-attention-classes"><a href="#-external-attention-classes"><a id="attention-classes"></a> External Attention classes</a></h3>
616
- <p>The solution of the “attention abstraction problem” we chose was to move to an <a href="https://huggingface.co/docs/transformers/en/attention_interface">attention interface</a> that allows the following:</p>
617
- <p>We keep a <code>Callable</code> for the naive implementation of the attention, called “eager computation. We thus name this Callable <code>eager_attention_forward</code>, and it can be run as long as the user had <code>torch</code> installed, which is a requirement in any case.</p>
618
- <p>In other words, we moved from a class interface to a function interface: in order to use more complex attention implementations, the config is checked, and can use other Callables, including kernel bindings that are much faster, if they are available.</p>
619
- <p>This exemplifies the fact that we prefer to have an interface that is <a href="#standardize-dont-abstract">standard, but not abstract</a>.</p>
620
  <div class="code-card"><button class="code-copy button--ghost" type="button" aria-label="Copy code"><svg viewBox="0 0 24 24" aria-hidden="true" focusable="false"><path d="M16 1H4c-1.1 0-2 .9-2 2v12h2V3h12V1zm3 4H8c-1.1 0-2 .9-2 2v14c0 1.1.9 2 2 2h11c1.1 0 2-.9 2-2V7c0-1.1-.9-2-2-2zm0 16H8V7h11v14z"></path></svg></button><pre class="astro-code astro-code-themes github-light github-dark" style="--shiki-light:#24292e;--shiki-dark:#e1e4e8;--shiki-light-bg:#fff;--shiki-dark-bg:#24292e;overflow-x:auto" tabindex="0" data-language="python"><code><span class="line"><span style="--shiki-light:#24292E;--shiki-dark:#E1E4E8">attention_interface: Callable </span><span style="--shiki-light:#D73A49;--shiki-dark:#F97583">=</span><span style="--shiki-light:#24292E;--shiki-dark:#E1E4E8"> eager_attention_forward</span></span>
621
  <span class="line"><span style="--shiki-light:#D73A49;--shiki-dark:#F97583">if</span><span style="--shiki-light:#005CC5;--shiki-dark:#79B8FF"> self</span><span style="--shiki-light:#24292E;--shiki-dark:#E1E4E8">.config._attn_implementation </span><span style="--shiki-light:#D73A49;--shiki-dark:#F97583">!=</span><span style="--shiki-light:#032F62;--shiki-dark:#9ECBFF"> &quot;eager&quot;</span><span style="--shiki-light:#24292E;--shiki-dark:#E1E4E8">:</span></span>
622
  <span class="line"><span style="--shiki-light:#24292E;--shiki-dark:#E1E4E8"> attention_interface </span><span style="--shiki-light:#D73A49;--shiki-dark:#F97583">=</span><span style="--shiki-light:#005CC5;--shiki-dark:#79B8FF"> ALL_ATTENTION_FUNCTIONS</span><span style="--shiki-light:#24292E;--shiki-dark:#E1E4E8">[</span><span style="--shiki-light:#005CC5;--shiki-dark:#79B8FF">self</span><span style="--shiki-light:#24292E;--shiki-dark:#E1E4E8">.config._attn_implementation]</span></span>
623
  <span class="line"></span></code></pre></div>
624
- <p>A strength of the new attention interface is the possibility to enforce specific kwargs, which are needed by kernel providers and other dependencies. We know that kwargs are often a necessary evil that plagues tools with widespread compatibility; and it is something we have aimed to reduce, and will continue reduce in order to improve readability - with them, the current system is a <a href="#minimal-user-api">minimal user api</a>.</p>
625
- <p>Hence, backend integrations sometimes require specific kwargs. We reduce that surface and document expectations; where flexibility is necessary, we plan to use <code>typing.Annotated</code> to convey shapes and invariants without constraining integrations. Such an implementation could look like this in the future:</p>
 
 
626
  <div class="code-card"><button class="code-copy button--ghost" type="button" aria-label="Copy code"><svg viewBox="0 0 24 24" aria-hidden="true" focusable="false"><path d="M16 1H4c-1.1 0-2 .9-2 2v12h2V3h12V1zm3 4H8c-1.1 0-2 .9-2 2v14c0 1.1.9 2 2 2h11c1.1 0 2-.9 2-2V7c0-1.1-.9-2-2-2zm0 16H8V7h11v14z"></path></svg></button><pre class="astro-code astro-code-themes github-light github-dark" style="--shiki-light:#24292e;--shiki-dark:#e1e4e8;--shiki-light-bg:#fff;--shiki-dark-bg:#24292e;overflow-x:auto" tabindex="0" data-language="python"><code><span class="line"><span style="--shiki-light:#D73A49;--shiki-dark:#F97583">from</span><span style="--shiki-light:#24292E;--shiki-dark:#E1E4E8"> typing </span><span style="--shiki-light:#D73A49;--shiki-dark:#F97583">import</span><span style="--shiki-light:#24292E;--shiki-dark:#E1E4E8"> Annotated</span></span>
627
  <span class="line"></span>
628
  <span class="line"><span style="--shiki-light:#24292E;--shiki-dark:#E1E4E8">MyModelOutputAnnotated </span><span style="--shiki-light:#D73A49;--shiki-dark:#F97583">=</span><span style="--shiki-light:#24292E;--shiki-dark:#E1E4E8"> Annotated[MyModelOutput, </span><span style="--shiki-light:#032F62;--shiki-dark:#9ECBFF">&quot;shape: (B, C, H, W)&quot;</span><span style="--shiki-light:#24292E;--shiki-dark:#E1E4E8">]</span></span>
629
  <span class="line"></span></code></pre></div>
630
- <div class="crumbs"><p>Semantics remain in <code>eager_attention_forward</code>; faster backends are opt-in via config. We inform via types/annotations rather than enforce rigid kwargs, preserving integrations. <strong>Next:</strong> distribution concerns are declared as a plan, not model surgery.</p></div>
631
  <h3 id="-configurable-tensor-parallelism"><a href="#-configurable-tensor-parallelism"><a id="simpler-tensor-parallelism"></a> Configurable Tensor Parallelism</a></h3>
632
  <p>If you’re not familiar with the different flavours of parallelism, I recommend checking out <a href="https://huggingface.co/blog/accelerate-nd-parallel">this blog post</a> first, and of course a full <a href="https://huggingface.co/spaces/nanotron/ultrascale-playbook">dive into the ultra-scale playbook</a> is always recommended.</p>
633
  <p>The essential part is that, as <a href="https://huggingface.co/docs/transformers/v4.56.2/perf_train_gpu_many#tensor-parallelism">the documentation states</a> when tensors get too large to fit on a single GPU, they are sliced along a particular dimension and every slice is sent to a different GPU.</p>
@@ -637,7 +639,7 @@ We choose to place the level of abstraction higher than the device placement: a
637
  <p>Hence, we want to touch <a href="#minimal-user-api">minimally</a> to the modeling code, and only modify it when <em>architectural changes</em> are involved. For instance, for tensor parallelism, we instead now specify a simple <code>tp_plan</code>.</p>
638
  <p>The alternative would be to modify parent classes specific to their</p>
639
  <p>It is written once in the config and passed to <code>.from_pretrained()</code>. The plan maps module name patterns to partitioning strategies. Strategies are resolved by the internal <code>ParallelInterface</code>, which wires to sharding implementations <code>ColwiseParallel</code>, <code>RowwiseParallel</code>, packed variants, and so on.</p>
640
- <figure class="html-embed"><div class="html-embed__card"><div id="frag-pys3d4xoy6a"><pre><code class="language-python"># In the model's config (example: ERNIE 4.5-style decoder blocks)
641
  base_model_tp_plan = {
642
  "layers.*.self_attn.q_proj": "colwise",
643
  "layers.*.self_attn.k_proj": "colwise",
@@ -666,7 +668,7 @@ out = model(**inputs)</code></pre></div></div></figure>
666
  <p>Semantics stay in the model (a Linear stays a Linear), distribution is orthogonal and declared via strings: “colwise” splits columns of weights/bias across ranks; “rowwise” splits rows; packed variants shard fused weights; The mapping keys accept glob patterns like <code>layers.*.mlp.down_proj</code> to target repeated submodules.</p>
667
  <div class="crumbs"><p>Sharding is configuration (<code>tp_plan</code>), not edits to <code>Linear</code>s. Glob patterns target repeated blocks; modeling semantics stay intact. <strong>Next:</strong> per-layer attention/caching schedules declared in config, not hardcoded.</p></div>
668
  <h3 id="-layers-attentions-and-caches"><a href="#-layers-attentions-and-caches"><a id="layers-attentions-caches"></a> Layers, attentions and caches</a></h3>
669
- <p>Following the same logic, the <em>nature</em> of attention and caching per layer of a model should not be hardcoded. We should be able to specify in a configuration-based fashion how each layer is implemented. Thus we define a mapping that can be then</p>
670
  <div class="code-card"><button class="code-copy button--ghost" type="button" aria-label="Copy code"><svg viewBox="0 0 24 24" aria-hidden="true" focusable="false"><path d="M16 1H4c-1.1 0-2 .9-2 2v12h2V3h12V1zm3 4H8c-1.1 0-2 .9-2 2v14c0 1.1.9 2 2 2h11c1.1 0 2-.9 2-2V7c0-1.1-.9-2-2-2zm0 16H8V7h11v14z"></path></svg></button><pre class="astro-code astro-code-themes github-light github-dark" style="--shiki-light:#24292e;--shiki-dark:#e1e4e8;--shiki-light-bg:#fff;--shiki-dark-bg:#24292e;overflow-x:auto" tabindex="0" data-language="python"><code><span class="line"><span style="--shiki-light:#005CC5;--shiki-dark:#79B8FF">ALLOWED_LAYER_TYPES</span><span style="--shiki-light:#D73A49;--shiki-dark:#F97583"> =</span><span style="--shiki-light:#24292E;--shiki-dark:#E1E4E8"> (</span></span>
671
  <span class="line"><span style="--shiki-light:#032F62;--shiki-dark:#9ECBFF"> &quot;full_attention&quot;</span><span style="--shiki-light:#24292E;--shiki-dark:#E1E4E8">,</span></span>
672
  <span class="line"><span style="--shiki-light:#032F62;--shiki-dark:#9ECBFF"> &quot;sliding_attention&quot;</span><span style="--shiki-light:#24292E;--shiki-dark:#E1E4E8">,</span></span>
@@ -675,7 +677,7 @@ out = model(**inputs)</code></pre></div></div></figure>
675
  <span class="line"><span style="--shiki-light:#005CC5;--shiki-dark:#79B8FF"> ...</span></span>
676
  <span class="line"><span style="--shiki-light:#24292E;--shiki-dark:#E1E4E8">)</span></span>
677
  <span class="line"></span></code></pre></div>
678
- <p>and the configuration can be <em>explicit</em> about which attention type is in which layer, see e.g. gpt-oss, which alternates sliding and full attention:</p>
679
  <div class="code-card"><button class="code-copy button--ghost" type="button" aria-label="Copy code"><svg viewBox="0 0 24 24" aria-hidden="true" focusable="false"><path d="M16 1H4c-1.1 0-2 .9-2 2v12h2V3h12V1zm3 4H8c-1.1 0-2 .9-2 2v14c0 1.1.9 2 2 2h11c1.1 0 2-.9 2-2V7c0-1.1-.9-2-2-2zm0 16H8V7h11v14z"></path></svg></button><pre class="astro-code astro-code-themes github-light github-dark" style="--shiki-light:#24292e;--shiki-dark:#e1e4e8;--shiki-light-bg:#fff;--shiki-dark-bg:#24292e;overflow-x:auto" tabindex="0" data-language="python"><code><span class="line"><span style="--shiki-light:#032F62;--shiki-dark:#9ECBFF"> &quot;layer_types&quot;</span><span style="--shiki-light:#24292E;--shiki-dark:#E1E4E8">: [</span></span>
680
  <span class="line"><span style="--shiki-light:#032F62;--shiki-dark:#9ECBFF"> &quot;sliding_attention&quot;</span><span style="--shiki-light:#24292E;--shiki-dark:#E1E4E8">,</span></span>
681
  <span class="line"><span style="--shiki-light:#032F62;--shiki-dark:#9ECBFF"> &quot;full_attention&quot;</span><span style="--shiki-light:#24292E;--shiki-dark:#E1E4E8">,</span></span>
@@ -684,8 +686,8 @@ out = model(**inputs)</code></pre></div></div></figure>
684
  <span class="line"><span style="--shiki-light:#032F62;--shiki-dark:#9ECBFF"> &quot;full_attention&quot;</span></span>
685
  <span class="line"><span style="--shiki-light:#24292E;--shiki-dark:#E1E4E8"> ],</span></span>
686
  <span class="line"></span></code></pre></div>
687
- <p>This is <a href="#minimal-user-api">minimal</a> to implement on the user side, and allows to keep the modeling untouched. It is also easy to tweak.</p>
688
- <div class="crumbs"><p>Allowed layer types are explicit; schedules (e.g., sliding/full alternation) live in config. This keeps the file readable and easy to tweak. <strong>Next:</strong> speedups come from kernels that don’t change semantics.</p></div>
689
  <h3 id="community-kernels"><a href="#community-kernels"><a id="community-kernels"></a>Community Kernels</a></h3>
690
  <p>The same principle extends to normalization, activation, and other code paths. The model defines <strong>semantics</strong>; a kernel defines <strong>how</strong> to execute them faster. We annotate the module to borrow a community‑provided forward, keeping a <a href="#consistent-public-surface">consistent public surface</a></p>
691
  <div class="code-card"><button class="code-copy button--ghost" type="button" aria-label="Copy code"><svg viewBox="0 0 24 24" aria-hidden="true" focusable="false"><path d="M16 1H4c-1.1 0-2 .9-2 2v12h2V3h12V1zm3 4H8c-1.1 0-2 .9-2 2v14c0 1.1.9 2 2 2h11c1.1 0 2-.9 2-2V7c0-1.1-.9-2-2-2zm0 16H8V7h11v14z"></path></svg></button><pre class="astro-code astro-code-themes github-light github-dark" style="--shiki-light:#24292e;--shiki-dark:#e1e4e8;--shiki-light-bg:#fff;--shiki-dark-bg:#24292e;overflow-x:auto" tabindex="0" data-language="python"><code><span class="line"><span style="--shiki-light:#6F42C1;--shiki-dark:#B392F0">@use_kernel_forward_from_hub</span><span style="--shiki-light:#24292E;--shiki-dark:#E1E4E8">(</span><span style="--shiki-light:#032F62;--shiki-dark:#9ECBFF">&quot;RMSNorm&quot;</span><span style="--shiki-light:#24292E;--shiki-dark:#E1E4E8">)</span></span>
@@ -707,36 +709,37 @@ So I wanted to take a look at the current <strong>state of modularity</strong> a
707
  </ol>
708
  <p>So what do we see? Llama is a basis for many models, and it shows.
709
  Radically different architectures such as mamba have spawned their own dependency subgraph.</p>
710
- <figure class="html-embed"><div class="html-embed__card"><div id="frag-jliqcg2oprj"><iframe
711
  src="https://molbap-dependencies-1.hf.space"
712
  style="width:100%; height:680px; border:0"
713
  allow="clipboard-read; clipboard-write; fullscreen"
714
  referrerpolicy="no-referrer-when-downgrade"
715
  ></iframe></div></div></figure>
716
- <p>However, even if llava defines a few VLMs, there’s far too many vision-based architectures that are not yet defined as modulars of other existing archs. In other words, there is no strong reference point in terms of software for vision models.
717
  As you can see, there is a small DETR island, a little llava pocket, and so on, but it’s not comparable to the centrality observed for llama.</p>
718
- <p>Another problem is, this is only for <code>modular</code> models. Several models do NOT have a modular file.</p>
719
  <p>How do we spot them, and how do we identify modularisable models?</p>
720
  <div class="crumbs"><p>Graph reading guide: nodes are models; edges are modular imports. Llama-lineage is a hub; several VLMs remain islands — engineering opportunity for shared parents. <strong>Next:</strong> timeline + similarity signals to spot candidates.</p></div>
721
  <h3 id="many-models-but-not-enough-yet-are-alike"><a href="#many-models-but-not-enough-yet-are-alike">Many models, but not enough yet, are alike</a></h3>
722
- <p>So I looked into Jaccard similarity, which we use to measure set differences. I know that code is more than a set of characters stringed together. We also tried code-embedding models that ranked candidates better in practice, but for this post we stick to the deterministic Jaccard index.</p>
723
- <p>It is interesting, for that, to look at <em>when</em> we deployed this modular logic and what was its rippling effect on the library. You can check the <a href="https://huggingface.co/spaces/Molbap/transformers-modular-refactor">larger space</a> to play around, but the gist is: adding modular allowed to connect more and more models to solid reference points. We have a lot of gaps to fill in still.</p>
 
724
  <p>Zoom out below - it’s full of models. You can click on a node to see its connections better, or use the text box to search for a model.</p>
725
- <figure class="html-embed"><div class="html-embed__card"><div id="frag-71fc1hpzhie"> <iframe
726
  src="https://molbap-timeline-1.hf.space"
727
  style="width:100%; height:680px; border:0"
728
  allow="clipboard-read; clipboard-write; fullscreen"
729
  referrerpolicy="no-referrer-when-downgrade"
730
  ></iframe></div></div></figure>
731
  <p>If you’ve checked out llava, you’ve seen that llava_video is a red node, connected by a red edge to llava: it’s a candidate, something that we can <em>likely</em> remodularize, <a href="#backwards-compatibility">not touching the actual model</a> but being much more readable with <a href="#do-repeat-yourself">DRY*</a>.</p>
732
- <div class="crumbs"><p>Similarity (Jaccard; embeddings tried separately) surfaces likely parents; the timeline shows consolidation after modular landed. Red nodes/edges = candidates (e.g., <code>llava_video</code> → <code>llava</code>) for refactors that preserve behavior. <strong>Next:</strong> concrete VLM choices that avoid leaky abstractions.</p></div>
733
  <h3 id="vlm-improvements-avoiding-abstraction"><a href="#vlm-improvements-avoiding-abstraction">VLM improvements, avoiding abstraction</a></h3>
734
- <p>We don’t have cookbook for common VLM patterns (image token scatter, multi‑tower encoders, cross‑attn bridges). This is one of the main improvement points where we can work.</p>
735
  <p>For instance, we thought of abstracting away the mixing of <code>inputs_embeds</code>, the tensor fed into an LLM decoder in 95% of the existing VLMs. It would have looked like something like</p>
736
  <div class="code-card"><button class="code-copy button--ghost" type="button" aria-label="Copy code"><svg viewBox="0 0 24 24" aria-hidden="true" focusable="false"><path d="M16 1H4c-1.1 0-2 .9-2 2v12h2V3h12V1zm3 4H8c-1.1 0-2 .9-2 2v14c0 1.1.9 2 2 2h11c1.1 0 2-.9 2-2V7c0-1.1-.9-2-2-2zm0 16H8V7h11v14z"></path></svg></button><pre class="astro-code astro-code-themes github-light github-dark" style="--shiki-light:#24292e;--shiki-dark:#e1e4e8;--shiki-light-bg:#fff;--shiki-dark-bg:#24292e;overflow-x:auto" tabindex="0" data-language="python"><code><span class="line"><span style="--shiki-light:#D73A49;--shiki-dark:#F97583">class</span><span style="--shiki-light:#6F42C1;--shiki-dark:#B392F0"> InputsEmbeddingMixerMixin</span><span style="--shiki-light:#24292E;--shiki-dark:#E1E4E8">(</span><span style="--shiki-light:#6F42C1;--shiki-dark:#B392F0">nn</span><span style="--shiki-light:#24292E;--shiki-dark:#E1E4E8">.</span><span style="--shiki-light:#6F42C1;--shiki-dark:#B392F0">Module</span><span style="--shiki-light:#24292E;--shiki-dark:#E1E4E8">):</span></span>
737
  <span class="line"><span style="--shiki-light:#6A737D;--shiki-dark:#6A737D"> #</span></span>
738
  <span class="line"></span></code></pre></div>
739
- <p>But this is <a href="#standardize-dont-abstract">abstracting away an important component of the modeling.</a>. Embedding mixin is part of the model, removing it would break it. A user opening <a href="https://huggingface.co/collections/Qwen/qwen25-vl-6795ffac22b334a837c0f9a5"><code>modeling_qwen2.5_vl</code></a> should not have to go to another file to understand how it works.</p>
740
  <p>What is the current state of these “abstractions” across the codebase?
741
  You will see all the imports around a modeling file, here <a href="https://huggingface.co/google/gemma-3n-E4B-it">Gemma3n</a>.</p>
742
  <p><img src="/images/transformers/still_graph_bloat.png" alt="Gemma3n graph"/></p>
@@ -796,10 +799,11 @@ That means every decision we make to abstract something else has to be extremely
796
  <p>Having a framework means forcing users into it. It restrains flexibility and creativity, which are the fertile soil for new ideas to grow.</p>
797
  <p>Among the most valuable contributions to <code>transformers</code> is of course the addition of new models. Very recently, <a href="https://huggingface.co/blog/welcome-openai-gpt-oss">OpenAI added GPT-OSS</a>, which prompted the addition of many new features to the library in order to support <a href="https://huggingface.co/openai/gpt-oss-120b">their model</a>.</p>
798
  <p>A second one is the ability to fine-tune and pipeline these models into many other software. Check here on the hub how many finetunes are registered for <a href="https://huggingface.co/models?other=base_model:finetune:openai/gpt-oss-120b">gpt-oss 120b</a>, despite its size!</p>
799
- <div class="crumbs"><p>The shape of a contribution: add a model (or variant) with a small modular shard; the community and serving stacks pick it up immediately. Popularity trends (encoders/embeddings) guide where we invest. <strong>Next:</strong> power tools enabled by a consistent API.</p></div>
 
800
  <h3 id="-models-popularity"><a href="#-models-popularity"><a id="encoders-ftw"></a> Models popularity</a></h3>
801
- <p>Talking about dependencies, we can take a look at the number of downloads for transformer models popularity. One thing we see is the prominence of encoders: This is because the usage of encoders lies in embeddings, just check out <a href="https://huggingface.co/blog/embeddinggemma">EmbeddingGemma</a> for a modern recap. Hence, it is vital to keep the encoders part viable, usable, fine-tune-able.</p>
802
- <div><figure class="html-embed"><div class="html-embed__card"><div id="frag-34smeunec26"><html>
803
  <head><meta charset="utf-8" /></head>
804
  <body>
805
  <div> <script type="text/javascript">window.PlotlyConfig = {MathJaxConfig: 'local'};</script>
@@ -4687,15 +4691,16 @@ return Plotly;
4687
  }));</script> <div id="b7de70fd-22d7-40bc-923f-0a3dc91875e6" class="plotly-graph-div" style="height:1000px; width:100%;"></div> <script type="text/javascript"> window.PLOTLYENV=window.PLOTLYENV || {}; if (document.getElementById("b7de70fd-22d7-40bc-923f-0a3dc91875e6")) { Plotly.newPlot( "b7de70fd-22d7-40bc-923f-0a3dc91875e6", [{"hovertext":["clip","vit","vit","clip","resnet","resnet","clip","vit","vit","clip","vit","vitmatte","table_transformer","vit","d_fine","qwen2_vl","whisper","clip","vit","resnet","vit","siglip","qwen2_vl","align","clip","vit","whisper","blip","siglip2","siglip","vit","clip","clipseg","blip","whisper","gemma3","vit","dinov2","dinov2","whisper","vit","clip","d_fine","layoutlmv3","musicgen","resnet","table_transformer","dinov2","internvl","whisper","vit","gemma3","qwen2_vl","mobilevit","vit","bit","blip","blip_2","dinov2","table_transformer"],"marker":{"color":["#FFA15A","#FF97FF","#FF97FF","#FFA15A","#FFA15A","#FFA15A","#FFA15A","#FF97FF","#FF97FF","#FFA15A","#FF97FF","#FECB52","#B6E880","#FF97FF","#FF6692","#AB63FA","#636EFA","#FFA15A","#FF97FF","#FFA15A","#FF97FF","#19D3F3","#AB63FA","#636EFA","#FFA15A","#FF97FF","#636EFA","#00CC96","#FF6692","#19D3F3","#FF97FF","#FFA15A","#19D3F3","#00CC96","#636EFA","#FF97FF","#FF97FF","#B6E880","#B6E880","#636EFA","#FF97FF","#FFA15A","#FF6692","#636EFA","#00CC96","#FFA15A","#B6E880","#B6E880","#FECB52","#636EFA","#FF97FF","#FF97FF","#AB63FA","#EF553B","#FF97FF","#EF553B","#00CC96","#AB63FA","#B6E880","#B6E880"]},"name":"By Downloads","orientation":"h","visible":true,"x":{"dtype":"i4","bdata":"JFdPASRXTwHoJBsB6CQbAVyTGAHtzgMBMX5HADF+RwCsCUIArAlCAOy5PwDsuT8AJrk6ALmaOQDt+zYAlfk2AGgLNgBRnDQAKB80AF6mMwAJIjIALGUxAOhUMQAGJjEAwVQuAMFULgBO5S0Am6MqAF15JwBdeScAcQomAGoZJQBqGSUAq+YkAOtPIACUJx8AauYeAGrmHgAMHx4AArgcABh\u002fFwAYfxcAK1YWACK+FQBOnxUAMQwVAPySEwAsXhMAqTMTAOkdEwBRrBIAcTkSAArmEAC60hAAt2AQALdgEAAFKBAABSgQAGUZEAAPARAA"},"y":["openai\u002fclip-vit-large-patch14","openai\u002fclip-vit-large-patch14","openai\u002fclip-vit-base-patch32","openai\u002fclip-vit-base-patch32","timm\u002fresnet50.a1_in1k","pyannote\u002fwespeaker-voxceleb-resnet34-LM","openai\u002fclip-vit-large-patch14-336","openai\u002fclip-vit-large-patch14-336","openai\u002fclip-vit-base-patch16","openai\u002fclip-vit-base-patch16","hustvl\u002fvitmatte-small-composition-1k","hustvl\u002fvitmatte-small-composition-1k","microsoft\u002ftable-transformer-detection","google\u002fvit-base-patch16-224","distilbert\u002fdistilbert-base-uncased-finetuned-sst-2-english","Qwen\u002fQwen2.5-VL-7B-Instruct","openai\u002fwhisper-large-v3-turbo","patrickjohncyh\u002ffashion-clip","nateraw\u002fvit-age-classifier","timm\u002fresnet18.a1_in1k","google\u002fvit-base-patch16-224-in21k","google\u002fsiglip-so400m-patch14-384","Qwen\u002fQwen2.5-VL-3B-Instruct","MahmoudAshraf\u002fmms-300m-1130-forced-aligner","laion\u002fCLIP-ViT-bigG-14-laion2B-39B-b160k","laion\u002fCLIP-ViT-bigG-14-laion2B-39B-b160k","openai\u002fwhisper-large-v3","Salesforce\u002fblip-image-captioning-base","google\u002fsiglip2-so400m-patch16-naflex","google\u002fsiglip2-so400m-patch16-naflex","lpiccinelli\u002funidepth-v2-vitl14","CIDAS\u002fclipseg-rd64-refined","CIDAS\u002fclipseg-rd64-refined","Salesforce\u002fblip-image-captioning-large","openai\u002fwhisper-medium","google\u002fgemma-3-1b-it","timm\u002fvit_large_patch14_reg4_dinov2.lvd142m","timm\u002fvit_large_patch14_reg4_dinov2.lvd142m","facebook\u002fdinov2-base","openai\u002fwhisper-base.en","laion\u002fCLIP-ViT-H-14-laion2B-s32B-b79K","laion\u002fCLIP-ViT-H-14-laion2B-s32B-b79K","dbmdz\u002fbert-large-cased-finetuned-conll03-english","microsoft\u002flayoutlmv3-base","facebook\u002fmusicgen-medium","microsoft\u002fresnet-50","microsoft\u002ftable-transformer-structure-recognition","facebook\u002fdinov2-small","OpenGVLab\u002fInternVL3-78B","openai\u002fwhisper-small","AdamCodd\u002fvit-base-nsfw-detector","google\u002fgemma-3-4b-it","Qwen\u002fQwen2-VL-2B-Instruct","apple\u002fmobilevit-small","google\u002fvit-hybrid-base-bit-384","google\u002fvit-hybrid-base-bit-384","Salesforce\u002fblip2-opt-2.7b","Salesforce\u002fblip2-opt-2.7b","facebook\u002fdinov2-large","microsoft\u002ftable-transformer-structure-recognition-v1.1-all"],"type":"bar"},{"hovertext":["internvl","qwen2_vl","qwen2_vl","gemma3","gemma3","vit","mobilevit","siglip","siglip2","blip","blip","blip","blip_2","clip","vit","vit","clip","resnet","resnet","dinov2","vit","qwen2_vl","clipseg","clip","vit","whisper","align","siglip","clip","whisper","vit","resnet","layoutlmv3","vitmatte","vit","whisper","whisper","clip","vit","resnet","vit","whisper","dinov2","d_fine","table_transformer","musicgen","vit","clip","vit","bit","d_fine","table_transformer","table_transformer","dinov2","dinov2","vit","clip","vit","vit","clip"],"marker":{"color":["#FECB52","#AB63FA","#AB63FA","#FF97FF","#FF97FF","#FF97FF","#EF553B","#19D3F3","#FF6692","#00CC96","#00CC96","#00CC96","#AB63FA","#FFA15A","#FF97FF","#FF97FF","#FFA15A","#FFA15A","#FFA15A","#B6E880","#FF97FF","#AB63FA","#19D3F3","#FFA15A","#FF97FF","#636EFA","#636EFA","#19D3F3","#FFA15A","#636EFA","#FF97FF","#FFA15A","#636EFA","#FECB52","#FF97FF","#636EFA","#636EFA","#FFA15A","#FF97FF","#FFA15A","#FF97FF","#636EFA","#B6E880","#FF6692","#B6E880","#00CC96","#FF97FF","#FFA15A","#FF97FF","#EF553B","#FF6692","#B6E880","#B6E880","#B6E880","#B6E880","#FF97FF","#FFA15A","#FF97FF","#FF97FF","#FFA15A"]},"name":"By Last Modified","orientation":"h","visible":false,"x":{"dtype":"i4","bdata":"qTMTAOhUMQCV+TYAlCcfAHE5EgBxCiYAutIQAF15JwBdeScAm6MqAKvmJAAFKBAABSgQABh\u002fFwAYfxcAwVQuAMFULgBckxgBXqYzAGrmHgBq5h4ACuYQAGoZJQBqGSUAUawSAGgLNgAGJjEALGUxAFGcNABO5S0AKB80AO3OAwEivhUA7Lk\u002fAOy5PwDrTyAA6R0TAOgkGwHoJBsBMQwVAAkiMgACuBwADB8eAO37NgAPARAATp8VACRXTwEkV08Bt2AQALdgEAArVhYA\u002fJITACa5OgAsXhMAZRkQALmaOQCsCUIArAlCADF+RwAxfkcA"},"y":["OpenGVLab\u002fInternVL3-78B","Qwen\u002fQwen2.5-VL-3B-Instruct","Qwen\u002fQwen2.5-VL-7B-Instruct","google\u002fgemma-3-1b-it","google\u002fgemma-3-4b-it","lpiccinelli\u002funidepth-v2-vitl14","apple\u002fmobilevit-small","google\u002fsiglip2-so400m-patch16-naflex","google\u002fsiglip2-so400m-patch16-naflex","Salesforce\u002fblip-image-captioning-base","Salesforce\u002fblip-image-captioning-large","Salesforce\u002fblip2-opt-2.7b","Salesforce\u002fblip2-opt-2.7b","laion\u002fCLIP-ViT-H-14-laion2B-s32B-b79K","laion\u002fCLIP-ViT-H-14-laion2B-s32B-b79K","laion\u002fCLIP-ViT-bigG-14-laion2B-39B-b160k","laion\u002fCLIP-ViT-bigG-14-laion2B-39B-b160k","timm\u002fresnet50.a1_in1k","timm\u002fresnet18.a1_in1k","timm\u002fvit_large_patch14_reg4_dinov2.lvd142m","timm\u002fvit_large_patch14_reg4_dinov2.lvd142m","Qwen\u002fQwen2-VL-2B-Instruct","CIDAS\u002fclipseg-rd64-refined","CIDAS\u002fclipseg-rd64-refined","AdamCodd\u002fvit-base-nsfw-detector","openai\u002fwhisper-large-v3-turbo","MahmoudAshraf\u002fmms-300m-1130-forced-aligner","google\u002fsiglip-so400m-patch14-384","patrickjohncyh\u002ffashion-clip","openai\u002fwhisper-large-v3","nateraw\u002fvit-age-classifier","pyannote\u002fwespeaker-voxceleb-resnet34-LM","microsoft\u002flayoutlmv3-base","hustvl\u002fvitmatte-small-composition-1k","hustvl\u002fvitmatte-small-composition-1k","openai\u002fwhisper-medium","openai\u002fwhisper-small","openai\u002fclip-vit-base-patch32","openai\u002fclip-vit-base-patch32","microsoft\u002fresnet-50","google\u002fvit-base-patch16-224-in21k","openai\u002fwhisper-base.en","facebook\u002fdinov2-base","distilbert\u002fdistilbert-base-uncased-finetuned-sst-2-english","microsoft\u002ftable-transformer-structure-recognition-v1.1-all","facebook\u002fmusicgen-medium","openai\u002fclip-vit-large-patch14","openai\u002fclip-vit-large-patch14","google\u002fvit-hybrid-base-bit-384","google\u002fvit-hybrid-base-bit-384","dbmdz\u002fbert-large-cased-finetuned-conll03-english","microsoft\u002ftable-transformer-structure-recognition","microsoft\u002ftable-transformer-detection","facebook\u002fdinov2-small","facebook\u002fdinov2-large","google\u002fvit-base-patch16-224","openai\u002fclip-vit-base-patch16","openai\u002fclip-vit-base-patch16","openai\u002fclip-vit-large-patch14-336","openai\u002fclip-vit-large-patch14-336"],"type":"bar"}], {"template":{"data":{"barpolar":[{"marker":{"line":{"color":"rgb(17,17,17)","width":0.5},"pattern":{"fillmode":"overlay","size":10,"solidity":0.2}},"type":"barpolar"}],"bar":[{"error_x":{"color":"#f2f5fa"},"error_y":{"color":"#f2f5fa"},"marker":{"line":{"color":"rgb(17,17,17)","width":0.5},"pattern":{"fillmode":"overlay","size":10,"solidity":0.2}},"type":"bar"}],"carpet":[{"aaxis":{"endlinecolor":"#A2B1C6","gridcolor":"#506784","linecolor":"#506784","minorgridcolor":"#506784","startlinecolor":"#A2B1C6"},"baxis":{"endlinecolor":"#A2B1C6","gridcolor":"#506784","linecolor":"#506784","minorgridcolor":"#506784","startlinecolor":"#A2B1C6"},"type":"carpet"}],"choropleth":[{"colorbar":{"outlinewidth":0,"ticks":""},"type":"choropleth"}],"contourcarpet":[{"colorbar":{"outlinewidth":0,"ticks":""},"type":"contourcarpet"}],"contour":[{"colorbar":{"outlinewidth":0,"ticks":""},"colorscale":[[0.0,"#0d0887"],[0.1111111111111111,"#46039f"],[0.2222222222222222,"#7201a8"],[0.3333333333333333,"#9c179e"],[0.4444444444444444,"#bd3786"],[0.5555555555555556,"#d8576b"],[0.6666666666666666,"#ed7953"],[0.7777777777777778,"#fb9f3a"],[0.8888888888888888,"#fdca26"],[1.0,"#f0f921"]],"type":"contour"}],"heatmap":[{"colorbar":{"outlinewidth":0,"ticks":""},"colorscale":[[0.0,"#0d0887"],[0.1111111111111111,"#46039f"],[0.2222222222222222,"#7201a8"],[0.3333333333333333,"#9c179e"],[0.4444444444444444,"#bd3786"],[0.5555555555555556,"#d8576b"],[0.6666666666666666,"#ed7953"],[0.7777777777777778,"#fb9f3a"],[0.8888888888888888,"#fdca26"],[1.0,"#f0f921"]],"type":"heatmap"}],"histogram2dcontour":[{"colorbar":{"outlinewidth":0,"ticks":""},"colorscale":[[0.0,"#0d0887"],[0.1111111111111111,"#46039f"],[0.2222222222222222,"#7201a8"],[0.3333333333333333,"#9c179e"],[0.4444444444444444,"#bd3786"],[0.5555555555555556,"#d8576b"],[0.6666666666666666,"#ed7953"],[0.7777777777777778,"#fb9f3a"],[0.8888888888888888,"#fdca26"],[1.0,"#f0f921"]],"type":"histogram2dcontour"}],"histogram2d":[{"colorbar":{"outlinewidth":0,"ticks":""},"colorscale":[[0.0,"#0d0887"],[0.1111111111111111,"#46039f"],[0.2222222222222222,"#7201a8"],[0.3333333333333333,"#9c179e"],[0.4444444444444444,"#bd3786"],[0.5555555555555556,"#d8576b"],[0.6666666666666666,"#ed7953"],[0.7777777777777778,"#fb9f3a"],[0.8888888888888888,"#fdca26"],[1.0,"#f0f921"]],"type":"histogram2d"}],"histogram":[{"marker":{"pattern":{"fillmode":"overlay","size":10,"solidity":0.2}},"type":"histogram"}],"mesh3d":[{"colorbar":{"outlinewidth":0,"ticks":""},"type":"mesh3d"}],"parcoords":[{"line":{"colorbar":{"outlinewidth":0,"ticks":""}},"type":"parcoords"}],"pie":[{"automargin":true,"type":"pie"}],"scatter3d":[{"line":{"colorbar":{"outlinewidth":0,"ticks":""}},"marker":{"colorbar":{"outlinewidth":0,"ticks":""}},"type":"scatter3d"}],"scattercarpet":[{"marker":{"colorbar":{"outlinewidth":0,"ticks":""}},"type":"scattercarpet"}],"scattergeo":[{"marker":{"colorbar":{"outlinewidth":0,"ticks":""}},"type":"scattergeo"}],"scattergl":[{"marker":{"line":{"color":"#283442"}},"type":"scattergl"}],"scattermapbox":[{"marker":{"colorbar":{"outlinewidth":0,"ticks":""}},"type":"scattermapbox"}],"scattermap":[{"marker":{"colorbar":{"outlinewidth":0,"ticks":""}},"type":"scattermap"}],"scatterpolargl":[{"marker":{"colorbar":{"outlinewidth":0,"ticks":""}},"type":"scatterpolargl"}],"scatterpolar":[{"marker":{"colorbar":{"outlinewidth":0,"ticks":""}},"type":"scatterpolar"}],"scatter":[{"marker":{"line":{"color":"#283442"}},"type":"scatter"}],"scatterternary":[{"marker":{"colorbar":{"outlinewidth":0,"ticks":""}},"type":"scatterternary"}],"surface":[{"colorbar":{"outlinewidth":0,"ticks":""},"colorscale":[[0.0,"#0d0887"],[0.1111111111111111,"#46039f"],[0.2222222222222222,"#7201a8"],[0.3333333333333333,"#9c179e"],[0.4444444444444444,"#bd3786"],[0.5555555555555556,"#d8576b"],[0.6666666666666666,"#ed7953"],[0.7777777777777778,"#fb9f3a"],[0.8888888888888888,"#fdca26"],[1.0,"#f0f921"]],"type":"surface"}],"table":[{"cells":{"fill":{"color":"#506784"},"line":{"color":"rgb(17,17,17)"}},"header":{"fill":{"color":"#2a3f5f"},"line":{"color":"rgb(17,17,17)"}},"type":"table"}]},"layout":{"annotationdefaults":{"arrowcolor":"#f2f5fa","arrowhead":0,"arrowwidth":1},"autotypenumbers":"strict","coloraxis":{"colorbar":{"outlinewidth":0,"ticks":""}},"colorscale":{"diverging":[[0,"#8e0152"],[0.1,"#c51b7d"],[0.2,"#de77ae"],[0.3,"#f1b6da"],[0.4,"#fde0ef"],[0.5,"#f7f7f7"],[0.6,"#e6f5d0"],[0.7,"#b8e186"],[0.8,"#7fbc41"],[0.9,"#4d9221"],[1,"#276419"]],"sequential":[[0.0,"#0d0887"],[0.1111111111111111,"#46039f"],[0.2222222222222222,"#7201a8"],[0.3333333333333333,"#9c179e"],[0.4444444444444444,"#bd3786"],[0.5555555555555556,"#d8576b"],[0.6666666666666666,"#ed7953"],[0.7777777777777778,"#fb9f3a"],[0.8888888888888888,"#fdca26"],[1.0,"#f0f921"]],"sequentialminus":[[0.0,"#0d0887"],[0.1111111111111111,"#46039f"],[0.2222222222222222,"#7201a8"],[0.3333333333333333,"#9c179e"],[0.4444444444444444,"#bd3786"],[0.5555555555555556,"#d8576b"],[0.6666666666666666,"#ed7953"],[0.7777777777777778,"#fb9f3a"],[0.8888888888888888,"#fdca26"],[1.0,"#f0f921"]]},"colorway":["#636efa","#EF553B","#00cc96","#ab63fa","#FFA15A","#19d3f3","#FF6692","#B6E880","#FF97FF","#FECB52"],"font":{"color":"#f2f5fa"},"geo":{"bgcolor":"rgb(17,17,17)","lakecolor":"rgb(17,17,17)","landcolor":"rgb(17,17,17)","showlakes":true,"showland":true,"subunitcolor":"#506784"},"hoverlabel":{"align":"left"},"hovermode":"closest","mapbox":{"style":"dark"},"paper_bgcolor":"rgb(17,17,17)","plot_bgcolor":"rgb(17,17,17)","polar":{"angularaxis":{"gridcolor":"#506784","linecolor":"#506784","ticks":""},"bgcolor":"rgb(17,17,17)","radialaxis":{"gridcolor":"#506784","linecolor":"#506784","ticks":""}},"scene":{"xaxis":{"backgroundcolor":"rgb(17,17,17)","gridcolor":"#506784","gridwidth":2,"linecolor":"#506784","showbackground":true,"ticks":"","zerolinecolor":"#C8D4E3"},"yaxis":{"backgroundcolor":"rgb(17,17,17)","gridcolor":"#506784","gridwidth":2,"linecolor":"#506784","showbackground":true,"ticks":"","zerolinecolor":"#C8D4E3"},"zaxis":{"backgroundcolor":"rgb(17,17,17)","gridcolor":"#506784","gridwidth":2,"linecolor":"#506784","showbackground":true,"ticks":"","zerolinecolor":"#C8D4E3"}},"shapedefaults":{"line":{"color":"#f2f5fa"}},"sliderdefaults":{"bgcolor":"#C8D4E3","bordercolor":"rgb(17,17,17)","borderwidth":1,"tickwidth":0},"ternary":{"aaxis":{"gridcolor":"#506784","linecolor":"#506784","ticks":""},"baxis":{"gridcolor":"#506784","linecolor":"#506784","ticks":""},"bgcolor":"rgb(17,17,17)","caxis":{"gridcolor":"#506784","linecolor":"#506784","ticks":""}},"title":{"x":0.05},"updatemenudefaults":{"bgcolor":"#506784","borderwidth":0},"xaxis":{"automargin":true,"gridcolor":"#283442","linecolor":"#506784","ticks":"","title":{"standoff":15},"zerolinecolor":"#283442","zerolinewidth":2},"yaxis":{"automargin":true,"gridcolor":"#283442","linecolor":"#506784","ticks":"","title":{"standoff":15},"zerolinecolor":"#283442","zerolinewidth":2}}},"xaxis":{"title":{"text":"Downloads"},"type":"log"},"yaxis":{"autorange":"reversed"},"updatemenus":[{"buttons":[{"args":[{"visible":[true,false]},{"title":"Sorted by Downloads"}],"label":"Sort by Downloads","method":"update"},{"args":[{"visible":[false,true]},{"title":"Sorted by Last Modified"}],"label":"Sort by Last Modified","method":"update"}],"direction":"down","showactive":true,"x":1.05,"xanchor":"left","y":1.15,"yanchor":"top"}],"title":{"text":"Model Popularity (Toggle Sort)"},"height":1000}, {"responsive": true} ) }; </script> </div>
4688
  </body>
4689
  </html></div></div></figure></div>
4690
- <p>As the codebase grows, with our friend codebase <a href="https://huggingface.co/sentence-transformers">Sentence Transformers</a>, we need to maintain this one as well. Retrieval use-cases, smart databases, like FAISS-based indexing rely on it, and thus indirectly on transformers.</p>
4691
  <p>In that regard, we DO want to be a modular toolbox, being <a href="#minimal-user-api">minimal</a> enough and well documented enough so any ML/AI developer can use <code>transformers</code> without having to think about it. We aim to reduce the cognitive load brought about by model development, not increase it.</p>
4692
  <p>So, how do these design choices, these “tenets” influence development of models and overall usage of transformers?</p>
4693
- <div class="crumbs"><p>Encoders remain critical for embeddings and retrieval; maintaining them well benefits the broader ecosystem (e.g., Sentence Transformers, FAISS). <strong>Next:</strong> dev tools that leverage unified attention APIs and PyTorch-only internals.</p></div>
4694
  <h2 id="a-surgical-toolbox-for-model-development"><a href="#a-surgical-toolbox-for-model-development">A surgical toolbox for model development</a></h2>
4695
  <h3 id="attention-visualisation"><a href="#attention-visualisation">Attention visualisation</a></h3>
4696
- <p>All models have the same API internally for attention computation, thanks to <a href="#external-attention-classes">the externalisation of attention classes</a>. it allows us to build cool tools to visualize the inner workings of the attention mechanism.</p>
 
4697
  <p>One particular piece of machinery is the <code>attention mask</code>. Here you see the famous bidirectional attention pattern for the whole prefix (text + image) in PaliGemma and all Gemma2+ models, contrasting with the usual “causal-only” models.</p>
4698
- <figure class="html-embed"><div class="html-embed__card"><div id="frag-4lcs3vw6jdj"><!-- Minimal HTML fragment: terminal-style ASCII attention masks -->
4699
  <div style="max-width: 940px; margin: 16px 0; border:1px solid #2a2f3a; border-radius:8px; background:#0b0f19; font-family: ui-monospace, SFMono-Regular, Menlo, Monaco, Consolas, 'Liberation Mono', 'Courier New', monospace; color:#e5e7eb;">
4700
  <div style="display:flex; align-items:center; gap:8px; padding:8px 10px; border-bottom:1px solid #1f2430; background:#111827; border-top-left-radius:8px; border-top-right-radius:8px;">
4701
  <span style="width:10px; height:10px; background:#ef4444; border-radius:50%; display:inline-block;"></span>
@@ -4740,7 +4745,8 @@ return Plotly;
4740
  </div>
4741
  </div>
4742
  </div></div></figure>
4743
- <div class="crumbs"><p>Uniform attention APIs enable cross-model diagnostics (e.g., PaliGemma prefix bidirectionality vs causal). <strong>Next:</strong> whole-model tracing for ports and regressions.</p></div>
 
4744
  <h3 id="logging-entire-model-activations"><a href="#logging-entire-model-activations">Logging entire model activations</a></h3>
4745
  <p>Further, because it is all PyTorch (and it is even more now that we support only PyTorch), we can easily <a href="https://huggingface.co/docs/transformers/internal/model_debugging_utils">debug any model</a> when we want to add it to transformers. We now have a power-user tool for porting or adding models, that wraps a forward pass, intercepts every submodule call, and logs shapes, dtypes, and sample statistics of inputs/outputs to nested JSON.</p>
4746
  <p>It just works with PyTorch models and is especially useful when aligning outputs with a reference implementation, aligned with our <a href="#source-of-truth">core guideline</a>.</p>
@@ -4748,7 +4754,7 @@ return Plotly;
4748
  <div class="crumbs"><p>Forward interception and nested JSON logging align ports to reference implementations, reinforcing “Source of Truth.” <strong>Next:</strong> CUDA warmup reduces load-time stalls without touching modeling semantics.</p></div>
4749
  <h3 id="cooking-faster-cuda-warmups"><a href="#cooking-faster-cuda-warmups">Cooking faster CUDA warmups</a></h3>
4750
  <p>Having a clean <em>external</em> API allows us to work on the <a href="#code-is-product">true inner workings of transformers</a>. One of the few recent additions was the <em>CUDA warmup</em> via <code>caching_allocator_warmup</code> which improved massively the loading footprint by pre-allocating GPU memory to avoid malloc bottlenecks during model loading, achieving a 7x factor for an 8B model, 6x for a 32B, you can check out <a href="https://github.com/huggingface/transformers/pull/36380">the source</a>!</p>
4751
- <figure class="html-embed"><div class="html-embed__card"><div id="frag-oi4dqpay1do"><style>
4752
  /* 1) Scope tokens to the widget */
4753
  .warmup-demo{
4754
  --page-bg:#ffffff;
@@ -5052,27 +5058,35 @@ return Plotly;
5052
  <p>It’s hard to overstate how much of a lifesaver that is when you’re trying to load a model as fast as possible, as it’s the narrowest bottleneck for your iteration speed.</p>
5053
  <div class="crumbs"><p>Pre-allocating GPU memory removes malloc spikes (e.g., 7× for 8B, 6× for 32B in the referenced PR). <strong>Next:</strong> serving benefits directly from consistent interfaces and modularity.</p></div>
5054
  <h3 id="transformers-serve-and-continuous-batching"><a href="#transformers-serve-and-continuous-batching">Transformers-serve and continuous batching</a></h3>
5055
- <p>Having all these models readily available allows to use all of them with transformers-serve, and enable interfacing with them with an Open API-like pattern. As a reminder, the hub also opens access to various <a href="https://huggingface.co/docs/inference-providers/en/index">inference providers</a> if you’re interested in model deployment in general.</p>
5056
  <div class="code-card"><button class="code-copy button--ghost" type="button" aria-label="Copy code"><svg viewBox="0 0 24 24" aria-hidden="true" focusable="false"><path d="M16 1H4c-1.1 0-2 .9-2 2v12h2V3h12V1zm3 4H8c-1.1 0-2 .9-2 2v14c0 1.1.9 2 2 2h11c1.1 0 2-.9 2-2V7c0-1.1-.9-2-2-2zm0 16H8V7h11v14z"></path></svg></button><pre class="astro-code astro-code-themes github-light github-dark" style="--shiki-light:#24292e;--shiki-dark:#e1e4e8;--shiki-light-bg:#fff;--shiki-dark-bg:#24292e;overflow-x:auto" tabindex="0" data-language="bash"><code><span class="line"><span style="--shiki-light:#6F42C1;--shiki-dark:#B392F0">transformers</span><span style="--shiki-light:#032F62;--shiki-dark:#9ECBFF"> serve</span></span>
5057
  <span class="line"></span>
5058
  <span class="line"><span style="--shiki-light:#6F42C1;--shiki-dark:#B392F0">curl</span><span style="--shiki-light:#005CC5;--shiki-dark:#79B8FF"> -X</span><span style="--shiki-light:#032F62;--shiki-dark:#9ECBFF"> POST</span><span style="--shiki-light:#032F62;--shiki-dark:#9ECBFF"> http://localhost:8000/v1/chat/completions</span><span style="--shiki-light:#005CC5;--shiki-dark:#79B8FF"> \</span></span>
5059
  <span class="line"><span style="--shiki-light:#24292E;--shiki-dark:#E1E4E8">-H </span><span style="--shiki-light:#032F62;--shiki-dark:#9ECBFF">&quot;Content-Type: application/json&quot;</span><span style="--shiki-light:#005CC5;--shiki-dark:#79B8FF"> \</span></span>
5060
  <span class="line"><span style="--shiki-light:#24292E;--shiki-dark:#E1E4E8">-d </span><span style="--shiki-light:#032F62;--shiki-dark:#9ECBFF">&#39;{&quot;messages&quot;: [{&quot;role&quot;: &quot;system&quot;, &quot;content&quot;: &quot;hello&quot;}], &quot;temperature&quot;: 0.9, &quot;max_tokens&quot;: 1000, &quot;stream&quot;: true, &quot;model&quot;: &quot;Qwen/Qwen2.5-0.5B-Instruct&quot;}&#39;</span></span>
5061
  <span class="line"></span></code></pre></div>
5062
- <p>This provides an OpenAI-compatible API with features like <a href="https://github.com/huggingface/transformers/pull/38085">continuous batching</a> (also check <a href="https://github.com/huggingface/transformers/pull/40426">here</a>) for better GPU utilization.</p>
5063
- <p>Continuous batching is in itself very much linked to the great work of vLLM with the <code>paged attention kernel</code>, further justifying the facilitation of <a href="#community-kernels">external kernels</a>.</p>
5064
- <div class="crumbs"><p>OpenAI-compatible surface + continuous batching; kernels/backends slot in because the modeling API stayed stable. <strong>Next:</strong> reuse across vLLM/SGLang relies on the same consistency.</p></div>
 
 
 
 
 
 
 
5065
  <h2 id="community-reusability"><a href="#community-reusability">Community reusability</a></h2>
5066
- <p>Transformers-serve is transformers-first, for sure, but the library is made first and foremost to be <em>reused</em> at large by the open-source ecosystem.</p>
5067
  <p>Adding a model to transformers means:</p>
5068
  <ul>
5069
  <li>having it immediately available to the community</li>
5070
  <li>having it immediately usable in vLLM, <a href="https://huggingface.co/blog/transformers-backend-sglang">SGLang</a>, and so on without additional code. In April 2025, transformers was added as a backend to run models on vLLM, which optimizes throughput/latency on top of existing transformers architectures <a href="https://blog.vllm.ai/2025/04/11/transformers-backend.html">as seen in this great vLLM x HF blog post.</a></li>
5071
  </ul>
5072
  <p>This cements the need even more for a <a href="#consistent-public-surface">consistent public surface</a>: we are now a backend, and there’s more optimized software than us to handle serving. At the time of writing, more effort is done in that direction. We already have compatible configs for VLMs for vLLM (say that three times fast), <a href="https://github.com/huggingface/transformers/pull/40696/files">here for GLM4 video support</a>, and here for <a href="https://github.com/huggingface/transformers/pull/40132">MoE support</a> for instance.</p>
5073
- <div class="crumbs"><p>Being a good backend consumer requires a consistent public surface; modular shards and configs make that stability practical. <strong>Next:</strong> what changes in v5 without breaking the promise of visible semantics.</p></div>
 
5074
  <h2 id="what-is-coming-next"><a href="#what-is-coming-next">What is coming next</a></h2>
5075
- <p>The next major version of <code>transformers</code> is just around the corner (and will have another blog post to its name when it comes out.). When v5 is released, we aim to keep <a href="#backwards-compatibility">backwards compatibility</a> as solid as possible. The changes we make now are in service of that goal.</p>
5076
  <p>We will lean further into a modular toolbox, not a framework. You should not be forced to rewrite modeling code. It’s better when a model can inherit from <code>PreTrainedModel</code> and opt into Tensor Parallel, <code>from_pretrained</code>, sharding, <code>push_to_hub</code>, loss plumbing, and external stacks like PEFT/TRL/SGLang/vLLM.</p> </main> </section> <footer class="footer"> <div class="footer-inner"> <section class="citation-block"> <h3>Citation</h3> <p>For attribution, cite this work as</p> <pre class="citation short">Pablo Montalvo (2025). &quot;Maintain the unmaintainable: 1M python loc, 400+ models&quot;.</pre> <p>BibTeX citation</p> <pre class="citation long">@misc{montalvo2025_maintain_the_unmaintaina,
5077
  title={Maintain the unmaintainable: 1M python loc, 400+ models},
5078
  author={Pablo Montalvo},
 
12
  document.documentElement.setAttribute("data-theme", theme);
13
  } catch {}
14
  })();
15
+ </script><script type="module" src="/scripts/color-palettes.js"></script><!-- TO MANAGE PROPERLY --><script src="https://cdn.plot.ly/plotly-3.0.0.min.js" charset="utf-8"></script><link rel="stylesheet" href="/_astro/index.beJ178IL.css"><script type="module" src="/_astro/hoisted.DK-CdsVg.js"></script>
16
+ <script type="module" src="/_astro/page.CH0W_C1Z.js"></script></head> <body> <button id="theme-toggle" aria-label="Toggle color theme" data-astro-cid-x3pjskd3> <svg class="icon light" width="20" height="20" viewBox="0 0 24 24" aria-hidden="true" focusable="false" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" data-astro-cid-x3pjskd3> <circle cx="12" cy="12" r="5" data-astro-cid-x3pjskd3></circle> <line x1="12" y1="1" x2="12" y2="4" data-astro-cid-x3pjskd3></line> <line x1="12" y1="20" x2="12" y2="23" data-astro-cid-x3pjskd3></line> <line x1="1" y1="12" x2="4" y2="12" data-astro-cid-x3pjskd3></line> <line x1="20" y1="12" x2="23" y2="12" data-astro-cid-x3pjskd3></line> <line x1="4.22" y1="4.22" x2="6.34" y2="6.34" data-astro-cid-x3pjskd3></line> <line x1="17.66" y1="17.66" x2="19.78" y2="19.78" data-astro-cid-x3pjskd3></line> <line x1="4.22" y1="19.78" x2="6.34" y2="17.66" data-astro-cid-x3pjskd3></line> <line x1="17.66" y1="6.34" x2="19.78" y2="4.22" data-astro-cid-x3pjskd3></line> </svg> <svg class="icon dark" width="20" height="20" viewBox="0 0 24 24" aria-hidden="true" focusable="false" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" data-astro-cid-x3pjskd3> <path d="M21 12.79A9 9 0 1 1 11.21 3 7 7 0 0 0 21 12.79z" data-astro-cid-x3pjskd3></path> </svg> </button> <section class="hero" data-astro-cid-bbe6dxrz> <h1 class="hero-title" data-astro-cid-bbe6dxrz>Maintain the unmaintainable:<br/>1M python loc, 400+ models</h1> <div class="hero-banner" data-astro-cid-bbe6dxrz> <figure class="html-embed"><div class="html-embed__card is-frameless"><div id="frag-7ye2najyjjf"><style>
17
  @import url('https://fonts.googleapis.com/css2?family=Inter:wght@500;600&display=swap');
18
 
19
  .banner-container {
20
+ width: 100%;
21
  height: 600px;
22
  position: relative;
23
  overflow: visible;
 
400
  if (document.readyState === 'loading') {
401
  document.addEventListener('DOMContentLoaded', buildTOC, { once: true });
402
  } else { buildTOC(); }
403
+ </script> <main> <h2 id="preface"><a href="#preface">Preface</a></h2>
404
  <p>One million lines of <code>python</code> code. Through them, the <code>transformers</code> library supports more than 400 model architectures, from state-of-the-art LLMs and VLMs to specialized models for audio, video, and tables.</p>
405
+ <p>Built on <code>PyTorch</code>, transformers is a foundational tool for modern LLM usage, research, education, and tens of thousands of other open-source projects. Each AI model is added by the community, harmonized into a consistent interface, and tested daily on a CI to ensure reproducibility.</p>
406
  <p>This scale presents a monumental engineering challenge.</p>
407
  <p>How do you keep such a ship afloat, made of so many moving, unrelated parts, contributed to by a buzzing hivemind? Especially as the pace of ML research accelerates? We receive constant feedback on everything from function signatures with hundreds of arguments to duplicated code and optimization concerns, and we listen to all of it, or try to. The library’s usage keeps on growing, and we are a small team of maintainers and contributors, backed by hundreds of open-source community members.
408
  We continue to support all new models and expect to do so for the foreseeable future.</p>
409
+ <p>This post dissects the design philosophy that makes this possible. It’s the result of an evolution from our older principles, detailed on our previous <a href="https://huggingface.co/docs/transformers/en/philosophy">philosophy</a> page, as well as its accompanying <a href="https://huggingface.co/blog/transformers-design-philosophy">blog post from 2022</a>. More recently (and we strongly recommend the read) we published a blog post about <a href="https://huggingface.co/blog/faster-transformers">recent upgrades to transformers</a>, focusing on what makes the library faster today. All of these developments are only made possible thanks to these principles.</p>
410
  <p>We codify the “tenets” that guide our development, demonstrate how they are implemented in code, and show the measurable impact they have on the library’s sustainability and growth.</p>
411
  <p>For any OSS maintainer, power user, or contributor, this is the map to understanding, using, and building upon <code>transformers</code>, but not only: any project of comparable size will require you to make deep choices, not only on design and choice of abstraction, but on the very mindset of the software you are building.</p>
412
  <p><a href="#source-of-truth">Tenets exemplified</a> will have their summary available on hover.</p>
 
439
  <p>It works as follows. In order to contribute a model, say for instance define a <code>modular_</code> file that can inherit from <em>any function across all other modeling, configuration and processor files</em>.
440
  This modular file can use inheritance across models: and then, it will be unravelled into a fully functional modeling file.</p>
441
  <summary id="generated-modeling">Auto-generated modeling code</summary>
442
+ <figure class="html-embed"><div class="html-embed__card"><div id="frag-971efks994o"><div class="code-compare" style="display: grid; grid-template-columns: 1fr 1fr; gap: 1rem; margin: 1.5rem 0;">
443
  <div class="code-column" style="border: 1px solid #e2e8f0; border-radius: 8px; overflow: hidden;">
444
  <div class="code-header" style="background: #f8f9fa; padding: 0.75rem 1rem; font-weight: 600; color: #495057; border-bottom: 1px solid #e2e8f0;">
445
  modular_glm.py
 
593
  <p>What is the consequence? When adding a model, we do not need to go over the entire modeling file. The modular (left side above) is enough.</p>
594
  <p>When <code>AutoModel.from_pretrained(...)</code> is called, it is indeed the modeling (right side) that is ran, and all the tests are run on the modeling code.</p>
595
  <p>What does that give us?</p>
596
+ <div class="crumbs"><p><strong>TL;DR:</strong> A small <code>modular_*.py</code> declares reuse; the expanded modeling file stays visible (<a href="#one-model-one-file">One Model, One File tenet preserved</a>). Reviewers and contributors maintain the shard, not the repetition.</p><p><strong>Next:</strong> the measurable effect on effective LOC and maintenance cost.</p></div>
597
  <h3 id="a-maintainable-control-surface"><a href="#a-maintainable-control-surface">A maintainable control surface</a></h3>
598
  <p>The effect of modular can be measured in lines of code (LOC). If a model only has a modeling file, we add its LOC count.
599
  However, if a model has a modular_<em>.py and a corresponding automatically generated modeling_</em>/.py, we only count the LOC under the modular file. The modeling code has no maintenance cost as it is strictly dependent on the modular file.</p>
600
  <p>That gives an “effective LOC” curve: the 𝗺𝗮𝗶𝗻𝘁𝗲𝗻𝗮𝗻𝗰𝗲 𝘀𝘂𝗿𝗳𝗮𝗰𝗲.</p>
601
+ <p>Measured on git history, raw <code>modeling_*.py</code> grew at ~362 LOC/day before modular; counting only modular shards yields ~25 LOC/day after — about <strong>15× lower</strong>. The effective curve (blue line below) represents the <strong>maintenance surface</strong> today: what maintainers actually read and review.</p>
602
+ <p>Less code to hand-maintain means fewer places to break. LOC is not a direct measure of complexity, but they correlate in review effort and change risk.</p>
603
+ <figure class="html-embed"><div class="html-embed__card"><div id="frag-b9s6rzaeqn8"><iframe
604
  src="https://molbap-loc-1.hf.space"
605
  style="width:100%; height:900px; border:0"
606
  allow="clipboard-read; clipboard-write; fullscreen"
607
  referrerpolicy="no-referrer-when-downgrade"
608
  ></iframe></div></div></figure>
609
+ <p>If you zoom in, youll notice there’s a sharp drop near the end, it’s essentially due to us <a href="https://github.com/huggingface/transformers/commit/4df2529d79d75f44e70396df5888a32ffa02d61e#diff-60849db3e9922197854ef1cac92bf4aba08b5d7fd3fe6f3c16a3511e29e0eacc">removing support for Jax and TensorFlow</a> library-wide.</p>
610
+ <p>But this was not the only effort that allowed us to reduce maintenance load.</p>
611
+ <p>We recently underwent a deep refactor of the attention implementation. You’ve likely heard about <a href="https://huggingface.co/docs/text-generation-inference/en/conceptual/flash_attention">flash attention</a> and its several variants.</p>
612
  <p>The <em>attention computation</em> itself happens at a <em>lower</em> level of abstraction than the model itself.</p>
613
+ <p>However, we were adding specific torch operations for each backend (sdpa, the several flash-attention iterations, flex attention) but it wasn’t a <a href="#minimal-user-api">minimal user api</a>. Next section explains what we did.</p>
614
+ <div class="crumbs"><p>Evidence: effective (i.e., maintenable) LOC growth drops ~15× when counting shards instead of expanded modeling files. Less code to read, fewer places to break.</p><p><strong>Next:</strong> how the attention interface stays standard without hiding semantics.</p></div>
615
  <h3 id="-external-attention-classes"><a href="#-external-attention-classes"><a id="attention-classes"></a> External Attention classes</a></h3>
616
+ <p>The solution for the “attention abstraction problem” was to move to a standard <a href="https://huggingface.co/docs/transformers/en/attention_interface">attention interface</a> that allows the following:</p>
617
+ <p>The naive implementation of attention, called “eager”, is available by default. We use a <code>Callable</code> called <code>eager_attention_forward</code>, which can run as long as the user has PyTorch installed which is a requirement any way.</p>
618
+ <p>Instead of using a class interface and a class hierarchy, we just moved to a function interface. When a more complex attention implementation is needed, we use other Callables, including much faster kernel bindings when available. The decision to use a different attention implementation is based on the model configuration file we download from the Hub, and it can also be overridden by the user.</p>
619
+ <p>This is a clear example that that we prefer an interface that is <a href="#standardize-dont-abstract">standard, but not abstract</a>. To be completely precise, this is what the interface selection looks like in transformers code:</p>
620
  <div class="code-card"><button class="code-copy button--ghost" type="button" aria-label="Copy code"><svg viewBox="0 0 24 24" aria-hidden="true" focusable="false"><path d="M16 1H4c-1.1 0-2 .9-2 2v12h2V3h12V1zm3 4H8c-1.1 0-2 .9-2 2v14c0 1.1.9 2 2 2h11c1.1 0 2-.9 2-2V7c0-1.1-.9-2-2-2zm0 16H8V7h11v14z"></path></svg></button><pre class="astro-code astro-code-themes github-light github-dark" style="--shiki-light:#24292e;--shiki-dark:#e1e4e8;--shiki-light-bg:#fff;--shiki-dark-bg:#24292e;overflow-x:auto" tabindex="0" data-language="python"><code><span class="line"><span style="--shiki-light:#24292E;--shiki-dark:#E1E4E8">attention_interface: Callable </span><span style="--shiki-light:#D73A49;--shiki-dark:#F97583">=</span><span style="--shiki-light:#24292E;--shiki-dark:#E1E4E8"> eager_attention_forward</span></span>
621
  <span class="line"><span style="--shiki-light:#D73A49;--shiki-dark:#F97583">if</span><span style="--shiki-light:#005CC5;--shiki-dark:#79B8FF"> self</span><span style="--shiki-light:#24292E;--shiki-dark:#E1E4E8">.config._attn_implementation </span><span style="--shiki-light:#D73A49;--shiki-dark:#F97583">!=</span><span style="--shiki-light:#032F62;--shiki-dark:#9ECBFF"> &quot;eager&quot;</span><span style="--shiki-light:#24292E;--shiki-dark:#E1E4E8">:</span></span>
622
  <span class="line"><span style="--shiki-light:#24292E;--shiki-dark:#E1E4E8"> attention_interface </span><span style="--shiki-light:#D73A49;--shiki-dark:#F97583">=</span><span style="--shiki-light:#005CC5;--shiki-dark:#79B8FF"> ALL_ATTENTION_FUNCTIONS</span><span style="--shiki-light:#24292E;--shiki-dark:#E1E4E8">[</span><span style="--shiki-light:#005CC5;--shiki-dark:#79B8FF">self</span><span style="--shiki-light:#24292E;--shiki-dark:#E1E4E8">.config._attn_implementation]</span></span>
623
  <span class="line"></span></code></pre></div>
624
+ <p>A strength of the new attention interface is the possibility to enforce specific kwargs, which are needed by kernel providers and other dependencies.</p>
625
+ <p>Backend integrations sometimes require specific kwargs.</p>
626
+ <p>We know that kwargs are often a necessary evil that plagues tools with widespread compatibility; and it is something we have aimed to reduce, and will continue reduce in order to improve readability - with them, the current system is a <a href="#minimal-user-api">minimal user api</a>.</p>
627
+ <p>We reduce that surface and document expectations; where flexibility is necessary, we plan to use <code>typing.Annotated</code> to convey shapes and invariants without constraining integrations. Such an implementation could look like this in the future:</p>
628
  <div class="code-card"><button class="code-copy button--ghost" type="button" aria-label="Copy code"><svg viewBox="0 0 24 24" aria-hidden="true" focusable="false"><path d="M16 1H4c-1.1 0-2 .9-2 2v12h2V3h12V1zm3 4H8c-1.1 0-2 .9-2 2v14c0 1.1.9 2 2 2h11c1.1 0 2-.9 2-2V7c0-1.1-.9-2-2-2zm0 16H8V7h11v14z"></path></svg></button><pre class="astro-code astro-code-themes github-light github-dark" style="--shiki-light:#24292e;--shiki-dark:#e1e4e8;--shiki-light-bg:#fff;--shiki-dark-bg:#24292e;overflow-x:auto" tabindex="0" data-language="python"><code><span class="line"><span style="--shiki-light:#D73A49;--shiki-dark:#F97583">from</span><span style="--shiki-light:#24292E;--shiki-dark:#E1E4E8"> typing </span><span style="--shiki-light:#D73A49;--shiki-dark:#F97583">import</span><span style="--shiki-light:#24292E;--shiki-dark:#E1E4E8"> Annotated</span></span>
629
  <span class="line"></span>
630
  <span class="line"><span style="--shiki-light:#24292E;--shiki-dark:#E1E4E8">MyModelOutputAnnotated </span><span style="--shiki-light:#D73A49;--shiki-dark:#F97583">=</span><span style="--shiki-light:#24292E;--shiki-dark:#E1E4E8"> Annotated[MyModelOutput, </span><span style="--shiki-light:#032F62;--shiki-dark:#9ECBFF">&quot;shape: (B, C, H, W)&quot;</span><span style="--shiki-light:#24292E;--shiki-dark:#E1E4E8">]</span></span>
631
  <span class="line"></span></code></pre></div>
632
+ <div class="crumbs"><p>Attention semantics remain in <code>eager_attention_forward</code>; faster backends are opt-in via config. We inform via types/annotations rather than enforce rigid kwargs, preserving integrations.</p><p><strong>Next:</strong> parallel partitioning is declared as a plan, not through model surgery.</p></div>
633
  <h3 id="-configurable-tensor-parallelism"><a href="#-configurable-tensor-parallelism"><a id="simpler-tensor-parallelism"></a> Configurable Tensor Parallelism</a></h3>
634
  <p>If you’re not familiar with the different flavours of parallelism, I recommend checking out <a href="https://huggingface.co/blog/accelerate-nd-parallel">this blog post</a> first, and of course a full <a href="https://huggingface.co/spaces/nanotron/ultrascale-playbook">dive into the ultra-scale playbook</a> is always recommended.</p>
635
  <p>The essential part is that, as <a href="https://huggingface.co/docs/transformers/v4.56.2/perf_train_gpu_many#tensor-parallelism">the documentation states</a> when tensors get too large to fit on a single GPU, they are sliced along a particular dimension and every slice is sent to a different GPU.</p>
 
639
  <p>Hence, we want to touch <a href="#minimal-user-api">minimally</a> to the modeling code, and only modify it when <em>architectural changes</em> are involved. For instance, for tensor parallelism, we instead now specify a simple <code>tp_plan</code>.</p>
640
  <p>The alternative would be to modify parent classes specific to their</p>
641
  <p>It is written once in the config and passed to <code>.from_pretrained()</code>. The plan maps module name patterns to partitioning strategies. Strategies are resolved by the internal <code>ParallelInterface</code>, which wires to sharding implementations <code>ColwiseParallel</code>, <code>RowwiseParallel</code>, packed variants, and so on.</p>
642
+ <figure class="html-embed"><div class="html-embed__card"><div id="frag-cnu461t3tc"><pre><code class="language-python"># In the model's config (example: ERNIE 4.5-style decoder blocks)
643
  base_model_tp_plan = {
644
  "layers.*.self_attn.q_proj": "colwise",
645
  "layers.*.self_attn.k_proj": "colwise",
 
668
  <p>Semantics stay in the model (a Linear stays a Linear), distribution is orthogonal and declared via strings: “colwise” splits columns of weights/bias across ranks; “rowwise” splits rows; packed variants shard fused weights; The mapping keys accept glob patterns like <code>layers.*.mlp.down_proj</code> to target repeated submodules.</p>
669
  <div class="crumbs"><p>Sharding is configuration (<code>tp_plan</code>), not edits to <code>Linear</code>s. Glob patterns target repeated blocks; modeling semantics stay intact. <strong>Next:</strong> per-layer attention/caching schedules declared in config, not hardcoded.</p></div>
670
  <h3 id="-layers-attentions-and-caches"><a href="#-layers-attentions-and-caches"><a id="layers-attentions-caches"></a> Layers, attentions and caches</a></h3>
671
+ <p>Following the same logic, the <em>nature</em> of attention and per-layer caching should not be hardcoded. We should be able to specify in the configuration how each layer is implemented. Thus, we define a mapping like:</p>
672
  <div class="code-card"><button class="code-copy button--ghost" type="button" aria-label="Copy code"><svg viewBox="0 0 24 24" aria-hidden="true" focusable="false"><path d="M16 1H4c-1.1 0-2 .9-2 2v12h2V3h12V1zm3 4H8c-1.1 0-2 .9-2 2v14c0 1.1.9 2 2 2h11c1.1 0 2-.9 2-2V7c0-1.1-.9-2-2-2zm0 16H8V7h11v14z"></path></svg></button><pre class="astro-code astro-code-themes github-light github-dark" style="--shiki-light:#24292e;--shiki-dark:#e1e4e8;--shiki-light-bg:#fff;--shiki-dark-bg:#24292e;overflow-x:auto" tabindex="0" data-language="python"><code><span class="line"><span style="--shiki-light:#005CC5;--shiki-dark:#79B8FF">ALLOWED_LAYER_TYPES</span><span style="--shiki-light:#D73A49;--shiki-dark:#F97583"> =</span><span style="--shiki-light:#24292E;--shiki-dark:#E1E4E8"> (</span></span>
673
  <span class="line"><span style="--shiki-light:#032F62;--shiki-dark:#9ECBFF"> &quot;full_attention&quot;</span><span style="--shiki-light:#24292E;--shiki-dark:#E1E4E8">,</span></span>
674
  <span class="line"><span style="--shiki-light:#032F62;--shiki-dark:#9ECBFF"> &quot;sliding_attention&quot;</span><span style="--shiki-light:#24292E;--shiki-dark:#E1E4E8">,</span></span>
 
677
  <span class="line"><span style="--shiki-light:#005CC5;--shiki-dark:#79B8FF"> ...</span></span>
678
  <span class="line"><span style="--shiki-light:#24292E;--shiki-dark:#E1E4E8">)</span></span>
679
  <span class="line"></span></code></pre></div>
680
+ <p>and the configuration can be <em>explicit</em> about which attention type is in which layer. See, for example, <a href="https://huggingface.co/openai/gpt-oss-120b/blob/main/config.json#L15">gpt-oss</a>, which alternates sliding and full attention:</p>
681
  <div class="code-card"><button class="code-copy button--ghost" type="button" aria-label="Copy code"><svg viewBox="0 0 24 24" aria-hidden="true" focusable="false"><path d="M16 1H4c-1.1 0-2 .9-2 2v12h2V3h12V1zm3 4H8c-1.1 0-2 .9-2 2v14c0 1.1.9 2 2 2h11c1.1 0 2-.9 2-2V7c0-1.1-.9-2-2-2zm0 16H8V7h11v14z"></path></svg></button><pre class="astro-code astro-code-themes github-light github-dark" style="--shiki-light:#24292e;--shiki-dark:#e1e4e8;--shiki-light-bg:#fff;--shiki-dark-bg:#24292e;overflow-x:auto" tabindex="0" data-language="python"><code><span class="line"><span style="--shiki-light:#032F62;--shiki-dark:#9ECBFF"> &quot;layer_types&quot;</span><span style="--shiki-light:#24292E;--shiki-dark:#E1E4E8">: [</span></span>
682
  <span class="line"><span style="--shiki-light:#032F62;--shiki-dark:#9ECBFF"> &quot;sliding_attention&quot;</span><span style="--shiki-light:#24292E;--shiki-dark:#E1E4E8">,</span></span>
683
  <span class="line"><span style="--shiki-light:#032F62;--shiki-dark:#9ECBFF"> &quot;full_attention&quot;</span><span style="--shiki-light:#24292E;--shiki-dark:#E1E4E8">,</span></span>
 
686
  <span class="line"><span style="--shiki-light:#032F62;--shiki-dark:#9ECBFF"> &quot;full_attention&quot;</span></span>
687
  <span class="line"><span style="--shiki-light:#24292E;--shiki-dark:#E1E4E8"> ],</span></span>
688
  <span class="line"></span></code></pre></div>
689
+ <p>This is <a href="#minimal-user-api">minimal</a> to implement on the user side, and allows to keep the modeling code untouched. It is also easy to tweak.</p>
690
+ <div class="crumbs"><p>Allowed layer types are explicit; schedules (e.g., sliding/full alternation) live in config. This keeps the file readable and easy to tweak.</p><p><strong>Next:</strong> speedups come from kernels that don’t change semantics.</p></div>
691
  <h3 id="community-kernels"><a href="#community-kernels"><a id="community-kernels"></a>Community Kernels</a></h3>
692
  <p>The same principle extends to normalization, activation, and other code paths. The model defines <strong>semantics</strong>; a kernel defines <strong>how</strong> to execute them faster. We annotate the module to borrow a community‑provided forward, keeping a <a href="#consistent-public-surface">consistent public surface</a></p>
693
  <div class="code-card"><button class="code-copy button--ghost" type="button" aria-label="Copy code"><svg viewBox="0 0 24 24" aria-hidden="true" focusable="false"><path d="M16 1H4c-1.1 0-2 .9-2 2v12h2V3h12V1zm3 4H8c-1.1 0-2 .9-2 2v14c0 1.1.9 2 2 2h11c1.1 0 2-.9 2-2V7c0-1.1-.9-2-2-2zm0 16H8V7h11v14z"></path></svg></button><pre class="astro-code astro-code-themes github-light github-dark" style="--shiki-light:#24292e;--shiki-dark:#e1e4e8;--shiki-light-bg:#fff;--shiki-dark-bg:#24292e;overflow-x:auto" tabindex="0" data-language="python"><code><span class="line"><span style="--shiki-light:#6F42C1;--shiki-dark:#B392F0">@use_kernel_forward_from_hub</span><span style="--shiki-light:#24292E;--shiki-dark:#E1E4E8">(</span><span style="--shiki-light:#032F62;--shiki-dark:#9ECBFF">&quot;RMSNorm&quot;</span><span style="--shiki-light:#24292E;--shiki-dark:#E1E4E8">)</span></span>
 
709
  </ol>
710
  <p>So what do we see? Llama is a basis for many models, and it shows.
711
  Radically different architectures such as mamba have spawned their own dependency subgraph.</p>
712
+ <figure class="html-embed"><div class="html-embed__card"><div id="frag-3popmj9j337"><iframe
713
  src="https://molbap-dependencies-1.hf.space"
714
  style="width:100%; height:680px; border:0"
715
  allow="clipboard-read; clipboard-write; fullscreen"
716
  referrerpolicy="no-referrer-when-downgrade"
717
  ></iframe></div></div></figure>
718
+ <p>In the case of VLMs, there’s far too many vision-based architectures that are not yet defined as modulars of other existing archs. In other words, there is no strong reference point in terms of software for vision models.
719
  As you can see, there is a small DETR island, a little llava pocket, and so on, but it’s not comparable to the centrality observed for llama.</p>
720
+ <p>Another problem is, this visualization only shows <code>modular</code> models. Several models still do NOT have a modular file.</p>
721
  <p>How do we spot them, and how do we identify modularisable models?</p>
722
  <div class="crumbs"><p>Graph reading guide: nodes are models; edges are modular imports. Llama-lineage is a hub; several VLMs remain islands — engineering opportunity for shared parents. <strong>Next:</strong> timeline + similarity signals to spot candidates.</p></div>
723
  <h3 id="many-models-but-not-enough-yet-are-alike"><a href="#many-models-but-not-enough-yet-are-alike">Many models, but not enough yet, are alike</a></h3>
724
+ <p>I looked into Jaccard similarity, which we use to measure set differences, to find similarities across models. I know that code is more than a set of characters stringed together. We also tried code-embedding models that ranked candidates better in practice, but for this post we stick to the deterministic Jaccard index. You can take a look at <a href="https://github.com/huggingface/transformers/pull/41289">the corresponding PR</a> for the embedding method.</p>
725
+ <p>It is interesting, for our comparison, to look at <em>when</em> we deployed this modular logic and what was its rippling effect on the library. You can check the <a href="https://huggingface.co/spaces/Molbap/transformers-modular-refactor">larger space</a> to play around, but the gist is: adding modular allowed to connect more and more models to solid reference points.</p>
726
+ <p>Yet, we still have a lot of gaps to fill.</p>
727
  <p>Zoom out below - it’s full of models. You can click on a node to see its connections better, or use the text box to search for a model.</p>
728
+ <figure class="html-embed"><div class="html-embed__card"><div id="frag-n93vsx8na5e"> <iframe
729
  src="https://molbap-timeline-1.hf.space"
730
  style="width:100%; height:680px; border:0"
731
  allow="clipboard-read; clipboard-write; fullscreen"
732
  referrerpolicy="no-referrer-when-downgrade"
733
  ></iframe></div></div></figure>
734
  <p>If you’ve checked out llava, you’ve seen that llava_video is a red node, connected by a red edge to llava: it’s a candidate, something that we can <em>likely</em> remodularize, <a href="#backwards-compatibility">not touching the actual model</a> but being much more readable with <a href="#do-repeat-yourself">DRY*</a>.</p>
735
+ <div class="crumbs"><p>Similarity metrics (Jaccard index or embeddings) surfaces likely parents; the timeline shows consolidation after modular landed. Red nodes/edges = candidates (e.g., <code>llava_video</code> → <code>llava</code>) for refactors that preserve behavior. <strong>Next:</strong> concrete VLM choices that avoid leaky abstractions.</p></div>
736
  <h3 id="vlm-improvements-avoiding-abstraction"><a href="#vlm-improvements-avoiding-abstraction">VLM improvements, avoiding abstraction</a></h3>
737
+ <p>We don’t yet have a cookbook for common VLM patterns (image token scatter, multi‑tower encoders, cross‑attention bridges). This is one of the main improvement points where we can work.</p>
738
  <p>For instance, we thought of abstracting away the mixing of <code>inputs_embeds</code>, the tensor fed into an LLM decoder in 95% of the existing VLMs. It would have looked like something like</p>
739
  <div class="code-card"><button class="code-copy button--ghost" type="button" aria-label="Copy code"><svg viewBox="0 0 24 24" aria-hidden="true" focusable="false"><path d="M16 1H4c-1.1 0-2 .9-2 2v12h2V3h12V1zm3 4H8c-1.1 0-2 .9-2 2v14c0 1.1.9 2 2 2h11c1.1 0 2-.9 2-2V7c0-1.1-.9-2-2-2zm0 16H8V7h11v14z"></path></svg></button><pre class="astro-code astro-code-themes github-light github-dark" style="--shiki-light:#24292e;--shiki-dark:#e1e4e8;--shiki-light-bg:#fff;--shiki-dark-bg:#24292e;overflow-x:auto" tabindex="0" data-language="python"><code><span class="line"><span style="--shiki-light:#D73A49;--shiki-dark:#F97583">class</span><span style="--shiki-light:#6F42C1;--shiki-dark:#B392F0"> InputsEmbeddingMixerMixin</span><span style="--shiki-light:#24292E;--shiki-dark:#E1E4E8">(</span><span style="--shiki-light:#6F42C1;--shiki-dark:#B392F0">nn</span><span style="--shiki-light:#24292E;--shiki-dark:#E1E4E8">.</span><span style="--shiki-light:#6F42C1;--shiki-dark:#B392F0">Module</span><span style="--shiki-light:#24292E;--shiki-dark:#E1E4E8">):</span></span>
740
  <span class="line"><span style="--shiki-light:#6A737D;--shiki-dark:#6A737D"> #</span></span>
741
  <span class="line"></span></code></pre></div>
742
+ <p>But this is <a href="#standardize-dont-abstract">abstracting away an important component of the modeling</a>. Embedding mixin is part of the model, removing it would break it. A user opening <a href="https://github.com/huggingface/transformers/blob/b3bd815786c36f4e6c3791fae0a96cac86658b32/src/transformers/models/qwen2_5_vl/modeling_qwen2_5_vl.py#L1358"><code>modeling_qwen2.5_vl</code></a> (check out the <a href="https://huggingface.co/collections/Qwen/qwen25-vl-6795ffac22b334a837c0f9a5">Qwen2.5VL collection</a>) should not have to go to another file to understand how it works.</p>
743
  <p>What is the current state of these “abstractions” across the codebase?
744
  You will see all the imports around a modeling file, here <a href="https://huggingface.co/google/gemma-3n-E4B-it">Gemma3n</a>.</p>
745
  <p><img src="/images/transformers/still_graph_bloat.png" alt="Gemma3n graph"/></p>
 
799
  <p>Having a framework means forcing users into it. It restrains flexibility and creativity, which are the fertile soil for new ideas to grow.</p>
800
  <p>Among the most valuable contributions to <code>transformers</code> is of course the addition of new models. Very recently, <a href="https://huggingface.co/blog/welcome-openai-gpt-oss">OpenAI added GPT-OSS</a>, which prompted the addition of many new features to the library in order to support <a href="https://huggingface.co/openai/gpt-oss-120b">their model</a>.</p>
801
  <p>A second one is the ability to fine-tune and pipeline these models into many other software. Check here on the hub how many finetunes are registered for <a href="https://huggingface.co/models?other=base_model:finetune:openai/gpt-oss-120b">gpt-oss 120b</a>, despite its size!</p>
802
+ <div class="crumbs"><p>The shape of a contribution: add a model (or variant) with a small modular shard; the community and serving stacks pick it up immediately. Popularity trends (encoders/embeddings) guide where we invest.
803
+ <strong>Next:</strong> power tools enabled by a consistent API.</p></div>
804
  <h3 id="-models-popularity"><a href="#-models-popularity"><a id="encoders-ftw"></a> Models popularity</a></h3>
805
+ <p>Talking about dependencies, we can take a look at the number of downloads as a measure of popularity. One thing we see is the prominence of encoders, despite the apparent prevalence of decoder LLMs. The reason is that encoders are used to generate embeddings, which have multiple downstream uses. Just check out <a href="https://huggingface.co/blog/embeddinggemma">EmbeddingGemma</a> for a modern recap. Hence, it is vital to keep the encoders portion of the library viable, usable, fine-tune-able.</p>
806
+ <div><figure class="html-embed"><div class="html-embed__card"><div id="frag-0yx46sf56oel"><html>
807
  <head><meta charset="utf-8" /></head>
808
  <body>
809
  <div> <script type="text/javascript">window.PlotlyConfig = {MathJaxConfig: 'local'};</script>
 
4691
  }));</script> <div id="b7de70fd-22d7-40bc-923f-0a3dc91875e6" class="plotly-graph-div" style="height:1000px; width:100%;"></div> <script type="text/javascript"> window.PLOTLYENV=window.PLOTLYENV || {}; if (document.getElementById("b7de70fd-22d7-40bc-923f-0a3dc91875e6")) { Plotly.newPlot( "b7de70fd-22d7-40bc-923f-0a3dc91875e6", [{"hovertext":["clip","vit","vit","clip","resnet","resnet","clip","vit","vit","clip","vit","vitmatte","table_transformer","vit","d_fine","qwen2_vl","whisper","clip","vit","resnet","vit","siglip","qwen2_vl","align","clip","vit","whisper","blip","siglip2","siglip","vit","clip","clipseg","blip","whisper","gemma3","vit","dinov2","dinov2","whisper","vit","clip","d_fine","layoutlmv3","musicgen","resnet","table_transformer","dinov2","internvl","whisper","vit","gemma3","qwen2_vl","mobilevit","vit","bit","blip","blip_2","dinov2","table_transformer"],"marker":{"color":["#FFA15A","#FF97FF","#FF97FF","#FFA15A","#FFA15A","#FFA15A","#FFA15A","#FF97FF","#FF97FF","#FFA15A","#FF97FF","#FECB52","#B6E880","#FF97FF","#FF6692","#AB63FA","#636EFA","#FFA15A","#FF97FF","#FFA15A","#FF97FF","#19D3F3","#AB63FA","#636EFA","#FFA15A","#FF97FF","#636EFA","#00CC96","#FF6692","#19D3F3","#FF97FF","#FFA15A","#19D3F3","#00CC96","#636EFA","#FF97FF","#FF97FF","#B6E880","#B6E880","#636EFA","#FF97FF","#FFA15A","#FF6692","#636EFA","#00CC96","#FFA15A","#B6E880","#B6E880","#FECB52","#636EFA","#FF97FF","#FF97FF","#AB63FA","#EF553B","#FF97FF","#EF553B","#00CC96","#AB63FA","#B6E880","#B6E880"]},"name":"By Downloads","orientation":"h","visible":true,"x":{"dtype":"i4","bdata":"JFdPASRXTwHoJBsB6CQbAVyTGAHtzgMBMX5HADF+RwCsCUIArAlCAOy5PwDsuT8AJrk6ALmaOQDt+zYAlfk2AGgLNgBRnDQAKB80AF6mMwAJIjIALGUxAOhUMQAGJjEAwVQuAMFULgBO5S0Am6MqAF15JwBdeScAcQomAGoZJQBqGSUAq+YkAOtPIACUJx8AauYeAGrmHgAMHx4AArgcABh\u002fFwAYfxcAK1YWACK+FQBOnxUAMQwVAPySEwAsXhMAqTMTAOkdEwBRrBIAcTkSAArmEAC60hAAt2AQALdgEAAFKBAABSgQAGUZEAAPARAA"},"y":["openai\u002fclip-vit-large-patch14","openai\u002fclip-vit-large-patch14","openai\u002fclip-vit-base-patch32","openai\u002fclip-vit-base-patch32","timm\u002fresnet50.a1_in1k","pyannote\u002fwespeaker-voxceleb-resnet34-LM","openai\u002fclip-vit-large-patch14-336","openai\u002fclip-vit-large-patch14-336","openai\u002fclip-vit-base-patch16","openai\u002fclip-vit-base-patch16","hustvl\u002fvitmatte-small-composition-1k","hustvl\u002fvitmatte-small-composition-1k","microsoft\u002ftable-transformer-detection","google\u002fvit-base-patch16-224","distilbert\u002fdistilbert-base-uncased-finetuned-sst-2-english","Qwen\u002fQwen2.5-VL-7B-Instruct","openai\u002fwhisper-large-v3-turbo","patrickjohncyh\u002ffashion-clip","nateraw\u002fvit-age-classifier","timm\u002fresnet18.a1_in1k","google\u002fvit-base-patch16-224-in21k","google\u002fsiglip-so400m-patch14-384","Qwen\u002fQwen2.5-VL-3B-Instruct","MahmoudAshraf\u002fmms-300m-1130-forced-aligner","laion\u002fCLIP-ViT-bigG-14-laion2B-39B-b160k","laion\u002fCLIP-ViT-bigG-14-laion2B-39B-b160k","openai\u002fwhisper-large-v3","Salesforce\u002fblip-image-captioning-base","google\u002fsiglip2-so400m-patch16-naflex","google\u002fsiglip2-so400m-patch16-naflex","lpiccinelli\u002funidepth-v2-vitl14","CIDAS\u002fclipseg-rd64-refined","CIDAS\u002fclipseg-rd64-refined","Salesforce\u002fblip-image-captioning-large","openai\u002fwhisper-medium","google\u002fgemma-3-1b-it","timm\u002fvit_large_patch14_reg4_dinov2.lvd142m","timm\u002fvit_large_patch14_reg4_dinov2.lvd142m","facebook\u002fdinov2-base","openai\u002fwhisper-base.en","laion\u002fCLIP-ViT-H-14-laion2B-s32B-b79K","laion\u002fCLIP-ViT-H-14-laion2B-s32B-b79K","dbmdz\u002fbert-large-cased-finetuned-conll03-english","microsoft\u002flayoutlmv3-base","facebook\u002fmusicgen-medium","microsoft\u002fresnet-50","microsoft\u002ftable-transformer-structure-recognition","facebook\u002fdinov2-small","OpenGVLab\u002fInternVL3-78B","openai\u002fwhisper-small","AdamCodd\u002fvit-base-nsfw-detector","google\u002fgemma-3-4b-it","Qwen\u002fQwen2-VL-2B-Instruct","apple\u002fmobilevit-small","google\u002fvit-hybrid-base-bit-384","google\u002fvit-hybrid-base-bit-384","Salesforce\u002fblip2-opt-2.7b","Salesforce\u002fblip2-opt-2.7b","facebook\u002fdinov2-large","microsoft\u002ftable-transformer-structure-recognition-v1.1-all"],"type":"bar"},{"hovertext":["internvl","qwen2_vl","qwen2_vl","gemma3","gemma3","vit","mobilevit","siglip","siglip2","blip","blip","blip","blip_2","clip","vit","vit","clip","resnet","resnet","dinov2","vit","qwen2_vl","clipseg","clip","vit","whisper","align","siglip","clip","whisper","vit","resnet","layoutlmv3","vitmatte","vit","whisper","whisper","clip","vit","resnet","vit","whisper","dinov2","d_fine","table_transformer","musicgen","vit","clip","vit","bit","d_fine","table_transformer","table_transformer","dinov2","dinov2","vit","clip","vit","vit","clip"],"marker":{"color":["#FECB52","#AB63FA","#AB63FA","#FF97FF","#FF97FF","#FF97FF","#EF553B","#19D3F3","#FF6692","#00CC96","#00CC96","#00CC96","#AB63FA","#FFA15A","#FF97FF","#FF97FF","#FFA15A","#FFA15A","#FFA15A","#B6E880","#FF97FF","#AB63FA","#19D3F3","#FFA15A","#FF97FF","#636EFA","#636EFA","#19D3F3","#FFA15A","#636EFA","#FF97FF","#FFA15A","#636EFA","#FECB52","#FF97FF","#636EFA","#636EFA","#FFA15A","#FF97FF","#FFA15A","#FF97FF","#636EFA","#B6E880","#FF6692","#B6E880","#00CC96","#FF97FF","#FFA15A","#FF97FF","#EF553B","#FF6692","#B6E880","#B6E880","#B6E880","#B6E880","#FF97FF","#FFA15A","#FF97FF","#FF97FF","#FFA15A"]},"name":"By Last Modified","orientation":"h","visible":false,"x":{"dtype":"i4","bdata":"qTMTAOhUMQCV+TYAlCcfAHE5EgBxCiYAutIQAF15JwBdeScAm6MqAKvmJAAFKBAABSgQABh\u002fFwAYfxcAwVQuAMFULgBckxgBXqYzAGrmHgBq5h4ACuYQAGoZJQBqGSUAUawSAGgLNgAGJjEALGUxAFGcNABO5S0AKB80AO3OAwEivhUA7Lk\u002fAOy5PwDrTyAA6R0TAOgkGwHoJBsBMQwVAAkiMgACuBwADB8eAO37NgAPARAATp8VACRXTwEkV08Bt2AQALdgEAArVhYA\u002fJITACa5OgAsXhMAZRkQALmaOQCsCUIArAlCADF+RwAxfkcA"},"y":["OpenGVLab\u002fInternVL3-78B","Qwen\u002fQwen2.5-VL-3B-Instruct","Qwen\u002fQwen2.5-VL-7B-Instruct","google\u002fgemma-3-1b-it","google\u002fgemma-3-4b-it","lpiccinelli\u002funidepth-v2-vitl14","apple\u002fmobilevit-small","google\u002fsiglip2-so400m-patch16-naflex","google\u002fsiglip2-so400m-patch16-naflex","Salesforce\u002fblip-image-captioning-base","Salesforce\u002fblip-image-captioning-large","Salesforce\u002fblip2-opt-2.7b","Salesforce\u002fblip2-opt-2.7b","laion\u002fCLIP-ViT-H-14-laion2B-s32B-b79K","laion\u002fCLIP-ViT-H-14-laion2B-s32B-b79K","laion\u002fCLIP-ViT-bigG-14-laion2B-39B-b160k","laion\u002fCLIP-ViT-bigG-14-laion2B-39B-b160k","timm\u002fresnet50.a1_in1k","timm\u002fresnet18.a1_in1k","timm\u002fvit_large_patch14_reg4_dinov2.lvd142m","timm\u002fvit_large_patch14_reg4_dinov2.lvd142m","Qwen\u002fQwen2-VL-2B-Instruct","CIDAS\u002fclipseg-rd64-refined","CIDAS\u002fclipseg-rd64-refined","AdamCodd\u002fvit-base-nsfw-detector","openai\u002fwhisper-large-v3-turbo","MahmoudAshraf\u002fmms-300m-1130-forced-aligner","google\u002fsiglip-so400m-patch14-384","patrickjohncyh\u002ffashion-clip","openai\u002fwhisper-large-v3","nateraw\u002fvit-age-classifier","pyannote\u002fwespeaker-voxceleb-resnet34-LM","microsoft\u002flayoutlmv3-base","hustvl\u002fvitmatte-small-composition-1k","hustvl\u002fvitmatte-small-composition-1k","openai\u002fwhisper-medium","openai\u002fwhisper-small","openai\u002fclip-vit-base-patch32","openai\u002fclip-vit-base-patch32","microsoft\u002fresnet-50","google\u002fvit-base-patch16-224-in21k","openai\u002fwhisper-base.en","facebook\u002fdinov2-base","distilbert\u002fdistilbert-base-uncased-finetuned-sst-2-english","microsoft\u002ftable-transformer-structure-recognition-v1.1-all","facebook\u002fmusicgen-medium","openai\u002fclip-vit-large-patch14","openai\u002fclip-vit-large-patch14","google\u002fvit-hybrid-base-bit-384","google\u002fvit-hybrid-base-bit-384","dbmdz\u002fbert-large-cased-finetuned-conll03-english","microsoft\u002ftable-transformer-structure-recognition","microsoft\u002ftable-transformer-detection","facebook\u002fdinov2-small","facebook\u002fdinov2-large","google\u002fvit-base-patch16-224","openai\u002fclip-vit-base-patch16","openai\u002fclip-vit-base-patch16","openai\u002fclip-vit-large-patch14-336","openai\u002fclip-vit-large-patch14-336"],"type":"bar"}], {"template":{"data":{"barpolar":[{"marker":{"line":{"color":"rgb(17,17,17)","width":0.5},"pattern":{"fillmode":"overlay","size":10,"solidity":0.2}},"type":"barpolar"}],"bar":[{"error_x":{"color":"#f2f5fa"},"error_y":{"color":"#f2f5fa"},"marker":{"line":{"color":"rgb(17,17,17)","width":0.5},"pattern":{"fillmode":"overlay","size":10,"solidity":0.2}},"type":"bar"}],"carpet":[{"aaxis":{"endlinecolor":"#A2B1C6","gridcolor":"#506784","linecolor":"#506784","minorgridcolor":"#506784","startlinecolor":"#A2B1C6"},"baxis":{"endlinecolor":"#A2B1C6","gridcolor":"#506784","linecolor":"#506784","minorgridcolor":"#506784","startlinecolor":"#A2B1C6"},"type":"carpet"}],"choropleth":[{"colorbar":{"outlinewidth":0,"ticks":""},"type":"choropleth"}],"contourcarpet":[{"colorbar":{"outlinewidth":0,"ticks":""},"type":"contourcarpet"}],"contour":[{"colorbar":{"outlinewidth":0,"ticks":""},"colorscale":[[0.0,"#0d0887"],[0.1111111111111111,"#46039f"],[0.2222222222222222,"#7201a8"],[0.3333333333333333,"#9c179e"],[0.4444444444444444,"#bd3786"],[0.5555555555555556,"#d8576b"],[0.6666666666666666,"#ed7953"],[0.7777777777777778,"#fb9f3a"],[0.8888888888888888,"#fdca26"],[1.0,"#f0f921"]],"type":"contour"}],"heatmap":[{"colorbar":{"outlinewidth":0,"ticks":""},"colorscale":[[0.0,"#0d0887"],[0.1111111111111111,"#46039f"],[0.2222222222222222,"#7201a8"],[0.3333333333333333,"#9c179e"],[0.4444444444444444,"#bd3786"],[0.5555555555555556,"#d8576b"],[0.6666666666666666,"#ed7953"],[0.7777777777777778,"#fb9f3a"],[0.8888888888888888,"#fdca26"],[1.0,"#f0f921"]],"type":"heatmap"}],"histogram2dcontour":[{"colorbar":{"outlinewidth":0,"ticks":""},"colorscale":[[0.0,"#0d0887"],[0.1111111111111111,"#46039f"],[0.2222222222222222,"#7201a8"],[0.3333333333333333,"#9c179e"],[0.4444444444444444,"#bd3786"],[0.5555555555555556,"#d8576b"],[0.6666666666666666,"#ed7953"],[0.7777777777777778,"#fb9f3a"],[0.8888888888888888,"#fdca26"],[1.0,"#f0f921"]],"type":"histogram2dcontour"}],"histogram2d":[{"colorbar":{"outlinewidth":0,"ticks":""},"colorscale":[[0.0,"#0d0887"],[0.1111111111111111,"#46039f"],[0.2222222222222222,"#7201a8"],[0.3333333333333333,"#9c179e"],[0.4444444444444444,"#bd3786"],[0.5555555555555556,"#d8576b"],[0.6666666666666666,"#ed7953"],[0.7777777777777778,"#fb9f3a"],[0.8888888888888888,"#fdca26"],[1.0,"#f0f921"]],"type":"histogram2d"}],"histogram":[{"marker":{"pattern":{"fillmode":"overlay","size":10,"solidity":0.2}},"type":"histogram"}],"mesh3d":[{"colorbar":{"outlinewidth":0,"ticks":""},"type":"mesh3d"}],"parcoords":[{"line":{"colorbar":{"outlinewidth":0,"ticks":""}},"type":"parcoords"}],"pie":[{"automargin":true,"type":"pie"}],"scatter3d":[{"line":{"colorbar":{"outlinewidth":0,"ticks":""}},"marker":{"colorbar":{"outlinewidth":0,"ticks":""}},"type":"scatter3d"}],"scattercarpet":[{"marker":{"colorbar":{"outlinewidth":0,"ticks":""}},"type":"scattercarpet"}],"scattergeo":[{"marker":{"colorbar":{"outlinewidth":0,"ticks":""}},"type":"scattergeo"}],"scattergl":[{"marker":{"line":{"color":"#283442"}},"type":"scattergl"}],"scattermapbox":[{"marker":{"colorbar":{"outlinewidth":0,"ticks":""}},"type":"scattermapbox"}],"scattermap":[{"marker":{"colorbar":{"outlinewidth":0,"ticks":""}},"type":"scattermap"}],"scatterpolargl":[{"marker":{"colorbar":{"outlinewidth":0,"ticks":""}},"type":"scatterpolargl"}],"scatterpolar":[{"marker":{"colorbar":{"outlinewidth":0,"ticks":""}},"type":"scatterpolar"}],"scatter":[{"marker":{"line":{"color":"#283442"}},"type":"scatter"}],"scatterternary":[{"marker":{"colorbar":{"outlinewidth":0,"ticks":""}},"type":"scatterternary"}],"surface":[{"colorbar":{"outlinewidth":0,"ticks":""},"colorscale":[[0.0,"#0d0887"],[0.1111111111111111,"#46039f"],[0.2222222222222222,"#7201a8"],[0.3333333333333333,"#9c179e"],[0.4444444444444444,"#bd3786"],[0.5555555555555556,"#d8576b"],[0.6666666666666666,"#ed7953"],[0.7777777777777778,"#fb9f3a"],[0.8888888888888888,"#fdca26"],[1.0,"#f0f921"]],"type":"surface"}],"table":[{"cells":{"fill":{"color":"#506784"},"line":{"color":"rgb(17,17,17)"}},"header":{"fill":{"color":"#2a3f5f"},"line":{"color":"rgb(17,17,17)"}},"type":"table"}]},"layout":{"annotationdefaults":{"arrowcolor":"#f2f5fa","arrowhead":0,"arrowwidth":1},"autotypenumbers":"strict","coloraxis":{"colorbar":{"outlinewidth":0,"ticks":""}},"colorscale":{"diverging":[[0,"#8e0152"],[0.1,"#c51b7d"],[0.2,"#de77ae"],[0.3,"#f1b6da"],[0.4,"#fde0ef"],[0.5,"#f7f7f7"],[0.6,"#e6f5d0"],[0.7,"#b8e186"],[0.8,"#7fbc41"],[0.9,"#4d9221"],[1,"#276419"]],"sequential":[[0.0,"#0d0887"],[0.1111111111111111,"#46039f"],[0.2222222222222222,"#7201a8"],[0.3333333333333333,"#9c179e"],[0.4444444444444444,"#bd3786"],[0.5555555555555556,"#d8576b"],[0.6666666666666666,"#ed7953"],[0.7777777777777778,"#fb9f3a"],[0.8888888888888888,"#fdca26"],[1.0,"#f0f921"]],"sequentialminus":[[0.0,"#0d0887"],[0.1111111111111111,"#46039f"],[0.2222222222222222,"#7201a8"],[0.3333333333333333,"#9c179e"],[0.4444444444444444,"#bd3786"],[0.5555555555555556,"#d8576b"],[0.6666666666666666,"#ed7953"],[0.7777777777777778,"#fb9f3a"],[0.8888888888888888,"#fdca26"],[1.0,"#f0f921"]]},"colorway":["#636efa","#EF553B","#00cc96","#ab63fa","#FFA15A","#19d3f3","#FF6692","#B6E880","#FF97FF","#FECB52"],"font":{"color":"#f2f5fa"},"geo":{"bgcolor":"rgb(17,17,17)","lakecolor":"rgb(17,17,17)","landcolor":"rgb(17,17,17)","showlakes":true,"showland":true,"subunitcolor":"#506784"},"hoverlabel":{"align":"left"},"hovermode":"closest","mapbox":{"style":"dark"},"paper_bgcolor":"rgb(17,17,17)","plot_bgcolor":"rgb(17,17,17)","polar":{"angularaxis":{"gridcolor":"#506784","linecolor":"#506784","ticks":""},"bgcolor":"rgb(17,17,17)","radialaxis":{"gridcolor":"#506784","linecolor":"#506784","ticks":""}},"scene":{"xaxis":{"backgroundcolor":"rgb(17,17,17)","gridcolor":"#506784","gridwidth":2,"linecolor":"#506784","showbackground":true,"ticks":"","zerolinecolor":"#C8D4E3"},"yaxis":{"backgroundcolor":"rgb(17,17,17)","gridcolor":"#506784","gridwidth":2,"linecolor":"#506784","showbackground":true,"ticks":"","zerolinecolor":"#C8D4E3"},"zaxis":{"backgroundcolor":"rgb(17,17,17)","gridcolor":"#506784","gridwidth":2,"linecolor":"#506784","showbackground":true,"ticks":"","zerolinecolor":"#C8D4E3"}},"shapedefaults":{"line":{"color":"#f2f5fa"}},"sliderdefaults":{"bgcolor":"#C8D4E3","bordercolor":"rgb(17,17,17)","borderwidth":1,"tickwidth":0},"ternary":{"aaxis":{"gridcolor":"#506784","linecolor":"#506784","ticks":""},"baxis":{"gridcolor":"#506784","linecolor":"#506784","ticks":""},"bgcolor":"rgb(17,17,17)","caxis":{"gridcolor":"#506784","linecolor":"#506784","ticks":""}},"title":{"x":0.05},"updatemenudefaults":{"bgcolor":"#506784","borderwidth":0},"xaxis":{"automargin":true,"gridcolor":"#283442","linecolor":"#506784","ticks":"","title":{"standoff":15},"zerolinecolor":"#283442","zerolinewidth":2},"yaxis":{"automargin":true,"gridcolor":"#283442","linecolor":"#506784","ticks":"","title":{"standoff":15},"zerolinecolor":"#283442","zerolinewidth":2}}},"xaxis":{"title":{"text":"Downloads"},"type":"log"},"yaxis":{"autorange":"reversed"},"updatemenus":[{"buttons":[{"args":[{"visible":[true,false]},{"title":"Sorted by Downloads"}],"label":"Sort by Downloads","method":"update"},{"args":[{"visible":[false,true]},{"title":"Sorted by Last Modified"}],"label":"Sort by Last Modified","method":"update"}],"direction":"down","showactive":true,"x":1.05,"xanchor":"left","y":1.15,"yanchor":"top"}],"title":{"text":"Model Popularity (Toggle Sort)"},"height":1000}, {"responsive": true} ) }; </script> </div>
4692
  </body>
4693
  </html></div></div></figure></div>
4694
+ <p>As the codebase grows, we need to maintain it in coordination with our friend <a href="https://huggingface.co/sentence-transformers">Sentence Transformers codebase</a>. Retrieval use-cases, smart databases, FAISS-based indexing rely on it, and thus indirectly on transformers.</p>
4695
  <p>In that regard, we DO want to be a modular toolbox, being <a href="#minimal-user-api">minimal</a> enough and well documented enough so any ML/AI developer can use <code>transformers</code> without having to think about it. We aim to reduce the cognitive load brought about by model development, not increase it.</p>
4696
  <p>So, how do these design choices, these “tenets” influence development of models and overall usage of transformers?</p>
4697
+ <div class="crumbs"><p>Encoders remain critical for embeddings and retrieval; maintaining them well benefits the broader ecosystem (e.g., Sentence Transformers, FAISS).</p><p><strong>Next:</strong> dev tools that leverage unified attention APIs and PyTorch-only internals.</p></div>
4698
  <h2 id="a-surgical-toolbox-for-model-development"><a href="#a-surgical-toolbox-for-model-development">A surgical toolbox for model development</a></h2>
4699
  <h3 id="attention-visualisation"><a href="#attention-visualisation">Attention visualisation</a></h3>
4700
+ <p>All models have the same API for attention computation, thanks to <a href="#external-attention-classes">the externalisation of attention classes</a>.</p>
4701
+ <p>This uniformity allows us to build cool tools to visualize the inner workings of the attention mechanism.</p>
4702
  <p>One particular piece of machinery is the <code>attention mask</code>. Here you see the famous bidirectional attention pattern for the whole prefix (text + image) in PaliGemma and all Gemma2+ models, contrasting with the usual “causal-only” models.</p>
4703
+ <figure class="html-embed"><div class="html-embed__card"><div id="frag-xi8y9of6l1"><!-- Minimal HTML fragment: terminal-style ASCII attention masks -->
4704
  <div style="max-width: 940px; margin: 16px 0; border:1px solid #2a2f3a; border-radius:8px; background:#0b0f19; font-family: ui-monospace, SFMono-Regular, Menlo, Monaco, Consolas, 'Liberation Mono', 'Courier New', monospace; color:#e5e7eb;">
4705
  <div style="display:flex; align-items:center; gap:8px; padding:8px 10px; border-bottom:1px solid #1f2430; background:#111827; border-top-left-radius:8px; border-top-right-radius:8px;">
4706
  <span style="width:10px; height:10px; background:#ef4444; border-radius:50%; display:inline-block;"></span>
 
4745
  </div>
4746
  </div>
4747
  </div></div></figure>
4748
+ <div class="crumbs"><p>Uniform attention APIs enable cross-model diagnostics (e.g., PaliGemma prefix bidirectionality vs causal).
4749
+ <strong>Next:</strong> whole-model tracing for ports and regressions.</p></div>
4750
  <h3 id="logging-entire-model-activations"><a href="#logging-entire-model-activations">Logging entire model activations</a></h3>
4751
  <p>Further, because it is all PyTorch (and it is even more now that we support only PyTorch), we can easily <a href="https://huggingface.co/docs/transformers/internal/model_debugging_utils">debug any model</a> when we want to add it to transformers. We now have a power-user tool for porting or adding models, that wraps a forward pass, intercepts every submodule call, and logs shapes, dtypes, and sample statistics of inputs/outputs to nested JSON.</p>
4752
  <p>It just works with PyTorch models and is especially useful when aligning outputs with a reference implementation, aligned with our <a href="#source-of-truth">core guideline</a>.</p>
 
4754
  <div class="crumbs"><p>Forward interception and nested JSON logging align ports to reference implementations, reinforcing “Source of Truth.” <strong>Next:</strong> CUDA warmup reduces load-time stalls without touching modeling semantics.</p></div>
4755
  <h3 id="cooking-faster-cuda-warmups"><a href="#cooking-faster-cuda-warmups">Cooking faster CUDA warmups</a></h3>
4756
  <p>Having a clean <em>external</em> API allows us to work on the <a href="#code-is-product">true inner workings of transformers</a>. One of the few recent additions was the <em>CUDA warmup</em> via <code>caching_allocator_warmup</code> which improved massively the loading footprint by pre-allocating GPU memory to avoid malloc bottlenecks during model loading, achieving a 7x factor for an 8B model, 6x for a 32B, you can check out <a href="https://github.com/huggingface/transformers/pull/36380">the source</a>!</p>
4757
+ <figure class="html-embed"><div class="html-embed__card"><div id="frag-d8fi5kw4x4v"><style>
4758
  /* 1) Scope tokens to the widget */
4759
  .warmup-demo{
4760
  --page-bg:#ffffff;
 
5058
  <p>It’s hard to overstate how much of a lifesaver that is when you’re trying to load a model as fast as possible, as it’s the narrowest bottleneck for your iteration speed.</p>
5059
  <div class="crumbs"><p>Pre-allocating GPU memory removes malloc spikes (e.g., 7× for 8B, 6× for 32B in the referenced PR). <strong>Next:</strong> serving benefits directly from consistent interfaces and modularity.</p></div>
5060
  <h3 id="transformers-serve-and-continuous-batching"><a href="#transformers-serve-and-continuous-batching">Transformers-serve and continuous batching</a></h3>
5061
+ <p>Having all these models readily available and sharing the same interface allowed us to implement transformers-serve, a CLI tool to expose models through a standard OpenAI http API.</p>
5062
  <div class="code-card"><button class="code-copy button--ghost" type="button" aria-label="Copy code"><svg viewBox="0 0 24 24" aria-hidden="true" focusable="false"><path d="M16 1H4c-1.1 0-2 .9-2 2v12h2V3h12V1zm3 4H8c-1.1 0-2 .9-2 2v14c0 1.1.9 2 2 2h11c1.1 0 2-.9 2-2V7c0-1.1-.9-2-2-2zm0 16H8V7h11v14z"></path></svg></button><pre class="astro-code astro-code-themes github-light github-dark" style="--shiki-light:#24292e;--shiki-dark:#e1e4e8;--shiki-light-bg:#fff;--shiki-dark-bg:#24292e;overflow-x:auto" tabindex="0" data-language="bash"><code><span class="line"><span style="--shiki-light:#6F42C1;--shiki-dark:#B392F0">transformers</span><span style="--shiki-light:#032F62;--shiki-dark:#9ECBFF"> serve</span></span>
5063
  <span class="line"></span>
5064
  <span class="line"><span style="--shiki-light:#6F42C1;--shiki-dark:#B392F0">curl</span><span style="--shiki-light:#005CC5;--shiki-dark:#79B8FF"> -X</span><span style="--shiki-light:#032F62;--shiki-dark:#9ECBFF"> POST</span><span style="--shiki-light:#032F62;--shiki-dark:#9ECBFF"> http://localhost:8000/v1/chat/completions</span><span style="--shiki-light:#005CC5;--shiki-dark:#79B8FF"> \</span></span>
5065
  <span class="line"><span style="--shiki-light:#24292E;--shiki-dark:#E1E4E8">-H </span><span style="--shiki-light:#032F62;--shiki-dark:#9ECBFF">&quot;Content-Type: application/json&quot;</span><span style="--shiki-light:#005CC5;--shiki-dark:#79B8FF"> \</span></span>
5066
  <span class="line"><span style="--shiki-light:#24292E;--shiki-dark:#E1E4E8">-d </span><span style="--shiki-light:#032F62;--shiki-dark:#9ECBFF">&#39;{&quot;messages&quot;: [{&quot;role&quot;: &quot;system&quot;, &quot;content&quot;: &quot;hello&quot;}], &quot;temperature&quot;: 0.9, &quot;max_tokens&quot;: 1000, &quot;stream&quot;: true, &quot;model&quot;: &quot;Qwen/Qwen2.5-0.5B-Instruct&quot;}&#39;</span></span>
5067
  <span class="line"></span></code></pre></div>
5068
+ <p><code>transformers-serve</code> uses continuous batching (see <a href="https://github.com/huggingface/transformers/pull/38085">this PR</a> and also <a href="https://github.com/huggingface/transformers/pull/40426">this one</a>) for better GPU utilization, and is very much linked to the great work of vLLM with the <code>paged attention kernel</code> – a futher justification of <a href="#community-kernels">external kernels</a>.</p>
5069
+ <p><code>transformers-serve</code> is not meant for user-facing production services, tools like vLLM or SGLang are super optimized for that, but it’s useful for several use cases:</p>
5070
+ <ul>
5071
+ <li>Quickly verify that your model is compatible with continuous batching and paged attention.</li>
5072
+ <li>Run ad-hoc vibe tests on any model, without worrying to deploy anything.</li>
5073
+ <li>Run evaluations efficiently, again without having to spend a lot of time engineering your infrastructure.</li>
5074
+ </ul>
5075
+ <p>For model deployment, check <a href="https://huggingface.co/docs/inference-providers/en/index">Inference Providers</a> or roll your solution using any of the excellent serving libraries.</p>
5076
+ <div class="crumbs"><p>OpenAI-compatible surface + continuous batching; kernels/backends slot in because the modeling API stayed stable.
5077
+ <strong>Next:</strong> reuse across vLLM/SGLang relies on the same consistency.</p></div>
5078
  <h2 id="community-reusability"><a href="#community-reusability">Community reusability</a></h2>
5079
+ <p>The transformers-serve CLI built on transformers, for sure, but the library is made first and foremost to be <em>reused</em> at large by the open-source ecosystem.</p>
5080
  <p>Adding a model to transformers means:</p>
5081
  <ul>
5082
  <li>having it immediately available to the community</li>
5083
  <li>having it immediately usable in vLLM, <a href="https://huggingface.co/blog/transformers-backend-sglang">SGLang</a>, and so on without additional code. In April 2025, transformers was added as a backend to run models on vLLM, which optimizes throughput/latency on top of existing transformers architectures <a href="https://blog.vllm.ai/2025/04/11/transformers-backend.html">as seen in this great vLLM x HF blog post.</a></li>
5084
  </ul>
5085
  <p>This cements the need even more for a <a href="#consistent-public-surface">consistent public surface</a>: we are now a backend, and there’s more optimized software than us to handle serving. At the time of writing, more effort is done in that direction. We already have compatible configs for VLMs for vLLM (say that three times fast), <a href="https://github.com/huggingface/transformers/pull/40696/files">here for GLM4 video support</a>, and here for <a href="https://github.com/huggingface/transformers/pull/40132">MoE support</a> for instance.</p>
5086
+ <div class="crumbs"><p>Being a good backend consumer requires a consistent public surface; modular shards and configs make that stability practical.
5087
+ <strong>Next:</strong> what changes in v5 without breaking the promise of visible semantics.</p></div>
5088
  <h2 id="what-is-coming-next"><a href="#what-is-coming-next">What is coming next</a></h2>
5089
+ <p>The next major version of <code>transformers</code> is just around the corner (and will have another blog post to its name when it comes out). When v5 is released, we aim to keep <a href="#backwards-compatibility">backwards compatibility</a> as solid as possible. The changes we make now are in service of that goal.</p>
5090
  <p>We will lean further into a modular toolbox, not a framework. You should not be forced to rewrite modeling code. It’s better when a model can inherit from <code>PreTrainedModel</code> and opt into Tensor Parallel, <code>from_pretrained</code>, sharding, <code>push_to_hub</code>, loss plumbing, and external stacks like PEFT/TRL/SGLang/vLLM.</p> </main> </section> <footer class="footer"> <div class="footer-inner"> <section class="citation-block"> <h3>Citation</h3> <p>For attribution, cite this work as</p> <pre class="citation short">Pablo Montalvo (2025). &quot;Maintain the unmaintainable: 1M python loc, 400+ models&quot;.</pre> <p>BibTeX citation</p> <pre class="citation long">@misc{montalvo2025_maintain_the_unmaintaina,
5091
  title={Maintain the unmaintainable: 1M python loc, 400+ models},
5092
  author={Pablo Montalvo},
app/dist/index.html.gz CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:69707f0e4ede2db07fd44e93b4f7cbf2bd110cbd19dd18e6fee579bb5f760da3
3
- size 1488182
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ed0105da49dc4bd2501866a3127d0d5d87cece9f044a8d3acf75e7007e611f9f
3
+ size 1488729
app/package-lock.json CHANGED
@@ -21,7 +21,7 @@
21
  "devDependencies": {
22
  "@astrojs/mdx": "^3.1.9",
23
  "@astrojs/svelte": "^5.5.0",
24
- "astro": "^4.10.0",
25
  "astro-compressor": "^0.4.1",
26
  "astro-mermaid": "^1.0.4",
27
  "mermaid": "^11.10.1",
@@ -336,6 +336,7 @@
336
  "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.28.4.tgz",
337
  "integrity": "sha512-2BCOP7TN8M+gVDj7/ht3hsaO/B/n5oDbiAyyvnRlNOs+u1o+JWNYTQrmpuNp1/Wq2gcFrI01JAW+paEKDMx/CA==",
338
  "dev": true,
 
339
  "dependencies": {
340
  "@babel/code-frame": "^7.27.1",
341
  "@babel/generator": "^7.28.3",
@@ -655,6 +656,7 @@
655
  "resolved": "https://registry.npmjs.org/@citation-js/core/-/core-0.7.18.tgz",
656
  "integrity": "sha512-EjLuZWA5156dIFGdF7OnyPyWFBW43B8Ckje6Sn/W2RFxHDu0oACvW4/6TNgWT80jhEA4bVFm7ahrZe9MJ2B2UQ==",
657
  "dev": true,
 
658
  "dependencies": {
659
  "@citation-js/date": "^0.5.0",
660
  "@citation-js/name": "^0.4.2",
@@ -839,6 +841,7 @@
839
  "url": "https://opencollective.com/csstools"
840
  }
841
  ],
 
842
  "engines": {
843
  "node": ">=18"
844
  },
@@ -861,6 +864,7 @@
861
  "url": "https://opencollective.com/csstools"
862
  }
863
  ],
 
864
  "engines": {
865
  "node": ">=18"
866
  }
@@ -3122,7 +3126,6 @@
3122
  "resolved": "https://registry.npmjs.org/@shikijs/core/-/core-3.13.0.tgz",
3123
  "integrity": "sha512-3P8rGsg2Eh2qIHekwuQjzWhKI4jV97PhvYjYUzGqjvJfqdQPz+nMlfWahU24GZAyW1FxFI1sYjyhfh5CoLmIUA==",
3124
  "dev": true,
3125
- "peer": true,
3126
  "dependencies": {
3127
  "@shikijs/types": "3.13.0",
3128
  "@shikijs/vscode-textmate": "^10.0.2",
@@ -3135,7 +3138,6 @@
3135
  "resolved": "https://registry.npmjs.org/@shikijs/engine-javascript/-/engine-javascript-3.13.0.tgz",
3136
  "integrity": "sha512-Ty7xv32XCp8u0eQt8rItpMs6rU9Ki6LJ1dQOW3V/56PKDcpvfHPnYFbsx5FFUP2Yim34m/UkazidamMNVR4vKg==",
3137
  "dev": true,
3138
- "peer": true,
3139
  "dependencies": {
3140
  "@shikijs/types": "3.13.0",
3141
  "@shikijs/vscode-textmate": "^10.0.2",
@@ -3147,7 +3149,6 @@
3147
  "resolved": "https://registry.npmjs.org/@shikijs/engine-oniguruma/-/engine-oniguruma-3.13.0.tgz",
3148
  "integrity": "sha512-O42rBGr4UDSlhT2ZFMxqM7QzIU+IcpoTMzb3W7AlziI1ZF7R8eS2M0yt5Ry35nnnTX/LTLXFPUjRFCIW+Operg==",
3149
  "dev": true,
3150
- "peer": true,
3151
  "dependencies": {
3152
  "@shikijs/types": "3.13.0",
3153
  "@shikijs/vscode-textmate": "^10.0.2"
@@ -3158,7 +3159,6 @@
3158
  "resolved": "https://registry.npmjs.org/@shikijs/langs/-/langs-3.13.0.tgz",
3159
  "integrity": "sha512-672c3WAETDYHwrRP0yLy3W1QYB89Hbpj+pO4KhxK6FzIrDI2FoEXNiNCut6BQmEApYLfuYfpgOZaqbY+E9b8wQ==",
3160
  "dev": true,
3161
- "peer": true,
3162
  "dependencies": {
3163
  "@shikijs/types": "3.13.0"
3164
  }
@@ -3168,7 +3168,6 @@
3168
  "resolved": "https://registry.npmjs.org/@shikijs/themes/-/themes-3.13.0.tgz",
3169
  "integrity": "sha512-Vxw1Nm1/Od8jyA7QuAenaV78BG2nSr3/gCGdBkLpfLscddCkzkL36Q5b67SrLLfvAJTOUzW39x4FHVCFriPVgg==",
3170
  "dev": true,
3171
- "peer": true,
3172
  "dependencies": {
3173
  "@shikijs/types": "3.13.0"
3174
  }
@@ -3178,7 +3177,6 @@
3178
  "resolved": "https://registry.npmjs.org/@shikijs/types/-/types-3.13.0.tgz",
3179
  "integrity": "sha512-oM9P+NCFri/mmQ8LoFGVfVyemm5Hi27330zuOBp0annwJdKH1kOLndw3zCtAVDehPLg9fKqoEx3Ht/wNZxolfw==",
3180
  "dev": true,
3181
- "peer": true,
3182
  "dependencies": {
3183
  "@shikijs/vscode-textmate": "^10.0.2",
3184
  "@types/hast": "^3.0.4"
@@ -3195,6 +3193,7 @@
3195
  "resolved": "https://registry.npmjs.org/@sveltejs/vite-plugin-svelte/-/vite-plugin-svelte-3.1.2.tgz",
3196
  "integrity": "sha512-Txsm1tJvtiYeLUVRNqxZGKR/mI+CzuIQuc2gn+YCs9rMTowpNZ2Nqt53JdL8KF9bLhAf2ruR/dr9eZCwdTriRA==",
3197
  "dev": true,
 
3198
  "dependencies": {
3199
  "@sveltejs/vite-plugin-svelte-inspector": "^2.1.0",
3200
  "debug": "^4.3.4",
@@ -3642,6 +3641,7 @@
3642
  "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.15.0.tgz",
3643
  "integrity": "sha512-NZyJarBfL7nWwIq+FDL6Zp/yHEhePMNnnJ0y3qfieCrmNvYct8uvtiV41UvlSe6apAfk0fY1FbWx+NwfmpvtTg==",
3644
  "dev": true,
 
3645
  "bin": {
3646
  "acorn": "bin/acorn"
3647
  },
@@ -3771,6 +3771,8 @@
3771
  "resolved": "https://registry.npmjs.org/astro/-/astro-4.16.19.tgz",
3772
  "integrity": "sha512-baeSswPC5ZYvhGDoj25L2FuzKRWMgx105FetOPQVJFMCAp0o08OonYC7AhwsFdhvp7GapqjnC1Fe3lKb2lupYw==",
3773
  "dev": true,
 
 
3774
  "dependencies": {
3775
  "@astrojs/compiler": "^2.10.3",
3776
  "@astrojs/internal-helpers": "0.4.1",
@@ -4140,6 +4142,7 @@
4140
  "url": "https://github.com/sponsors/ai"
4141
  }
4142
  ],
 
4143
  "dependencies": {
4144
  "baseline-browser-mapping": "^2.8.3",
4145
  "caniuse-lite": "^1.0.30001741",
@@ -4276,6 +4279,7 @@
4276
  "resolved": "https://registry.npmjs.org/chevrotain/-/chevrotain-11.0.3.tgz",
4277
  "integrity": "sha512-ci2iJH6LeIkvP9eJW6gpueU8cnZhv85ELY8w8WiFtNjMHA5ad6pQLaJo9mEly/9qUyCpvqX8/POVUTf18/HFdw==",
4278
  "dev": true,
 
4279
  "dependencies": {
4280
  "@chevrotain/cst-dts-gen": "11.0.3",
4281
  "@chevrotain/gast": "11.0.3",
@@ -4617,6 +4621,7 @@
4617
  "resolved": "https://registry.npmjs.org/cytoscape/-/cytoscape-3.33.1.tgz",
4618
  "integrity": "sha512-iJc4TwyANnOGR1OmWhsS9ayRS3s+XQ185FmuHObThD+5AeJCakAAbWv8KimMTt08xCCLNgneQwFp+JRJOr9qGQ==",
4619
  "dev": true,
 
4620
  "engines": {
4621
  "node": ">=0.10"
4622
  }
@@ -4992,6 +4997,7 @@
4992
  "version": "3.0.0",
4993
  "resolved": "https://registry.npmjs.org/d3-selection/-/d3-selection-3.0.0.tgz",
4994
  "integrity": "sha512-fmTRWbNMmsmWq6xJV8D19U/gw/bwrHfNXxrIN+HfZgnzqTHp9jOmKMhsTUjXOJnZOdZY9Q28y4yebKzqDKlxlQ==",
 
4995
  "engines": {
4996
  "node": ">=12"
4997
  }
@@ -7231,6 +7237,7 @@
7231
  "resolved": "https://registry.npmjs.org/mermaid/-/mermaid-11.12.0.tgz",
7232
  "integrity": "sha512-ZudVx73BwrMJfCFmSSJT84y6u5brEoV8DOItdHomNLz32uBjNrelm7mg95X7g+C6UoQH/W6mBLGDEDv73JdxBg==",
7233
  "dev": true,
 
7234
  "dependencies": {
7235
  "@braintree/sanitize-url": "^7.1.1",
7236
  "@iconify/utils": "^3.0.1",
@@ -8528,15 +8535,13 @@
8528
  "version": "0.12.1",
8529
  "resolved": "https://registry.npmjs.org/oniguruma-parser/-/oniguruma-parser-0.12.1.tgz",
8530
  "integrity": "sha512-8Unqkvk1RYc6yq2WBYRj4hdnsAxVze8i7iPfQr8e4uSP3tRv0rpZcbGUDvxfQQcdwHt/e9PrMvGCsa8OqG9X3w==",
8531
- "dev": true,
8532
- "peer": true
8533
  },
8534
  "node_modules/oniguruma-to-es": {
8535
  "version": "4.3.3",
8536
  "resolved": "https://registry.npmjs.org/oniguruma-to-es/-/oniguruma-to-es-4.3.3.tgz",
8537
  "integrity": "sha512-rPiZhzC3wXwE59YQMRDodUwwT9FZ9nNBwQQfsd1wfdtlKEyCdRV0avrTcSZ5xlIvGRVPd/cx6ZN45ECmS39xvg==",
8538
  "dev": true,
8539
- "peer": true,
8540
  "dependencies": {
8541
  "oniguruma-parser": "^0.12.1",
8542
  "regex": "^6.0.1",
@@ -8879,6 +8884,7 @@
8879
  "url": "https://github.com/sponsors/ai"
8880
  }
8881
  ],
 
8882
  "dependencies": {
8883
  "nanoid": "^3.3.11",
8884
  "picocolors": "^1.1.1",
@@ -9570,6 +9576,7 @@
9570
  "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-7.1.0.tgz",
9571
  "integrity": "sha512-8sLjZwK0R+JlxlYcTuVnyT2v+htpdrjDOKuMcOVdYjt52Lh8hWRYpxBPoKx/Zg+bcjc3wx6fmQevMmUztS/ccA==",
9572
  "dev": true,
 
9573
  "dependencies": {
9574
  "cssesc": "^3.0.0",
9575
  "util-deprecate": "^1.0.2"
@@ -9765,7 +9772,6 @@
9765
  "resolved": "https://registry.npmjs.org/regex/-/regex-6.0.1.tgz",
9766
  "integrity": "sha512-uorlqlzAKjKQZ5P+kTJr3eeJGSVroLKoHmquUj4zHWuR+hEyNqlXsSKlYYF5F4NI6nl7tWCs0apKJ0lmfsXAPA==",
9767
  "dev": true,
9768
- "peer": true,
9769
  "dependencies": {
9770
  "regex-utilities": "^2.3.0"
9771
  }
@@ -9775,7 +9781,6 @@
9775
  "resolved": "https://registry.npmjs.org/regex-recursion/-/regex-recursion-6.0.2.tgz",
9776
  "integrity": "sha512-0YCaSCq2VRIebiaUviZNs0cBz1kg5kVS2UKUfNIx8YVs1cN3AV7NTctO5FOKBA+UT2BPJIWZauYHPqJODG50cg==",
9777
  "dev": true,
9778
- "peer": true,
9779
  "dependencies": {
9780
  "regex-utilities": "^2.3.0"
9781
  }
@@ -10678,6 +10683,7 @@
10678
  "resolved": "https://registry.npmjs.org/svelte/-/svelte-4.2.20.tgz",
10679
  "integrity": "sha512-eeEgGc2DtiUil5ANdtd8vPwt9AgaMdnuUFnPft9F5oMvU/FHu5IHFic+p1dR/UOB7XU2mX2yHW+NcTch4DCh5Q==",
10680
  "dev": true,
 
10681
  "dependencies": {
10682
  "@ampproject/remapping": "^2.2.1",
10683
  "@jridgewell/sourcemap-codec": "^1.4.15",
@@ -11146,6 +11152,7 @@
11146
  "resolved": "https://registry.npmjs.org/vite/-/vite-5.4.20.tgz",
11147
  "integrity": "sha512-j3lYzGC3P+B5Yfy/pfKNgVEg4+UtcIJcVRt2cDjIOmhLourAqPqf8P7acgxeiSgUB7E3p2P8/3gNIgDLpwzs4g==",
11148
  "dev": true,
 
11149
  "dependencies": {
11150
  "esbuild": "^0.21.3",
11151
  "postcss": "^8.4.43",
@@ -11394,6 +11401,7 @@
11394
  "resolved": "https://registry.npmjs.org/zod/-/zod-3.25.76.tgz",
11395
  "integrity": "sha512-gzUt/qt81nXsFGKIFcC3YnfEAx5NkunCfnDlvuBSSFS02bcXu4Lmea0AFIUwbLWxWPx3d9p8S5QoaujKcNQxcQ==",
11396
  "dev": true,
 
11397
  "funding": {
11398
  "url": "https://github.com/sponsors/colinhacks"
11399
  }
 
21
  "devDependencies": {
22
  "@astrojs/mdx": "^3.1.9",
23
  "@astrojs/svelte": "^5.5.0",
24
+ "astro": "^4.16.19",
25
  "astro-compressor": "^0.4.1",
26
  "astro-mermaid": "^1.0.4",
27
  "mermaid": "^11.10.1",
 
336
  "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.28.4.tgz",
337
  "integrity": "sha512-2BCOP7TN8M+gVDj7/ht3hsaO/B/n5oDbiAyyvnRlNOs+u1o+JWNYTQrmpuNp1/Wq2gcFrI01JAW+paEKDMx/CA==",
338
  "dev": true,
339
+ "peer": true,
340
  "dependencies": {
341
  "@babel/code-frame": "^7.27.1",
342
  "@babel/generator": "^7.28.3",
 
656
  "resolved": "https://registry.npmjs.org/@citation-js/core/-/core-0.7.18.tgz",
657
  "integrity": "sha512-EjLuZWA5156dIFGdF7OnyPyWFBW43B8Ckje6Sn/W2RFxHDu0oACvW4/6TNgWT80jhEA4bVFm7ahrZe9MJ2B2UQ==",
658
  "dev": true,
659
+ "peer": true,
660
  "dependencies": {
661
  "@citation-js/date": "^0.5.0",
662
  "@citation-js/name": "^0.4.2",
 
841
  "url": "https://opencollective.com/csstools"
842
  }
843
  ],
844
+ "peer": true,
845
  "engines": {
846
  "node": ">=18"
847
  },
 
864
  "url": "https://opencollective.com/csstools"
865
  }
866
  ],
867
+ "peer": true,
868
  "engines": {
869
  "node": ">=18"
870
  }
 
3126
  "resolved": "https://registry.npmjs.org/@shikijs/core/-/core-3.13.0.tgz",
3127
  "integrity": "sha512-3P8rGsg2Eh2qIHekwuQjzWhKI4jV97PhvYjYUzGqjvJfqdQPz+nMlfWahU24GZAyW1FxFI1sYjyhfh5CoLmIUA==",
3128
  "dev": true,
 
3129
  "dependencies": {
3130
  "@shikijs/types": "3.13.0",
3131
  "@shikijs/vscode-textmate": "^10.0.2",
 
3138
  "resolved": "https://registry.npmjs.org/@shikijs/engine-javascript/-/engine-javascript-3.13.0.tgz",
3139
  "integrity": "sha512-Ty7xv32XCp8u0eQt8rItpMs6rU9Ki6LJ1dQOW3V/56PKDcpvfHPnYFbsx5FFUP2Yim34m/UkazidamMNVR4vKg==",
3140
  "dev": true,
 
3141
  "dependencies": {
3142
  "@shikijs/types": "3.13.0",
3143
  "@shikijs/vscode-textmate": "^10.0.2",
 
3149
  "resolved": "https://registry.npmjs.org/@shikijs/engine-oniguruma/-/engine-oniguruma-3.13.0.tgz",
3150
  "integrity": "sha512-O42rBGr4UDSlhT2ZFMxqM7QzIU+IcpoTMzb3W7AlziI1ZF7R8eS2M0yt5Ry35nnnTX/LTLXFPUjRFCIW+Operg==",
3151
  "dev": true,
 
3152
  "dependencies": {
3153
  "@shikijs/types": "3.13.0",
3154
  "@shikijs/vscode-textmate": "^10.0.2"
 
3159
  "resolved": "https://registry.npmjs.org/@shikijs/langs/-/langs-3.13.0.tgz",
3160
  "integrity": "sha512-672c3WAETDYHwrRP0yLy3W1QYB89Hbpj+pO4KhxK6FzIrDI2FoEXNiNCut6BQmEApYLfuYfpgOZaqbY+E9b8wQ==",
3161
  "dev": true,
 
3162
  "dependencies": {
3163
  "@shikijs/types": "3.13.0"
3164
  }
 
3168
  "resolved": "https://registry.npmjs.org/@shikijs/themes/-/themes-3.13.0.tgz",
3169
  "integrity": "sha512-Vxw1Nm1/Od8jyA7QuAenaV78BG2nSr3/gCGdBkLpfLscddCkzkL36Q5b67SrLLfvAJTOUzW39x4FHVCFriPVgg==",
3170
  "dev": true,
 
3171
  "dependencies": {
3172
  "@shikijs/types": "3.13.0"
3173
  }
 
3177
  "resolved": "https://registry.npmjs.org/@shikijs/types/-/types-3.13.0.tgz",
3178
  "integrity": "sha512-oM9P+NCFri/mmQ8LoFGVfVyemm5Hi27330zuOBp0annwJdKH1kOLndw3zCtAVDehPLg9fKqoEx3Ht/wNZxolfw==",
3179
  "dev": true,
 
3180
  "dependencies": {
3181
  "@shikijs/vscode-textmate": "^10.0.2",
3182
  "@types/hast": "^3.0.4"
 
3193
  "resolved": "https://registry.npmjs.org/@sveltejs/vite-plugin-svelte/-/vite-plugin-svelte-3.1.2.tgz",
3194
  "integrity": "sha512-Txsm1tJvtiYeLUVRNqxZGKR/mI+CzuIQuc2gn+YCs9rMTowpNZ2Nqt53JdL8KF9bLhAf2ruR/dr9eZCwdTriRA==",
3195
  "dev": true,
3196
+ "peer": true,
3197
  "dependencies": {
3198
  "@sveltejs/vite-plugin-svelte-inspector": "^2.1.0",
3199
  "debug": "^4.3.4",
 
3641
  "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.15.0.tgz",
3642
  "integrity": "sha512-NZyJarBfL7nWwIq+FDL6Zp/yHEhePMNnnJ0y3qfieCrmNvYct8uvtiV41UvlSe6apAfk0fY1FbWx+NwfmpvtTg==",
3643
  "dev": true,
3644
+ "peer": true,
3645
  "bin": {
3646
  "acorn": "bin/acorn"
3647
  },
 
3771
  "resolved": "https://registry.npmjs.org/astro/-/astro-4.16.19.tgz",
3772
  "integrity": "sha512-baeSswPC5ZYvhGDoj25L2FuzKRWMgx105FetOPQVJFMCAp0o08OonYC7AhwsFdhvp7GapqjnC1Fe3lKb2lupYw==",
3773
  "dev": true,
3774
+ "license": "MIT",
3775
+ "peer": true,
3776
  "dependencies": {
3777
  "@astrojs/compiler": "^2.10.3",
3778
  "@astrojs/internal-helpers": "0.4.1",
 
4142
  "url": "https://github.com/sponsors/ai"
4143
  }
4144
  ],
4145
+ "peer": true,
4146
  "dependencies": {
4147
  "baseline-browser-mapping": "^2.8.3",
4148
  "caniuse-lite": "^1.0.30001741",
 
4279
  "resolved": "https://registry.npmjs.org/chevrotain/-/chevrotain-11.0.3.tgz",
4280
  "integrity": "sha512-ci2iJH6LeIkvP9eJW6gpueU8cnZhv85ELY8w8WiFtNjMHA5ad6pQLaJo9mEly/9qUyCpvqX8/POVUTf18/HFdw==",
4281
  "dev": true,
4282
+ "peer": true,
4283
  "dependencies": {
4284
  "@chevrotain/cst-dts-gen": "11.0.3",
4285
  "@chevrotain/gast": "11.0.3",
 
4621
  "resolved": "https://registry.npmjs.org/cytoscape/-/cytoscape-3.33.1.tgz",
4622
  "integrity": "sha512-iJc4TwyANnOGR1OmWhsS9ayRS3s+XQ185FmuHObThD+5AeJCakAAbWv8KimMTt08xCCLNgneQwFp+JRJOr9qGQ==",
4623
  "dev": true,
4624
+ "peer": true,
4625
  "engines": {
4626
  "node": ">=0.10"
4627
  }
 
4997
  "version": "3.0.0",
4998
  "resolved": "https://registry.npmjs.org/d3-selection/-/d3-selection-3.0.0.tgz",
4999
  "integrity": "sha512-fmTRWbNMmsmWq6xJV8D19U/gw/bwrHfNXxrIN+HfZgnzqTHp9jOmKMhsTUjXOJnZOdZY9Q28y4yebKzqDKlxlQ==",
5000
+ "peer": true,
5001
  "engines": {
5002
  "node": ">=12"
5003
  }
 
7237
  "resolved": "https://registry.npmjs.org/mermaid/-/mermaid-11.12.0.tgz",
7238
  "integrity": "sha512-ZudVx73BwrMJfCFmSSJT84y6u5brEoV8DOItdHomNLz32uBjNrelm7mg95X7g+C6UoQH/W6mBLGDEDv73JdxBg==",
7239
  "dev": true,
7240
+ "peer": true,
7241
  "dependencies": {
7242
  "@braintree/sanitize-url": "^7.1.1",
7243
  "@iconify/utils": "^3.0.1",
 
8535
  "version": "0.12.1",
8536
  "resolved": "https://registry.npmjs.org/oniguruma-parser/-/oniguruma-parser-0.12.1.tgz",
8537
  "integrity": "sha512-8Unqkvk1RYc6yq2WBYRj4hdnsAxVze8i7iPfQr8e4uSP3tRv0rpZcbGUDvxfQQcdwHt/e9PrMvGCsa8OqG9X3w==",
8538
+ "dev": true
 
8539
  },
8540
  "node_modules/oniguruma-to-es": {
8541
  "version": "4.3.3",
8542
  "resolved": "https://registry.npmjs.org/oniguruma-to-es/-/oniguruma-to-es-4.3.3.tgz",
8543
  "integrity": "sha512-rPiZhzC3wXwE59YQMRDodUwwT9FZ9nNBwQQfsd1wfdtlKEyCdRV0avrTcSZ5xlIvGRVPd/cx6ZN45ECmS39xvg==",
8544
  "dev": true,
 
8545
  "dependencies": {
8546
  "oniguruma-parser": "^0.12.1",
8547
  "regex": "^6.0.1",
 
8884
  "url": "https://github.com/sponsors/ai"
8885
  }
8886
  ],
8887
+ "peer": true,
8888
  "dependencies": {
8889
  "nanoid": "^3.3.11",
8890
  "picocolors": "^1.1.1",
 
9576
  "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-7.1.0.tgz",
9577
  "integrity": "sha512-8sLjZwK0R+JlxlYcTuVnyT2v+htpdrjDOKuMcOVdYjt52Lh8hWRYpxBPoKx/Zg+bcjc3wx6fmQevMmUztS/ccA==",
9578
  "dev": true,
9579
+ "peer": true,
9580
  "dependencies": {
9581
  "cssesc": "^3.0.0",
9582
  "util-deprecate": "^1.0.2"
 
9772
  "resolved": "https://registry.npmjs.org/regex/-/regex-6.0.1.tgz",
9773
  "integrity": "sha512-uorlqlzAKjKQZ5P+kTJr3eeJGSVroLKoHmquUj4zHWuR+hEyNqlXsSKlYYF5F4NI6nl7tWCs0apKJ0lmfsXAPA==",
9774
  "dev": true,
 
9775
  "dependencies": {
9776
  "regex-utilities": "^2.3.0"
9777
  }
 
9781
  "resolved": "https://registry.npmjs.org/regex-recursion/-/regex-recursion-6.0.2.tgz",
9782
  "integrity": "sha512-0YCaSCq2VRIebiaUviZNs0cBz1kg5kVS2UKUfNIx8YVs1cN3AV7NTctO5FOKBA+UT2BPJIWZauYHPqJODG50cg==",
9783
  "dev": true,
 
9784
  "dependencies": {
9785
  "regex-utilities": "^2.3.0"
9786
  }
 
10683
  "resolved": "https://registry.npmjs.org/svelte/-/svelte-4.2.20.tgz",
10684
  "integrity": "sha512-eeEgGc2DtiUil5ANdtd8vPwt9AgaMdnuUFnPft9F5oMvU/FHu5IHFic+p1dR/UOB7XU2mX2yHW+NcTch4DCh5Q==",
10685
  "dev": true,
10686
+ "peer": true,
10687
  "dependencies": {
10688
  "@ampproject/remapping": "^2.2.1",
10689
  "@jridgewell/sourcemap-codec": "^1.4.15",
 
11152
  "resolved": "https://registry.npmjs.org/vite/-/vite-5.4.20.tgz",
11153
  "integrity": "sha512-j3lYzGC3P+B5Yfy/pfKNgVEg4+UtcIJcVRt2cDjIOmhLourAqPqf8P7acgxeiSgUB7E3p2P8/3gNIgDLpwzs4g==",
11154
  "dev": true,
11155
+ "peer": true,
11156
  "dependencies": {
11157
  "esbuild": "^0.21.3",
11158
  "postcss": "^8.4.43",
 
11401
  "resolved": "https://registry.npmjs.org/zod/-/zod-3.25.76.tgz",
11402
  "integrity": "sha512-gzUt/qt81nXsFGKIFcC3YnfEAx5NkunCfnDlvuBSSFS02bcXu4Lmea0AFIUwbLWxWPx3d9p8S5QoaujKcNQxcQ==",
11403
  "dev": true,
11404
+ "peer": true,
11405
  "funding": {
11406
  "url": "https://github.com/sponsors/colinhacks"
11407
  }
app/package.json CHANGED
@@ -50,7 +50,7 @@
50
  "devDependencies": {
51
  "@astrojs/mdx": "^3.1.9",
52
  "@astrojs/svelte": "^5.5.0",
53
- "astro": "^4.10.0",
54
  "astro-compressor": "^0.4.1",
55
  "astro-mermaid": "^1.0.4",
56
  "mermaid": "^11.10.1",
@@ -82,4 +82,4 @@
82
  "prism-themes": "^1.9.0",
83
  "stream-browserify": "^3.0.0"
84
  }
85
- }
 
50
  "devDependencies": {
51
  "@astrojs/mdx": "^3.1.9",
52
  "@astrojs/svelte": "^5.5.0",
53
+ "astro": "^4.16.19",
54
  "astro-compressor": "^0.4.1",
55
  "astro-mermaid": "^1.0.4",
56
  "mermaid": "^11.10.1",
 
82
  "prism-themes": "^1.9.0",
83
  "stream-browserify": "^3.0.0"
84
  }
85
+ }
app/src/components/HtmlEmbed.astro CHANGED
@@ -70,9 +70,13 @@ const htmlWithId = id && html ? html.replace(/<div class="([^"]*)"[^>]*>/, `<div
70
  </script>
71
 
72
  <style is:global>
73
- .html-embed { margin: 0 0 var(--block-spacing-y);
 
74
  z-index: var(--z-elevated);
75
  position: relative;
 
 
 
76
  }
77
  .html-embed__title {
78
  text-align: left;
@@ -150,6 +154,15 @@ const htmlWithId = id && html ? html.replace(/<div class="([^"]*)"[^>]*>/, `<div
150
  .force-light-mode { filter: invert(0); color-scheme: light; background: white; padding: 20px; border-radius: 10px; }
151
  [data-theme="dark"] .force-light-mode .html-embed__card { background: white !important; border-color: #ddd !important; }
152
  [data-theme="dark"] .force-light-mode * { color: #333 !important; }
 
 
 
 
 
 
 
 
 
153
  @media print {
154
  .html-embed, .html-embed__card { max-width: 100% !important; width: 100% !important; margin-left: 0 !important; margin-right: 0 !important; }
155
  .html-embed__card { padding: 6px; }
 
70
  </script>
71
 
72
  <style is:global>
73
+ .html-embed {
74
+ margin: 0 0 var(--block-spacing-y);
75
  z-index: var(--z-elevated);
76
  position: relative;
77
+ width: min(1100px, 100vw - var(--content-padding-x) * 2);
78
+ margin-left: 50%;
79
+ transform: translateX(-50%);
80
  }
81
  .html-embed__title {
82
  text-align: left;
 
154
  .force-light-mode { filter: invert(0); color-scheme: light; background: white; padding: 20px; border-radius: 10px; }
155
  [data-theme="dark"] .force-light-mode .html-embed__card { background: white !important; border-color: #ddd !important; }
156
  [data-theme="dark"] .force-light-mode * { color: #333 !important; }
157
+
158
+ @media (max-width: 1024px) {
159
+ .html-embed {
160
+ width: 100%;
161
+ margin-left: 0;
162
+ transform: none;
163
+ }
164
+ }
165
+
166
  @media print {
167
  .html-embed, .html-embed__card { max-width: 100% !important; width: 100% !important; margin-left: 0 !important; margin-right: 0 !important; }
168
  .html-embed__card { padding: 6px; }
app/src/content/article.mdx CHANGED
@@ -16,18 +16,18 @@ tableOfContentsAutoCollapse: true
16
 
17
  import HtmlEmbed from "../components/HtmlEmbed.astro";
18
 
19
- ## Introduction
20
 
21
  One million lines of `python` code. Through them, the `transformers` library supports more than 400 model architectures, from state-of-the-art LLMs and VLMs to specialized models for audio, video, and tables.
22
 
23
- Built on `PyTorch`, it's a foundational tool for modern LLM usage, research, education, and tens of thousands of other open-source projects. Each AI model is added by the community, harmonized into a consistent interface, and tested daily on a CI to ensure reproducibility.
24
 
25
  This scale presents a monumental engineering challenge.
26
 
27
  How do you keep such a ship afloat, made of so many moving, unrelated parts, contributed to by a buzzing hivemind? Especially as the pace of ML research accelerates? We receive constant feedback on everything from function signatures with hundreds of arguments to duplicated code and optimization concerns, and we listen to all of it, or try to. The library's usage keeps on growing, and we are a small team of maintainers and contributors, backed by hundreds of open-source community members.
28
  We continue to support all new models and expect to do so for the foreseeable future.
29
 
30
- This post dissects the design philosophy that makes this possible today. It's a continuation of our older principles, detailed on our previous [philosophy](https://huggingface.co/docs/transformers/en/philosophy) page, as well as its accompanying [blog post from 2022](https://huggingface.co/blog/transformers-design-philosophy). More recently, and I recommend the read if it's not done yet, a blog post about [recent upgrades to transformers](https://huggingface.co/blog/faster-transformers) was written, explaining in particular what makes the library faster today. Again, all of that development was only made possible thanks to these principles.
31
 
32
  We codify the "tenets" that guide our development, demonstrate how they are implemented in code, and show the measurable impact they have on the library's sustainability and growth.
33
 
@@ -162,7 +162,9 @@ When `AutoModel.from_pretrained(...)` is called, it is indeed the modeling (righ
162
  What does that give us?
163
 
164
  <div class="crumbs">
165
- A small <code>modular_*.py</code> declares reuse; the expanded modeling file stays visible (<a href="#one-model-one-file">tenet kept</a>). Reviewers and contributors maintain the shard, not the repetition. <strong>Next:</strong> the measurable effect on effective LOC and maintenance cost.
 
 
166
  </div>
167
 
168
 
@@ -173,35 +175,37 @@ However, if a model has a modular_*.py and a corresponding automatically generat
173
 
174
  That gives an "effective LOC" curve: the 𝗺𝗮𝗶𝗻𝘁𝗲𝗻𝗮𝗻𝗰𝗲 𝘀𝘂𝗿𝗳𝗮𝗰𝗲.
175
 
176
- Measured on git history, raw `modeling_*.py` grew at ~362 LOC/day before modular; counting only modular shards yields ~25 LOC/day after — about **15× lower**. The curve represents the **maintenance surface** today: what maintainers actually read and review.
177
 
178
- Less code to hand-maintain means fewer places to break. LOC is not complexity, but they correlate in review effort and change risk.
179
 
180
  <HtmlEmbed src="transformers/loc-growth.html" />
181
 
182
- There's a sharp drop near the end, it's due to us [removing support for Jax and TensorFlow](https://github.com/huggingface/transformers/commit/4df2529d79d75f44e70396df5888a32ffa02d61e#diff-60849db3e9922197854ef1cac92bf4aba08b5d7fd3fe6f3c16a3511e29e0eacc) library-wide.
183
 
184
- Of course, it is not only this effort that allowed to reduce the maintenance load.
185
 
186
- A related optimization was the following one. You've likely heard about [flash attention](https://huggingface.co/docs/text-generation-inference/en/conceptual/flash_attention) and its several variants.
187
 
188
  The _attention computation_ itself happens at a _lower_ level of abstraction than the model itself.
189
 
190
- However, we were adding specific torch operations for each backend (sdpa, flash-attention iterations, flex attention) but it wasn't a [minimal user api](#minimal-user-api).
191
 
192
  <div class="crumbs">
193
- Evidence: effective LOC drops ~15× when counting shards instead of expanded modeling. Less to read, fewer places to break. Related cleanups: attention backends moved behind a function interface. <strong>Next:</strong> how the attention interface stays standard without hiding semantics.
 
 
194
  </div>
195
 
196
  ### <a id="attention-classes"></a> External Attention classes
197
 
198
- The solution of the "attention abstraction problem" we chose was to move to an [attention interface](https://huggingface.co/docs/transformers/en/attention_interface) that allows the following:
199
 
200
- We keep a `Callable` for the naive implementation of the attention, called "eager" computation. We thus name this Callable `eager_attention_forward`, and it can be run as long as the user had `torch` installed, which is a requirement in any case.
201
 
202
- In other words, we moved from a class interface to a function interface: in order to use more complex attention implementations, the config is checked, and can use other Callables, including kernel bindings that are much faster, if they are available.
203
 
204
- This exemplifies the fact that we prefer to have an interface that is [standard, but not abstract](#standardize-dont-abstract).
205
 
206
  ```python
207
  attention_interface: Callable = eager_attention_forward
@@ -209,9 +213,13 @@ if self.config._attn_implementation != "eager":
209
  attention_interface = ALL_ATTENTION_FUNCTIONS[self.config._attn_implementation]
210
  ```
211
 
212
- A strength of the new attention interface is the possibility to enforce specific kwargs, which are needed by kernel providers and other dependencies. We know that kwargs are often a necessary evil that plagues tools with widespread compatibility; and it is something we have aimed to reduce, and will continue reduce in order to improve readability - with them, the current system is a [minimal user api](#minimal-user-api).
 
 
 
 
213
 
214
- Hence, backend integrations sometimes require specific kwargs. We reduce that surface and document expectations; where flexibility is necessary, we plan to use `typing.Annotated` to convey shapes and invariants without constraining integrations. Such an implementation could look like this in the future:
215
 
216
  ```python
217
  from typing import Annotated
@@ -221,7 +229,9 @@ MyModelOutputAnnotated = Annotated[MyModelOutput, "shape: (B, C, H, W)"]
221
 
222
 
223
  <div class="crumbs">
224
- Semantics remain in <code>eager_attention_forward</code>; faster backends are opt-in via config. We inform via types/annotations rather than enforce rigid kwargs, preserving integrations. <strong>Next:</strong> distribution concerns are declared as a plan, not model surgery.
 
 
225
  </div>
226
 
227
  ### <a id="simpler-tensor-parallelism"></a> Configurable Tensor Parallelism
@@ -256,7 +266,7 @@ Sharding is configuration (<code>tp_plan</code>), not edits to <code>Linear</cod
256
 
257
  ### <a id="layers-attentions-caches"></a> Layers, attentions and caches
258
 
259
- Following the same logic, the _nature_ of attention and caching per layer of a model should not be hardcoded. We should be able to specify in a configuration-based fashion how each layer is implemented. Thus we define a mapping that can be then
260
 
261
 
262
  ```python
@@ -269,7 +279,7 @@ ALLOWED_LAYER_TYPES = (
269
  )
270
  ```
271
 
272
- and the configuration can be _explicit_ about which attention type is in which layer, see e.g. gpt-oss, which alternates sliding and full attention:
273
 
274
  ```python
275
  "layer_types": [
@@ -281,10 +291,12 @@ and the configuration can be _explicit_ about which attention type is in which l
281
  ],
282
  ```
283
 
284
- This is [minimal](#minimal-user-api) to implement on the user side, and allows to keep the modeling untouched. It is also easy to tweak.
285
 
286
  <div class="crumbs">
287
- Allowed layer types are explicit; schedules (e.g., sliding/full alternation) live in config. This keeps the file readable and easy to tweak. <strong>Next:</strong> speedups come from kernels that don't change semantics.
 
 
288
  </div>
289
 
290
 
@@ -324,10 +336,10 @@ Radically different architectures such as mamba have spawned their own dependenc
324
 
325
  <HtmlEmbed src="transformers/dependency-graph.html" />
326
 
327
- However, even if llava defines a few VLMs, there's far too many vision-based architectures that are not yet defined as modulars of other existing archs. In other words, there is no strong reference point in terms of software for vision models.
328
  As you can see, there is a small DETR island, a little llava pocket, and so on, but it's not comparable to the centrality observed for llama.
329
 
330
- Another problem is, this is only for `modular` models. Several models do NOT have a modular file.
331
 
332
  How do we spot them, and how do we identify modularisable models?
333
 
@@ -338,10 +350,11 @@ Graph reading guide: nodes are models; edges are modular imports. Llama-lineage
338
 
339
  ### Many models, but not enough yet, are alike
340
 
341
- So I looked into Jaccard similarity, which we use to measure set differences. I know that code is more than a set of characters stringed together. We also tried code-embedding models that ranked candidates better in practice, but for this post we stick to the deterministic Jaccard index.
342
 
 
343
 
344
- It is interesting, for that, to look at _when_ we deployed this modular logic and what was its rippling effect on the library. You can check the [larger space](https://huggingface.co/spaces/Molbap/transformers-modular-refactor) to play around, but the gist is: adding modular allowed to connect more and more models to solid reference points. We have a lot of gaps to fill in still.
345
 
346
  Zoom out below - it's full of models. You can click on a node to see its connections better, or use the text box to search for a model.
347
 
@@ -350,12 +363,12 @@ Zoom out below - it's full of models. You can click on a node to see its connect
350
  If you've checked out llava, you've seen that llava_video is a red node, connected by a red edge to llava: it's a candidate, something that we can _likely_ remodularize, [not touching the actual model](#backwards-compatibility) but being much more readable with [DRY*](#do-repeat-yourself).
351
 
352
  <div class="crumbs">
353
- Similarity (Jaccard; embeddings tried separately) surfaces likely parents; the timeline shows consolidation after modular landed. Red nodes/edges = candidates (e.g., <code>llava_video</code> → <code>llava</code>) for refactors that preserve behavior. <strong>Next:</strong> concrete VLM choices that avoid leaky abstractions.
354
  </div>
355
 
356
  ### VLM improvements, avoiding abstraction
357
 
358
- We don't have cookbook for common VLM patterns (image token scatter, multi‑tower encoders, cross‑attn bridges). This is one of the main improvement points where we can work.
359
 
360
  For instance, we thought of abstracting away the mixing of `inputs_embeds`, the tensor fed into an LLM decoder in 95% of the existing VLMs. It would have looked like something like
361
 
@@ -364,7 +377,7 @@ class InputsEmbeddingMixerMixin(nn.Module):
364
  #
365
  ```
366
 
367
- But this is [abstracting away an important component of the modeling.](#standardize-dont-abstract). Embedding mixin is part of the model, removing it would break it. A user opening [`modeling_qwen2.5_vl`](https://huggingface.co/collections/Qwen/qwen25-vl-6795ffac22b334a837c0f9a5) should not have to go to another file to understand how it works.
368
 
369
  What is the current state of these “abstractions” across the codebase?
370
  You will see all the imports around a modeling file, here [Gemma3n](https://huggingface.co/google/gemma-3n-E4B-it).
@@ -452,27 +465,30 @@ A second one is the ability to fine-tune and pipeline these models into many oth
452
 
453
 
454
  <div class="crumbs">
455
- The shape of a contribution: add a model (or variant) with a small modular shard; the community and serving stacks pick it up immediately. Popularity trends (encoders/embeddings) guide where we invest. <strong>Next:</strong> power tools enabled by a consistent API.
 
456
  </div>
457
 
458
 
459
  ### <a id="encoders-ftw"></a> Models popularity
460
 
461
- Talking about dependencies, we can take a look at the number of downloads for transformer models popularity. One thing we see is the prominence of encoders: This is because the usage of encoders lies in embeddings, just check out [EmbeddingGemma](https://huggingface.co/blog/embeddinggemma) for a modern recap. Hence, it is vital to keep the encoders part viable, usable, fine-tune-able.
 
462
 
463
  <div>
464
  <HtmlEmbed src="transformers/model-visualisation.html" />
465
  </div>
466
 
467
- As the codebase grows, with our friend codebase [Sentence Transformers](https://huggingface.co/sentence-transformers), we need to maintain this one as well. Retrieval use-cases, smart databases, like FAISS-based indexing rely on it, and thus indirectly on transformers.
468
-
469
 
470
  In that regard, we DO want to be a modular toolbox, being [minimal](#minimal-user-api) enough and well documented enough so any ML/AI developer can use `transformers` without having to think about it. We aim to reduce the cognitive load brought about by model development, not increase it.
471
 
472
  So, how do these design choices, these "tenets" influence development of models and overall usage of transformers?
473
 
474
  <div class="crumbs">
475
- Encoders remain critical for embeddings and retrieval; maintaining them well benefits the broader ecosystem (e.g., Sentence Transformers, FAISS). <strong>Next:</strong> dev tools that leverage unified attention APIs and PyTorch-only internals.
 
 
476
  </div>
477
 
478
 
@@ -480,14 +496,17 @@ Encoders remain critical for embeddings and retrieval; maintaining them well ben
480
 
481
  ### Attention visualisation
482
 
483
- All models have the same API internally for attention computation, thanks to [the externalisation of attention classes](#external-attention-classes). it allows us to build cool tools to visualize the inner workings of the attention mechanism.
 
 
484
 
485
  One particular piece of machinery is the `attention mask`. Here you see the famous bidirectional attention pattern for the whole prefix (text + image) in PaliGemma and all Gemma2+ models, contrasting with the usual "causal-only" models.
486
 
487
  <HtmlEmbed src="transformers/attention-visualizer.html" />
488
 
489
  <div class="crumbs">
490
- Uniform attention APIs enable cross-model diagnostics (e.g., PaliGemma prefix bidirectionality vs causal). <strong>Next:</strong> whole-model tracing for ports and regressions.
 
491
  </div>
492
 
493
 
@@ -521,7 +540,7 @@ Pre-allocating GPU memory removes malloc spikes (e.g., 7× for 8B, 6× for 32B i
521
 
522
  ### Transformers-serve and continuous batching
523
 
524
- Having all these models readily available allows to use all of them with transformers-serve, and enable interfacing with them with an Open API-like pattern. As a reminder, the hub also opens access to various [inference providers](https://huggingface.co/docs/inference-providers/en/index) if you're interested in model deployment in general.
525
 
526
  ```bash
527
  transformers serve
@@ -531,18 +550,25 @@ curl -X POST http://localhost:8000/v1/chat/completions \
531
  -d '{"messages": [{"role": "system", "content": "hello"}], "temperature": 0.9, "max_tokens": 1000, "stream": true, "model": "Qwen/Qwen2.5-0.5B-Instruct"}'
532
  ```
533
 
534
- This provides an OpenAI-compatible API with features like [continuous batching](https://github.com/huggingface/transformers/pull/38085) (also check [here](https://github.com/huggingface/transformers/pull/40426)) for better GPU utilization.
535
 
536
- Continuous batching is in itself very much linked to the great work of vLLM with the `paged attention kernel`, further justifying the facilitation of [external kernels](#community-kernels).
 
 
 
 
 
 
 
537
 
538
  <div class="crumbs">
539
- OpenAI-compatible surface + continuous batching; kernels/backends slot in because the modeling API stayed stable. <strong>Next:</strong> reuse across vLLM/SGLang relies on the same consistency.
 
540
  </div>
541
 
542
 
543
  ## Community reusability
544
 
545
- Transformers-serve is transformers-first, for sure, but the library is made first and foremost to be _reused_ at large by the open-source ecosystem.
546
 
547
  Adding a model to transformers means:
548
 
@@ -553,11 +579,13 @@ This cements the need even more for a [consistent public surface](#consistent-pu
553
 
554
 
555
  <div class="crumbs">
556
- Being a good backend consumer requires a consistent public surface; modular shards and configs make that stability practical. <strong>Next:</strong> what changes in v5 without breaking the promise of visible semantics.
 
557
  </div>
558
 
559
  ## What is coming next
560
 
561
- The next major version of `transformers` is just around the corner (and will have another blog post to its name when it comes out.). When v5 is released, we aim to keep [backwards compatibility](#backwards-compatibility) as solid as possible. The changes we make now are in service of that goal.
 
 
562
 
563
- We will lean further into a modular toolbox, not a framework. You should not be forced to rewrite modeling code. It’s better when a model can inherit from `PreTrainedModel` and opt into Tensor Parallel, `from_pretrained`, sharding, `push_to_hub`, loss plumbing, and external stacks like PEFT/TRL/SGLang/vLLM.
 
16
 
17
  import HtmlEmbed from "../components/HtmlEmbed.astro";
18
 
19
+ ## Preface
20
 
21
  One million lines of `python` code. Through them, the `transformers` library supports more than 400 model architectures, from state-of-the-art LLMs and VLMs to specialized models for audio, video, and tables.
22
 
23
+ Built on `PyTorch`, transformers is a foundational tool for modern LLM usage, research, education, and tens of thousands of other open-source projects. Each AI model is added by the community, harmonized into a consistent interface, and tested daily on a CI to ensure reproducibility.
24
 
25
  This scale presents a monumental engineering challenge.
26
 
27
  How do you keep such a ship afloat, made of so many moving, unrelated parts, contributed to by a buzzing hivemind? Especially as the pace of ML research accelerates? We receive constant feedback on everything from function signatures with hundreds of arguments to duplicated code and optimization concerns, and we listen to all of it, or try to. The library's usage keeps on growing, and we are a small team of maintainers and contributors, backed by hundreds of open-source community members.
28
  We continue to support all new models and expect to do so for the foreseeable future.
29
 
30
+ This post dissects the design philosophy that makes this possible. It's the result of an evolution from our older principles, detailed on our previous [philosophy](https://huggingface.co/docs/transformers/en/philosophy) page, as well as its accompanying [blog post from 2022](https://huggingface.co/blog/transformers-design-philosophy). More recently (and we strongly recommend the read) we published a blog post about [recent upgrades to transformers](https://huggingface.co/blog/faster-transformers), focusing on what makes the library faster today. All of these developments are only made possible thanks to these principles.
31
 
32
  We codify the "tenets" that guide our development, demonstrate how they are implemented in code, and show the measurable impact they have on the library's sustainability and growth.
33
 
 
162
  What does that give us?
163
 
164
  <div class="crumbs">
165
+ <strong>TL;DR:</strong> A small <code>modular_*.py</code> declares reuse; the expanded modeling file stays visible (<a href="#one-model-one-file">One Model, One File tenet preserved</a>). Reviewers and contributors maintain the shard, not the repetition.
166
+
167
+ <strong>Next:</strong> the measurable effect on effective LOC and maintenance cost.
168
  </div>
169
 
170
 
 
175
 
176
  That gives an "effective LOC" curve: the 𝗺𝗮𝗶𝗻𝘁𝗲𝗻𝗮𝗻𝗰𝗲 𝘀𝘂𝗿𝗳𝗮𝗰𝗲.
177
 
178
+ Measured on git history, raw `modeling_*.py` grew at ~362 LOC/day before modular; counting only modular shards yields ~25 LOC/day after — about **15× lower**. The effective curve (blue line below) represents the **maintenance surface** today: what maintainers actually read and review.
179
 
180
+ Less code to hand-maintain means fewer places to break. LOC is not a direct measure of complexity, but they correlate in review effort and change risk.
181
 
182
  <HtmlEmbed src="transformers/loc-growth.html" />
183
 
184
+ If you zoom in, you'll notice there's a sharp drop near the end, it's essentially due to us [removing support for Jax and TensorFlow](https://github.com/huggingface/transformers/commit/4df2529d79d75f44e70396df5888a32ffa02d61e#diff-60849db3e9922197854ef1cac92bf4aba08b5d7fd3fe6f3c16a3511e29e0eacc) library-wide.
185
 
186
+ But this was not the only effort that allowed us to reduce maintenance load.
187
 
188
+ We recently underwent a deep refactor of the attention implementation. You've likely heard about [flash attention](https://huggingface.co/docs/text-generation-inference/en/conceptual/flash_attention) and its several variants.
189
 
190
  The _attention computation_ itself happens at a _lower_ level of abstraction than the model itself.
191
 
192
+ However, we were adding specific torch operations for each backend (sdpa, the several flash-attention iterations, flex attention) but it wasn't a [minimal user api](#minimal-user-api). Next section explains what we did.
193
 
194
  <div class="crumbs">
195
+ Evidence: effective (i.e., maintenable) LOC growth drops ~15× when counting shards instead of expanded modeling files. Less code to read, fewer places to break.
196
+
197
+ <strong>Next:</strong> how the attention interface stays standard without hiding semantics.
198
  </div>
199
 
200
  ### <a id="attention-classes"></a> External Attention classes
201
 
202
+ The solution for the "attention abstraction problem" was to move to a standard [attention interface](https://huggingface.co/docs/transformers/en/attention_interface) that allows the following:
203
 
204
+ The naive implementation of attention, called "eager", is available by default. We use a `Callable` called `eager_attention_forward`, which can run as long as the user has PyTorch installed which is a requirement any way.
205
 
206
+ Instead of using a class interface and a class hierarchy, we just moved to a function interface. When a more complex attention implementation is needed, we use other Callables, including much faster kernel bindings when available. The decision to use a different attention implementation is based on the model configuration file we download from the Hub, and it can also be overridden by the user.
207
 
208
+ This is a clear example that that we prefer an interface that is [standard, but not abstract](#standardize-dont-abstract). To be completely precise, this is what the interface selection looks like in transformers code:
209
 
210
  ```python
211
  attention_interface: Callable = eager_attention_forward
 
213
  attention_interface = ALL_ATTENTION_FUNCTIONS[self.config._attn_implementation]
214
  ```
215
 
216
+ A strength of the new attention interface is the possibility to enforce specific kwargs, which are needed by kernel providers and other dependencies.
217
+
218
+ Backend integrations sometimes require specific kwargs.
219
+
220
+ We know that kwargs are often a necessary evil that plagues tools with widespread compatibility; and it is something we have aimed to reduce, and will continue reduce in order to improve readability - with them, the current system is a [minimal user api](#minimal-user-api).
221
 
222
+ We reduce that surface and document expectations; where flexibility is necessary, we plan to use `typing.Annotated` to convey shapes and invariants without constraining integrations. Such an implementation could look like this in the future:
223
 
224
  ```python
225
  from typing import Annotated
 
229
 
230
 
231
  <div class="crumbs">
232
+ Attention semantics remain in <code>eager_attention_forward</code>; faster backends are opt-in via config. We inform via types/annotations rather than enforce rigid kwargs, preserving integrations.
233
+
234
+ <strong>Next:</strong> parallel partitioning is declared as a plan, not through model surgery.
235
  </div>
236
 
237
  ### <a id="simpler-tensor-parallelism"></a> Configurable Tensor Parallelism
 
266
 
267
  ### <a id="layers-attentions-caches"></a> Layers, attentions and caches
268
 
269
+ Following the same logic, the _nature_ of attention and per-layer caching should not be hardcoded. We should be able to specify in the configuration how each layer is implemented. Thus, we define a mapping like:
270
 
271
 
272
  ```python
 
279
  )
280
  ```
281
 
282
+ and the configuration can be _explicit_ about which attention type is in which layer. See, for example, [gpt-oss](https://huggingface.co/openai/gpt-oss-120b/blob/main/config.json#L15), which alternates sliding and full attention:
283
 
284
  ```python
285
  "layer_types": [
 
291
  ],
292
  ```
293
 
294
+ This is [minimal](#minimal-user-api) to implement on the user side, and allows to keep the modeling code untouched. It is also easy to tweak.
295
 
296
  <div class="crumbs">
297
+ Allowed layer types are explicit; schedules (e.g., sliding/full alternation) live in config. This keeps the file readable and easy to tweak.
298
+
299
+ <strong>Next:</strong> speedups come from kernels that don't change semantics.
300
  </div>
301
 
302
 
 
336
 
337
  <HtmlEmbed src="transformers/dependency-graph.html" />
338
 
339
+ In the case of VLMs, there's far too many vision-based architectures that are not yet defined as modulars of other existing archs. In other words, there is no strong reference point in terms of software for vision models.
340
  As you can see, there is a small DETR island, a little llava pocket, and so on, but it's not comparable to the centrality observed for llama.
341
 
342
+ Another problem is, this visualization only shows `modular` models. Several models still do NOT have a modular file.
343
 
344
  How do we spot them, and how do we identify modularisable models?
345
 
 
350
 
351
  ### Many models, but not enough yet, are alike
352
 
353
+ I looked into Jaccard similarity, which we use to measure set differences, to find similarities across models. I know that code is more than a set of characters stringed together. We also tried code-embedding models that ranked candidates better in practice, but for this post we stick to the deterministic Jaccard index. You can take a look at [the corresponding PR](https://github.com/huggingface/transformers/pull/41289) for the embedding method.
354
 
355
+ It is interesting, for our comparison, to look at _when_ we deployed this modular logic and what was its rippling effect on the library. You can check the [larger space](https://huggingface.co/spaces/Molbap/transformers-modular-refactor) to play around, but the gist is: adding modular allowed to connect more and more models to solid reference points.
356
 
357
+ Yet, we still have a lot of gaps to fill.
358
 
359
  Zoom out below - it's full of models. You can click on a node to see its connections better, or use the text box to search for a model.
360
 
 
363
  If you've checked out llava, you've seen that llava_video is a red node, connected by a red edge to llava: it's a candidate, something that we can _likely_ remodularize, [not touching the actual model](#backwards-compatibility) but being much more readable with [DRY*](#do-repeat-yourself).
364
 
365
  <div class="crumbs">
366
+ Similarity metrics (Jaccard index or embeddings) surfaces likely parents; the timeline shows consolidation after modular landed. Red nodes/edges = candidates (e.g., <code>llava_video</code> → <code>llava</code>) for refactors that preserve behavior. <strong>Next:</strong> concrete VLM choices that avoid leaky abstractions.
367
  </div>
368
 
369
  ### VLM improvements, avoiding abstraction
370
 
371
+ We don't yet have a cookbook for common VLM patterns (image token scatter, multi‑tower encoders, cross‑attention bridges). This is one of the main improvement points where we can work.
372
 
373
  For instance, we thought of abstracting away the mixing of `inputs_embeds`, the tensor fed into an LLM decoder in 95% of the existing VLMs. It would have looked like something like
374
 
 
377
  #
378
  ```
379
 
380
+ But this is [abstracting away an important component of the modeling](#standardize-dont-abstract). Embedding mixin is part of the model, removing it would break it. A user opening [`modeling_qwen2.5_vl`](https://github.com/huggingface/transformers/blob/b3bd815786c36f4e6c3791fae0a96cac86658b32/src/transformers/models/qwen2_5_vl/modeling_qwen2_5_vl.py#L1358) (check out the [Qwen2.5VL collection](https://huggingface.co/collections/Qwen/qwen25-vl-6795ffac22b334a837c0f9a5)) should not have to go to another file to understand how it works.
381
 
382
  What is the current state of these “abstractions” across the codebase?
383
  You will see all the imports around a modeling file, here [Gemma3n](https://huggingface.co/google/gemma-3n-E4B-it).
 
465
 
466
 
467
  <div class="crumbs">
468
+ The shape of a contribution: add a model (or variant) with a small modular shard; the community and serving stacks pick it up immediately. Popularity trends (encoders/embeddings) guide where we invest.
469
+ <strong>Next:</strong> power tools enabled by a consistent API.
470
  </div>
471
 
472
 
473
  ### <a id="encoders-ftw"></a> Models popularity
474
 
475
+ Talking about dependencies, we can take a look at the number of downloads as a measure of popularity. One thing we see is the prominence of encoders, despite the apparent prevalence of decoder LLMs. The reason is that encoders are used to generate embeddings, which have multiple downstream uses. Just check out [EmbeddingGemma](https://huggingface.co/blog/embeddinggemma) for a modern recap. Hence, it is vital to keep the encoders portion of the library viable, usable, fine-tune-able.
476
+
477
 
478
  <div>
479
  <HtmlEmbed src="transformers/model-visualisation.html" />
480
  </div>
481
 
482
+ As the codebase grows, we need to maintain it in coordination with our friend [Sentence Transformers codebase](https://huggingface.co/sentence-transformers). Retrieval use-cases, smart databases, FAISS-based indexing rely on it, and thus indirectly on transformers.
 
483
 
484
  In that regard, we DO want to be a modular toolbox, being [minimal](#minimal-user-api) enough and well documented enough so any ML/AI developer can use `transformers` without having to think about it. We aim to reduce the cognitive load brought about by model development, not increase it.
485
 
486
  So, how do these design choices, these "tenets" influence development of models and overall usage of transformers?
487
 
488
  <div class="crumbs">
489
+ Encoders remain critical for embeddings and retrieval; maintaining them well benefits the broader ecosystem (e.g., Sentence Transformers, FAISS).
490
+
491
+ <strong>Next:</strong> dev tools that leverage unified attention APIs and PyTorch-only internals.
492
  </div>
493
 
494
 
 
496
 
497
  ### Attention visualisation
498
 
499
+ All models have the same API for attention computation, thanks to [the externalisation of attention classes](#external-attention-classes).
500
+
501
+ This uniformity allows us to build cool tools to visualize the inner workings of the attention mechanism.
502
 
503
  One particular piece of machinery is the `attention mask`. Here you see the famous bidirectional attention pattern for the whole prefix (text + image) in PaliGemma and all Gemma2+ models, contrasting with the usual "causal-only" models.
504
 
505
  <HtmlEmbed src="transformers/attention-visualizer.html" />
506
 
507
  <div class="crumbs">
508
+ Uniform attention APIs enable cross-model diagnostics (e.g., PaliGemma prefix bidirectionality vs causal).
509
+ <strong>Next:</strong> whole-model tracing for ports and regressions.
510
  </div>
511
 
512
 
 
540
 
541
  ### Transformers-serve and continuous batching
542
 
543
+ Having all these models readily available and sharing the same interface allowed us to implement transformers-serve, a CLI tool to expose models through a standard OpenAI http API.
544
 
545
  ```bash
546
  transformers serve
 
550
  -d '{"messages": [{"role": "system", "content": "hello"}], "temperature": 0.9, "max_tokens": 1000, "stream": true, "model": "Qwen/Qwen2.5-0.5B-Instruct"}'
551
  ```
552
 
 
553
 
554
+ `transformers-serve` uses continuous batching (see [this PR](https://github.com/huggingface/transformers/pull/38085) and also [this one](https://github.com/huggingface/transformers/pull/40426)) for better GPU utilization, and is very much linked to the great work of vLLM with the `paged attention kernel` a futher justification of [external kernels](#community-kernels).
555
+
556
+ `transformers-serve` is not meant for user-facing production services, tools like vLLM or SGLang are super optimized for that, but it's useful for several use cases:
557
+ - Quickly verify that your model is compatible with continuous batching and paged attention.
558
+ - Run ad-hoc vibe tests on any model, without worrying to deploy anything.
559
+ - Run evaluations efficiently, again without having to spend a lot of time engineering your infrastructure.
560
+
561
+ For model deployment, check [Inference Providers](https://huggingface.co/docs/inference-providers/en/index) or roll your solution using any of the excellent serving libraries.
562
 
563
  <div class="crumbs">
564
+ OpenAI-compatible surface + continuous batching; kernels/backends slot in because the modeling API stayed stable.
565
+ <strong>Next:</strong> reuse across vLLM/SGLang relies on the same consistency.
566
  </div>
567
 
568
 
569
  ## Community reusability
570
 
571
+ The transformers-serve CLI built on transformers, for sure, but the library is made first and foremost to be _reused_ at large by the open-source ecosystem.
572
 
573
  Adding a model to transformers means:
574
 
 
579
 
580
 
581
  <div class="crumbs">
582
+ Being a good backend consumer requires a consistent public surface; modular shards and configs make that stability practical.
583
+ <strong>Next:</strong> what changes in v5 without breaking the promise of visible semantics.
584
  </div>
585
 
586
  ## What is coming next
587
 
588
+ The next major version of `transformers` is just around the corner (and will have another blog post to its name when it comes out). When v5 is released, we aim to keep [backwards compatibility](#backwards-compatibility) as solid as possible. The changes we make now are in service of that goal.
589
+
590
+ We will lean further into a modular toolbox, not a framework. You should not be forced to rewrite modeling code. It’s better when a model can inherit from `PreTrainedModel` and opt into Tensor Parallel, `from_pretrained`, sharding, `push_to_hub`, loss plumbing, and external stacks like PEFT/TRL/SGLang/vLLM.
591
 
 
app/src/content/embeds/banner.html CHANGED
@@ -2,7 +2,7 @@
2
  @import url('https://fonts.googleapis.com/css2?family=Inter:wght@500;600&display=swap');
3
 
4
  .banner-container {
5
- width: 300%;
6
  height: 600px;
7
  position: relative;
8
  overflow: visible;
 
2
  @import url('https://fonts.googleapis.com/css2?family=Inter:wght@500;600&display=swap');
3
 
4
  .banner-container {
5
+ width: 100%;
6
  height: 600px;
7
  position: relative;
8
  overflow: visible;
app/src/content/new_article.mdx ADDED
@@ -0,0 +1,633 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ title: "Maintain the unmaintainable:\n1M python loc, 400+ models"
3
+ subtitle: "A peek into software engineering for the transformers library"
4
+ description: "A peek into software engineering for the transformers library"
5
+ authors:
6
+ - name: "Pablo Montalvo"
7
+ url: "https://huggingface.co/Molbap"
8
+ affiliations: [1]
9
+ affiliations:
10
+ - name: "Hugging Face"
11
+ url: "https://huggingface.co"
12
+ published: "October 2, 2025"
13
+ tags: [transformers, engineering, design-philosophy]
14
+ tableOfContentsAutoCollapse: true
15
+ ---
16
+
17
+ import HtmlEmbed from "../components/HtmlEmbed.astro";
18
+
19
+ ## Preface
20
+
21
+ One million lines of `python` code. Through them, the `transformers` library supports more than 400 model architectures, from state-of-the-art LLMs and VLMs to specialized models for audio, video, and tables.
22
+
23
+ Built on `PyTorch`, transformers is a foundational tool for modern LLM usage, research, education, and tens of thousands of other open-source projects. Each AI model is added by the community, harmonized into a consistent interface, and tested daily on a CI to ensure reproducibility.
24
+
25
+ This scale presents a monumental engineering challenge.
26
+
27
+ How do you keep such a ship afloat, made of so many moving, unrelated parts, contributed to by a buzzing hivemind? Especially as the pace of ML research accelerates? We receive constant feedback on everything from function signatures with hundreds of arguments to duplicated code and optimization concerns, and we listen to all of it, or try to. The library's usage keeps on growing, and we are a small team of maintainers and contributors, backed by hundreds of open-source community members.
28
+ We continue to support all new models and expect to do so for the foreseeable future.
29
+
30
+ This post dissects the design philosophy that makes this possible. It's the result of a gradual evolution from our older principles, detailed on our previous [philosophy](https://huggingface.co/docs/transformers/en/philosophy) page, as well as its accompanying [blog post from 2022](https://huggingface.co/blog/transformers-design-philosophy). More recently (and I do recommend the read), we wrote a blog post about [recent upgrades to transformers](https://huggingface.co/blog/faster-transformers) with a special focus on what makes the library faster today. All of these developments were only made possible thanks to these principles.
31
+
32
+ We formalize and articulate the "tenets" that have been guiding our development, demonstrate how they are implemented in code, and show the measurable impact they have on the library's sustainability and growth.
33
+
34
+ For any OSS maintainer, power user, or contributor, this is the map to understanding, using, and building upon `transformers`; but not only: any project of comparable size will require you to make deep choices, not only on design and choice of abstractions, but on the very mindset of the software you are building. These tenets may or may not be applicable to your project, but they provide a glimpse on how we work that could be helpful or inspirational.
35
+
36
+ Conventions used throughout this post:
37
+
38
+ * [Tenets exemplified](#source-of-truth) will have their summary available on hover.
39
+
40
+ * [External links](https://huggingface.co/blog/welcome-openai-gpt-oss) to articles will help you solidify your knowledge.
41
+
42
+ * [Several interactive visualisations](#generated-modeling) are available as you go - scroll, zoom, drag away to explore.
43
+
44
+ <div class="crumbs">
45
+ * Breadcrumb boxes summarize what you just learned, connect it to the tenets, and point to what's coming <strong>Next</strong>. Think of them as narrative signposts to help you keep track.
46
+ </div>
47
+
48
+ We will get started by enumerating the tenets. Then we'll look at concrete examples that show how they shape our decision-making. These examples are necessarily detailed, and sometimes complex, because they illustrate the challenges to maintain and grow a large codebase that caters to multiple collectives, has millions of users, hundreds of contributors, and always strives for simplicity and consistency.
49
+
50
+ ## The core tenets of transformers
51
+
52
+
53
+ We summarize the foundations on which we've built everything, and write the "tenets" of the library. They behave like _software interfaces_, hence it is crucial that they are explicitly written down. However opinionated they are, they have evolved over time.
54
+
55
+ These principles were not decided in a vacuum. The library _evolved_ towards them, and once they _emerged_, they were recognized as critical.
56
+
57
+ <div class="tenet-list">
58
+ <ol>
59
+ <li class="tenet">
60
+ <a id="source-of-truth"></a>
61
+ <strong>Source of Truth</strong>
62
+ <p>We aim to be a [source of truth for all model definitions](https://huggingface.co/blog/transformers-model-definition). This is more of a goal than a tenet, but it strongly guides our decisions. Model implementations should be reliable, reproducible, and faithful to the original implementations. If we are successful, they should become reference baselines for the ecosystem, so they'll be easily adopted by downstream libraries and projects. It's much easier for a project to _always_ refer to the transformers implementation, than to learn a different research codebase every time a new architecture is released.</p>
63
+ <em>This overarching guideline ensures quality and reproducibility across all models in the library, and aspires to make the community work easier.</em>
64
+ </li>
65
+
66
+ <li class="tenet">
67
+ <a id="one-model-one-file"></a>
68
+ <strong>One Model, One File</strong>
69
+ <p>All inference and training core logic has to be visible, top‑to‑bottom, to maximize each model's hackability.</p>
70
+ <em>Every model should be completely understandable and hackable by reading a single file from top to bottom.</em>
71
+ </li>
72
+ <li class="tenet">
73
+ <a id="code-is-product"></a>
74
+ <strong>Code is The Product</strong>
75
+ <p>Optimize for reading, diffing, and tweaking. Our users are power users. Variables are explicit, we use full words, and even several words. Readability is primordial.</p>
76
+ <em>Code quality matters as much as functionality - optimize for human readers, not just computers.</em>
77
+ </li>
78
+ <li class="tenet">
79
+ <a id="standardize-dont-abstract"></a>
80
+ <strong>Standardize, Don't Abstract</strong>
81
+ <p>If it's model behavior, keep it in the file; only use abstractions for generic infra.</p>
82
+ <em>Model-specific logic belongs in the model file, not hidden behind abstractions.</em>
83
+ </li>
84
+ <li class="tenet">
85
+ <a id="do-repeat-yourself"></a>
86
+ <strong>DRY* (DO Repeat Yourself)</strong>
87
+ <p>Copy code when it helps users; keep successors in sync without centralizing behavior.</p>
88
+ <p><strong>Evolution:</strong> With the introduction and global adoption of <a href="#modular">modular</a> transformers, we do not repeat any logic in the modular files, but end user files remain faithful to the original tenet as if code had been copied to make modeling files standalone.</p>
89
+ <em>Strategic duplication can improve readability and maintainability when done thoughtfully.</em>
90
+ </li>
91
+ <li class="tenet">
92
+ <a id="minimal-user-api"></a>
93
+ <strong>Minimal User API</strong>
94
+ <p>Config, model, preprocessing; `from_pretrained`, `save_pretrained`, `push_to_hub`. We want the least amount of codepaths. Reading should be obvious, configurations should be obvious.</p>
95
+ <em>Keep the public interface simple and predictable, users should know what to expect.</em>
96
+ </li>
97
+ <li class="tenet">
98
+ <a id="backwards-compatibility"></a>
99
+ <strong>Backwards Compatibility</strong>
100
+ <p>Evolve by additive standardization, never break public APIs.</p>
101
+ <p>Any artifact that was once on the hub and loadable with transformers should be usable indefinitely with the same interface. Further, public methods should not change to avoid breaking dependencies.</p>
102
+ <em>Once something is public, it stays public. Evolution through addition, not breaking changes.</em>
103
+ </li>
104
+ <li class="tenet">
105
+ <a id="consistent-public-surface"></a>
106
+ <strong>Consistent Public Surface</strong>
107
+ <p>Same argument names, same outputs, hidden states and attentions exposed, enforced by tests. This is a goal as well as a tenet.</p>
108
+ <em>All models should feel familiar - consistent interfaces reduce cognitive load.</em>
109
+ </li>
110
+ </ol>
111
+ </div>
112
+
113
+
114
+ When a PR is merged, it is because the contribution is worthwhile, and because the `transformers` team finds the design of the contribution to be aligned with these principles.
115
+
116
+ Does all the code in the library strictly follow these tenets? No. The library is a gigantic house with connected nooks, corridors, crannies everywhere, built by thousands of different workers. We _try_ to make it so all the code added is compliant, because if we fail and merge it, we cannot change it lest we break [backwards compatibility](#backwards-compatibility).
117
+
118
+ <!-- I found the transition to the following example confusing. It implied (because of the previous paragraph and the `for instance` clause) that it's not following the tenets, where in fact it's something we WANT to do. Suggesting some slight reordering. -->
119
+
120
+ To see what constitutes adherence to the tenets, let's take the example of code repetition.
121
+
122
+ The following function, which is essential to the implementation of [Rotary Positional Embeddings](https://huggingface.co/papers/2104.09864), can be found in 70 `modeling_<file>.py` files across `src/transformers/models/.` Why keep it? Because we want all the model logic to be [contained in the modeling file](#one-model-one-file). In order to do that, we [do repeat ourselves](#do-repeat-yourself).
123
+
124
+ ```python
125
+ def rotate_half(x):
126
+ """Rotates half the hidden dims of the input."""
127
+ x1 = x[..., : x.shape[-1] // 2]
128
+ x2 = x[..., x.shape[-1] // 2 :]
129
+ return torch.cat((-x2, x1), dim=-1)
130
+ ```
131
+
132
+ You can use a simple regex, like [this one]() to look at all methods of a given name across your codebase and look at their differences and similarities.
133
+ <!-- I'd maybe remove the previous line altogether and just use a link in the paragraph above -->
134
+
135
+ We want all models to have self-contained modeling code. Every core functionality _must_ be in the modeling code, every non-core functionality _can_ be outside of it.
136
+
137
+ This comes at a great cost. For a long time we used the `#Copied from...` mechanism: we added comments that documented that some code was copied from another model, saving time both for the reviewers and for the CI: we had tooling to ensure that the copied blocks remained in sync. But the LOC count kept creeping up. Each new model copied over hundreds of lines that we considered largely boilerplate, yet, we could not remove them.
138
+
139
+ We needed to separate two principles that were so far intertwined, [repetition](#do-repeat-yourself) and [hackabilty](#one-model-one-file).
140
+
141
+ What was the solution to this? Let's talk about modular transformers.
142
+
143
+ <div class="crumbs">
144
+ <strong>TL;DR:</strong> Read the code in one place (<a href="#one-model-one-file">One Model, One File</a>). Keep semantics local (<a href="#standardize-dont-abstract">Standardize, Don't Abstract</a>). Allow strategic duplication for end users (<a href="#do-repeat-yourself">DRY*</a>). Keep the public surface minimal and stable (<a href="#minimal-user-api">Minimal API</a>, <a href="#backwards-compatibility">Backwards Compatibility</a>, <a href="#consistent-public-surface">Consistent Surface</a>).
145
+
146
+ <strong>Next:</strong> how modular transformers honor these while removing boilerplate.
147
+ </div>
148
+
149
+
150
+ ## <a id="modular"></a> Modular transformers
151
+
152
+ Transformers is an opiniated library. The previous [philosophy](https://huggingface.co/docs/transformers/en/philosophy) page, and the [2022 blog post](https://huggingface.co/blog/transformers-design-philosophy) were already pointing at the drawbacks mentioned just above, which have been iteratively addressed. [`modular` transformers was introduced](https://huggingface.co/docs/transformers/en/modular_transformers) to allow a form of inheritance without breaking [One model, One file](#one-model-one-file).
153
+
154
+ We amended the principle of [DRY*](#do-repeat-yourself) by progressively removing all pieces of code that were "copied from" another file.
155
+
156
+ It works as follows. In order to contribute a model –GLM, for instance– we define a `modular_` file that can inherit from _any function across all other modeling, configuration and processor files already available in the library_. The modular file can use inheritance across models, but then it's unravelled into a fully functional and standalone modeling file.
157
+
158
+ <summary id="generated-modeling">Auto-generated modeling code</summary>
159
+
160
+ <HtmlEmbed src="transformers/glm-compare.html" />
161
+
162
+ As you can see, we can define a new model as a _modular_ combination of fragments taken from others.
163
+
164
+ You might think "well that's just how inheritance works". The crucial difference is that we do _visibly_ what is essentially the _compiler_'s job: by unrolling the inheritances, we make visible all of the modeling code, keeping it [all in one piece](#one-model-one-file).
165
+
166
+ <!-- some ideas for additional hand-holding: link to the implementation of `LlamaAttention` to show it was copied (and modified), or maybe provide a git diff view between the GlmAttention and LlamaAttention implementations -->
167
+
168
+ What is the consequence? When adding a model, we do not need to go over the entire modeling file. The modular (left side above) is enough.
169
+
170
+ When `AutoModel.from_pretrained(...)` is called, it is indeed the modeling (right side) that is ran, and all the tests run on the modeling code. More importantly, the auto-generated modeling file is what users _read_ to understand the code, what they step through in their debuggers and what they hack for their needs.
171
+
172
+ What does that give us?
173
+
174
+ <div class="crumbs">
175
+ <strong>TL;DR:</strong> A small <code>modular_*.py</code> declares reuse; the expanded modeling file stays visible (<a href="#one-model-one-file">One Model, One File tenet preserved</a>). Reviewers and contributors maintain the shard, not the repetition.
176
+
177
+ <strong>Next:</strong> the measurable effect on effective LOC and maintenance cost.
178
+ </div>
179
+
180
+
181
+ ### A maintainable control surface
182
+
183
+ The effect of modular can be measured in lines of code (LOC). If a model only has a modeling file, we add its LOC count.
184
+ However, if a model has a modular_*.py and a corresponding automatically generated modeling_*/.py, we only count the LOC under the modular file. The modeling code has no maintenance cost as it is strictly dependent on the modular file.
185
+
186
+ That gives an "effective LOC" curve: the 𝗺𝗮𝗶𝗻𝘁𝗲𝗻𝗮𝗻𝗰𝗲 𝘀𝘂𝗿𝗳𝗮𝗰𝗲.
187
+
188
+ Measured on git history, raw `modeling_*.py` grew at ~362 LOC/day before modular; counting only modular shards yields ~25 LOC/day after — about **15× lower**. The effective curve (blue line below) represents the **maintenance surface** today: what maintainers actually read and review.
189
+
190
+ <!-- Yeah, super good point that effective == maintenable -->
191
+
192
+ Less code to hand-maintain means fewer places to break. Of course LOC is not a direct measure of complexity, but they correlate in review effort and change risk.
193
+
194
+ <HtmlEmbed src="transformers/loc-growth.html" />
195
+
196
+ <!-- What is "Modeling LOC (included)"? The modeling code, not counting the files that have a modular counterpart? If so, perhaps we can say that the blue line (effective) is the sum of the red + green, whereas the yellow would have been the progression without modular. Also worth mentioning imo that the surface area has been essentially constant (in LOC) since modular. -->
197
+
198
+ Notice there's a sharp drop at the end of the curves, this is mostly due to us [removing support for Jax and TensorFlow](https://github.com/huggingface/transformers/commit/4df2529d79d75f44e70396df5888a32ffa02d61e#diff-60849db3e9922197854ef1cac92bf4aba08b5d7fd3fe6f3c16a3511e29e0eacc) library-wide.
199
+
200
+ But this was not the only effort that allowed us to reduce maintenance load.
201
+
202
+ We recently underwent a thoughtful refactor of the attention implementation. You've likely heard about [flash attention](https://huggingface.co/docs/text-generation-inference/en/conceptual/flash_attention) and its several variants.
203
+
204
+ _Attention computation_ happens at a _lower_ level of abstraction than the model itself.
205
+
206
+ However, we were adding specific torch operations to every model for each backend (sdpa, various flash-attention versions, flex attention) but it wasn't a [minimal user api](#minimal-user-api). Next section explains what we did.
207
+
208
+ <div class="crumbs">
209
+ Evidence: effective (i.e., maintenable) LOC growth drops ~15× when counting shards instead of expanded modeling files. Less code to read, fewer places to break.
210
+
211
+ <strong>Next:</strong> how the attention interface stays standard without hiding semantics.
212
+ </div>
213
+
214
+ ### <a id="attention-classes"></a> External Attention classes
215
+
216
+ The solution for the "attention abstraction problem" was to move to a standard [attention interface](https://huggingface.co/docs/transformers/en/attention_interface) that allows the following:
217
+
218
+ The naive implementation of attention, called "eager", is available by default. We use a `Callable` called `eager_attention_forward`, which can run as long as the user has PyTorch installed – which is a requirement any way.
219
+
220
+ Instead of using a class interface and a class hierarchy, we just moved to a function interface. When a more complex attention implementation is needed, we use other Callables, including much faster kernel bindings when available. The decision to use a different attention implementation is based on the model configuration file we download from the Hub, and it can also be overridden by the user.
221
+
222
+ This is a clear example that that we prefer an interface that is [standard, but not abstract](#standardize-dont-abstract). To be completely precise, this is what the interface selection looks like in transformers code:
223
+
224
+ ```python
225
+ attention_interface: Callable = eager_attention_forward
226
+ if self.config._attn_implementation != "eager":
227
+ attention_interface = ALL_ATTENTION_FUNCTIONS[self.config._attn_implementation]
228
+ ```
229
+
230
+ A strength of the new attention interface is the possibility to enforce specific kwargs, which are needed by kernel providers and other dependencies. We know that kwargs are often a necessary evil that plagues tools with widespread compatibility; it is something we have aimed to reduce, and will continue to reduce in order to improve readability - with them, the current system is a [minimal user api](#minimal-user-api).
231
+
232
+ <!-- not fully following the transition here -->
233
+
234
+ Backend integrations sometimes require specific kwargs. We reduce that surface and document expectations; where flexibility is necessary, we plan to use `typing.Annotated` to convey shapes and invariants without constraining integrations. Such an implementation could look like this in the future:
235
+
236
+ ```python
237
+ from typing import Annotated
238
+
239
+ MyModelOutputAnnotated = Annotated[MyModelOutput, "shape: (B, C, H, W)"]
240
+ ```
241
+
242
+
243
+ <div class="crumbs">
244
+ Attention semantics remain in <code>eager_attention_forward</code>; faster backends are opt-in via config. We inform via types/annotations rather than enforce rigid kwargs, preserving integrations.
245
+
246
+ <strong>Next:</strong> parallel partitioning is declared as a plan, not through model surgery.
247
+ </div>
248
+
249
+ ### <a id="simpler-tensor-parallelism"></a> Configurable Tensor Parallelism
250
+
251
+ If you're not familiar with the different flavours of parallelism, I recommend checking out [this blog post](https://huggingface.co/blog/accelerate-nd-parallel) first, and of course a full [dive into the ultra-scale playbook](https://huggingface.co/spaces/nanotron/ultrascale-playbook) is always recommended.
252
+
253
+ The essential part is that, as [the documentation states](https://huggingface.co/docs/transformers/v4.56.2/perf_train_gpu_many#tensor-parallelism), when tensors get too large to fit on a single GPU, they are sliced along a particular dimension and every slice is sent to a different GPU.
254
+
255
+ Why does it matter?
256
+
257
+ Because we want to avoid code modifications that are unrelated to the model.
258
+
259
+ We choose to place the level of abstraction higher than the device placement: a matrix multiplication - a `nn.Linear` layer - should be always expressed in the same way, regardless of how it is placed.
260
+
261
+ Hence, we want to touch the modeling code [minimally](#minimal-user-api), and only modify it when _architectural changes_ are involved – not depending on the way you run it. For tensor parallelism, we simply specify a `tp_plan`:
262
+
263
+ <HtmlEmbed src="transformers/tp-plan.html" />
264
+
265
+ The plan is written once, saved as part of the config and passed to `.from_pretrained()`. It maps module name patterns to partitioning strategies. Strategies are resolved by the internal `ParallelInterface`, which wires to sharding implementations `ColwiseParallel`, `RowwiseParallel`, packed variants, and so on.
266
+
267
+ The alternative would be to modify classes depending on supported types of parallelism.
268
+
269
+ The `tp_plan` solution allows users to run the same model on a single GPU, or distribute it using multiple processes per node, e.g. 4 GPUs:
270
+
271
+ `torchrun --nproc-per-node 4 demo.py`
272
+
273
+ Semantics stay in the model (a Linear stays a Linear), parallelization is orthogonal and declared via strings: "colwise" splits columns of weights/bias across ranks; "rowwise" splits rows; packed variants shard fused weights; The mapping keys accept glob patterns like `layers.*.mlp.down_proj` to target repeated submodules.
274
+
275
+ <div class="crumbs">
276
+ Parallelization is specified in the configuration (<code>tp_plan</code>), not through edits to <code>Linear</code>s. Glob patterns target repeated blocks; modeling semantics stay intact.
277
+
278
+ <strong>Next:</strong> per-layer attention/caching schedules declared in config, not hardcoded.
279
+ </div>
280
+
281
+ ### <a id="layers-attentions-caches"></a> Layers, attentions and caches
282
+
283
+ Following the same logic, the _nature_ of attention and per-layer caching should not be hardcoded. We should be able to specify in the configuration how each layer is implemented. Thus, we define a mapping like:
284
+
285
+
286
+ ```python
287
+ ALLOWED_LAYER_TYPES = (
288
+ "full_attention",
289
+ "sliding_attention",
290
+ "chunked_attention",
291
+ "linear_attention",
292
+ ...
293
+ )
294
+ ```
295
+
296
+ and the configuration can be _explicit_ about which attention type is in which layer. See, for example, [gpt-oss](https://huggingface.co/openai/gpt-oss-120b/blob/main/config.json#L15), which alternates sliding and full attention:
297
+
298
+ ```python
299
+ "layer_types": [
300
+ "sliding_attention",
301
+ "full_attention",
302
+ ...,
303
+ "sliding_attention",
304
+ "full_attention"
305
+ ],
306
+ ```
307
+
308
+ This is [minimal](#minimal-user-api) to implement on the user side, and allows to keep the modeling code untouched. It is also easy to tweak.
309
+
310
+ <div class="crumbs">
311
+ Allowed layer types are explicit; schedules (e.g., sliding/full alternation) live in config. This keeps the file readable and easy to tweak.
312
+
313
+ <strong>Next:</strong> speedups come from kernels that don't change semantics.
314
+ </div>
315
+
316
+
317
+ ### <a id="community-kernels"></a>Community Kernels
318
+
319
+ The same principle extends to normalization, activation, and other code paths. The model defines **semantics**; a kernel defines **how** to execute them faster. We annotate the module to borrow a community‑provided forward, keeping a [consistent public surface](#consistent-public-surface)
320
+
321
+ ```python
322
+ @use_kernel_forward_from_hub("RMSNorm")
323
+ class GlmRMSNorm(nn.Module):
324
+ ...
325
+ ```
326
+
327
+ This also opens another contribution path: GPU specialists can contribute optimized kernels to the [Kernels Hub](https://huggingface.co/kernels-community), and have them immediately available to use in `transformers` and other libraries. You can check the [kernel community blog post](https://huggingface.co/blog/hello-hf-kernels) to learn more about it!
328
+
329
+ Even more resources have been added, like the formidable [kernel builder](https://github.com/huggingface/kernel-builder) with its connected resources to [help you build kernels with it](https://github.com/huggingface/kernel-builder/blob/main/docs/writing-kernels.md) and [with nix](https://github.com/huggingface/kernel-builder/blob/main/docs/nix.md).
330
+
331
+ <div class="crumbs">
332
+ Models define semantics; kernels define how to run them faster. Use decorations to borrow community forwards while keeping a consistent public surface.
333
+
334
+ <strong>Next:</strong> what modularity looks like across the repo.
335
+ </div>
336
+
337
+ ## The Sate of Modular
338
+
339
+ Modular provides a form of inheritance in our codebase. Some models become standards, and model contributors have the opportunity to _define standards_ if their architectures are adopted. Pushing the boundaries of scientific knowledge can translate into the boundaries of engineering if this effort is made, and we're striving for it.
340
+
341
+ It's hard to conceptualize very large libraries and how their components interact with each other, regardless of your cognitive abilities for abstractions.
342
+ So I wanted to take a look at the current **state of modularity** across the repository. How many models are defined using components of others?
343
+
344
+ To get this graph, I used the heuristic of modular inheritance.
345
+ 1. Does this model have a `modular` file?
346
+ 2. In this `modular` file, what models, configurations and processings are imported?
347
+ 3. Recurse through the model list that way.
348
+
349
+ So what do we see? Llama is a basis and an influence for many models, and it shows.
350
+ Radically different architectures such as mamba have spawned their own dependency subgraph.
351
+
352
+ <!-- A couple of ideas here:
353
+ - Use screenshots to clearly show the points we make. For example, the cluster with Llama in the center, or the one about DETR/llava below.
354
+ - Use a link to open the viewer full-screen for better manipulation and exploration.
355
+ -->
356
+
357
+ (Graph reading guide: nodes are models; edges are modular imports).
358
+
359
+ <HtmlEmbed src="transformers/dependency-graph.html" />
360
+
361
+ In the case of VLMs, there's far too many vision-based architectures that are not yet defined as modulars of other existing archs. In other words, there is no strong reference point in terms of software for vision models.
362
+ As you can see, there is a small DETR island, a little llava pocket, and so on, but it's not comparable to the centrality observed for llama.
363
+
364
+ Another problem is, this visualization only shows `modular` models. Several models still do NOT have a modular file.
365
+
366
+ How do we spot them, and how do we identify modularisable models?
367
+
368
+ <div class="crumbs">
369
+ Llama-lineage is a hub; several VLMs remain islands — engineering opportunity for shared parents.
370
+
371
+ <strong>Next:</strong> timeline + similarity signals to spot modularisable candidates.
372
+ </div>
373
+
374
+
375
+ ### Many models, but not enough yet, are alike
376
+
377
+ I looked into Jaccard similarity, which we use to measure set differences, to find similarities across models. I know that code is more than a set of characters stringed together. We also tried code-embedding models that ranked candidates better in practice, but for this post we stick to the deterministic Jaccard index.
378
+
379
+ It is interesting, for our comparison, to look at _when_ we deployed the modular logic and what was its rippling effect on the library. You can check the [larger space](https://huggingface.co/spaces/Molbap/transformers-modular-refactor) to play around, but the gist is: adding modular allowed to connect more and more models to solid reference points. But we still have a lot of gaps to fill.
380
+
381
+ Zoom out below - it's full of models. You can click on a node to see its connections better, or use the text box to search for a model.
382
+
383
+ <HtmlEmbed src="transformers/model-timeline.html" />
384
+
385
+ <!-- screenshot would be helpful -->
386
+
387
+ If you check llava, you've seen that llava_video is a red node, connected by a red edge to llava: it's a candidate, something that we can _likely_ remodularize, [not touching the actual model](#backwards-compatibility) but being much more readable with [DRY*](#do-repeat-yourself).
388
+
389
+ <div class="crumbs">
390
+ Similarity metrics (Jaccard or embeddings) surface likely parents; the timeline shows consolidation after modular landed. Red nodes/edges = candidates (e.g., <code>llava_video</code> → <code>llava</code>) for refactors that preserve behavior.
391
+
392
+ <strong>Next:</strong> concrete VLM choices that avoid leaky abstractions.
393
+ </div>
394
+
395
+ ### VLM improvements, avoiding abstraction
396
+
397
+ We don't yet have a cookbook for common VLM patterns (image token scatter, multi‑tower encoders, cross‑attention bridges). This is one of the main improvement points where we can work.
398
+
399
+ For instance, we thought of abstracting away the mixing of `inputs_embeds`, the tensor fed into an LLM decoder in 95% of the existing VLMs. It would have looked like something like
400
+
401
+ ```python
402
+ class InputsEmbeddingMixerMixin(nn.Module):
403
+ #
404
+ ```
405
+
406
+ But this is [abstracting away an important component of the modeling](#standardize-dont-abstract). Embedding mixin is part of the model, removing it would break it. A user opening [`modeling_qwen2.5_vl`](https://huggingface.co/collections/Qwen/qwen25-vl-6795ffac22b334a837c0f9a5) should not have to go to another file to understand how it works.
407
+
408
+ <!-- ^ should we link to the code instead? -->
409
+
410
+ What is the current state of these “abstractions” across the codebase?
411
+ You will see all the imports around a modeling file, here [Gemma3n](https://huggingface.co/google/gemma-3n-E4B-it).
412
+
413
+ ![Gemma3n graph](/images/transformers/still_graph_bloat.png)
414
+
415
+
416
+ As you can see, the `GenerationMixin` node is already very heavy. It encompasses all of the utilities around `.generate`, it is second only to `nn.Module`.
417
+ That means every decision we make to abstract something else has to be extremely careful.
418
+
419
+ The following [Pull request to standardize placeholder masking](https://github.com/huggingface/transformers/pull/39777) is a good example of what kind of changes are acceptable. In a VLM, we always need to insert embeddings from various encoders at various positions, so we can have a function to do it. For Qwen2 VL, for instance, it will look like this:
420
+
421
+ ```python
422
+ def get_placeholder_mask(
423
+ self,
424
+ input_ids: torch.LongTensor,
425
+ inputs_embeds: torch.FloatTensor,
426
+ image_features: torch.FloatTensor = None,
427
+ video_features: torch.FloatTensor = None,
428
+ ):
429
+ """
430
+ Obtains multimodal placeholdr mask from `input_ids` or `inputs_embeds`, and checks that the placeholder token count is
431
+ equal to the length of multimodal features. If the lengths are different, an error is raised.
432
+ """
433
+ if input_ids is None:
434
+ special_image_mask = inputs_embeds == self.get_input_embeddings()(
435
+ torch.tensor(self.config.image_token_id, dtype=torch.long, device=inputs_embeds.device)
436
+ )
437
+ special_image_mask = special_image_mask.all(-1)
438
+ special_video_mask = inputs_embeds == self.get_input_embeddings()(
439
+ torch.tensor(self.config.video_token_id, dtype=torch.long, device=inputs_embeds.device)
440
+ )
441
+ special_video_mask = special_video_mask.all(-1)
442
+ else:
443
+ special_image_mask = input_ids == self.config.image_token_id
444
+ special_video_mask = input_ids == self.config.video_token_id
445
+
446
+ n_image_tokens = special_image_mask.sum()
447
+ special_image_mask = special_image_mask.unsqueeze(-1).expand_as(inputs_embeds).to(inputs_embeds.device)
448
+ if image_features is not None and inputs_embeds[special_image_mask].numel() != image_features.numel():
449
+ raise ValueError(
450
+ f"Image features and image tokens do not match: tokens: {n_image_tokens}, features {image_features.shape[0]}"
451
+ )
452
+
453
+ n_video_tokens = special_video_mask.sum()
454
+ special_video_mask = special_video_mask.unsqueeze(-1).expand_as(inputs_embeds).to(inputs_embeds.device)
455
+ if video_features is not None and inputs_embeds[special_video_mask].numel() != video_features.numel():
456
+ raise ValueError(
457
+ f"Videos features and video tokens do not match: tokens: {n_video_tokens}, features {video_features.shape[0]}"
458
+ )
459
+
460
+ return special_image_mask, special_video_mask
461
+ ```
462
+
463
+ But this is _within_ the modeling file, not in the `PreTrainedModel` base class. It will not move away from it, because it'd break the [self-contained logic](#one-model-one-file) of the model.
464
+
465
+ <!-- So the main conclusion here is that VLMs should use modular more to come up with de-facto standard modules without abstracting them away? -->
466
+
467
+ <div class="crumbs">
468
+ Keep VLM embedding mix in the modeling file (semantics), standardize safe helpers (e.g., placeholder masking), don't migrate behavior to <code>PreTrainedModel</code>.
469
+
470
+ <strong>Next:</strong> pipeline-level wins that came from PyTorch-first choices (fast processors).
471
+ </div>
472
+
473
+
474
+ ### On image processing and processors
475
+
476
+ Deciding to become a `torch`-first library meant relieving a tremendous amount of support for `jax ` and `TensorFlow`, and it also meant that we could be more lenient into the amount of torch-dependent utilities that we were able to accept. One of these is the _fast processing_ of images. Where inputs were once minimally assumed to be ndarrays, enforcing native `torch` and `torchvision` inputs allowed us to massively improve processing speed for each model.
477
+
478
+ The gains in performance are immense, up to 20x speedup for most models when using compiled torchvision ops. Furthermore, it allows to run the whole pipeline solely on GPU.
479
+
480
+ ![Fast Image Processors Performance](/images/transformers/fast_image_processors.png)
481
+ <p class="figure-legend">Thanks <a href="https://huggingface.co/yonigozlan">Yoni Gozlan</a> for the great work!</p>
482
+
483
+ <div class="crumbs">
484
+ PyTorch-first lets processors assume torch/torchvision and run the whole pipeline on GPU; big per-model speedups.
485
+
486
+ <strong>Next:</strong> how this lowers friction for contributors and downstream users.
487
+ </div>
488
+
489
+
490
+ ## Reduce barrier to entry/contribution
491
+
492
+ This is an overall objective: there's no `transformers` without its community.
493
+
494
+ Having a framework means forcing users into it. It restrains flexibility and creativity, which are the fertile soil for new ideas to grow.
495
+
496
+ Among the most valuable contributions to `transformers` is of course the addition of new models. Very recently, [OpenAI added GPT-OSS](https://huggingface.co/blog/welcome-openai-gpt-oss), which prompted the addition of many new features to the library in order to support [their model](https://huggingface.co/openai/gpt-oss-120b). These additions are immediately available for other models to use.
497
+
498
+ Another important advantage is the ability to fine-tune and pipeline these models into many other libraries and tools. Check here on the hub how many finetunes are registered for [gpt-oss 120b](https://huggingface.co/models?other=base_model:finetune:openai/gpt-oss-120b), despite its size!
499
+
500
+
501
+ <div class="crumbs">
502
+ The shape of a contribution: add a model (or variant) with a small modular shard; the community and serving stacks pick it up immediately. Popularity trends (encoders/embeddings) guide where we invest.
503
+
504
+ <strong>Next:</strong> power tools enabled by a consistent API.
505
+ </div>
506
+
507
+
508
+ ### <a id="encoders-ftw"></a> Models popularity
509
+
510
+ Talking about dependencies, we can take a look at the number of downloads as a measure of popularity. One thing we see is the prominence of encoders, despite the apparent prevalence of decoder LLMs. The reason is that encoders are used to generate embeddings, which have multiple downstream uses. Just check out [EmbeddingGemma](https://huggingface.co/blog/embeddinggemma) for a modern recap. Hence, it is vital to keep the encoders portion of the library viable, usable, fine-tune-able.
511
+
512
+ <div>
513
+ <HtmlEmbed src="transformers/model-visualisation.html" />
514
+ </div>
515
+
516
+ As the codebase grows, we need to maintain it in coordination with our friend [Sentence Transformers codebase](https://huggingface.co/sentence-transformers). Retrieval use-cases, smart databases, FAISS-based indexing rely on it, and thus indirectly on transformers.
517
+
518
+ In that regard, we DO want to be a modular toolbox, being [minimal](#minimal-user-api) enough and well documented enough so any ML/AI developer can use `transformers` without having to think about it. We aim to reduce the cognitive load brought about by model development, not increase it.
519
+
520
+ So, how do these design choices, these "tenets" influence development of models and overall usage of transformers?
521
+
522
+ <div class="crumbs">
523
+ Encoders remain critical for embeddings and retrieval; maintaining them well benefits the broader ecosystem (e.g., Sentence Transformers, FAISS).
524
+
525
+ <strong>Next:</strong> dev tools that leverage unified attention APIs and PyTorch-only internals.
526
+ </div>
527
+
528
+
529
+ ## A surgical toolbox for model development
530
+
531
+ Transformers provides many tools that can help you while adding a new architecture, or help you understand the inner workings of the library.
532
+
533
+ ### Attention visualisation
534
+
535
+ All models have the same internal API for attention computation, thanks to [the externalisation of attention classes](#external-attention-classes). This allows us to build cool tools to visualize the inner workings of the attention mechanism.
536
+
537
+ One particular piece of machinery is the `attention mask`. Here you see the famous bidirectional attention pattern for the whole prefix (text + image) in PaliGemma and all Gemma2+ models, contrasting with the usual "causal-only" models.
538
+
539
+ <HtmlEmbed src="transformers/attention-visualizer.html" />
540
+
541
+ <div class="crumbs">
542
+ Uniform attention APIs enable cross-model diagnostics (e.g., PaliGemma prefix bidirectionality vs causal).
543
+
544
+ <strong>Next:</strong> whole-model tracing for ports and regressions.
545
+ </div>
546
+
547
+
548
+ ### Logging entire model activations
549
+
550
+ Because everything is PyTorch, we can easily [debug any model](https://huggingface.co/docs/transformers/internal/model_debugging_utils) when we want to add it to transformers. We now have a power-user tool for porting or adding models, that wraps a forward pass, intercepts every submodule call, and logs shapes, dtypes, and sample statistics of inputs/outputs to nested JSON.
551
+
552
+ It just works with PyTorch models and is especially useful when aligning outputs with a reference implementation, to match our [Source of Truth guideline](#source-of-truth).
553
+
554
+ ![Model debugger interface](/images/transformers/model_debugger.png)
555
+
556
+
557
+ <div class="crumbs">
558
+ Forward interception and nested JSON logging align ports to reference implementations, reinforcing "Source of Truth." <strong>
559
+
560
+ Next:</strong> CUDA warmup reduces load-time without touching modeling semantics.
561
+ </div>
562
+
563
+
564
+
565
+ ### Cooking faster CUDA warmups
566
+
567
+ Having a clean _external_ API allows us to work on the [true inner workings of transformers](#code-is-product). One of a few recent additions was the _CUDA warmup_ via `caching_allocator_warmup`, which dramatically improved loading times by pre-allocating GPU memory to avoid malloc bottlenecks during model loading. It can achieve a 7x speedup factor for an 8B model, or 6x for a 32B one, as you can check in [the PR](https://github.com/huggingface/transformers/pull/36380)!
568
+
569
+ <HtmlEmbed src="transformers/warmup_demo.html" />
570
+
571
+ It's hard to overstate how much of a lifesaver that is when you're trying to load a model as fast as possible, as it's the narrowest bottleneck for your iteration speed.
572
+
573
+ <div class="crumbs">
574
+ Pre-allocating GPU memory removes malloc spikes (e.g., 7× for 8B, 6× for 32B in the referenced PR).
575
+
576
+ <strong>Next:</strong> consistent interfaces allow transformers-serve.
577
+ </div>
578
+
579
+
580
+ ### Transformers-serve and continuous batching
581
+
582
+ Having all these models readily available and sharing the same interface allowed us to implement transformers-serve, a CLI tool to expose models through a standard OpenAI http API.
583
+
584
+ ```bash
585
+ transformers serve
586
+
587
+ curl -X POST http://localhost:8000/v1/chat/completions \
588
+ -H "Content-Type: application/json" \
589
+ -d '{"messages": [{"role": "system", "content": "hello"}], "temperature": 0.9, "max_tokens": 1000, "stream": true, "model": "Qwen/Qwen2.5-0.5B-Instruct"}'
590
+ ```
591
+
592
+ transformers-serve uses continuous batching (see [this PR](https://github.com/huggingface/transformers/pull/38085) and also [this one](https://github.com/huggingface/transformers/pull/40426)) for better GPU utilization, and is very much linked to the great work of vLLM with the `paged attention kernel` – a futher justification of [external kernels](#community-kernels).
593
+
594
+ transformers-serve is not meant for user-facing production services – tools like vLLM or SGLang are super optimized for that –, but it's useful for several use cases:
595
+ - Quickly verify that your model is compatible with continuous batching and paged attention.
596
+ - Run ad-hoc vibe tests on any model, without worrying to deploy anything.
597
+ - Run evaluations efficiently, again without having to spend a lot of time engineering your infrastructure.
598
+
599
+ For model deployment, check [Inference Providers](https://huggingface.co/docs/inference-providers/en/index) or roll your solution using any of the excellent serving libraries.
600
+
601
+ <div class="crumbs">
602
+ OpenAI-compatible surface + continuous batching; kernels/backends slot in because the modeling API stayed stable.
603
+
604
+ <strong>Next:</strong> reuse across vLLM/SGLang relies on the same consistency.
605
+ </div>
606
+
607
+
608
+ ## Community reusability
609
+
610
+ The transformers-serve CLI built on transformers, for sure, but the library is made first and foremost to be _reused_ at large by the open-source ecosystem.
611
+
612
+ Adding a model to transformers means:
613
+
614
+ - having it immediately available to the community
615
+ - having it immediately usable in vLLM, [SGLang](https://huggingface.co/blog/transformers-backend-sglang), and so on without additional code. In the case of vLLM, transformers was added as a backend to run models on vLLM, which optimizes throughput/latency on top of existing transformers architectures [as seen in this great vLLM x HF blog post.](https://blog.vllm.ai/2025/04/11/transformers-backend.html)
616
+ - being the reference code for implementations in MLX, llama.cpp and other libraries.
617
+
618
+ This further cements the need for a [consistent public surface](#consistent-public-surface): we are a backend and a reference, and there's more software than us to handle serving. At the time of writing, more effort is done in that direction. We already have compatible configs for VLMs for vLLM (say that three times fast), check [here for GLM4 video support](https://github.com/huggingface/transformers/pull/40696/files), and here for [MoE support](https://github.com/huggingface/transformers/pull/40132), for instance.
619
+
620
+
621
+ <div class="crumbs">
622
+ Being a good backend consumer requires a consistent public surface; modular shards and configs make that stability practical.
623
+
624
+ <strong>Next:</strong> what changes in v5 without breaking the promise of visible semantics.
625
+ </div>
626
+
627
+ ## What is coming next
628
+
629
+ The next major version of `transformers` is just around the corner (and will have another blog post to its name when it comes out). When v5 is released, we aim to keep [backwards compatibility](#backwards-compatibility) as solid as possible. The changes we make now are in service of that goal.
630
+
631
+ We will lean further into a modular toolbox, not a framework. You should not be forced to rewrite modeling code. It’s better when a model can inherit from `PreTrainedModel` and opt into Tensor Parallel, `from_pretrained`, sharding, `push_to_hub`, loss plumbing, and external stacks like PEFT/TRL/SGLang/vLLM.
632
+
633
+ <!-- Maybe end with some statement that shows lots of excitement -->
app/src/styles/global.css CHANGED
@@ -32,4 +32,33 @@
32
  .mermaid {
33
  background: none!important;
34
  margin-bottom: var(--block-spacing-y) !important;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
35
  }
 
32
  .mermaid {
33
  background: none!important;
34
  margin-bottom: var(--block-spacing-y) !important;
35
+ }
36
+
37
+ .content-grid main img {
38
+ max-width: 100%;
39
+ height: auto;
40
+ width: min(1100px, 100vw - var(--content-padding-x) * 2);
41
+ margin-left: 50%;
42
+ transform: translateX(-50%);
43
+ display: block;
44
+ }
45
+
46
+ .content-grid main .figure-legend {
47
+ text-align: center;
48
+ font-size: 0.9rem;
49
+ color: var(--muted-color);
50
+ font-style: italic;
51
+ margin: var(--spacing-2) 0 var(--spacing-4);
52
+ width: min(1100px, 100vw - var(--content-padding-x) * 2);
53
+ margin-left: 50%;
54
+ transform: translateX(-50%);
55
+ }
56
+
57
+ @media (max-width: 1024px) {
58
+ .content-grid main img,
59
+ .content-grid main .figure-legend {
60
+ width: 100%;
61
+ margin-left: 0;
62
+ transform: none;
63
+ }
64
  }
dist/distill.bundle.js DELETED
The diff for this file is too large to render. See raw diff
 
dist/distill.bundle.js.map DELETED
The diff for this file is too large to render. See raw diff
 
dist/fragments/attention-visualizer.html DELETED
@@ -1,45 +0,0 @@
1
- <!-- Minimal HTML fragment: terminal-style ASCII attention masks -->
2
- <div style="max-width: 940px; margin: 16px 0; border:1px solid #2a2f3a; border-radius:8px; background:#0b0f19; font-family: ui-monospace, SFMono-Regular, Menlo, Monaco, Consolas, 'Liberation Mono', 'Courier New', monospace; color:#e5e7eb;">
3
- <div style="display:flex; align-items:center; gap:8px; padding:8px 10px; border-bottom:1px solid #1f2430; background:#111827; border-top-left-radius:8px; border-top-right-radius:8px;">
4
- <span style="width:10px; height:10px; background:#ef4444; border-radius:50%; display:inline-block;"></span>
5
- <span style="width:10px; height:10px; background:#f59e0b; border-radius:50%; display:inline-block;"></span>
6
- <span style="width:10px; height:10px; background:#22c55e; border-radius:50%; display:inline-block;"></span>
7
- <span style="margin-left:8px; font-size:12px; color:#9ca3af;">attention-mask-visualizer</span>
8
- </div>
9
- <div style="padding:12px 14px; overflow:auto; font-size:12.5px; line-height:1.4;">
10
- <pre style="margin:0; white-space:pre; tab-size:2;">
11
- ATTN MASK — GPT-2 (causal)
12
- Tokens: [The, cat, sat, on, the, mat]
13
- Legend: x = can attend, . = masked (future)
14
-
15
- The cat sat on the mat
16
- The x
17
- cat x x
18
- sat x x x
19
- on x x x x
20
- the x x x x x
21
- mat x x x x x x
22
-
23
-
24
- ATTN MASK — PaliGemma-style (bidirectional prefix + causal suffix)
25
- Prefix: [&lt;i0&gt; &lt;i1&gt; &lt;i2&gt; &lt;i3&gt; &lt;i4&gt; What is this]
26
- Suffix: [A great duck]
27
- Legend: ✓ = can attend, ✗ = cannot
28
-
29
- &lt;i0&gt;&lt;i1&gt;&lt;i2&gt;&lt;i3&gt;&lt;i4&gt; What is this | A great duck
30
- &lt;i0&gt; ✓ ✓ ✓ ✓ ✓ ✓ ✓ ✓ ✗ ✗ ✗
31
- &lt;i1&gt; ✓ ✓ ✓ ✓ ✓ ✓ ✓ ✓ ✗ ✗ ✗
32
- &lt;i2&gt; ✓ ✓ ✓ ✓ ✓ ✓ ✓ ✓ ✗ ✗ ✗
33
- &lt;i3&gt; ✓ ✓ ✓ ✓ ✓ ✓ ✓ ✓ ✗ ✗ ✗
34
- &lt;i4&gt; ✓ ✓ ✓ ✓ ✓ ✓ ✓ ✓ ✗ ✗ ✗
35
- What ✓ ✓ ✓ ✓ ✓ ✓ ✓ ✓ ✗ ✗ ✗
36
- is ✓ ✓ ✓ ✓ ✓ ✓ ✓ ✓ ✗ ✗ ✗
37
- this ✓ ✓ ✓ ✓ ✓ ✓ ✓ ✓ ✗ ✗ ✗
38
- --------------------------------------------------------------------
39
- A ✓ ✓ ✓ ✓ ✓ ✓ ✓ ✓ ✓ ✗ ✗
40
- great ✓ ✓ ✓ ✓ ✓ ✓ ✓ ✓ ✓ ✓ ✗
41
- duck ✓ ✓ ✓ ✓ ✓ ✓ ✓ ✓ ✓ ✓ ✓
42
- </pre>
43
- </div>
44
- </div>
45
-
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
dist/fragments/d3-graph.html DELETED
@@ -1,12 +0,0 @@
1
- <div class="interactive-demo">
2
- <div class="demo-header">
3
- <h3>🔗 Model Dependency Graph</h3>
4
- </div>
5
- <div class="demo-content">
6
- <iframe src="static/d3_dependency_graph.html" width="100%" height="600px" frameborder="0" style="border-radius: 8px; background: white;"></iframe>
7
- </div>
8
- <div class="demo-footer">
9
- Interactive dependency graph showing real relationships between Transformers models. 🟡 Base models (HuggingFace logo), 🔵 Derived modular models. Click and drag to explore!
10
- </div>
11
- </div>
12
-
 
 
 
 
 
 
 
 
 
 
 
 
 
dist/fragments/dependency-graph.html DELETED
@@ -1,6 +0,0 @@
1
- <iframe
2
- src="https://molbap-dependencies-1.hf.space"
3
- style="width:100%; height:680px; border:0"
4
- allow="clipboard-read; clipboard-write; fullscreen"
5
- referrerpolicy="no-referrer-when-downgrade"
6
- ></iframe>
 
 
 
 
 
 
 
dist/fragments/glm-compare.html DELETED
@@ -1,149 +0,0 @@
1
- <div class="code-compare" style="display: grid; grid-template-columns: 1fr 1fr; gap: 1rem; margin: 1.5rem 0;">
2
- <div class="code-column" style="border: 1px solid #e2e8f0; border-radius: 8px; overflow: hidden;">
3
- <div class="code-header" style="background: #f8f9fa; padding: 0.75rem 1rem; font-weight: 600; color: #495057; border-bottom: 1px solid #e2e8f0;">
4
- modular_glm.py
5
- </div>
6
- <pre style="margin: 0; padding: 1rem; background: #ffffff; overflow-x: auto; font-size: 0.9em;"><code class="language-python">class GlmMLP(Phi3MLP):
7
- pass
8
-
9
- class GlmAttention(LlamaAttention):
10
- def __init__(self, config, layer_idx=None):
11
- super().__init__(config, layer_idx)
12
- self.o_proj = nn.Linear(
13
- config.num_attention_heads * self.head_dim,
14
- config.hidden_size,
15
- bias=False
16
- )
17
-
18
- class GlmForCausalLM(LlamaForCausalLM):
19
- pass</code></pre>
20
- </div>
21
-
22
- <div class="code-column" style="border: 1px solid #e2e8f0; border-radius: 8px; overflow: hidden;">
23
- <div class="code-header" style="background: #f8f9fa; padding: 0.75rem 1rem; font-weight: 600; color: #495057; border-bottom: 1px solid #e2e8f0;">
24
- modeling_glm.py (auto-expanded)
25
- </div>
26
- <pre style="margin: 0; padding: 1rem; background: #ffffff; overflow-x: auto; font-size: 0.9em; max-height: 400px;"><code class="language-python">class GlmMLP(nn.Module):
27
- def __init__(self, config):
28
- super().__init__()
29
- self.config = config
30
- self.gate_up_proj = nn.Linear(
31
- config.hidden_size,
32
- 2 * config.intermediate_size,
33
- bias=False
34
- )
35
- self.down_proj = nn.Linear(
36
- config.intermediate_size,
37
- config.hidden_size,
38
- bias=False
39
- )
40
- self.activation_fn = ACT2FN[config.hidden_act]
41
-
42
- def forward(self, hidden_states: torch.FloatTensor) -> torch.FloatTensor:
43
- up_states = self.gate_up_proj(hidden_states)
44
- gate, up_states = up_states.chunk(2, dim=-1)
45
- up_states = up_states * self.activation_fn(gate)
46
- return self.down_proj(up_states)
47
-
48
-
49
- class GlmAttention(nn.Module):
50
- """Multi-headed attention from 'Attention Is All You Need' paper"""
51
-
52
- def __init__(self, config: GlmConfig, layer_idx: Optional[int] = None):
53
- super().__init__()
54
- self.config = config
55
- self.layer_idx = layer_idx
56
- self.head_dim = getattr(
57
- config, "head_dim",
58
- config.hidden_size // config.num_attention_heads
59
- )
60
- self.num_key_value_groups = (
61
- config.num_attention_heads // config.num_key_value_heads
62
- )
63
- self.scaling = self.head_dim**-0.5
64
- self.attention_dropout = config.attention_dropout
65
- self.is_causal = True
66
-
67
- self.q_proj = nn.Linear(
68
- config.hidden_size,
69
- config.num_attention_heads * self.head_dim,
70
- bias=config.attention_bias
71
- )
72
- self.k_proj = nn.Linear(
73
- config.hidden_size,
74
- config.num_key_value_heads * self.head_dim,
75
- bias=config.attention_bias
76
- )
77
- self.v_proj = nn.Linear(
78
- config.hidden_size,
79
- config.num_key_value_heads * self.head_dim,
80
- bias=config.attention_bias
81
- )
82
- self.o_proj = nn.Linear(
83
- config.num_attention_heads * self.head_dim,
84
- config.hidden_size,
85
- bias=False
86
- )
87
-
88
- def forward(
89
- self,
90
- hidden_states: torch.Tensor,
91
- position_embeddings: Tuple[torch.Tensor, torch.Tensor],
92
- attention_mask: Optional[torch.Tensor],
93
- past_key_value: Optional[Cache] = None,
94
- cache_position: Optional[torch.LongTensor] = None,
95
- **kwargs: Unpack[FlashAttentionKwargs],
96
- ) -> Tuple[torch.Tensor, Optional[torch.Tensor], Optional[Tuple[torch.Tensor]]]:
97
- input_shape = hidden_states.shape[:-1]
98
- hidden_shape = (*input_shape, -1, self.head_dim)
99
-
100
- query_states = self.q_proj(hidden_states).view(hidden_shape).transpose(1, 2)
101
- key_states = self.k_proj(hidden_states).view(hidden_shape).transpose(1, 2)
102
- value_states = self.v_proj(hidden_states).view(hidden_shape).transpose(1, 2)
103
-
104
- cos, sin = position_embeddings
105
- query_states, key_states = apply_rotary_pos_emb(query_states, key_states, cos, sin)
106
-
107
- if past_key_value is not None:
108
- cache_kwargs = {"sin": sin, "cos": cos, "cache_position": cache_position}
109
- key_states, value_states = past_key_value.update(
110
- key_states, value_states, self.layer_idx, cache_kwargs
111
- )
112
-
113
- attention_interface: Callable = eager_attention_forward
114
- if self.config._attn_implementation != "eager":
115
- attention_interface = ALL_ATTENTION_FUNCTIONS[self.config._attn_implementation]
116
-
117
- attn_output, attn_weights = attention_interface(
118
- self, query_states, key_states, value_states,
119
- attention_mask, dropout=0.0 if not self.training else self.attention_dropout,
120
- scaling=self.scaling, **kwargs,
121
- )
122
-
123
- attn_output = attn_output.reshape(*input_shape, -1).contiguous()
124
- attn_output = self.o_proj(attn_output)
125
- return attn_output, attn_weights
126
-
127
-
128
- @use_kernel_forward_from_hub("RMSNorm")
129
- class GlmRMSNorm(nn.Module):
130
- def __init__(self, hidden_size, eps=1e-6):
131
- super().__init__()
132
- self.weight = nn.Parameter(torch.ones(hidden_size))
133
- self.variance_epsilon = eps
134
-
135
- def forward(self, hidden_states):
136
- input_dtype = hidden_states.dtype
137
- hidden_states = hidden_states.to(torch.float32)
138
- variance = hidden_states.pow(2).mean(-1, keepdim=True)
139
- hidden_states = hidden_states * torch.rsqrt(variance + self.variance_epsilon)
140
- return self.weight * hidden_states.to(input_dtype)
141
-
142
- # ... (many more classes and functions would follow)</code></pre>
143
- </div>
144
- </div>
145
-
146
- <p style="text-align: center; font-style: italic; color: #6c757d; margin-top: 1rem;">
147
- <strong>Left:</strong> Clean modular definition with inheritance.
148
- <strong>Right:</strong> Auto-expanded version with all inherited functionality visible.
149
- </p>
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
dist/fragments/loc-growth.html DELETED
@@ -1,6 +0,0 @@
1
- <iframe
2
- src="https://molbap-loc-1.hf.space"
3
- style="width:100%; height:680px; border:0"
4
- allow="clipboard-read; clipboard-write; fullscreen"
5
- referrerpolicy="no-referrer-when-downgrade"
6
- ></iframe>
 
 
 
 
 
 
 
dist/fragments/memory-profiler.html DELETED
@@ -1,16 +0,0 @@
1
- <div style="border: 1px solid #e2e8f0; border-radius: 8px; background: white; margin: 1.5rem 0;">
2
- <div style="padding: 1rem; border-bottom: 1px solid #e2e8f0; background: #f8f9fa;">
3
- <h4 style="margin: 0 0 0.5rem 0; color: #495057;">🚀 CUDA Warmup Efficiency Benchmark</h4>
4
- <p style="margin: 0; font-size: 0.9em; color: #6c757d;">
5
- Real CUDA warmup benchmarking with actual Transformers models. Measure the performance impact of the caching_allocator_warmup function.
6
- </p>
7
- </div>
8
-
9
- <div style="padding: 1rem;">
10
- <iframe src=https://molbap-cuda-warmup-transformers.hf.space width=100% height=800px frameborder=0 style="border-radius: 8px; background: white;"></iframe>
11
- </div>
12
-
13
- <div style="padding: 1rem; border-top: 1px solid #e2e8f0; background: #f8f9fa; font-size: 0.9em; color: #6c757d;">
14
- Real CUDA warmup benchmarking with actual Transformers models. Measure the performance impact of the <code>caching_allocator_warmup</code> function at <code>transformers/src/transformers/modeling_utils.py:6186</code>. This interactive tool loads models twice - once with warmup disabled and once with warmup enabled - to demonstrate the significant loading time improvements.
15
- </div>
16
- </div>
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
dist/fragments/model-timeline.html DELETED
@@ -1,6 +0,0 @@
1
- <iframe
2
- src="https://molbap-timeline-1.hf.space"
3
- style="width:100%; height:680px; border:0"
4
- allow="clipboard-read; clipboard-write; fullscreen"
5
- referrerpolicy="no-referrer-when-downgrade"
6
- ></iframe>
 
 
 
 
 
 
 
dist/fragments/model-visualisation.html DELETED
The diff for this file is too large to render. See raw diff
 
dist/fragments/terminal.html DELETED
@@ -1,43 +0,0 @@
1
- <div style="background: #f8f9fa; border: 1px solid #e9ecef; border-radius: 8px; padding: 1rem; margin: 1.5rem 0;">
2
- <h4 style="margin-top: 0; color: #495057;">Interactive Terminal</h4>
3
- <div style="background: #2d3748; color: #e2e8f0; padding: 1rem; border-radius: 6px; font-family: 'Consolas', 'Monaco', monospace;">
4
- <div style="margin-bottom: 1rem;">
5
- <input type="text"
6
- id="terminal-input"
7
- placeholder="python -c 'import torch; print(torch.__version__)'"
8
- style="width: calc(100% - 80px); padding: 0.5rem; background: #1a202c; border: 1px solid #4a5568; color: #e2e8f0; border-radius: 4px;">
9
- <button id="terminal-run"
10
- style="width: 70px; padding: 0.5rem; margin-left: 8px; background: #3182ce; color: white; border: none; border-radius: 4px; cursor: pointer;">Run</button>
11
- </div>
12
- <pre id="terminal-output" style="background: #1a202c; padding: 1rem; border-radius: 4px; min-height: 100px; margin: 0; overflow-x: auto;">$ Ready to run commands...</pre>
13
- </div>
14
- <p style="font-size: 0.9em; color: #6c757d; margin-top: 0.5rem;">
15
- <em>Note: This is a simulated terminal. In the original Gradio app, this would execute real Python commands with proper security restrictions.</em>
16
- </p>
17
- </div>
18
-
19
- <script>
20
- document.addEventListener('DOMContentLoaded', function() {
21
- const input = document.getElementById('terminal-input');
22
- const button = document.getElementById('terminal-run');
23
- const output = document.getElementById('terminal-output');
24
-
25
- function runCommand() {
26
- const command = input.value.trim();
27
- if (!command) return;
28
-
29
- // Simulate command execution
30
- output.textContent = `$ ${command}\nSimulated output for: ${command}\n\n` +
31
- `This would execute the command in the original app.\n` +
32
- `Example outputs:\n` +
33
- `- torch version: 2.0.1+cu117\n` +
34
- `- import checks: Success\n` +
35
- `- memory info: Available`;
36
- }
37
-
38
- button.addEventListener('click', runCommand);
39
- input.addEventListener('keypress', function(e) {
40
- if (e.key === 'Enter') runCommand();
41
- });
42
- });
43
- </script>
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
dist/fragments/tp-plan.html DELETED
@@ -1,24 +0,0 @@
1
- <pre><code class="language-python"># In the model's config (example: ERNIE 4.5-style decoder blocks)
2
- base_model_tp_plan = {
3
- "layers.*.self_attn.q_proj": "colwise",
4
- "layers.*.self_attn.k_proj": "colwise",
5
- "layers.*.self_attn.v_proj": "colwise",
6
- "layers.*.self_attn.o_proj": "rowwise",
7
- "layers.*.mlp.gate_proj": "colwise",
8
- "layers.*.mlp.up_proj": "colwise",
9
- "layers.*.mlp.down_proj": "rowwise",
10
- }
11
-
12
- # Runtime
13
- import torch
14
- from transformers import AutoModelForCausalLM, AutoTokenizer
15
-
16
- model_id = "your/model-or-local-checkpoint"
17
- model = AutoModelForCausalLM.from_pretrained(
18
- model_id,
19
- dtype=torch.bfloat16,
20
- tp_plan=base_model_tp_plan, # <-- plan defined above
21
- )
22
- tok = AutoTokenizer.from_pretrained(model_id)
23
- inputs = tok("Hello", return_tensors="pt").to(model.device)
24
- out = model(**inputs)</code></pre>
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
dist/fragments/warmup_demo.html DELETED
@@ -1,398 +0,0 @@
1
- <style>
2
- .warmup-demo body {
3
- font-family: 'Segoe UI', Tahoma, Geneva, Verdana, sans-serif;
4
- margin: 0;
5
- padding: 20px;
6
- background-color: #f5f5f5;
7
- }
8
-
9
- .warmup-demo .container {
10
- max-width: 1200px;
11
- margin: 0 auto;
12
- background: white;
13
- border-radius: 12px;
14
- padding: 30px;
15
- box-shadow: 0 4px 6px rgba(0, 0, 0, 0.1);
16
- }
17
-
18
- .warmup-demo h1 {
19
- text-align: center;
20
- color: #333;
21
- margin-bottom: 10px;
22
- }
23
-
24
- .warmup-demo .subtitle {
25
- text-align: center;
26
- color: #666;
27
- margin-bottom: 30px;
28
- font-size: 16px;
29
- }
30
-
31
- .warmup-demo .demo-container {
32
- display: flex;
33
- gap: 40px;
34
- margin-bottom: 30px;
35
- }
36
-
37
- .warmup-demo .side {
38
- flex: 1;
39
- border: 2px solid #ddd;
40
- border-radius: 8px;
41
- padding: 20px;
42
- background: #fafafa;
43
- }
44
-
45
- .warmup-demo .side h2 {
46
- text-align: center;
47
- margin-top: 0;
48
- color: #333;
49
- }
50
-
51
- .warmup-demo .no-warmup h2 {
52
- color: #d63384;
53
- }
54
-
55
- .warmup-demo .with-warmup h2 {
56
- color: #198754;
57
- }
58
-
59
- .warmup-demo .memory-area {
60
- height: 400px;
61
- border: 2px dashed #ccc;
62
- border-radius: 6px;
63
- padding: 10px;
64
- margin: 20px 0;
65
- background: #fff;
66
- position: relative;
67
- overflow: hidden;
68
- }
69
-
70
- .warmup-demo .layer-box {
71
- width: 80px;
72
- height: 30px;
73
- border: 2px solid #666;
74
- border-radius: 4px;
75
- margin: 3px;
76
- display: inline-block;
77
- position: relative;
78
- background: #fff;
79
- transition: all 0.3s ease;
80
- }
81
-
82
- .warmup-demo .layer-box.allocating {
83
- background: #e9ecef;
84
- border-color: #adb5bd;
85
- }
86
-
87
- .warmup-demo .layer-box.allocating::after {
88
- content: "malloc";
89
- position: absolute;
90
- top: 50%;
91
- left: 50%;
92
- transform: translate(-50%, -50%);
93
- font-size: 10px;
94
- color: #666;
95
- font-weight: bold;
96
- }
97
-
98
- .warmup-demo .layer-box.loaded {
99
- background: #d1e7dd;
100
- border-color: #198754;
101
- }
102
-
103
- .warmup-demo .layer-box.loaded::after {
104
- content: "data";
105
- position: absolute;
106
- top: 50%;
107
- left: 50%;
108
- transform: translate(-50%, -50%);
109
- font-size: 10px;
110
- color: #198754;
111
- font-weight: bold;
112
- }
113
-
114
- .warmup-demo .warmup-container {
115
- width: 100%;
116
- height: 60px;
117
- border: 3px solid #666;
118
- border-radius: 6px;
119
- margin-bottom: 20px;
120
- background: #fff;
121
- position: relative;
122
- overflow: hidden;
123
- }
124
-
125
- .warmup-demo .warmup-container.allocated {
126
- border-color: #0d6efd;
127
- background: #e7f1ff;
128
- }
129
-
130
- .warmup-demo .warmup-container::before {
131
- content: "Pre-allocated Memory Pool";
132
- position: absolute;
133
- top: 50%;
134
- left: 50%;
135
- transform: translate(-50%, -50%);
136
- font-size: 14px;
137
- color: #666;
138
- font-weight: bold;
139
- z-index: 1;
140
- }
141
-
142
- .warmup-demo .warmup-container.allocated::before {
143
- color: #0d6efd;
144
- }
145
-
146
- .warmup-demo .warmup-fill {
147
- height: 100%;
148
- background: linear-gradient(90deg, #198754, #20c997);
149
- width: 0%;
150
- transition: width 0.5s ease;
151
- border-radius: 3px;
152
- position: relative;
153
- z-index: 2;
154
- }
155
-
156
- .warmup-demo .warmup-fill::after {
157
- content: "Layer Data Loading";
158
- position: absolute;
159
- top: 50%;
160
- left: 50%;
161
- transform: translate(-50%, -50%);
162
- font-size: 12px;
163
- color: white;
164
- font-weight: bold;
165
- white-space: nowrap;
166
- }
167
-
168
- .warmup-demo .timing {
169
- text-align: center;
170
- font-size: 24px;
171
- font-weight: bold;
172
- margin: 15px 0;
173
- min-height: 30px;
174
- }
175
-
176
- .warmup-demo .no-warmup .timing {
177
- color: #d63384;
178
- }
179
-
180
- .warmup-demo .with-warmup .timing {
181
- color: #198754;
182
- }
183
-
184
- .warmup-demo .controls {
185
- text-align: center;
186
- margin: 30px 0;
187
- }
188
-
189
- .warmup-demo .btn {
190
- background: #0d6efd;
191
- color: white;
192
- border: none;
193
- padding: 12px 24px;
194
- border-radius: 6px;
195
- font-size: 16px;
196
- cursor: pointer;
197
- margin: 0 10px;
198
- transition: background 0.3s ease;
199
- }
200
-
201
- .warmup-demo .btn:hover {
202
- background: #0b5ed7;
203
- }
204
-
205
- .warmup-demo .btn:disabled {
206
- background: #6c757d;
207
- cursor: not-allowed;
208
- }
209
-
210
- .warmup-demo .description {
211
- background: #f8f9fa;
212
- padding: 15px;
213
- border-radius: 6px;
214
- margin-top: 15px;
215
- font-size: 14px;
216
- line-height: 1.5;
217
- }
218
-
219
- .warmup-demo .phase-indicator {
220
- font-size: 14px;
221
- color: #666;
222
- text-align: center;
223
- margin-top: 10px;
224
- min-height: 20px;
225
- }
226
-
227
- .warmup-demo .layer-counter {
228
- text-align: center;
229
- font-size: 16px;
230
- color: #495057;
231
- margin: 10px 0;
232
- }
233
- </style>
234
-
235
- <div class="warmup-demo">
236
- <div class="container">
237
- <p class="subtitle">Mem allocation patterns during model loading</p>
238
-
239
- <div class="controls">
240
- <button class="btn" id="startBtn" onclick="startDemo()">Start Animation</button>
241
- <button class="btn" id="resetBtn" onclick="resetDemo()">Reset</button>
242
- </div>
243
-
244
- <div class="demo-container">
245
- <div class="side no-warmup">
246
- <h4 data-no-toc>❌ Without Warmup</h4>
247
- <div class="timing" id="noWarmupTime">0.00s</div>
248
- <div class="layer-counter" id="noWarmupCounter">Layers loaded: 0/10</div>
249
- <div class="phase-indicator" id="noWarmupPhase"></div>
250
- <div class="memory-area" id="noWarmupArea"></div>
251
- <div class="description">
252
- <strong>Individual Allocations:</strong><br>
253
- Each model layer triggers a separate cudaMalloc() call, creating memory fragmentation and allocation overhead.
254
- <br><br>
255
- 📦 <strong>Grey "malloc"</strong> = Memory allocation overhead<br>
256
- ✅ <strong>Green "data"</strong> = Actual layer data loading
257
- </div>
258
- </div>
259
-
260
- <div class="side with-warmup">
261
- <h4 data-no-toc>✅ With Warmup</h4>
262
- <div class="timing" id="warmupTime">0.00s</div>
263
- <div class="layer-counter" id="warmupCounter">Layers loaded: 0/10</div>
264
- <div class="phase-indicator" id="warmupPhase"></div>
265
- <div class="memory-area" id="warmupArea">
266
- <div class="warmup-container" id="warmupContainer">
267
- <div class="warmup-fill" id="warmupFill"></div>
268
- </div>
269
- <div id="warmupLayers"></div>
270
- </div>
271
- <div class="description">
272
- <strong>Pre-allocated Pool:</strong><br>
273
- The warmup function calculates total memory needed and makes ONE large allocation. Subsequent layers load directly into this pool, eliminating malloc overhead.
274
- <br><br>
275
- 🔵 <strong>Blue container</strong> = Single large malloc (warmup)<br>
276
- 🟢 <strong>Green progress bar</strong> = Layer data loading (no malloc needed)
277
- </div>
278
- </div>
279
- </div>
280
- </div>
281
- </div>
282
-
283
- <script>
284
- let animationSpeed = 1 / 2.4;
285
- let isRunning = false;
286
- const totalLayers = 10;
287
-
288
- function startDemo() {
289
- if (isRunning) return;
290
- isRunning = true;
291
-
292
- document.getElementById('startBtn').disabled = true;
293
- document.getElementById('resetBtn').disabled = true;
294
-
295
- Promise.all([
296
- animateNoWarmup(),
297
- animateWithWarmup()
298
- ]).then(() => {
299
- isRunning = false;
300
- document.getElementById('startBtn').disabled = false;
301
- document.getElementById('resetBtn').disabled = false;
302
- });
303
- }
304
-
305
- function resetDemo() {
306
- if (isRunning) return;
307
-
308
- document.getElementById('noWarmupArea').innerHTML = '';
309
- document.getElementById('warmupLayers').innerHTML = '';
310
- document.getElementById('warmupFill').style.width = '0%';
311
- document.getElementById('warmupContainer').classList.remove('allocated');
312
-
313
- document.getElementById('noWarmupTime').textContent = '0.00s';
314
- document.getElementById('warmupTime').textContent = '0.00s';
315
-
316
- document.getElementById('noWarmupCounter').textContent = 'Layers loaded: 0/10';
317
- document.getElementById('warmupCounter').textContent = 'Layers loaded: 0/10';
318
-
319
- document.getElementById('noWarmupPhase').textContent = '';
320
- document.getElementById('warmupPhase').textContent = '';
321
- }
322
-
323
- async function animateNoWarmup() {
324
- const container = document.getElementById('noWarmupArea');
325
- const timeEl = document.getElementById('noWarmupTime');
326
- const counterEl = document.getElementById('noWarmupCounter');
327
- const phaseEl = document.getElementById('noWarmupPhase');
328
-
329
- let currentTime = 0;
330
- const baseDelay = 200 / animationSpeed;
331
-
332
- phaseEl.textContent = 'Loading model layers...';
333
-
334
- for (let i = 0; i < totalLayers; i++) {
335
- const layerBox = document.createElement('div');
336
- layerBox.className = 'layer-box';
337
- container.appendChild(layerBox);
338
-
339
- await sleep(baseDelay * 0.3);
340
- layerBox.classList.add('allocating');
341
- currentTime += 0.08;
342
- timeEl.textContent = currentTime.toFixed(2) + 's';
343
-
344
- await sleep(baseDelay * 0.7);
345
- layerBox.classList.remove('allocating');
346
- layerBox.classList.add('loaded');
347
- currentTime += 0.12;
348
- timeEl.textContent = currentTime.toFixed(2) + 's';
349
-
350
- counterEl.textContent = `Layers loaded: ${i + 1}/${totalLayers}`;
351
- }
352
-
353
- phaseEl.textContent = 'Complete!';
354
- }
355
-
356
- async function animateWithWarmup() {
357
- const container = document.getElementById('warmupLayers');
358
- const timeEl = document.getElementById('warmupTime');
359
- const counterEl = document.getElementById('warmupCounter');
360
- const phaseEl = document.getElementById('warmupPhase');
361
- const warmupContainer = document.getElementById('warmupContainer');
362
- const warmupFill = document.getElementById('warmupFill');
363
-
364
- let currentTime = 0;
365
- const baseDelay = 200 / animationSpeed;
366
-
367
- phaseEl.textContent = 'Warming up allocator...';
368
- await sleep(baseDelay * 2);
369
- warmupContainer.classList.add('allocated');
370
- currentTime += 0.3;
371
- timeEl.textContent = currentTime.toFixed(2) + 's';
372
-
373
- phaseEl.textContent = 'Loading model layers...';
374
-
375
- for (let i = 0; i < totalLayers; i++) {
376
- const layerBox = document.createElement('div');
377
- layerBox.className = 'layer-box loaded';
378
- layerBox.style.width = '40px';
379
- layerBox.style.height = '20px';
380
- container.appendChild(layerBox);
381
-
382
- const progress = ((i + 1) / totalLayers) * 100;
383
- warmupFill.style.width = progress + '%';
384
-
385
- await sleep(baseDelay * 0.5);
386
- currentTime += 0.08;
387
- timeEl.textContent = currentTime.toFixed(2) + 's';
388
-
389
- counterEl.textContent = `Layers loaded: ${i + 1}/${totalLayers}`;
390
- }
391
-
392
- phaseEl.textContent = 'Complete!';
393
- }
394
-
395
- function sleep(ms) {
396
- return new Promise(resolve => setTimeout(resolve, ms));
397
- }
398
- </script>
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
dist/hf-logo.svg DELETED
dist/index.html DELETED
The diff for this file is too large to render. See raw diff
 
dist/main.bundle.js DELETED
@@ -1,2028 +0,0 @@
1
- /******/ (() => { // webpackBootstrap
2
- /******/ "use strict";
3
- /******/ var __webpack_modules__ = ({
4
-
5
- /***/ 56:
6
- /***/ ((module, __unused_webpack_exports, __webpack_require__) => {
7
-
8
-
9
-
10
- /* istanbul ignore next */
11
- function setAttributesWithoutAttributes(styleElement) {
12
- var nonce = true ? __webpack_require__.nc : 0;
13
- if (nonce) {
14
- styleElement.setAttribute("nonce", nonce);
15
- }
16
- }
17
- module.exports = setAttributesWithoutAttributes;
18
-
19
- /***/ }),
20
-
21
- /***/ 72:
22
- /***/ ((module) => {
23
-
24
-
25
-
26
- var stylesInDOM = [];
27
- function getIndexByIdentifier(identifier) {
28
- var result = -1;
29
- for (var i = 0; i < stylesInDOM.length; i++) {
30
- if (stylesInDOM[i].identifier === identifier) {
31
- result = i;
32
- break;
33
- }
34
- }
35
- return result;
36
- }
37
- function modulesToDom(list, options) {
38
- var idCountMap = {};
39
- var identifiers = [];
40
- for (var i = 0; i < list.length; i++) {
41
- var item = list[i];
42
- var id = options.base ? item[0] + options.base : item[0];
43
- var count = idCountMap[id] || 0;
44
- var identifier = "".concat(id, " ").concat(count);
45
- idCountMap[id] = count + 1;
46
- var indexByIdentifier = getIndexByIdentifier(identifier);
47
- var obj = {
48
- css: item[1],
49
- media: item[2],
50
- sourceMap: item[3],
51
- supports: item[4],
52
- layer: item[5]
53
- };
54
- if (indexByIdentifier !== -1) {
55
- stylesInDOM[indexByIdentifier].references++;
56
- stylesInDOM[indexByIdentifier].updater(obj);
57
- } else {
58
- var updater = addElementStyle(obj, options);
59
- options.byIndex = i;
60
- stylesInDOM.splice(i, 0, {
61
- identifier: identifier,
62
- updater: updater,
63
- references: 1
64
- });
65
- }
66
- identifiers.push(identifier);
67
- }
68
- return identifiers;
69
- }
70
- function addElementStyle(obj, options) {
71
- var api = options.domAPI(options);
72
- api.update(obj);
73
- var updater = function updater(newObj) {
74
- if (newObj) {
75
- if (newObj.css === obj.css && newObj.media === obj.media && newObj.sourceMap === obj.sourceMap && newObj.supports === obj.supports && newObj.layer === obj.layer) {
76
- return;
77
- }
78
- api.update(obj = newObj);
79
- } else {
80
- api.remove();
81
- }
82
- };
83
- return updater;
84
- }
85
- module.exports = function (list, options) {
86
- options = options || {};
87
- list = list || [];
88
- var lastIdentifiers = modulesToDom(list, options);
89
- return function update(newList) {
90
- newList = newList || [];
91
- for (var i = 0; i < lastIdentifiers.length; i++) {
92
- var identifier = lastIdentifiers[i];
93
- var index = getIndexByIdentifier(identifier);
94
- stylesInDOM[index].references--;
95
- }
96
- var newLastIdentifiers = modulesToDom(newList, options);
97
- for (var _i = 0; _i < lastIdentifiers.length; _i++) {
98
- var _identifier = lastIdentifiers[_i];
99
- var _index = getIndexByIdentifier(_identifier);
100
- if (stylesInDOM[_index].references === 0) {
101
- stylesInDOM[_index].updater();
102
- stylesInDOM.splice(_index, 1);
103
- }
104
- }
105
- lastIdentifiers = newLastIdentifiers;
106
- };
107
- };
108
-
109
- /***/ }),
110
-
111
- /***/ 113:
112
- /***/ ((module) => {
113
-
114
-
115
-
116
- /* istanbul ignore next */
117
- function styleTagTransform(css, styleElement) {
118
- if (styleElement.styleSheet) {
119
- styleElement.styleSheet.cssText = css;
120
- } else {
121
- while (styleElement.firstChild) {
122
- styleElement.removeChild(styleElement.firstChild);
123
- }
124
- styleElement.appendChild(document.createTextNode(css));
125
- }
126
- }
127
- module.exports = styleTagTransform;
128
-
129
- /***/ }),
130
-
131
- /***/ 208:
132
- /***/ ((module, __webpack_exports__, __webpack_require__) => {
133
-
134
- /* harmony export */ __webpack_require__.d(__webpack_exports__, {
135
- /* harmony export */ A: () => (__WEBPACK_DEFAULT_EXPORT__)
136
- /* harmony export */ });
137
- /* harmony import */ var _node_modules_css_loader_dist_runtime_sourceMaps_js__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(354);
138
- /* harmony import */ var _node_modules_css_loader_dist_runtime_sourceMaps_js__WEBPACK_IMPORTED_MODULE_0___default = /*#__PURE__*/__webpack_require__.n(_node_modules_css_loader_dist_runtime_sourceMaps_js__WEBPACK_IMPORTED_MODULE_0__);
139
- /* harmony import */ var _node_modules_css_loader_dist_runtime_api_js__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(314);
140
- /* harmony import */ var _node_modules_css_loader_dist_runtime_api_js__WEBPACK_IMPORTED_MODULE_1___default = /*#__PURE__*/__webpack_require__.n(_node_modules_css_loader_dist_runtime_api_js__WEBPACK_IMPORTED_MODULE_1__);
141
- /* harmony import */ var _node_modules_css_loader_dist_cjs_js_transformers_custom_css__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(995);
142
- // Imports
143
-
144
-
145
-
146
- var ___CSS_LOADER_EXPORT___ = _node_modules_css_loader_dist_runtime_api_js__WEBPACK_IMPORTED_MODULE_1___default()((_node_modules_css_loader_dist_runtime_sourceMaps_js__WEBPACK_IMPORTED_MODULE_0___default()));
147
- ___CSS_LOADER_EXPORT___.i(_node_modules_css_loader_dist_cjs_js_transformers_custom_css__WEBPACK_IMPORTED_MODULE_2__/* ["default"] */ .A);
148
- // Module
149
- ___CSS_LOADER_EXPORT___.push([module.id, `/* style.css - Scaling Insanity */
150
-
151
- /* Import ultrascale-playbook base styles and add transformers-specific styling */
152
- /* Define colors */
153
- :root {
154
- --distill-gray: rgb(107, 114, 128);
155
- --distill-gray-light: rgb(185, 185, 185);
156
- --distill-gray-lighter: rgb(228, 228, 228);
157
- --distill-gray-lightest: rgb(245, 245, 245);
158
- --distill-blue: #007BFF;
159
- }
160
-
161
- /* Container for the controls */
162
- [id^="plot-"] {
163
- display: flex;
164
- flex-direction: column;
165
- align-items: center;
166
- gap: 15px; /* Adjust the gap between controls as needed */
167
- }
168
- [id^="plot-"] figure {
169
- margin-bottom: 0px;
170
- margin-top: 0px;
171
- padding: 0px;
172
- }
173
- .plotly_caption {
174
- font-style: italic;
175
- margin-top: 10px;
176
- }
177
-
178
- .plotly_controls {
179
- display: flex;
180
- flex-wrap: wrap;
181
- flex-direction: row;
182
- justify-content: center;
183
- align-items: flex-start;
184
- gap: 30px;
185
- }
186
-
187
-
188
- .plotly_input_container {
189
- display: flex;
190
- align-items: center;
191
- flex-direction: column;
192
- gap: 10px;
193
- }
194
-
195
- /* Style for the select dropdown */
196
- .plotly_input_container > select {
197
- padding: 2px 4px;
198
- /* border: 1px solid #ccc; */
199
- line-height: 1.5em;
200
- text-align: center;
201
- border-radius: 4px;
202
- font-size: 12px;
203
- background-color: var(--distill-gray-lightest);
204
- outline: none;
205
- }
206
-
207
- /* Style for the range input */
208
-
209
- .plotly_slider {
210
- display: flex;
211
- align-items: center;
212
- gap: 10px;
213
- }
214
-
215
- .plotly_slider > input[type="range"] {
216
- -webkit-appearance: none;
217
- height: 2px;
218
- background: var(--distill-gray-light);
219
- border-radius: 5px;
220
- outline: none;
221
- }
222
-
223
- .plotly_slider > span {
224
- font-size: 14px;
225
- line-height: 1.6em;
226
- min-width: 16px;
227
- }
228
-
229
- .plotly_slider > input[type="range"]::-webkit-slider-thumb {
230
- -webkit-appearance: none;
231
- appearance: none;
232
- width: 18px;
233
- height: 18px;
234
- border-radius: 50%;
235
- background: var(--distill-blue);
236
- cursor: pointer;
237
- }
238
-
239
- .plotly_slider > input[type="range"]::-moz-range-thumb {
240
- width: 18px;
241
- height: 18px;
242
- border-radius: 50%;
243
- background: var(--distill-blue);
244
- cursor: pointer;
245
- }
246
-
247
- /* Style for the labels */
248
- .plotly_input_container > label {
249
- font-size: 14px;
250
- font-weight: bold;
251
- }
252
-
253
- .main-plot-container {
254
- margin-top: 21px;
255
- margin-bottom: 35px;
256
- }
257
-
258
- .main-plot-container > figure {
259
- display: block !important;
260
- /* Let this be handled by graph-container */
261
- margin-bottom: 0px;
262
- margin-top: 0px;
263
- }
264
- .main-plot-container > div {
265
- display: none !important;
266
- }
267
-
268
-
269
- @media (min-width: 768px) {
270
- .main-plot-container > figure {
271
- display: none !important;
272
- }
273
- .main-plot-container > div {
274
- display: flex !important;
275
- }
276
- }
277
-
278
- d-byline .byline {
279
- grid-template-columns: 1fr;
280
- grid-column: text;
281
- font-size: 0.9rem;
282
- line-height: 1.8em;
283
- }
284
-
285
- @media (min-width: 768px) {
286
- d-byline .byline {
287
- grid-template-columns: 5fr 1fr 1fr;
288
- }
289
- }
290
-
291
- #title-plot {
292
- margin-top: 0px;
293
- margin-bottom: 0px;
294
- }
295
-
296
- d-contents > nav a.active {
297
- text-decoration: underline;
298
- }
299
-
300
- @media (max-width: 1199px) {
301
- d-contents {
302
- display: none;
303
- background: white;
304
- justify-self: start;
305
- align-self: start;
306
- padding-bottom: 0.5em;
307
- margin-bottom: 1em;
308
- padding-left: 0.25em;
309
- border-bottom: 1px solid rgba(0, 0, 0, 0.1);
310
- border-bottom-width: 1px;
311
- border-bottom-style: solid;
312
- border-bottom-color: rgba(0, 0, 0, 0.1);
313
- overflow-y: scroll;
314
- height: calc(100vh - 40px);
315
- scrollbar-width: none;
316
- z-index: -100;
317
- }
318
- }
319
-
320
- d-contents a:hover {
321
- border-bottom: none;
322
- }
323
-
324
- toc-title {
325
- font-weight: bold;
326
- font-size: 1.2em;
327
- color: #333;
328
- }
329
-
330
- toggle-icon {
331
- transition: transform 0.3s;
332
- }
333
-
334
- toggle-icon.collapsed {
335
- transform: rotate(90deg);
336
- }
337
-
338
- .toc-content {
339
- margin-top: 15px;
340
- overflow: hidden;
341
- /* max-height: 1000px; */
342
- transition: max-height 0.3s ease-out;
343
- }
344
-
345
- .toc-content.collapsed {
346
- max-height: 0;
347
- margin-top: 0;
348
- }
349
-
350
- @media (min-width: 1200px) {
351
- d-article {
352
- /* Ensure d-article does not prevent sticky positioning */
353
- overflow: visible;
354
- }
355
-
356
- d-contents {
357
- align-self: start;
358
- background: white;
359
- grid-column-start: 1 !important;
360
- grid-column-end: 4 !important;
361
- grid-row: auto / span 6;
362
- justify-self: end;
363
- margin-top: 0em;
364
- padding-right: 3em;
365
- padding-left: 2em;
366
- /* border-right: 1px solid rgba(0, 0, 0, 0.1);
367
- border-right-width: 1px;
368
- border-right-style: solid;
369
- border-right-color: rgba(0, 0, 0, 0.1); */
370
- position: -webkit-sticky; /* For Safari */
371
- position: sticky;
372
- top: 10px; /* Adjust this value if needed */
373
- overflow-y: auto;
374
- height: calc(100vh - 40px);
375
- scrollbar-width: none;
376
- transition: max-height 0.3s ease-out;
377
- z-index: -100;
378
- }
379
- }
380
-
381
- d-contents nav h3 {
382
- margin-top: 0;
383
- margin-bottom: 1em;
384
- }
385
-
386
- d-contents nav div div {
387
- color: rgba(0, 0, 0, 0.8);
388
- font-weight: bold;
389
- }
390
-
391
- d-contents nav a {
392
- color: rgba(0, 0, 0, 0.8);
393
- border-bottom: none;
394
- text-decoration: none;
395
- }
396
-
397
- d-contents li {
398
- list-style-type: none;
399
- }
400
-
401
- d-contents ul, d-article d-contents ul {
402
- padding-left: 1em;
403
- }
404
-
405
- d-contents nav ul li {
406
- margin-bottom: .25em;
407
- }
408
-
409
- d-contents nav a:hover {
410
- text-decoration: underline solid rgba(0, 0, 0, 0.6);
411
- }
412
-
413
- d-contents nav ul {
414
- margin-top: 0;
415
- margin-bottom: 6px;
416
- }
417
-
418
-
419
- d-contents nav > div {
420
- display: block;
421
- outline: none;
422
- margin-bottom: 0.5em;
423
- }
424
-
425
- d-contents nav > div > a {
426
- font-size: 13px;
427
- font-weight: 600;
428
- }
429
-
430
- d-article aside {
431
- margin-bottom: 1em;
432
- }
433
-
434
- d-article img {
435
- max-width: 100%;
436
- }
437
-
438
- @media (min-width: 768px) {
439
- d-article aside {
440
- margin-bottom: 0;
441
- }
442
- }
443
-
444
- d-contents nav > div > a:hover,
445
- d-contents nav > ul > li > a:hover {
446
- text-decoration: none;
447
- }
448
-
449
- .note-box {
450
- background-color: #f6f8fa;
451
- border-left: 4px solid #444444;
452
- padding: 1rem;
453
- margin: 1rem 0; /* Keep this modest margin */
454
- border-radius: 6px;
455
- /* Add this to ensure the box only takes up needed space */
456
- display: inline-block;
457
- }
458
-
459
- .note-box-title {
460
- margin: 0;
461
- color: #444444;
462
- font-weight: 600;
463
- font-size: 1em;
464
- }
465
-
466
- .note-box-content {
467
- margin-top: 0.5rem;
468
- margin-bottom: 0; /* Ensure no bottom margin */
469
- color: #24292f;
470
- font-size: 0.9em;
471
- line-height: 1.5em;
472
- }
473
-
474
- /* For dark mode support */
475
- @media (prefers-color-scheme: dark) {
476
- .note-box {
477
- background-color: #1c1c1c;
478
- border-left-color: #888888;
479
- }
480
- .note-box-title {
481
- color: #888888;
482
- }
483
- .note-box-content {
484
- color: #d4d4d4;
485
- }
486
- }
487
-
488
- d-article {
489
- font-size: 1.0em;
490
- }
491
-
492
- .figure-legend {
493
- font-size: 0.9em;
494
- font-style: italic;
495
- color: var(--distill-gray);
496
- line-height: 1.5em;
497
- }
498
-
499
- d-code {
500
- font-size: 12px;
501
- }
502
-
503
- .large-image-background {
504
- width: 100vw;
505
- padding-top: 10px;
506
- padding-bottom: 10px;
507
- margin-left: calc(-50vw + 50%);
508
- margin-right: calc(-50vw + 50%);
509
- background: white;
510
- height: fit-content; /* This will make it match the image height */
511
- display: flex;
512
- justify-content: center; /* This will center your image */
513
- }
514
-
515
- .large-image-background-transparent {
516
- /* width: 100vw; */
517
- padding-top: 10px;
518
- padding-bottom: 10px;
519
- /* margin-left: calc(-50vw + 50%); */
520
- margin-left:-100px;
521
- margin-right: -100px;
522
- /* margin-right: calc(-50vw + 50%); */
523
- /* background: white; */
524
- height: fit-content; /* This will make it match the image height */
525
- display: flex;
526
- justify-content: center; /* This will center your image */
527
- }
528
-
529
- .boxed-image {
530
- padding: 0.5rem;
531
- background: white;
532
- border-radius: 12px;
533
- border: 1px solid #e5e7eb;
534
- box-shadow: 0 4px 6px rgba(0, 0, 0, 0.1);
535
- }
536
-
537
- d-article li {
538
- margin-bottom: 0.0em;
539
- }
540
-
541
- d-article ul ul {
542
- margin-bottom: 0.0em;
543
- }
544
-
545
- d-article ol ol {
546
- margin-bottom: 0.0em;
547
- }
548
-
549
- d-article hr {
550
- grid-column: text;
551
- }
552
-
553
- /* Memory visualization */
554
- #graph-all {
555
- min-width: 500px;
556
- margin-right: 10px;
557
- margin-bottom: 2rem;
558
- padding: 0.5rem;
559
- background: #f9fafb;
560
- border-radius: 12px;
561
- border: 1px solid #e5e7eb;
562
- box-shadow: 0 4px 6px rgba(0, 0, 0, 0.1);
563
- }
564
-
565
-
566
- /* Main container styles */
567
- #controls {
568
- max-width: 1200px;
569
- /* margin: 2rem auto; */
570
- margin-bottom: 2rem;
571
- margin-left: 10px;
572
- padding: 0.6rem;
573
- background: #f9fafb;
574
- border-radius: 12px;
575
- border: 1px solid #e5e7eb;
576
- box-shadow: 0 4px 6px rgba(0, 0, 0, 0.1);
577
- }
578
-
579
- /* Grid layout */
580
- #controls {
581
- display: grid;
582
- grid-template-columns: 1fr 1fr;
583
- /* gap: 2rem; */
584
- }
585
-
586
- /* Cell styles */
587
- .cell {
588
- margin-bottom: 0.2rem;
589
- }
590
-
591
- /* Label styles */
592
- label {
593
- display: block;
594
- /* margin-bottom: 0.5rem; */
595
- font-size: 0.8rem;
596
- font-weight: 500;
597
- color: #374151;
598
- }
599
-
600
- /* Input container for range + number combination */
601
- .input-container {
602
- display: flex;
603
- gap: 1rem;
604
- align-items: center;
605
- }
606
-
607
- /* Range input styling */
608
- input[type="range"] {
609
- flex: 1;
610
- height: 6px;
611
- background: #e5e7eb;
612
- border-radius: 3px;
613
- appearance: none;
614
- outline: none;
615
- }
616
-
617
- input[type="range"]::-webkit-slider-thumb {
618
- appearance: none;
619
- width: 16px;
620
- height: 16px;
621
- background: #3b82f6;
622
- border-radius: 50%;
623
- cursor: pointer;
624
- transition: background 0.15s ease;
625
- }
626
-
627
- input[type="range"]::-webkit-slider-thumb:hover {
628
- background: #2563eb;
629
- }
630
-
631
- /* Number input styling */
632
- input[type="number"] {
633
- width: 80px;
634
- padding: 0.5rem;
635
- border: 1px solid #e5e7eb;
636
- border-radius: 6px;
637
- font-size: 0.9rem;
638
- color: #374151;
639
- }
640
-
641
- /* Select styling */
642
- select {
643
- width: 100%;
644
- padding: 0.5rem;
645
- border: 1px solid #e5e7eb;
646
- border-radius: 6px;
647
- background: white;
648
- font-size: 0.9rem;
649
- color: #374151;
650
- cursor: pointer;
651
- }
652
-
653
- /* Checkbox styling */
654
- input[type="checkbox"] {
655
- width: 1.2rem;
656
- height: 1.2rem;
657
- margin-right: 0.5rem;
658
- border: 2px solid #e5e7eb;
659
- border-radius: 4px;
660
- cursor: pointer;
661
- }
662
-
663
- /* Column specific styles */
664
- .column-1 {
665
- padding-right: 0.5rem;
666
- }
667
-
668
- .column-2 {
669
- padding-left: 0.5rem;
670
- }
671
-
672
- /* Checkbox container */
673
- .checkbox-container {
674
- display: flex;
675
- align-items: center;
676
- margin-bottom: 1rem;
677
- }
678
-
679
- /* Memory visualization styles */
680
- .memory-block {
681
- background: #fff;
682
- border-radius: 8px;
683
- padding: 1rem;
684
- margin-bottom: 1rem;
685
- box-shadow: 0 2px 4px rgba(0, 0, 0, 0.05);
686
- }
687
-
688
- .memory-title {
689
- font-size: 1.1rem;
690
- font-weight: 500;
691
- color: #374151;
692
- margin-bottom: 0.5rem;
693
- }
694
-
695
- .memory-value {
696
- font-size: 1.5rem;
697
- font-weight: 600;
698
- color: #3b82f6;
699
- }
700
-
701
- /* Responsive adjustments */
702
- @media (max-width: 768px) {
703
- #controls {
704
- grid-template-columns: 1fr;
705
- padding: 1rem;
706
- }
707
-
708
- .column-1, .column-2 {
709
- padding: 0;
710
- }
711
- }
712
-
713
- /* Hover states and transitions */
714
- input:hover, select:hover {
715
- border-color: #3b82f6;
716
- }
717
-
718
- input:focus, select:focus {
719
- border-color: #2563eb;
720
- outline: none;
721
- box-shadow: 0 0 0 2px rgba(59, 130, 246, 0.1);
722
- }
723
-
724
- /* Add smooth transitions */
725
- input, select, button {
726
- transition: all 0.15s ease;
727
- }
728
-
729
- /* Preset dropdown special styling */
730
- select[name="presets"] {
731
- background-color: #f3f4f6;
732
- font-weight: 500;
733
- }
734
-
735
- /* Memory graph enhancements */
736
- .activation-memory {
737
- background: #dbeafe;
738
- padding: 1rem;
739
- border-radius: 8px;
740
- margin-bottom: 1rem;
741
- }
742
-
743
- .gradient-memory {
744
- background: #ede9fe;
745
- padding: 1rem;
746
- border-radius: 8px;
747
- }
748
-
749
- .order-button-second {
750
- background: linear-gradient(135deg, #6DB4C4, #D4A5B8);
751
- color: white;
752
- font-size: 18px;
753
- font-weight: 600;
754
- padding: 20px 20px;
755
- border: none;
756
- border-radius: 12px;
757
- cursor: pointer;
758
- text-transform: uppercase;
759
- letter-spacing: 1px;
760
- box-shadow: 0 4px 15px rgba(0, 0, 0, 0.2);
761
- transition: all 0.3s ease;
762
- position: relative;
763
- overflow: hidden;
764
- }
765
- .order-button-second:hover {
766
- transform: translateY(-2px);
767
- box-shadow: 0 6px 20px rgba(0, 0, 0, 0.25);
768
- }
769
-
770
- .order-button:active {
771
- transform: translateY(0);
772
- box-shadow: 0 2px 10px rgba(0, 0, 0, 0.2);
773
- }
774
-
775
- .order-button-second::before {
776
- content: '';
777
- position: absolute;
778
- top: 0;
779
- left: -100%;
780
- width: 100%;
781
- height: 100%;
782
- background: linear-gradient(135deg, rgba(255, 255, 255, 0.2), rgba(255, 255, 255, 0));
783
- transition: left 0.5s ease;
784
- }
785
-
786
- .order-button-second:hover::before {
787
- left: 100%;
788
- }
789
-
790
- .order-button {
791
- background: linear-gradient(135deg, #6DB4C4, #D4A5B8);
792
- color: white;
793
- font-size: 18px;
794
- font-weight: 600;
795
- padding: 16px 32px;
796
- border: none;
797
- border-radius: 12px;
798
- cursor: pointer;
799
- text-transform: uppercase;
800
- letter-spacing: 1px;
801
- box-shadow: 0 4px 15px rgba(0, 0, 0, 0.2);
802
- transition: all 0.3s ease;
803
- position: relative;
804
- overflow: hidden;
805
- }
806
-
807
- .order-button:hover {
808
- transform: translateY(-2px);
809
- box-shadow: 0 6px 20px rgba(0, 0, 0, 0.25);
810
- }
811
-
812
- .order-button:active {
813
- transform: translateY(0);
814
- box-shadow: 0 2px 10px rgba(0, 0, 0, 0.2);
815
- }
816
-
817
- .order-button::before {
818
- content: '';
819
- position: absolute;
820
- top: 0;
821
- left: -100%;
822
- width: 100%;
823
- height: 100%;
824
- background: linear-gradient(135deg, rgba(255, 255, 255, 0.2), rgba(255, 255, 255, 0));
825
- transition: left 0.5s ease;
826
- }
827
-
828
- .order-button:hover::before {
829
- left: 100%;
830
- }
831
- .order-button-container-second {
832
- /* display: flex; */
833
- justify-content: center;
834
- margin: 0px 0;
835
- }
836
-
837
- .order-button-container {
838
- display: flex;
839
- justify-content: center;
840
- margin: 0px 0 40px 0;
841
- }
842
-
843
- d-article img {
844
- width: 100%!important;
845
- }
846
-
847
-
848
- iframe, .js-plotly-plot {
849
- width: 100%!important;
850
- margin-bottom: 20px;
851
- }
852
-
853
- .modebar-container {
854
- display: none;
855
- }
856
-
857
- #graph-container {
858
- display: grid; grid-template-columns: 1fr 1fr; align-items: center;
859
- }
860
-
861
- @media (max-width: 768px) {
862
- #graph-container {
863
- grid-template-columns: 1fr;
864
- }
865
- }
866
-
867
- @media (max-width: 1024px) {
868
- #graph-container {
869
- grid-template-columns: 1fr;
870
- }
871
- #graph-all {
872
- margin-right: 0px;
873
- }
874
- #controls {
875
- margin-left: 0px;
876
- }
877
- }
878
-
879
- .main-plot-container svg {
880
- background: transparent !important;
881
- }
882
-
883
- .large-image-background-transparent {
884
- margin-left: 0px;
885
- margin-right: 0px;
886
- }
887
-
888
- /* Import transformers-specific styles */`, "",{"version":3,"sources":["webpack://./src/style.css"],"names":[],"mappings":"AAAA,iCAAiC;;AAEjC,iFAAiF;AACjF,kBAAkB;AAClB;IACI,kCAAkC;IAClC,wCAAwC;IACxC,0CAA0C;IAC1C,2CAA2C;IAC3C,uBAAuB;AAC3B;;AAEA,+BAA+B;AAC/B;IACI,aAAa;IACb,sBAAsB;IACtB,mBAAmB;IACnB,SAAS,EAAE,8CAA8C;AAC7D;AACA;IACI,kBAAkB;IAClB,eAAe;IACf,YAAY;AAChB;AACA;IACI,kBAAkB;IAClB,gBAAgB;AACpB;;AAEA;IACI,aAAa;IACb,eAAe;IACf,mBAAmB;IACnB,uBAAuB;IACvB,uBAAuB;IACvB,SAAS;AACb;;;AAGA;IACI,aAAa;IACb,mBAAmB;IACnB,sBAAsB;IACtB,SAAS;AACb;;AAEA,kCAAkC;AAClC;IACI,gBAAgB;IAChB,4BAA4B;IAC5B,kBAAkB;IAClB,kBAAkB;IAClB,kBAAkB;IAClB,eAAe;IACf,8CAA8C;IAC9C,aAAa;AACjB;;AAEA,8BAA8B;;AAE9B;IACI,aAAa;IACb,mBAAmB;IACnB,SAAS;AACb;;AAEA;IACI,wBAAwB;IACxB,WAAW;IACX,qCAAqC;IACrC,kBAAkB;IAClB,aAAa;AACjB;;AAEA;IACI,eAAe;IACf,kBAAkB;IAClB,eAAe;AACnB;;AAEA;IACI,wBAAwB;IACxB,gBAAgB;IAChB,WAAW;IACX,YAAY;IACZ,kBAAkB;IAClB,+BAA+B;IAC/B,eAAe;AACnB;;AAEA;IACI,WAAW;IACX,YAAY;IACZ,kBAAkB;IAClB,+BAA+B;IAC/B,eAAe;AACnB;;AAEA,yBAAyB;AACzB;IACI,eAAe;IACf,iBAAiB;AACrB;;AAEA;IACI,gBAAgB;IAChB,mBAAmB;AACvB;;AAEA;IACI,yBAAyB;IACzB,2CAA2C;IAC3C,kBAAkB;IAClB,eAAe;AACnB;AACA;IACI,wBAAwB;AAC5B;;;AAGA;IACI;QACI,wBAAwB;IAC5B;IACA;QACI,wBAAwB;IAC5B;AACJ;;AAEA;EACE,0BAA0B;EAC1B,iBAAiB;EACjB,iBAAiB;EACjB,kBAAkB;AACpB;;AAEA;EACE;IACE,kCAAkC;EACpC;AACF;;AAEA;IACI,eAAe;IACf,kBAAkB;AACtB;;AAEA;IACI,0BAA0B;AAC9B;;AAEA;IACI;QACI,aAAa;QACb,iBAAiB;QACjB,mBAAmB;QACnB,iBAAiB;QACjB,qBAAqB;QACrB,kBAAkB;QAClB,oBAAoB;QACpB,2CAA2C;QAC3C,wBAAwB;QACxB,0BAA0B;QAC1B,uCAAuC;QACvC,kBAAkB;QAClB,0BAA0B;QAC1B,qBAAqB;QACrB,aAAa;IACjB;AACJ;;AAEA;IACI,mBAAmB;AACvB;;AAEA;IACI,iBAAiB;IACjB,gBAAgB;IAChB,WAAW;AACf;;AAEA;IACI,0BAA0B;AAC9B;;AAEA;IACI,wBAAwB;AAC5B;;AAEA;IACI,gBAAgB;IAChB,gBAAgB;IAChB,wBAAwB;IACxB,oCAAoC;AACxC;;AAEA;IACI,aAAa;IACb,aAAa;AACjB;;AAEA;IACI;QACI,yDAAyD;QACzD,iBAAiB;IACrB;;IAEA;QACI,iBAAiB;QACjB,iBAAiB;QACjB,+BAA+B;QAC/B,6BAA6B;QAC7B,uBAAuB;QACvB,iBAAiB;QACjB,eAAe;QACf,kBAAkB;QAClB,iBAAiB;QACjB;;;iDAGyC;QACzC,wBAAwB,EAAE,eAAe;QACzC,gBAAgB;QAChB,SAAS,EAAE,gCAAgC;QAC3C,gBAAgB;QAChB,0BAA0B;QAC1B,qBAAqB;QACrB,oCAAoC;QACpC,aAAa;IACjB;AACJ;;AAEA;IACI,aAAa;IACb,kBAAkB;AACtB;;AAEA;IACI,yBAAyB;IACzB,iBAAiB;AACrB;;AAEA;IACI,yBAAyB;IACzB,mBAAmB;IACnB,qBAAqB;AACzB;;AAEA;IACI,qBAAqB;AACzB;;AAEA;IACI,iBAAiB;AACrB;;AAEA;IACI,oBAAoB;AACxB;;AAEA;IACI,mDAAmD;AACvD;;AAEA;IACI,aAAa;IACb,kBAAkB;AACtB;;;AAGA;IACI,cAAc;IACd,aAAa;IACb,oBAAoB;AACxB;;AAEA;IACI,eAAe;IACf,gBAAgB;AACpB;;AAEA;IACI,kBAAkB;AACtB;;AAEA;IACI,eAAe;AACnB;;AAEA;IACI;QACI,gBAAgB;IACpB;AACJ;;AAEA;;IAEI,qBAAqB;AACzB;;AAEA;IACI,yBAAyB;IACzB,8BAA8B;IAC9B,aAAa;IACb,cAAc,GAAG,4BAA4B;IAC7C,kBAAkB;IAClB,0DAA0D;IAC1D,qBAAqB;AACzB;;AAEA;IACI,SAAS;IACT,cAAc;IACd,gBAAgB;IAChB,cAAc;AAClB;;AAEA;IACI,kBAAkB;IAClB,gBAAgB,GAAG,4BAA4B;IAC/C,cAAc;IACd,gBAAgB;IAChB,kBAAkB;AACtB;;AAEA,0BAA0B;AAC1B;IACI;QACI,yBAAyB;QACzB,0BAA0B;IAC9B;IACA;QACI,cAAc;IAClB;IACA;QACI,cAAc;IAClB;AACJ;;AAEA;IACI,gBAAgB;AACpB;;AAEA;IACI,gBAAgB;IAChB,kBAAkB;IAClB,0BAA0B;IAC1B,kBAAkB;AACtB;;AAEA;IACI,eAAe;AACnB;;AAEA;QACQ,YAAY;QACZ,iBAAiB;QACjB,oBAAoB;QACpB,8BAA8B;QAC9B,+BAA+B;QAC/B,iBAAiB;QACjB,mBAAmB,EAAE,6CAA6C;QAClE,aAAa;QACb,uBAAuB,EAAE,gCAAgC;AACjE;;AAEA;IACI,kBAAkB;IAClB,iBAAiB;IACjB,oBAAoB;IACpB,oCAAoC;IACpC,kBAAkB;IAClB,oBAAoB;IACpB,qCAAqC;IACrC,uBAAuB;IACvB,mBAAmB,EAAE,6CAA6C;IAClE,aAAa;IACb,uBAAuB,EAAE,gCAAgC;AAC7D;;AAEA;IACI,eAAe;IACf,iBAAiB;IACjB,mBAAmB;IACnB,yBAAyB;IACzB,wCAAwC;AAC5C;;AAEA;IACI,oBAAoB;AACxB;;AAEA;IACI,oBAAoB;AACxB;;AAEA;IACI,oBAAoB;AACxB;;AAEA;IACI,iBAAiB;AACrB;;AAEA,yBAAyB;AACzB;IACI,gBAAgB;IAChB,kBAAkB;IAClB,mBAAmB;IACnB,eAAe;IACf,mBAAmB;IACnB,mBAAmB;IACnB,yBAAyB;IACzB,wCAAwC;AAC5C;;;AAGA,0BAA0B;AAC1B;IACI,iBAAiB;IACjB,uBAAuB;IACvB,mBAAmB;IACnB,iBAAiB;IACjB,eAAe;IACf,mBAAmB;IACnB,mBAAmB;IACnB,yBAAyB;IACzB,wCAAwC;AAC5C;;AAEA,gBAAgB;AAChB;IACI,aAAa;IACb,8BAA8B;IAC9B,eAAe;AACnB;;AAEA,gBAAgB;AAChB;IACI,qBAAqB;AACzB;;AAEA,iBAAiB;AACjB;IACI,cAAc;IACd,2BAA2B;IAC3B,iBAAiB;IACjB,gBAAgB;IAChB,cAAc;AAClB;;AAEA,mDAAmD;AACnD;IACI,aAAa;IACb,SAAS;IACT,mBAAmB;AACvB;;AAEA,wBAAwB;AACxB;IACI,OAAO;IACP,WAAW;IACX,mBAAmB;IACnB,kBAAkB;IAClB,gBAAgB;IAChB,aAAa;AACjB;;AAEA;IACI,gBAAgB;IAChB,WAAW;IACX,YAAY;IACZ,mBAAmB;IACnB,kBAAkB;IAClB,eAAe;IACf,iCAAiC;AACrC;;AAEA;IACI,mBAAmB;AACvB;;AAEA,yBAAyB;AACzB;IACI,WAAW;IACX,eAAe;IACf,yBAAyB;IACzB,kBAAkB;IAClB,iBAAiB;IACjB,cAAc;AAClB;;AAEA,mBAAmB;AACnB;IACI,WAAW;IACX,eAAe;IACf,yBAAyB;IACzB,kBAAkB;IAClB,iBAAiB;IACjB,iBAAiB;IACjB,cAAc;IACd,eAAe;AACnB;;AAEA,qBAAqB;AACrB;IACI,aAAa;IACb,cAAc;IACd,oBAAoB;IACpB,yBAAyB;IACzB,kBAAkB;IAClB,eAAe;AACnB;;AAEA,2BAA2B;AAC3B;IACI,qBAAqB;AACzB;;AAEA;IACI,oBAAoB;AACxB;;AAEA,uBAAuB;AACvB;IACI,aAAa;IACb,mBAAmB;IACnB,mBAAmB;AACvB;;AAEA,gCAAgC;AAChC;IACI,gBAAgB;IAChB,kBAAkB;IAClB,aAAa;IACb,mBAAmB;IACnB,yCAAyC;AAC7C;;AAEA;IACI,iBAAiB;IACjB,gBAAgB;IAChB,cAAc;IACd,qBAAqB;AACzB;;AAEA;IACI,iBAAiB;IACjB,gBAAgB;IAChB,cAAc;AAClB;;AAEA,2BAA2B;AAC3B;IACI;QACI,0BAA0B;QAC1B,aAAa;IACjB;;IAEA;QACI,UAAU;IACd;AACJ;;AAEA,iCAAiC;AACjC;IACI,qBAAqB;AACzB;;AAEA;IACI,qBAAqB;IACrB,aAAa;IACb,6CAA6C;AACjD;;AAEA,2BAA2B;AAC3B;IACI,0BAA0B;AAC9B;;AAEA,oCAAoC;AACpC;IACI,yBAAyB;IACzB,gBAAgB;AACpB;;AAEA,8BAA8B;AAC9B;IACI,mBAAmB;IACnB,aAAa;IACb,kBAAkB;IAClB,mBAAmB;AACvB;;AAEA;IACI,mBAAmB;IACnB,aAAa;IACb,kBAAkB;AACtB;;AAEA;YACY,qDAAqD;YACrD,YAAY;YACZ,eAAe;YACf,gBAAgB;YAChB,kBAAkB;YAClB,YAAY;YACZ,mBAAmB;YACnB,eAAe;YACf,yBAAyB;YACzB,mBAAmB;YACnB,yCAAyC;YACzC,yBAAyB;YACzB,kBAAkB;YAClB,gBAAgB;QACpB;AACR;IACI,2BAA2B;IAC3B,0CAA0C;AAC9C;;AAEA;IACI,wBAAwB;IACxB,yCAAyC;AAC7C;;AAEA;IACI,WAAW;IACX,kBAAkB;IAClB,MAAM;IACN,WAAW;IACX,WAAW;IACX,YAAY;IACZ,qFAAqF;IACrF,0BAA0B;AAC9B;;AAEA;IACI,UAAU;AACd;;AAEA;YACY,qDAAqD;YACrD,YAAY;YACZ,eAAe;YACf,gBAAgB;YAChB,kBAAkB;YAClB,YAAY;YACZ,mBAAmB;YACnB,eAAe;YACf,yBAAyB;YACzB,mBAAmB;YACnB,yCAAyC;YACzC,yBAAyB;YACzB,kBAAkB;YAClB,gBAAgB;QACpB;;AAER;IACI,2BAA2B;IAC3B,0CAA0C;AAC9C;;AAEA;IACI,wBAAwB;IACxB,yCAAyC;AAC7C;;AAEA;IACI,WAAW;IACX,kBAAkB;IAClB,MAAM;IACN,WAAW;IACX,WAAW;IACX,YAAY;IACZ,qFAAqF;IACrF,0BAA0B;AAC9B;;AAEA;IACI,UAAU;AACd;AACA;IACI,mBAAmB;IACnB,uBAAuB;IACvB,aAAa;AACjB;;AAEA;IACI,aAAa;IACb,uBAAuB;IACvB,oBAAoB;AACxB;;AAEA;IACI,qBAAqB;AACzB;;;AAGA;IACI,qBAAqB;IACrB,mBAAmB;AACvB;;AAEA;IACI,aAAa;AACjB;;AAEA;IACI,aAAa,EAAE,8BAA8B,EAAE,mBAAmB;AACtE;;AAEA;IACI;QACI,0BAA0B;IAC9B;AACJ;;AAEA;IACI;QACI,0BAA0B;IAC9B;IACA;QACI,iBAAiB;IACrB;IACA;QACI,gBAAgB;IACpB;AACJ;;AAEA;IACI,kCAAkC;AACtC;;AAEA;IACI,gBAAgB;IAChB,iBAAiB;AACrB;;AAEA,wCAAwC","sourcesContent":["/* style.css - Scaling Insanity */\n\n/* Import ultrascale-playbook base styles and add transformers-specific styling */\n/* Define colors */\n:root {\n --distill-gray: rgb(107, 114, 128);\n --distill-gray-light: rgb(185, 185, 185);\n --distill-gray-lighter: rgb(228, 228, 228);\n --distill-gray-lightest: rgb(245, 245, 245);\n --distill-blue: #007BFF;\n}\n\n/* Container for the controls */\n[id^=\"plot-\"] {\n display: flex;\n flex-direction: column;\n align-items: center;\n gap: 15px; /* Adjust the gap between controls as needed */\n}\n[id^=\"plot-\"] figure {\n margin-bottom: 0px;\n margin-top: 0px;\n padding: 0px;\n}\n.plotly_caption {\n font-style: italic;\n margin-top: 10px;\n}\n\n.plotly_controls {\n display: flex;\n flex-wrap: wrap;\n flex-direction: row;\n justify-content: center;\n align-items: flex-start;\n gap: 30px;\n}\n\n\n.plotly_input_container {\n display: flex;\n align-items: center;\n flex-direction: column;\n gap: 10px;\n}\n\n/* Style for the select dropdown */\n.plotly_input_container > select {\n padding: 2px 4px;\n /* border: 1px solid #ccc; */\n line-height: 1.5em;\n text-align: center;\n border-radius: 4px;\n font-size: 12px;\n background-color: var(--distill-gray-lightest);\n outline: none;\n}\n\n/* Style for the range input */\n\n.plotly_slider {\n display: flex;\n align-items: center;\n gap: 10px;\n}\n\n.plotly_slider > input[type=\"range\"] {\n -webkit-appearance: none;\n height: 2px;\n background: var(--distill-gray-light);\n border-radius: 5px;\n outline: none;\n}\n\n.plotly_slider > span {\n font-size: 14px;\n line-height: 1.6em;\n min-width: 16px;\n}\n\n.plotly_slider > input[type=\"range\"]::-webkit-slider-thumb {\n -webkit-appearance: none;\n appearance: none;\n width: 18px;\n height: 18px;\n border-radius: 50%;\n background: var(--distill-blue);\n cursor: pointer;\n}\n\n.plotly_slider > input[type=\"range\"]::-moz-range-thumb {\n width: 18px;\n height: 18px;\n border-radius: 50%;\n background: var(--distill-blue);\n cursor: pointer;\n}\n\n/* Style for the labels */\n.plotly_input_container > label {\n font-size: 14px;\n font-weight: bold;\n}\n\n.main-plot-container {\n margin-top: 21px;\n margin-bottom: 35px;\n}\n\n.main-plot-container > figure {\n display: block !important;\n /* Let this be handled by graph-container */\n margin-bottom: 0px;\n margin-top: 0px;\n}\n.main-plot-container > div {\n display: none !important;\n}\n\n\n@media (min-width: 768px) {\n .main-plot-container > figure {\n display: none !important;\n }\n .main-plot-container > div {\n display: flex !important;\n }\n}\n\nd-byline .byline {\n grid-template-columns: 1fr;\n grid-column: text;\n font-size: 0.9rem;\n line-height: 1.8em;\n}\n\n@media (min-width: 768px) {\n d-byline .byline {\n grid-template-columns: 5fr 1fr 1fr;\n }\n}\n\n#title-plot {\n margin-top: 0px;\n margin-bottom: 0px;\n}\n\nd-contents > nav a.active {\n text-decoration: underline;\n}\n\n@media (max-width: 1199px) {\n d-contents {\n display: none;\n background: white;\n justify-self: start;\n align-self: start;\n padding-bottom: 0.5em;\n margin-bottom: 1em;\n padding-left: 0.25em;\n border-bottom: 1px solid rgba(0, 0, 0, 0.1);\n border-bottom-width: 1px;\n border-bottom-style: solid;\n border-bottom-color: rgba(0, 0, 0, 0.1);\n overflow-y: scroll;\n height: calc(100vh - 40px);\n scrollbar-width: none;\n z-index: -100;\n }\n}\n\nd-contents a:hover {\n border-bottom: none;\n}\n\ntoc-title {\n font-weight: bold;\n font-size: 1.2em;\n color: #333;\n}\n\ntoggle-icon {\n transition: transform 0.3s;\n}\n\ntoggle-icon.collapsed {\n transform: rotate(90deg);\n}\n\n.toc-content {\n margin-top: 15px;\n overflow: hidden;\n /* max-height: 1000px; */\n transition: max-height 0.3s ease-out;\n}\n\n.toc-content.collapsed {\n max-height: 0;\n margin-top: 0;\n}\n\n@media (min-width: 1200px) {\n d-article {\n /* Ensure d-article does not prevent sticky positioning */\n overflow: visible;\n }\n\n d-contents {\n align-self: start;\n background: white;\n grid-column-start: 1 !important;\n grid-column-end: 4 !important;\n grid-row: auto / span 6;\n justify-self: end;\n margin-top: 0em;\n padding-right: 3em;\n padding-left: 2em;\n /* border-right: 1px solid rgba(0, 0, 0, 0.1);\n border-right-width: 1px;\n border-right-style: solid;\n border-right-color: rgba(0, 0, 0, 0.1); */\n position: -webkit-sticky; /* For Safari */\n position: sticky;\n top: 10px; /* Adjust this value if needed */\n overflow-y: auto;\n height: calc(100vh - 40px);\n scrollbar-width: none;\n transition: max-height 0.3s ease-out;\n z-index: -100;\n }\n}\n\nd-contents nav h3 {\n margin-top: 0;\n margin-bottom: 1em;\n}\n\nd-contents nav div div {\n color: rgba(0, 0, 0, 0.8);\n font-weight: bold;\n}\n\nd-contents nav a {\n color: rgba(0, 0, 0, 0.8);\n border-bottom: none;\n text-decoration: none;\n}\n\nd-contents li {\n list-style-type: none;\n}\n\nd-contents ul, d-article d-contents ul {\n padding-left: 1em;\n}\n\nd-contents nav ul li {\n margin-bottom: .25em;\n}\n\nd-contents nav a:hover {\n text-decoration: underline solid rgba(0, 0, 0, 0.6);\n}\n\nd-contents nav ul {\n margin-top: 0;\n margin-bottom: 6px;\n}\n\n\nd-contents nav > div {\n display: block;\n outline: none;\n margin-bottom: 0.5em;\n}\n\nd-contents nav > div > a {\n font-size: 13px;\n font-weight: 600;\n}\n\nd-article aside {\n margin-bottom: 1em;\n}\n\nd-article img {\n max-width: 100%;\n}\n\n@media (min-width: 768px) {\n d-article aside {\n margin-bottom: 0;\n }\n}\n\nd-contents nav > div > a:hover,\nd-contents nav > ul > li > a:hover {\n text-decoration: none;\n}\n\n.note-box {\n background-color: #f6f8fa;\n border-left: 4px solid #444444;\n padding: 1rem;\n margin: 1rem 0; /* Keep this modest margin */\n border-radius: 6px;\n /* Add this to ensure the box only takes up needed space */\n display: inline-block;\n}\n\n.note-box-title {\n margin: 0;\n color: #444444;\n font-weight: 600;\n font-size: 1em;\n}\n\n.note-box-content {\n margin-top: 0.5rem;\n margin-bottom: 0; /* Ensure no bottom margin */\n color: #24292f;\n font-size: 0.9em;\n line-height: 1.5em;\n}\n\n/* For dark mode support */\n@media (prefers-color-scheme: dark) {\n .note-box {\n background-color: #1c1c1c;\n border-left-color: #888888;\n }\n .note-box-title {\n color: #888888;\n }\n .note-box-content {\n color: #d4d4d4;\n }\n}\n\nd-article {\n font-size: 1.0em;\n}\n\n.figure-legend {\n font-size: 0.9em;\n font-style: italic;\n color: var(--distill-gray);\n line-height: 1.5em;\n}\n\nd-code {\n font-size: 12px;\n}\n\n.large-image-background {\n width: 100vw;\n padding-top: 10px;\n padding-bottom: 10px;\n margin-left: calc(-50vw + 50%);\n margin-right: calc(-50vw + 50%);\n background: white;\n height: fit-content; /* This will make it match the image height */\n display: flex;\n justify-content: center; /* This will center your image */\n}\n\n.large-image-background-transparent {\n /* width: 100vw; */\n padding-top: 10px;\n padding-bottom: 10px;\n /* margin-left: calc(-50vw + 50%); */\n margin-left:-100px;\n margin-right: -100px;\n /* margin-right: calc(-50vw + 50%); */\n /* background: white; */\n height: fit-content; /* This will make it match the image height */\n display: flex;\n justify-content: center; /* This will center your image */\n}\n\n.boxed-image {\n padding: 0.5rem;\n background: white;\n border-radius: 12px;\n border: 1px solid #e5e7eb;\n box-shadow: 0 4px 6px rgba(0, 0, 0, 0.1);\n}\n\nd-article li {\n margin-bottom: 0.0em;\n}\n\nd-article ul ul {\n margin-bottom: 0.0em;\n}\n\nd-article ol ol {\n margin-bottom: 0.0em;\n}\n\nd-article hr {\n grid-column: text;\n}\n\n/* Memory visualization */\n#graph-all {\n min-width: 500px;\n margin-right: 10px;\n margin-bottom: 2rem;\n padding: 0.5rem;\n background: #f9fafb;\n border-radius: 12px;\n border: 1px solid #e5e7eb;\n box-shadow: 0 4px 6px rgba(0, 0, 0, 0.1);\n}\n\n\n/* Main container styles */\n#controls {\n max-width: 1200px;\n /* margin: 2rem auto; */\n margin-bottom: 2rem;\n margin-left: 10px;\n padding: 0.6rem;\n background: #f9fafb;\n border-radius: 12px;\n border: 1px solid #e5e7eb;\n box-shadow: 0 4px 6px rgba(0, 0, 0, 0.1);\n}\n\n/* Grid layout */\n#controls {\n display: grid;\n grid-template-columns: 1fr 1fr;\n /* gap: 2rem; */\n}\n\n/* Cell styles */\n.cell {\n margin-bottom: 0.2rem;\n}\n\n/* Label styles */\nlabel {\n display: block;\n /* margin-bottom: 0.5rem; */\n font-size: 0.8rem;\n font-weight: 500;\n color: #374151;\n}\n\n/* Input container for range + number combination */\n.input-container {\n display: flex;\n gap: 1rem;\n align-items: center;\n}\n\n/* Range input styling */\ninput[type=\"range\"] {\n flex: 1;\n height: 6px;\n background: #e5e7eb;\n border-radius: 3px;\n appearance: none;\n outline: none;\n}\n\ninput[type=\"range\"]::-webkit-slider-thumb {\n appearance: none;\n width: 16px;\n height: 16px;\n background: #3b82f6;\n border-radius: 50%;\n cursor: pointer;\n transition: background 0.15s ease;\n}\n\ninput[type=\"range\"]::-webkit-slider-thumb:hover {\n background: #2563eb;\n}\n\n/* Number input styling */\ninput[type=\"number\"] {\n width: 80px;\n padding: 0.5rem;\n border: 1px solid #e5e7eb;\n border-radius: 6px;\n font-size: 0.9rem;\n color: #374151;\n}\n\n/* Select styling */\nselect {\n width: 100%;\n padding: 0.5rem;\n border: 1px solid #e5e7eb;\n border-radius: 6px;\n background: white;\n font-size: 0.9rem;\n color: #374151;\n cursor: pointer;\n}\n\n/* Checkbox styling */\ninput[type=\"checkbox\"] {\n width: 1.2rem;\n height: 1.2rem;\n margin-right: 0.5rem;\n border: 2px solid #e5e7eb;\n border-radius: 4px;\n cursor: pointer;\n}\n\n/* Column specific styles */\n.column-1 {\n padding-right: 0.5rem;\n}\n\n.column-2 {\n padding-left: 0.5rem;\n}\n\n/* Checkbox container */\n.checkbox-container {\n display: flex;\n align-items: center;\n margin-bottom: 1rem;\n}\n\n/* Memory visualization styles */\n.memory-block {\n background: #fff;\n border-radius: 8px;\n padding: 1rem;\n margin-bottom: 1rem;\n box-shadow: 0 2px 4px rgba(0, 0, 0, 0.05);\n}\n\n.memory-title {\n font-size: 1.1rem;\n font-weight: 500;\n color: #374151;\n margin-bottom: 0.5rem;\n}\n\n.memory-value {\n font-size: 1.5rem;\n font-weight: 600;\n color: #3b82f6;\n}\n\n/* Responsive adjustments */\n@media (max-width: 768px) {\n #controls {\n grid-template-columns: 1fr;\n padding: 1rem;\n }\n\n .column-1, .column-2 {\n padding: 0;\n }\n}\n\n/* Hover states and transitions */\ninput:hover, select:hover {\n border-color: #3b82f6;\n}\n\ninput:focus, select:focus {\n border-color: #2563eb;\n outline: none;\n box-shadow: 0 0 0 2px rgba(59, 130, 246, 0.1);\n}\n\n/* Add smooth transitions */\ninput, select, button {\n transition: all 0.15s ease;\n}\n\n/* Preset dropdown special styling */\nselect[name=\"presets\"] {\n background-color: #f3f4f6;\n font-weight: 500;\n}\n\n/* Memory graph enhancements */\n.activation-memory {\n background: #dbeafe;\n padding: 1rem;\n border-radius: 8px;\n margin-bottom: 1rem;\n}\n\n.gradient-memory {\n background: #ede9fe;\n padding: 1rem;\n border-radius: 8px;\n}\n\n.order-button-second {\n background: linear-gradient(135deg, #6DB4C4, #D4A5B8);\n color: white;\n font-size: 18px;\n font-weight: 600;\n padding: 20px 20px;\n border: none;\n border-radius: 12px;\n cursor: pointer;\n text-transform: uppercase;\n letter-spacing: 1px;\n box-shadow: 0 4px 15px rgba(0, 0, 0, 0.2);\n transition: all 0.3s ease;\n position: relative;\n overflow: hidden;\n }\n.order-button-second:hover {\n transform: translateY(-2px);\n box-shadow: 0 6px 20px rgba(0, 0, 0, 0.25);\n}\n\n.order-button:active {\n transform: translateY(0);\n box-shadow: 0 2px 10px rgba(0, 0, 0, 0.2);\n}\n\n.order-button-second::before {\n content: '';\n position: absolute;\n top: 0;\n left: -100%;\n width: 100%;\n height: 100%;\n background: linear-gradient(135deg, rgba(255, 255, 255, 0.2), rgba(255, 255, 255, 0));\n transition: left 0.5s ease;\n}\n\n.order-button-second:hover::before {\n left: 100%;\n}\n\n.order-button {\n background: linear-gradient(135deg, #6DB4C4, #D4A5B8);\n color: white;\n font-size: 18px;\n font-weight: 600;\n padding: 16px 32px;\n border: none;\n border-radius: 12px;\n cursor: pointer;\n text-transform: uppercase;\n letter-spacing: 1px;\n box-shadow: 0 4px 15px rgba(0, 0, 0, 0.2);\n transition: all 0.3s ease;\n position: relative;\n overflow: hidden;\n }\n\n.order-button:hover {\n transform: translateY(-2px);\n box-shadow: 0 6px 20px rgba(0, 0, 0, 0.25);\n}\n\n.order-button:active {\n transform: translateY(0);\n box-shadow: 0 2px 10px rgba(0, 0, 0, 0.2);\n}\n\n.order-button::before {\n content: '';\n position: absolute;\n top: 0;\n left: -100%;\n width: 100%;\n height: 100%;\n background: linear-gradient(135deg, rgba(255, 255, 255, 0.2), rgba(255, 255, 255, 0));\n transition: left 0.5s ease;\n}\n\n.order-button:hover::before {\n left: 100%;\n}\n.order-button-container-second {\n /* display: flex; */\n justify-content: center;\n margin: 0px 0;\n}\n\n.order-button-container {\n display: flex;\n justify-content: center;\n margin: 0px 0 40px 0;\n}\n\nd-article img {\n width: 100%!important;\n}\n\n\niframe, .js-plotly-plot {\n width: 100%!important;\n margin-bottom: 20px;\n}\n\n.modebar-container {\n display: none;\n}\n\n#graph-container {\n display: grid; grid-template-columns: 1fr 1fr; align-items: center;\n}\n\n@media (max-width: 768px) {\n #graph-container {\n grid-template-columns: 1fr;\n }\n}\n\n@media (max-width: 1024px) {\n #graph-container {\n grid-template-columns: 1fr;\n }\n #graph-all {\n margin-right: 0px;\n }\n #controls {\n margin-left: 0px;\n }\n}\n\n.main-plot-container svg {\n background: transparent !important;\n}\n\n.large-image-background-transparent {\n margin-left: 0px;\n margin-right: 0px;\n}\n\n/* Import transformers-specific styles */\n@import url('./transformers-custom.css');"],"sourceRoot":""}]);
889
- // Exports
890
- /* harmony default export */ const __WEBPACK_DEFAULT_EXPORT__ = (___CSS_LOADER_EXPORT___);
891
-
892
-
893
- /***/ }),
894
-
895
- /***/ 314:
896
- /***/ ((module) => {
897
-
898
-
899
-
900
- /*
901
- MIT License http://www.opensource.org/licenses/mit-license.php
902
- Author Tobias Koppers @sokra
903
- */
904
- module.exports = function (cssWithMappingToString) {
905
- var list = [];
906
-
907
- // return the list of modules as css string
908
- list.toString = function toString() {
909
- return this.map(function (item) {
910
- var content = "";
911
- var needLayer = typeof item[5] !== "undefined";
912
- if (item[4]) {
913
- content += "@supports (".concat(item[4], ") {");
914
- }
915
- if (item[2]) {
916
- content += "@media ".concat(item[2], " {");
917
- }
918
- if (needLayer) {
919
- content += "@layer".concat(item[5].length > 0 ? " ".concat(item[5]) : "", " {");
920
- }
921
- content += cssWithMappingToString(item);
922
- if (needLayer) {
923
- content += "}";
924
- }
925
- if (item[2]) {
926
- content += "}";
927
- }
928
- if (item[4]) {
929
- content += "}";
930
- }
931
- return content;
932
- }).join("");
933
- };
934
-
935
- // import a list of modules into the list
936
- list.i = function i(modules, media, dedupe, supports, layer) {
937
- if (typeof modules === "string") {
938
- modules = [[null, modules, undefined]];
939
- }
940
- var alreadyImportedModules = {};
941
- if (dedupe) {
942
- for (var k = 0; k < this.length; k++) {
943
- var id = this[k][0];
944
- if (id != null) {
945
- alreadyImportedModules[id] = true;
946
- }
947
- }
948
- }
949
- for (var _k = 0; _k < modules.length; _k++) {
950
- var item = [].concat(modules[_k]);
951
- if (dedupe && alreadyImportedModules[item[0]]) {
952
- continue;
953
- }
954
- if (typeof layer !== "undefined") {
955
- if (typeof item[5] === "undefined") {
956
- item[5] = layer;
957
- } else {
958
- item[1] = "@layer".concat(item[5].length > 0 ? " ".concat(item[5]) : "", " {").concat(item[1], "}");
959
- item[5] = layer;
960
- }
961
- }
962
- if (media) {
963
- if (!item[2]) {
964
- item[2] = media;
965
- } else {
966
- item[1] = "@media ".concat(item[2], " {").concat(item[1], "}");
967
- item[2] = media;
968
- }
969
- }
970
- if (supports) {
971
- if (!item[4]) {
972
- item[4] = "".concat(supports);
973
- } else {
974
- item[1] = "@supports (".concat(item[4], ") {").concat(item[1], "}");
975
- item[4] = supports;
976
- }
977
- }
978
- list.push(item);
979
- }
980
- };
981
- return list;
982
- };
983
-
984
- /***/ }),
985
-
986
- /***/ 354:
987
- /***/ ((module) => {
988
-
989
-
990
-
991
- module.exports = function (item) {
992
- var content = item[1];
993
- var cssMapping = item[3];
994
- if (!cssMapping) {
995
- return content;
996
- }
997
- if (typeof btoa === "function") {
998
- var base64 = btoa(unescape(encodeURIComponent(JSON.stringify(cssMapping))));
999
- var data = "sourceMappingURL=data:application/json;charset=utf-8;base64,".concat(base64);
1000
- var sourceMapping = "/*# ".concat(data, " */");
1001
- return [content].concat([sourceMapping]).join("\n");
1002
- }
1003
- return [content].join("\n");
1004
- };
1005
-
1006
- /***/ }),
1007
-
1008
- /***/ 540:
1009
- /***/ ((module) => {
1010
-
1011
-
1012
-
1013
- /* istanbul ignore next */
1014
- function insertStyleElement(options) {
1015
- var element = document.createElement("style");
1016
- options.setAttributes(element, options.attributes);
1017
- options.insert(element, options.options);
1018
- return element;
1019
- }
1020
- module.exports = insertStyleElement;
1021
-
1022
- /***/ }),
1023
-
1024
- /***/ 659:
1025
- /***/ ((module) => {
1026
-
1027
-
1028
-
1029
- var memo = {};
1030
-
1031
- /* istanbul ignore next */
1032
- function getTarget(target) {
1033
- if (typeof memo[target] === "undefined") {
1034
- var styleTarget = document.querySelector(target);
1035
-
1036
- // Special case to return head of iframe instead of iframe itself
1037
- if (window.HTMLIFrameElement && styleTarget instanceof window.HTMLIFrameElement) {
1038
- try {
1039
- // This will throw an exception if access to iframe is blocked
1040
- // due to cross-origin restrictions
1041
- styleTarget = styleTarget.contentDocument.head;
1042
- } catch (e) {
1043
- // istanbul ignore next
1044
- styleTarget = null;
1045
- }
1046
- }
1047
- memo[target] = styleTarget;
1048
- }
1049
- return memo[target];
1050
- }
1051
-
1052
- /* istanbul ignore next */
1053
- function insertBySelector(insert, style) {
1054
- var target = getTarget(insert);
1055
- if (!target) {
1056
- throw new Error("Couldn't find a style target. This probably means that the value for the 'insert' parameter is invalid.");
1057
- }
1058
- target.appendChild(style);
1059
- }
1060
- module.exports = insertBySelector;
1061
-
1062
- /***/ }),
1063
-
1064
- /***/ 825:
1065
- /***/ ((module) => {
1066
-
1067
-
1068
-
1069
- /* istanbul ignore next */
1070
- function apply(styleElement, options, obj) {
1071
- var css = "";
1072
- if (obj.supports) {
1073
- css += "@supports (".concat(obj.supports, ") {");
1074
- }
1075
- if (obj.media) {
1076
- css += "@media ".concat(obj.media, " {");
1077
- }
1078
- var needLayer = typeof obj.layer !== "undefined";
1079
- if (needLayer) {
1080
- css += "@layer".concat(obj.layer.length > 0 ? " ".concat(obj.layer) : "", " {");
1081
- }
1082
- css += obj.css;
1083
- if (needLayer) {
1084
- css += "}";
1085
- }
1086
- if (obj.media) {
1087
- css += "}";
1088
- }
1089
- if (obj.supports) {
1090
- css += "}";
1091
- }
1092
- var sourceMap = obj.sourceMap;
1093
- if (sourceMap && typeof btoa !== "undefined") {
1094
- css += "\n/*# sourceMappingURL=data:application/json;base64,".concat(btoa(unescape(encodeURIComponent(JSON.stringify(sourceMap)))), " */");
1095
- }
1096
-
1097
- // For old IE
1098
- /* istanbul ignore if */
1099
- options.styleTagTransform(css, styleElement, options.options);
1100
- }
1101
- function removeStyleElement(styleElement) {
1102
- // istanbul ignore if
1103
- if (styleElement.parentNode === null) {
1104
- return false;
1105
- }
1106
- styleElement.parentNode.removeChild(styleElement);
1107
- }
1108
-
1109
- /* istanbul ignore next */
1110
- function domAPI(options) {
1111
- if (typeof document === "undefined") {
1112
- return {
1113
- update: function update() {},
1114
- remove: function remove() {}
1115
- };
1116
- }
1117
- var styleElement = options.insertStyleElement(options);
1118
- return {
1119
- update: function update(obj) {
1120
- apply(styleElement, options, obj);
1121
- },
1122
- remove: function remove() {
1123
- removeStyleElement(styleElement);
1124
- }
1125
- };
1126
- }
1127
- module.exports = domAPI;
1128
-
1129
- /***/ }),
1130
-
1131
- /***/ 995:
1132
- /***/ ((module, __webpack_exports__, __webpack_require__) => {
1133
-
1134
- /* harmony export */ __webpack_require__.d(__webpack_exports__, {
1135
- /* harmony export */ A: () => (__WEBPACK_DEFAULT_EXPORT__)
1136
- /* harmony export */ });
1137
- /* harmony import */ var _node_modules_css_loader_dist_runtime_sourceMaps_js__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(354);
1138
- /* harmony import */ var _node_modules_css_loader_dist_runtime_sourceMaps_js__WEBPACK_IMPORTED_MODULE_0___default = /*#__PURE__*/__webpack_require__.n(_node_modules_css_loader_dist_runtime_sourceMaps_js__WEBPACK_IMPORTED_MODULE_0__);
1139
- /* harmony import */ var _node_modules_css_loader_dist_runtime_api_js__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(314);
1140
- /* harmony import */ var _node_modules_css_loader_dist_runtime_api_js__WEBPACK_IMPORTED_MODULE_1___default = /*#__PURE__*/__webpack_require__.n(_node_modules_css_loader_dist_runtime_api_js__WEBPACK_IMPORTED_MODULE_1__);
1141
- // Imports
1142
-
1143
-
1144
- var ___CSS_LOADER_EXPORT___ = _node_modules_css_loader_dist_runtime_api_js__WEBPACK_IMPORTED_MODULE_1___default()((_node_modules_css_loader_dist_runtime_sourceMaps_js__WEBPACK_IMPORTED_MODULE_0___default()));
1145
- // Module
1146
- ___CSS_LOADER_EXPORT___.push([module.id, `/* Transformers-specific styling additions */
1147
-
1148
- /* Code comparison layout */
1149
- .code-compare {
1150
- display: grid;
1151
- grid-template-columns: 1fr 1fr;
1152
- gap: 1.5rem;
1153
- margin: 2rem 0;
1154
- align-items: start;
1155
- }
1156
-
1157
- .code-compare .code-column {
1158
- background: #ffffff;
1159
- border: 1px solid #e2e8f0;
1160
- border-radius: 8px;
1161
- overflow: hidden;
1162
- box-shadow: 0 1px 3px rgba(0, 0, 0, 0.1);
1163
- }
1164
-
1165
- .code-compare .code-header {
1166
- background: #f8f9fa;
1167
- padding: 0.75rem 1rem;
1168
- font-weight: 600;
1169
- color: #495057;
1170
- border-bottom: 1px solid #e2e8f0;
1171
- font-size: 0.9em;
1172
- }
1173
-
1174
- .code-compare pre {
1175
- margin: 0;
1176
- padding: 1rem;
1177
- background: #ffffff;
1178
- overflow-x: auto;
1179
- font-size: 0.85em;
1180
- line-height: 1.4;
1181
- }
1182
-
1183
- .code-compare pre code {
1184
- color: #374151;
1185
- }
1186
-
1187
- /* Mobile responsiveness for code comparison */
1188
- @media (max-width: 768px) {
1189
- .code-compare {
1190
- grid-template-columns: 1fr;
1191
- gap: 1rem;
1192
- }
1193
- }
1194
-
1195
- /* Tenet styling - special highlighting for design principles */
1196
- .tenet-list {
1197
- margin: 3rem 0;
1198
- }
1199
-
1200
- .tenet-list ol {
1201
- counter-reset: tenet-counter -1; /* Start from 0 */
1202
- list-style: none;
1203
- padding-left: 0;
1204
- display: grid;
1205
- grid-template-columns: 1fr;
1206
- gap: 2.5rem;
1207
- max-width: 900px;
1208
- margin: 0 auto;
1209
- }
1210
-
1211
- .tenet-list li.tenet {
1212
- counter-increment: tenet-counter;
1213
- background: linear-gradient(135deg, #ffffff 0%, #f8f9fa 100%);
1214
- border: 2px solid #e2e8f0;
1215
- border-radius: 16px;
1216
- padding: 2rem 2rem 2rem 4rem;
1217
- margin: 0;
1218
- position: relative;
1219
- box-shadow: 0 12px 35px rgba(0, 0, 0, 0.12);
1220
- transition: all 0.3s ease;
1221
- cursor: pointer;
1222
- }
1223
-
1224
- .tenet-list li.tenet:hover {
1225
- transform: translateY(-8px) scale(1.02);
1226
- box-shadow: 0 20px 50px rgba(0, 0, 0, 0.25);
1227
- border-color: rgba(0, 123, 255, 0.5);
1228
- background: linear-gradient(135deg, #ffffff 0%, #f0f8ff 100%);
1229
- }
1230
-
1231
- /* Colorful numbering system */
1232
- .tenet-list li.tenet:nth-child(1):before { background: linear-gradient(135deg, #667eea 0%, #764ba2 100%); }
1233
- .tenet-list li.tenet:nth-child(2):before { background: linear-gradient(135deg, #f093fb 0%, #f5576c 100%); }
1234
- .tenet-list li.tenet:nth-child(3):before { background: linear-gradient(135deg, #4facfe 0%, #00f2fe 100%); }
1235
- .tenet-list li.tenet:nth-child(4):before { background: linear-gradient(135deg, #43e97b 0%, #38f9d7 100%); }
1236
- .tenet-list li.tenet:nth-child(5):before { background: linear-gradient(135deg, #fa709a 0%, #fee140 100%); }
1237
- .tenet-list li.tenet:nth-child(6):before { background: linear-gradient(135deg, #a8edea 0%, #fed6e3 100%); }
1238
- .tenet-list li.tenet:nth-child(7):before { background: linear-gradient(135deg, #ff9a9e 0%, #fecfef 100%); }
1239
- .tenet-list li.tenet:nth-child(8):before { background: linear-gradient(135deg, #a18cd1 0%, #fbc2eb 100%); }
1240
- .tenet-list li.tenet:nth-child(9):before { background: linear-gradient(135deg, #ffecd2 0%, #fcb69f 100%); }
1241
-
1242
- .tenet-list li.tenet:before {
1243
- content: counter(tenet-counter);
1244
- position: absolute;
1245
- top: -12px;
1246
- left: -12px;
1247
- color: white;
1248
- width: 48px;
1249
- height: 48px;
1250
- border-radius: 50%;
1251
- display: flex;
1252
- align-items: center;
1253
- justify-content: center;
1254
- font-size: 1.2em;
1255
- font-weight: bold;
1256
- box-shadow: 0 4px 12px rgba(0, 0, 0, 0.15);
1257
- border: 3px solid white;
1258
- }
1259
-
1260
- .tenet-list li.tenet strong {
1261
- color: #1a202c;
1262
- font-size: 1.1em;
1263
- display: block;
1264
- margin-bottom: 0.5rem;
1265
- }
1266
-
1267
- .tenet-list li.tenet em {
1268
- color: #4a5568;
1269
- font-size: 0.95em;
1270
- font-style: italic;
1271
- display: block;
1272
- margin-top: 0.75rem;
1273
- padding: 1rem;
1274
- background: rgba(0, 0, 0, 0.03);
1275
- border-radius: 8px;
1276
- border-left: 3px solid #e2e8f0;
1277
- }
1278
-
1279
- .tenet-list li.tenet p {
1280
- color: #2d3748;
1281
- line-height: 1.6;
1282
- margin: 0.5rem 0;
1283
- }
1284
-
1285
- /* Add a subtle pulse animation for the numbers */
1286
- @keyframes pulse-glow {
1287
- 0% { box-shadow: 0 4px 12px rgba(0, 0, 0, 0.15); }
1288
- 50% { box-shadow: 0 4px 20px rgba(0, 0, 0, 0.25); }
1289
- 100% { box-shadow: 0 4px 12px rgba(0, 0, 0, 0.15); }
1290
- }
1291
-
1292
- .tenet-list li.tenet:hover:before {
1293
- animation: pulse-glow 2s ease-in-out infinite;
1294
- }
1295
-
1296
- /* Interactive component styling */
1297
- .interactive-demo {
1298
- border: 1px solid #e2e8f0;
1299
- border-radius: 12px;
1300
- background: #ffffff;
1301
- margin: 2rem 0;
1302
- overflow: hidden;
1303
- box-shadow: 0 4px 6px rgba(0, 0, 0, 0.07);
1304
- }
1305
-
1306
- /* Model visualization fragment styling */
1307
- [id*="plot-model-visualisation"] {
1308
- margin: 1rem -2rem !important;
1309
- width: calc(100% + 4rem) !important;
1310
- }
1311
-
1312
- .interactive-demo .demo-header {
1313
- background: linear-gradient(135deg, #667eea 0%, #764ba2 100%);
1314
- color: white;
1315
- padding: 1rem 1.5rem;
1316
- font-weight: 600;
1317
- }
1318
-
1319
- .interactive-demo .demo-content {
1320
- padding: 1.5rem;
1321
- }
1322
-
1323
- .interactive-demo .demo-footer {
1324
- background: #f8f9fa;
1325
- padding: 1rem 1.5rem;
1326
- border-top: 1px solid #e2e8f0;
1327
- color: #6c757d;
1328
- font-size: 0.9em;
1329
- }
1330
-
1331
- /* Button styling for interactive elements */
1332
- .btn-primary {
1333
- background: linear-gradient(135deg, #667eea 0%, #764ba2 100%);
1334
- border: none;
1335
- color: white;
1336
- padding: 0.75rem 1.5rem;
1337
- border-radius: 6px;
1338
- font-weight: 500;
1339
- cursor: pointer;
1340
- transition: transform 0.2s, box-shadow 0.2s;
1341
- }
1342
-
1343
- .btn-primary:hover {
1344
- transform: translateY(-1px);
1345
- box-shadow: 0 4px 12px rgba(102, 126, 234, 0.3);
1346
- }
1347
-
1348
- .btn-primary:disabled {
1349
- opacity: 0.6;
1350
- cursor: not-allowed;
1351
- transform: none;
1352
- box-shadow: none;
1353
- }
1354
-
1355
- /* Terminal styling */
1356
- .terminal-container {
1357
- background: #1a202c;
1358
- border-radius: 8px;
1359
- padding: 1rem;
1360
- color: #e2e8f0;
1361
- font-family: 'Monaco', 'Menlo', 'Ubuntu Mono', monospace;
1362
- font-size: 0.9em;
1363
- }
1364
-
1365
- .terminal-input {
1366
- background: #2d3748;
1367
- border: 1px solid #4a5568;
1368
- color: #e2e8f0;
1369
- padding: 0.5rem;
1370
- border-radius: 4px;
1371
- width: 100%;
1372
- font-family: inherit;
1373
- }
1374
-
1375
- .terminal-output {
1376
- background: #0a0e1a;
1377
- padding: 1rem;
1378
- border-radius: 4px;
1379
- white-space: pre-wrap;
1380
- word-break: break-all;
1381
- min-height: 100px;
1382
- max-height: 300px;
1383
- overflow-y: auto;
1384
- }
1385
-
1386
- /* Attention visualization styling */
1387
- .attention-matrix {
1388
- font-family: monospace;
1389
- font-size: 0.8em;
1390
- border-collapse: collapse;
1391
- margin: 1rem 0;
1392
- }
1393
-
1394
- .attention-matrix td {
1395
- border: 1px solid #ddd;
1396
- padding: 4px 8px;
1397
- text-align: center;
1398
- min-width: 50px;
1399
- }
1400
-
1401
- /* Memory chart styling */
1402
- .memory-chart-container {
1403
- background: #f8f9fa;
1404
- border: 2px solid #e9ecef;
1405
- border-radius: 8px;
1406
- padding: 1rem;
1407
- margin: 1rem 0;
1408
- }
1409
-
1410
- /* Image styling improvements */
1411
- img {
1412
- max-width: 100%;
1413
- height: auto;
1414
- border-radius: 8px;
1415
- box-shadow: 0 4px 12px rgba(0, 0, 0, 0.1);
1416
- margin: 1.5rem 0;
1417
- }
1418
-
1419
- /* Table of contents styling - Fixed positioning like ultrascale */
1420
- @media (min-width: 1200px) {
1421
- d-article {
1422
- overflow: visible !important;
1423
- }
1424
-
1425
- d-contents {
1426
- align-self: start !important;
1427
- background: white !important;
1428
- grid-column-start: 1 !important;
1429
- grid-column-end: 4 !important;
1430
- grid-row: auto / span 6 !important;
1431
- justify-self: end !important;
1432
- margin-top: 0em !important;
1433
- padding-right: 3em !important;
1434
- padding-left: 2em !important;
1435
- position: -webkit-sticky !important; /* For Safari */
1436
- position: sticky !important;
1437
- top: 10px !important;
1438
- overflow-y: auto !important;
1439
- height: calc(100vh - 40px) !important;
1440
- scrollbar-width: none !important;
1441
- transition: max-height 0.3s ease-out !important;
1442
- z-index: -100 !important;
1443
- display: block !important;
1444
- visibility: visible !important;
1445
- }
1446
- }
1447
-
1448
- @media (max-width: 1199px) {
1449
- d-contents {
1450
- display: none !important;
1451
- background: white !important;
1452
- justify-self: start !important;
1453
- align-self: start !important;
1454
- padding-bottom: 0.5em !important;
1455
- margin-bottom: 1em !important;
1456
- padding-left: 0.25em !important;
1457
- border-bottom: 1px solid rgba(0, 0, 0, 0.1) !important;
1458
- overflow-y: scroll !important;
1459
- height: calc(100vh - 40px) !important;
1460
- scrollbar-width: none !important;
1461
- z-index: -100 !important;
1462
- }
1463
- }
1464
-
1465
- /* Force TOC to be visible and override distill defaults */
1466
- d-contents {
1467
- display: block !important;
1468
- visibility: visible !important;
1469
- opacity: 1 !important;
1470
- }
1471
-
1472
- /* TOC Navigation styling */
1473
- d-contents .toc-header {
1474
- margin-bottom: 1.5rem;
1475
- border-bottom: 2px solid #007bff;
1476
- padding-bottom: 0.5rem;
1477
- }
1478
-
1479
- d-contents .toc-title {
1480
- font-weight: bold;
1481
- font-size: 1.2em;
1482
- color: #333;
1483
- }
1484
-
1485
- d-contents nav a {
1486
- color: rgba(0, 0, 0, 0.7);
1487
- text-decoration: none;
1488
- border-bottom: none;
1489
- display: block;
1490
- padding: 0.3rem 0;
1491
- font-size: 0.9em;
1492
- line-height: 1.4;
1493
- transition: color 0.2s ease;
1494
- }
1495
-
1496
- d-contents nav a:hover {
1497
- color: #007bff;
1498
- text-decoration: none;
1499
- }
1500
-
1501
- d-contents nav a.active {
1502
- color: #007bff;
1503
- font-weight: 600;
1504
- }
1505
-
1506
- d-contents nav div {
1507
- margin-bottom: 0.2rem;
1508
- }
1509
-
1510
- /* Smooth scrollbar */
1511
- d-contents {
1512
- scrollbar-width: thin;
1513
- scrollbar-color: rgba(0, 123, 255, 0.3) transparent;
1514
- }
1515
-
1516
- d-contents::-webkit-scrollbar {
1517
- width: 6px;
1518
- }
1519
-
1520
- d-contents::-webkit-scrollbar-track {
1521
- background: transparent;
1522
- }
1523
-
1524
- d-contents::-webkit-scrollbar-thumb {
1525
- background: rgba(0, 123, 255, 0.3);
1526
- border-radius: 3px;
1527
- }
1528
-
1529
- d-contents::-webkit-scrollbar-thumb:hover {
1530
- background: rgba(0, 123, 255, 0.5);
1531
- }
1532
-
1533
- /* Custom tooltip styling for tenet links */
1534
- d-contents nav a[title] {
1535
- position: relative;
1536
- cursor: help;
1537
- }
1538
-
1539
- d-contents nav a[title]:hover {
1540
- color: #667eea;
1541
- }
1542
-
1543
- /* Enhanced tooltip using CSS (fallback for title attribute) */
1544
- d-contents nav a[title]:after {
1545
- content: attr(title);
1546
- position: absolute;
1547
- left: 100%;
1548
- top: 50%;
1549
- transform: translateY(-50%);
1550
- background: #1a202c;
1551
- color: white;
1552
- padding: 0.75rem 1rem;
1553
- border-radius: 8px;
1554
- font-size: 0.85em;
1555
- white-space: normal;
1556
- width: 300px;
1557
- line-height: 1.4;
1558
- z-index: 1001;
1559
- opacity: 0;
1560
- visibility: hidden;
1561
- transition: opacity 0.3s ease, visibility 0.3s ease;
1562
- pointer-events: none;
1563
- box-shadow: 0 4px 12px rgba(0, 0, 0, 0.2);
1564
- }
1565
-
1566
- d-contents nav a[title]:before {
1567
- content: '';
1568
- position: absolute;
1569
- left: 100%;
1570
- top: 50%;
1571
- transform: translate(-8px, -50%);
1572
- border: 8px solid transparent;
1573
- border-right-color: #1a202c;
1574
- z-index: 1002;
1575
- opacity: 0;
1576
- visibility: hidden;
1577
- transition: opacity 0.3s ease, visibility 0.3s ease;
1578
- }
1579
-
1580
- d-contents nav a[title]:hover:after,
1581
- d-contents nav a[title]:hover:before {
1582
- opacity: 1;
1583
- visibility: visible;
1584
- }
1585
-
1586
- /* Adjust for smaller screens */
1587
- @media (max-width: 1400px) {
1588
- d-contents nav a[title]:after {
1589
- left: auto;
1590
- right: 100%;
1591
- margin-right: 1rem;
1592
- width: 250px;
1593
- }
1594
-
1595
- d-contents nav a[title]:before {
1596
- left: auto;
1597
- right: 100%;
1598
- transform: translate(8px, -50%);
1599
- border-right-color: transparent;
1600
- border-left-color: #1a202c;
1601
- }
1602
- }
1603
-
1604
- /* Improve code syntax highlighting with Prism */
1605
- pre[class*="language-"] {
1606
- background: #f8f9fa !important;
1607
- border: 1px solid #e9ecef !important;
1608
- border-radius: 8px !important;
1609
- padding: 1.5rem !important;
1610
- margin: 1.5rem 0 !important;
1611
- overflow-x: auto !important;
1612
- font-size: 0.9em !important;
1613
- line-height: 1.5 !important;
1614
- }
1615
-
1616
- code[class*="language-"] {
1617
- background: none !important;
1618
- font-family: 'Monaco', 'Menlo', 'Ubuntu Mono', 'Courier New', monospace !important;
1619
- color: #383a42 !important;
1620
- }
1621
-
1622
- /* Inline code */
1623
- p code, li code {
1624
- background: #f1f3f4 !important;
1625
- padding: 0.2em 0.4em !important;
1626
- border-radius: 3px !important;
1627
- font-size: 0.9em !important;
1628
- color: #d73a49 !important;
1629
- }
1630
-
1631
- /* Distill article improvements */
1632
- d-article {
1633
- max-width: none;
1634
- font-size: 19px;
1635
- line-height: 1.7 !important;
1636
- color: #1a1a1a;
1637
- padding-top: 1rem !important;
1638
- grid-row-gap: 0 !important;
1639
- }
1640
-
1641
- d-article > * {
1642
- grid-column: middle !important;
1643
- max-width: none;
1644
- }
1645
-
1646
- /* Adjust for TOC on larger screens */
1647
- @media (min-width: 1200px) {
1648
- d-article > * {
1649
- grid-column: text / page-end !important;
1650
- max-width: none;
1651
- }
1652
- }
1653
-
1654
- /* Improve paragraph readability */
1655
- d-article p {
1656
- font-size: 19px;
1657
- line-height: 1.5;
1658
- margin-top: 0 !important;
1659
- color: #1a1a1a;
1660
- }
1661
-
1662
- /* Improve heading sizes */
1663
- d-article h1 {
1664
- font-size: 3rem;
1665
- line-height: 1.2;
1666
- margin: 3rem 0 2rem 0;
1667
- color: #1a202c;
1668
- font-weight: 700;
1669
- }
1670
-
1671
- d-article h2 {
1672
- font-size: 2.5rem;
1673
- line-height: 1.3;
1674
- margin: 1.5rem 0 0.75rem 0 !important;
1675
- padding-bottom: 0.5rem !important;
1676
- color: #1a202c;
1677
- font-weight: 650;
1678
- }
1679
-
1680
- d-article h3 {
1681
- font-size: 2rem;
1682
- line-height: 1.4;
1683
- margin: 2rem 0 1rem 0;
1684
- color: #1a202c;
1685
- font-weight: 600;
1686
- }
1687
-
1688
- d-article h4 {
1689
- font-size: 1.5rem;
1690
- line-height: 1.4;
1691
- margin: 1.5rem 0 1rem 0;
1692
- color: #2d3748;
1693
- font-weight: 600;
1694
- }
1695
-
1696
- /* Improve list readability */
1697
- d-article ul li,
1698
- d-article ol li {
1699
- font-size: 18px;
1700
- line-height: 1.7;
1701
- margin-bottom: 0.5rem;
1702
- }
1703
-
1704
- /* Enhanced tenet reference styling with custom tooltips */
1705
- a[href^="#source-of-truth"],
1706
- a[href^="#one-model-one-file"],
1707
- a[href^="#code-is-product"],
1708
- a[href^="#standardize-dont-abstract"],
1709
- a[href^="#do-repeat-yourself"],
1710
- a[href^="#minimal-user-api"],
1711
- a[href^="#backwards-compatibility"],
1712
- a[href^="#consistent-public-surface"],
1713
- a[href^="#modular-toolbox"] {
1714
- position: relative;
1715
- color: #667eea;
1716
- font-weight: 600;
1717
- text-decoration: underline;
1718
- text-decoration-color: rgba(102, 126, 234, 0.3);
1719
- transition: all 0.3s ease;
1720
- }
1721
-
1722
- a[href^="#source-of-truth"]:hover,
1723
- a[href^="#one-model-one-file"]:hover,
1724
- a[href^="#code-is-product"]:hover,
1725
- a[href^="#standardize-dont-abstract"]:hover,
1726
- a[href^="#do-repeat-yourself"]:hover,
1727
- a[href^="#minimal-user-api"]:hover,
1728
- a[href^="#backwards-compatibility"]:hover,
1729
- a[href^="#consistent-public-surface"]:hover,
1730
- a[href^="#modular-toolbox"]:hover {
1731
- color: #4c51bf;
1732
- text-decoration-color: #4c51bf;
1733
- background: rgba(102, 126, 234, 0.1);
1734
- padding: 2px 4px;
1735
- border-radius: 4px;
1736
- }
1737
-
1738
- /* Custom tooltip using data-tooltip attribute */
1739
- a[data-tooltip]:after {
1740
- content: attr(data-tooltip);
1741
- position: absolute;
1742
- bottom: 100%;
1743
- left: 50%;
1744
- transform: translateX(-50%);
1745
- background: #1a202c;
1746
- color: white;
1747
- padding: 0.75rem 1rem;
1748
- border-radius: 8px;
1749
- font-size: 0.85em;
1750
- font-weight: 400;
1751
- white-space: normal;
1752
- width: 320px;
1753
- line-height: 1.4;
1754
- z-index: 1001;
1755
- opacity: 0;
1756
- visibility: hidden;
1757
- transition: opacity 0.3s ease, visibility 0.3s ease;
1758
- pointer-events: none;
1759
- box-shadow: 0 4px 12px rgba(0, 0, 0, 0.2);
1760
- margin-bottom: 8px;
1761
- }
1762
-
1763
- a[data-tooltip]:before {
1764
- content: '';
1765
- position: absolute;
1766
- bottom: 100%;
1767
- left: 50%;
1768
- transform: translateX(-50%);
1769
- border: 8px solid transparent;
1770
- border-top-color: #1a202c;
1771
- z-index: 1002;
1772
- opacity: 0;
1773
- visibility: hidden;
1774
- transition: opacity 0.3s ease, visibility 0.3s ease;
1775
- }
1776
-
1777
- a[data-tooltip]:hover:after,
1778
- a[data-tooltip]:hover:before {
1779
- opacity: 1;
1780
- visibility: visible;
1781
- }
1782
-
1783
- /* Breadcrumb navigation styling */
1784
- .crumbs {
1785
- background: linear-gradient(135deg, #f0f4ff 0%, #e6eeff 100%);
1786
- border-left: 5px solid #667eea;
1787
- padding: 1.25rem 1.75rem;
1788
- margin: 2.5rem 0;
1789
- border-radius: 0 8px 8px 0;
1790
- box-shadow: 0 2px 8px rgba(102, 126, 234, 0.12);
1791
- font-size: 0.95em;
1792
- line-height: 1.6;
1793
- color: #4a5568;
1794
- }
1795
-
1796
- .crumbs strong {
1797
- color: #667eea;
1798
- font-weight: 700;
1799
- }
1800
-
1801
- .crumbs code {
1802
- background: rgba(102, 126, 234, 0.1);
1803
- padding: 0.15em 0.4em;
1804
- border-radius: 3px;
1805
- font-size: 0.9em;
1806
- color: #4c51bf;
1807
- }
1808
-
1809
- .crumbs a {
1810
- color: #667eea;
1811
- font-weight: 500;
1812
- }
1813
-
1814
- /* Improve blockquote styling */
1815
- d-article blockquote {
1816
- font-size: 19px;
1817
- line-height: 1.8;
1818
- padding: 1.5rem 2rem;
1819
- margin: 2rem 0;
1820
- border-left: 4px solid #667eea;
1821
- background: linear-gradient(135deg, #f8f9fa 0%, #e9ecef 50%);
1822
- border-radius: 0 8px 8px 0;
1823
- font-style: italic;
1824
- color: #4a5568;
1825
- }
1826
-
1827
- /* Link capsule styling - only for external HTTP(S) links */
1828
- d-article a[href^="http://"],
1829
- d-article a[href^="https://"] {
1830
- background: linear-gradient(135deg, #e3f2fd 0%, #bbdefb 100%);
1831
- color: #1565c0;
1832
- text-decoration: none;
1833
- padding: 0.15em 0.5em;
1834
- border-radius: 12px;
1835
- border: 1px solid #90caf9;
1836
- display: inline-block;
1837
- transition: all 0.3s ease;
1838
- font-weight: 500;
1839
- box-shadow: 0 1px 3px rgba(21, 101, 192, 0.15);
1840
- }
1841
-
1842
- d-article a[href^="http://"]:hover,
1843
- d-article a[href^="https://"]:hover {
1844
- background: linear-gradient(135deg, #2196f3 0%, #1976d2 100%);
1845
- color: white;
1846
- border-color: #1565c0;
1847
- transform: translateY(-1px);
1848
- box-shadow: 0 4px 12px rgba(21, 101, 192, 0.3);
1849
- }
1850
-
1851
- d-article a[href^="http://"]:active,
1852
- d-article a[href^="https://"]:active {
1853
- transform: translateY(0);
1854
- box-shadow: 0 1px 3px rgba(21, 101, 192, 0.2);
1855
- }
1856
-
1857
- /* Full width elements */
1858
- d-article .code-compare,
1859
- d-article .interactive-demo,
1860
- d-article .memory-chart-container {
1861
- max-width: none;
1862
- width: 100%;
1863
- margin-left: 0;
1864
- margin-right: 0;
1865
- }
1866
-
1867
- /* Responsive design improvements */
1868
- @media (max-width: 1200px) {
1869
- d-article .code-compare,
1870
- d-article .interactive-demo {
1871
- max-width: 95%;
1872
- margin-left: auto;
1873
- margin-right: auto;
1874
- }
1875
- }
1876
-
1877
- @media (max-width: 768px) {
1878
- .tenet-list li.tenet {
1879
- padding: 1rem;
1880
- }
1881
-
1882
- .interactive-demo .demo-content {
1883
- padding: 1rem;
1884
- }
1885
- }
1886
-
1887
- `, "",{"version":3,"sources":["webpack://./src/transformers-custom.css"],"names":[],"mappings":"AAAA,4CAA4C;;AAE5C,2BAA2B;AAC3B;IACI,aAAa;IACb,8BAA8B;IAC9B,WAAW;IACX,cAAc;IACd,kBAAkB;AACtB;;AAEA;IACI,mBAAmB;IACnB,yBAAyB;IACzB,kBAAkB;IAClB,gBAAgB;IAChB,wCAAwC;AAC5C;;AAEA;IACI,mBAAmB;IACnB,qBAAqB;IACrB,gBAAgB;IAChB,cAAc;IACd,gCAAgC;IAChC,gBAAgB;AACpB;;AAEA;IACI,SAAS;IACT,aAAa;IACb,mBAAmB;IACnB,gBAAgB;IAChB,iBAAiB;IACjB,gBAAgB;AACpB;;AAEA;IACI,cAAc;AAClB;;AAEA,8CAA8C;AAC9C;IACI;QACI,0BAA0B;QAC1B,SAAS;IACb;AACJ;;AAEA,+DAA+D;AAC/D;IACI,cAAc;AAClB;;AAEA;IACI,+BAA+B,EAAE,iBAAiB;IAClD,gBAAgB;IAChB,eAAe;IACf,aAAa;IACb,0BAA0B;IAC1B,WAAW;IACX,gBAAgB;IAChB,cAAc;AAClB;;AAEA;IACI,gCAAgC;IAChC,6DAA6D;IAC7D,yBAAyB;IACzB,mBAAmB;IACnB,4BAA4B;IAC5B,SAAS;IACT,kBAAkB;IAClB,2CAA2C;IAC3C,yBAAyB;IACzB,eAAe;AACnB;;AAEA;IACI,uCAAuC;IACvC,2CAA2C;IAC3C,oCAAoC;IACpC,6DAA6D;AACjE;;AAEA,8BAA8B;AAC9B,2CAA2C,6DAA6D,EAAE;AAC1G,2CAA2C,6DAA6D,EAAE;AAC1G,2CAA2C,6DAA6D,EAAE;AAC1G,2CAA2C,6DAA6D,EAAE;AAC1G,2CAA2C,6DAA6D,EAAE;AAC1G,2CAA2C,6DAA6D,EAAE;AAC1G,2CAA2C,6DAA6D,EAAE;AAC1G,2CAA2C,6DAA6D,EAAE;AAC1G,2CAA2C,6DAA6D,EAAE;;AAE1G;IACI,+BAA+B;IAC/B,kBAAkB;IAClB,UAAU;IACV,WAAW;IACX,YAAY;IACZ,WAAW;IACX,YAAY;IACZ,kBAAkB;IAClB,aAAa;IACb,mBAAmB;IACnB,uBAAuB;IACvB,gBAAgB;IAChB,iBAAiB;IACjB,0CAA0C;IAC1C,uBAAuB;AAC3B;;AAEA;IACI,cAAc;IACd,gBAAgB;IAChB,cAAc;IACd,qBAAqB;AACzB;;AAEA;IACI,cAAc;IACd,iBAAiB;IACjB,kBAAkB;IAClB,cAAc;IACd,mBAAmB;IACnB,aAAa;IACb,+BAA+B;IAC/B,kBAAkB;IAClB,8BAA8B;AAClC;;AAEA;IACI,cAAc;IACd,gBAAgB;IAChB,gBAAgB;AACpB;;AAEA,iDAAiD;AACjD;IACI,KAAK,0CAA0C,EAAE;IACjD,MAAM,0CAA0C,EAAE;IAClD,OAAO,0CAA0C,EAAE;AACvD;;AAEA;IACI,6CAA6C;AACjD;;AAEA,kCAAkC;AAClC;IACI,yBAAyB;IACzB,mBAAmB;IACnB,mBAAmB;IACnB,cAAc;IACd,gBAAgB;IAChB,yCAAyC;AAC7C;;AAEA,yCAAyC;AACzC;IACI,6BAA6B;IAC7B,mCAAmC;AACvC;;AAEA;IACI,6DAA6D;IAC7D,YAAY;IACZ,oBAAoB;IACpB,gBAAgB;AACpB;;AAEA;IACI,eAAe;AACnB;;AAEA;IACI,mBAAmB;IACnB,oBAAoB;IACpB,6BAA6B;IAC7B,cAAc;IACd,gBAAgB;AACpB;;AAEA,4CAA4C;AAC5C;IACI,6DAA6D;IAC7D,YAAY;IACZ,YAAY;IACZ,uBAAuB;IACvB,kBAAkB;IAClB,gBAAgB;IAChB,eAAe;IACf,2CAA2C;AAC/C;;AAEA;IACI,2BAA2B;IAC3B,+CAA+C;AACnD;;AAEA;IACI,YAAY;IACZ,mBAAmB;IACnB,eAAe;IACf,gBAAgB;AACpB;;AAEA,qBAAqB;AACrB;IACI,mBAAmB;IACnB,kBAAkB;IAClB,aAAa;IACb,cAAc;IACd,wDAAwD;IACxD,gBAAgB;AACpB;;AAEA;IACI,mBAAmB;IACnB,yBAAyB;IACzB,cAAc;IACd,eAAe;IACf,kBAAkB;IAClB,WAAW;IACX,oBAAoB;AACxB;;AAEA;IACI,mBAAmB;IACnB,aAAa;IACb,kBAAkB;IAClB,qBAAqB;IACrB,qBAAqB;IACrB,iBAAiB;IACjB,iBAAiB;IACjB,gBAAgB;AACpB;;AAEA,oCAAoC;AACpC;IACI,sBAAsB;IACtB,gBAAgB;IAChB,yBAAyB;IACzB,cAAc;AAClB;;AAEA;IACI,sBAAsB;IACtB,gBAAgB;IAChB,kBAAkB;IAClB,eAAe;AACnB;;AAEA,yBAAyB;AACzB;IACI,mBAAmB;IACnB,yBAAyB;IACzB,kBAAkB;IAClB,aAAa;IACb,cAAc;AAClB;;AAEA,+BAA+B;AAC/B;IACI,eAAe;IACf,YAAY;IACZ,kBAAkB;IAClB,yCAAyC;IACzC,gBAAgB;AACpB;;AAEA,kEAAkE;AAClE;IACI;QACI,4BAA4B;IAChC;;IAEA;QACI,4BAA4B;QAC5B,4BAA4B;QAC5B,+BAA+B;QAC/B,6BAA6B;QAC7B,kCAAkC;QAClC,4BAA4B;QAC5B,0BAA0B;QAC1B,6BAA6B;QAC7B,4BAA4B;QAC5B,mCAAmC,EAAE,eAAe;QACpD,2BAA2B;QAC3B,oBAAoB;QACpB,2BAA2B;QAC3B,qCAAqC;QACrC,gCAAgC;QAChC,+CAA+C;QAC/C,wBAAwB;QACxB,yBAAyB;QACzB,8BAA8B;IAClC;AACJ;;AAEA;IACI;QACI,wBAAwB;QACxB,4BAA4B;QAC5B,8BAA8B;QAC9B,4BAA4B;QAC5B,gCAAgC;QAChC,6BAA6B;QAC7B,+BAA+B;QAC/B,sDAAsD;QACtD,6BAA6B;QAC7B,qCAAqC;QACrC,gCAAgC;QAChC,wBAAwB;IAC5B;AACJ;;AAEA,0DAA0D;AAC1D;IACI,yBAAyB;IACzB,8BAA8B;IAC9B,qBAAqB;AACzB;;AAEA,2BAA2B;AAC3B;IACI,qBAAqB;IACrB,gCAAgC;IAChC,sBAAsB;AAC1B;;AAEA;IACI,iBAAiB;IACjB,gBAAgB;IAChB,WAAW;AACf;;AAEA;IACI,yBAAyB;IACzB,qBAAqB;IACrB,mBAAmB;IACnB,cAAc;IACd,iBAAiB;IACjB,gBAAgB;IAChB,gBAAgB;IAChB,2BAA2B;AAC/B;;AAEA;IACI,cAAc;IACd,qBAAqB;AACzB;;AAEA;IACI,cAAc;IACd,gBAAgB;AACpB;;AAEA;IACI,qBAAqB;AACzB;;AAEA,qBAAqB;AACrB;IACI,qBAAqB;IACrB,mDAAmD;AACvD;;AAEA;IACI,UAAU;AACd;;AAEA;IACI,uBAAuB;AAC3B;;AAEA;IACI,kCAAkC;IAClC,kBAAkB;AACtB;;AAEA;IACI,kCAAkC;AACtC;;AAEA,2CAA2C;AAC3C;IACI,kBAAkB;IAClB,YAAY;AAChB;;AAEA;IACI,cAAc;AAClB;;AAEA,8DAA8D;AAC9D;IACI,oBAAoB;IACpB,kBAAkB;IAClB,UAAU;IACV,QAAQ;IACR,2BAA2B;IAC3B,mBAAmB;IACnB,YAAY;IACZ,qBAAqB;IACrB,kBAAkB;IAClB,iBAAiB;IACjB,mBAAmB;IACnB,YAAY;IACZ,gBAAgB;IAChB,aAAa;IACb,UAAU;IACV,kBAAkB;IAClB,mDAAmD;IACnD,oBAAoB;IACpB,yCAAyC;AAC7C;;AAEA;IACI,WAAW;IACX,kBAAkB;IAClB,UAAU;IACV,QAAQ;IACR,gCAAgC;IAChC,6BAA6B;IAC7B,2BAA2B;IAC3B,aAAa;IACb,UAAU;IACV,kBAAkB;IAClB,mDAAmD;AACvD;;AAEA;;IAEI,UAAU;IACV,mBAAmB;AACvB;;AAEA,+BAA+B;AAC/B;IACI;QACI,UAAU;QACV,WAAW;QACX,kBAAkB;QAClB,YAAY;IAChB;;IAEA;QACI,UAAU;QACV,WAAW;QACX,+BAA+B;QAC/B,+BAA+B;QAC/B,0BAA0B;IAC9B;AACJ;;AAEA,gDAAgD;AAChD;IACI,8BAA8B;IAC9B,oCAAoC;IACpC,6BAA6B;IAC7B,0BAA0B;IAC1B,2BAA2B;IAC3B,2BAA2B;IAC3B,2BAA2B;IAC3B,2BAA2B;AAC/B;;AAEA;IACI,2BAA2B;IAC3B,kFAAkF;IAClF,yBAAyB;AAC7B;;AAEA,gBAAgB;AAChB;IACI,8BAA8B;IAC9B,+BAA+B;IAC/B,6BAA6B;IAC7B,2BAA2B;IAC3B,yBAAyB;AAC7B;;AAEA,iCAAiC;AACjC;IACI,eAAe;IACf,eAAe;IACf,2BAA2B;IAC3B,cAAc;IACd,4BAA4B;IAC5B,0BAA0B;AAC9B;;AAEA;IACI,8BAA8B;IAC9B,eAAe;AACnB;;AAEA,qCAAqC;AACrC;IACI;QACI,uCAAuC;QACvC,eAAe;IACnB;AACJ;;AAEA,kCAAkC;AAClC;IACI,eAAe;IACf,gBAAgB;IAChB,wBAAwB;IACxB,cAAc;AAClB;;AAEA,0BAA0B;AAC1B;IACI,eAAe;IACf,gBAAgB;IAChB,qBAAqB;IACrB,cAAc;IACd,gBAAgB;AACpB;;AAEA;IACI,iBAAiB;IACjB,gBAAgB;IAChB,qCAAqC;IACrC,iCAAiC;IACjC,cAAc;IACd,gBAAgB;AACpB;;AAEA;IACI,eAAe;IACf,gBAAgB;IAChB,qBAAqB;IACrB,cAAc;IACd,gBAAgB;AACpB;;AAEA;IACI,iBAAiB;IACjB,gBAAgB;IAChB,uBAAuB;IACvB,cAAc;IACd,gBAAgB;AACpB;;AAEA,6BAA6B;AAC7B;;IAEI,eAAe;IACf,gBAAgB;IAChB,qBAAqB;AACzB;;AAEA,0DAA0D;AAC1D;;;;;;;;;IASI,kBAAkB;IAClB,cAAc;IACd,gBAAgB;IAChB,0BAA0B;IAC1B,+CAA+C;IAC/C,yBAAyB;AAC7B;;AAEA;;;;;;;;;IASI,cAAc;IACd,8BAA8B;IAC9B,oCAAoC;IACpC,gBAAgB;IAChB,kBAAkB;AACtB;;AAEA,gDAAgD;AAChD;IACI,2BAA2B;IAC3B,kBAAkB;IAClB,YAAY;IACZ,SAAS;IACT,2BAA2B;IAC3B,mBAAmB;IACnB,YAAY;IACZ,qBAAqB;IACrB,kBAAkB;IAClB,iBAAiB;IACjB,gBAAgB;IAChB,mBAAmB;IACnB,YAAY;IACZ,gBAAgB;IAChB,aAAa;IACb,UAAU;IACV,kBAAkB;IAClB,mDAAmD;IACnD,oBAAoB;IACpB,yCAAyC;IACzC,kBAAkB;AACtB;;AAEA;IACI,WAAW;IACX,kBAAkB;IAClB,YAAY;IACZ,SAAS;IACT,2BAA2B;IAC3B,6BAA6B;IAC7B,yBAAyB;IACzB,aAAa;IACb,UAAU;IACV,kBAAkB;IAClB,mDAAmD;AACvD;;AAEA;;IAEI,UAAU;IACV,mBAAmB;AACvB;;AAEA,kCAAkC;AAClC;IACI,6DAA6D;IAC7D,8BAA8B;IAC9B,wBAAwB;IACxB,gBAAgB;IAChB,0BAA0B;IAC1B,+CAA+C;IAC/C,iBAAiB;IACjB,gBAAgB;IAChB,cAAc;AAClB;;AAEA;IACI,cAAc;IACd,gBAAgB;AACpB;;AAEA;IACI,oCAAoC;IACpC,qBAAqB;IACrB,kBAAkB;IAClB,gBAAgB;IAChB,cAAc;AAClB;;AAEA;IACI,cAAc;IACd,gBAAgB;AACpB;;AAEA,+BAA+B;AAC/B;IACI,eAAe;IACf,gBAAgB;IAChB,oBAAoB;IACpB,cAAc;IACd,8BAA8B;IAC9B,4DAA4D;IAC5D,0BAA0B;IAC1B,kBAAkB;IAClB,cAAc;AAClB;;AAEA,2DAA2D;AAC3D;;IAEI,6DAA6D;IAC7D,cAAc;IACd,qBAAqB;IACrB,qBAAqB;IACrB,mBAAmB;IACnB,yBAAyB;IACzB,qBAAqB;IACrB,yBAAyB;IACzB,gBAAgB;IAChB,8CAA8C;AAClD;;AAEA;;IAEI,6DAA6D;IAC7D,YAAY;IACZ,qBAAqB;IACrB,2BAA2B;IAC3B,8CAA8C;AAClD;;AAEA;;IAEI,wBAAwB;IACxB,6CAA6C;AACjD;;AAEA,wBAAwB;AACxB;;;IAGI,eAAe;IACf,WAAW;IACX,cAAc;IACd,eAAe;AACnB;;AAEA,mCAAmC;AACnC;IACI;;QAEI,cAAc;QACd,iBAAiB;QACjB,kBAAkB;IACtB;AACJ;;AAEA;IACI;QACI,aAAa;IACjB;;IAEA;QACI,aAAa;IACjB;AACJ","sourcesContent":["/* Transformers-specific styling additions */\n\n/* Code comparison layout */\n.code-compare {\n display: grid;\n grid-template-columns: 1fr 1fr;\n gap: 1.5rem;\n margin: 2rem 0;\n align-items: start;\n}\n\n.code-compare .code-column {\n background: #ffffff;\n border: 1px solid #e2e8f0;\n border-radius: 8px;\n overflow: hidden;\n box-shadow: 0 1px 3px rgba(0, 0, 0, 0.1);\n}\n\n.code-compare .code-header {\n background: #f8f9fa;\n padding: 0.75rem 1rem;\n font-weight: 600;\n color: #495057;\n border-bottom: 1px solid #e2e8f0;\n font-size: 0.9em;\n}\n\n.code-compare pre {\n margin: 0;\n padding: 1rem;\n background: #ffffff;\n overflow-x: auto;\n font-size: 0.85em;\n line-height: 1.4;\n}\n\n.code-compare pre code {\n color: #374151;\n}\n\n/* Mobile responsiveness for code comparison */\n@media (max-width: 768px) {\n .code-compare {\n grid-template-columns: 1fr;\n gap: 1rem;\n }\n}\n\n/* Tenet styling - special highlighting for design principles */\n.tenet-list {\n margin: 3rem 0;\n}\n\n.tenet-list ol {\n counter-reset: tenet-counter -1; /* Start from 0 */\n list-style: none;\n padding-left: 0;\n display: grid;\n grid-template-columns: 1fr;\n gap: 2.5rem;\n max-width: 900px;\n margin: 0 auto;\n}\n\n.tenet-list li.tenet {\n counter-increment: tenet-counter;\n background: linear-gradient(135deg, #ffffff 0%, #f8f9fa 100%);\n border: 2px solid #e2e8f0;\n border-radius: 16px;\n padding: 2rem 2rem 2rem 4rem;\n margin: 0;\n position: relative;\n box-shadow: 0 12px 35px rgba(0, 0, 0, 0.12);\n transition: all 0.3s ease;\n cursor: pointer;\n}\n\n.tenet-list li.tenet:hover {\n transform: translateY(-8px) scale(1.02);\n box-shadow: 0 20px 50px rgba(0, 0, 0, 0.25);\n border-color: rgba(0, 123, 255, 0.5);\n background: linear-gradient(135deg, #ffffff 0%, #f0f8ff 100%);\n}\n\n/* Colorful numbering system */\n.tenet-list li.tenet:nth-child(1):before { background: linear-gradient(135deg, #667eea 0%, #764ba2 100%); }\n.tenet-list li.tenet:nth-child(2):before { background: linear-gradient(135deg, #f093fb 0%, #f5576c 100%); }\n.tenet-list li.tenet:nth-child(3):before { background: linear-gradient(135deg, #4facfe 0%, #00f2fe 100%); }\n.tenet-list li.tenet:nth-child(4):before { background: linear-gradient(135deg, #43e97b 0%, #38f9d7 100%); }\n.tenet-list li.tenet:nth-child(5):before { background: linear-gradient(135deg, #fa709a 0%, #fee140 100%); }\n.tenet-list li.tenet:nth-child(6):before { background: linear-gradient(135deg, #a8edea 0%, #fed6e3 100%); }\n.tenet-list li.tenet:nth-child(7):before { background: linear-gradient(135deg, #ff9a9e 0%, #fecfef 100%); }\n.tenet-list li.tenet:nth-child(8):before { background: linear-gradient(135deg, #a18cd1 0%, #fbc2eb 100%); }\n.tenet-list li.tenet:nth-child(9):before { background: linear-gradient(135deg, #ffecd2 0%, #fcb69f 100%); }\n\n.tenet-list li.tenet:before {\n content: counter(tenet-counter);\n position: absolute;\n top: -12px;\n left: -12px;\n color: white;\n width: 48px;\n height: 48px;\n border-radius: 50%;\n display: flex;\n align-items: center;\n justify-content: center;\n font-size: 1.2em;\n font-weight: bold;\n box-shadow: 0 4px 12px rgba(0, 0, 0, 0.15);\n border: 3px solid white;\n}\n\n.tenet-list li.tenet strong {\n color: #1a202c;\n font-size: 1.1em;\n display: block;\n margin-bottom: 0.5rem;\n}\n\n.tenet-list li.tenet em {\n color: #4a5568;\n font-size: 0.95em;\n font-style: italic;\n display: block;\n margin-top: 0.75rem;\n padding: 1rem;\n background: rgba(0, 0, 0, 0.03);\n border-radius: 8px;\n border-left: 3px solid #e2e8f0;\n}\n\n.tenet-list li.tenet p {\n color: #2d3748;\n line-height: 1.6;\n margin: 0.5rem 0;\n}\n\n/* Add a subtle pulse animation for the numbers */\n@keyframes pulse-glow {\n 0% { box-shadow: 0 4px 12px rgba(0, 0, 0, 0.15); }\n 50% { box-shadow: 0 4px 20px rgba(0, 0, 0, 0.25); }\n 100% { box-shadow: 0 4px 12px rgba(0, 0, 0, 0.15); }\n}\n\n.tenet-list li.tenet:hover:before {\n animation: pulse-glow 2s ease-in-out infinite;\n}\n\n/* Interactive component styling */\n.interactive-demo {\n border: 1px solid #e2e8f0;\n border-radius: 12px;\n background: #ffffff;\n margin: 2rem 0;\n overflow: hidden;\n box-shadow: 0 4px 6px rgba(0, 0, 0, 0.07);\n}\n\n/* Model visualization fragment styling */\n[id*=\"plot-model-visualisation\"] {\n margin: 1rem -2rem !important;\n width: calc(100% + 4rem) !important;\n}\n\n.interactive-demo .demo-header {\n background: linear-gradient(135deg, #667eea 0%, #764ba2 100%);\n color: white;\n padding: 1rem 1.5rem;\n font-weight: 600;\n}\n\n.interactive-demo .demo-content {\n padding: 1.5rem;\n}\n\n.interactive-demo .demo-footer {\n background: #f8f9fa;\n padding: 1rem 1.5rem;\n border-top: 1px solid #e2e8f0;\n color: #6c757d;\n font-size: 0.9em;\n}\n\n/* Button styling for interactive elements */\n.btn-primary {\n background: linear-gradient(135deg, #667eea 0%, #764ba2 100%);\n border: none;\n color: white;\n padding: 0.75rem 1.5rem;\n border-radius: 6px;\n font-weight: 500;\n cursor: pointer;\n transition: transform 0.2s, box-shadow 0.2s;\n}\n\n.btn-primary:hover {\n transform: translateY(-1px);\n box-shadow: 0 4px 12px rgba(102, 126, 234, 0.3);\n}\n\n.btn-primary:disabled {\n opacity: 0.6;\n cursor: not-allowed;\n transform: none;\n box-shadow: none;\n}\n\n/* Terminal styling */\n.terminal-container {\n background: #1a202c;\n border-radius: 8px;\n padding: 1rem;\n color: #e2e8f0;\n font-family: 'Monaco', 'Menlo', 'Ubuntu Mono', monospace;\n font-size: 0.9em;\n}\n\n.terminal-input {\n background: #2d3748;\n border: 1px solid #4a5568;\n color: #e2e8f0;\n padding: 0.5rem;\n border-radius: 4px;\n width: 100%;\n font-family: inherit;\n}\n\n.terminal-output {\n background: #0a0e1a;\n padding: 1rem;\n border-radius: 4px;\n white-space: pre-wrap;\n word-break: break-all;\n min-height: 100px;\n max-height: 300px;\n overflow-y: auto;\n}\n\n/* Attention visualization styling */\n.attention-matrix {\n font-family: monospace;\n font-size: 0.8em;\n border-collapse: collapse;\n margin: 1rem 0;\n}\n\n.attention-matrix td {\n border: 1px solid #ddd;\n padding: 4px 8px;\n text-align: center;\n min-width: 50px;\n}\n\n/* Memory chart styling */\n.memory-chart-container {\n background: #f8f9fa;\n border: 2px solid #e9ecef;\n border-radius: 8px;\n padding: 1rem;\n margin: 1rem 0;\n}\n\n/* Image styling improvements */\nimg {\n max-width: 100%;\n height: auto;\n border-radius: 8px;\n box-shadow: 0 4px 12px rgba(0, 0, 0, 0.1);\n margin: 1.5rem 0;\n}\n\n/* Table of contents styling - Fixed positioning like ultrascale */\n@media (min-width: 1200px) {\n d-article {\n overflow: visible !important;\n }\n \n d-contents {\n align-self: start !important;\n background: white !important;\n grid-column-start: 1 !important;\n grid-column-end: 4 !important;\n grid-row: auto / span 6 !important;\n justify-self: end !important;\n margin-top: 0em !important;\n padding-right: 3em !important;\n padding-left: 2em !important;\n position: -webkit-sticky !important; /* For Safari */\n position: sticky !important;\n top: 10px !important;\n overflow-y: auto !important;\n height: calc(100vh - 40px) !important;\n scrollbar-width: none !important;\n transition: max-height 0.3s ease-out !important;\n z-index: -100 !important;\n display: block !important;\n visibility: visible !important;\n }\n}\n\n@media (max-width: 1199px) {\n d-contents {\n display: none !important;\n background: white !important;\n justify-self: start !important;\n align-self: start !important;\n padding-bottom: 0.5em !important;\n margin-bottom: 1em !important;\n padding-left: 0.25em !important;\n border-bottom: 1px solid rgba(0, 0, 0, 0.1) !important;\n overflow-y: scroll !important;\n height: calc(100vh - 40px) !important;\n scrollbar-width: none !important;\n z-index: -100 !important;\n }\n}\n\n/* Force TOC to be visible and override distill defaults */\nd-contents {\n display: block !important;\n visibility: visible !important;\n opacity: 1 !important;\n}\n\n/* TOC Navigation styling */\nd-contents .toc-header {\n margin-bottom: 1.5rem;\n border-bottom: 2px solid #007bff;\n padding-bottom: 0.5rem;\n}\n\nd-contents .toc-title {\n font-weight: bold;\n font-size: 1.2em;\n color: #333;\n}\n\nd-contents nav a {\n color: rgba(0, 0, 0, 0.7);\n text-decoration: none;\n border-bottom: none;\n display: block;\n padding: 0.3rem 0;\n font-size: 0.9em;\n line-height: 1.4;\n transition: color 0.2s ease;\n}\n\nd-contents nav a:hover {\n color: #007bff;\n text-decoration: none;\n}\n\nd-contents nav a.active {\n color: #007bff;\n font-weight: 600;\n}\n\nd-contents nav div {\n margin-bottom: 0.2rem;\n}\n\n/* Smooth scrollbar */\nd-contents {\n scrollbar-width: thin;\n scrollbar-color: rgba(0, 123, 255, 0.3) transparent;\n}\n\nd-contents::-webkit-scrollbar {\n width: 6px;\n}\n\nd-contents::-webkit-scrollbar-track {\n background: transparent;\n}\n\nd-contents::-webkit-scrollbar-thumb {\n background: rgba(0, 123, 255, 0.3);\n border-radius: 3px;\n}\n\nd-contents::-webkit-scrollbar-thumb:hover {\n background: rgba(0, 123, 255, 0.5);\n}\n\n/* Custom tooltip styling for tenet links */\nd-contents nav a[title] {\n position: relative;\n cursor: help;\n}\n\nd-contents nav a[title]:hover {\n color: #667eea;\n}\n\n/* Enhanced tooltip using CSS (fallback for title attribute) */\nd-contents nav a[title]:after {\n content: attr(title);\n position: absolute;\n left: 100%;\n top: 50%;\n transform: translateY(-50%);\n background: #1a202c;\n color: white;\n padding: 0.75rem 1rem;\n border-radius: 8px;\n font-size: 0.85em;\n white-space: normal;\n width: 300px;\n line-height: 1.4;\n z-index: 1001;\n opacity: 0;\n visibility: hidden;\n transition: opacity 0.3s ease, visibility 0.3s ease;\n pointer-events: none;\n box-shadow: 0 4px 12px rgba(0, 0, 0, 0.2);\n}\n\nd-contents nav a[title]:before {\n content: '';\n position: absolute;\n left: 100%;\n top: 50%;\n transform: translate(-8px, -50%);\n border: 8px solid transparent;\n border-right-color: #1a202c;\n z-index: 1002;\n opacity: 0;\n visibility: hidden;\n transition: opacity 0.3s ease, visibility 0.3s ease;\n}\n\nd-contents nav a[title]:hover:after,\nd-contents nav a[title]:hover:before {\n opacity: 1;\n visibility: visible;\n}\n\n/* Adjust for smaller screens */\n@media (max-width: 1400px) {\n d-contents nav a[title]:after {\n left: auto;\n right: 100%;\n margin-right: 1rem;\n width: 250px;\n }\n \n d-contents nav a[title]:before {\n left: auto;\n right: 100%;\n transform: translate(8px, -50%);\n border-right-color: transparent;\n border-left-color: #1a202c;\n }\n}\n\n/* Improve code syntax highlighting with Prism */\npre[class*=\"language-\"] {\n background: #f8f9fa !important;\n border: 1px solid #e9ecef !important;\n border-radius: 8px !important;\n padding: 1.5rem !important;\n margin: 1.5rem 0 !important;\n overflow-x: auto !important;\n font-size: 0.9em !important;\n line-height: 1.5 !important;\n}\n\ncode[class*=\"language-\"] {\n background: none !important;\n font-family: 'Monaco', 'Menlo', 'Ubuntu Mono', 'Courier New', monospace !important;\n color: #383a42 !important;\n}\n\n/* Inline code */\np code, li code {\n background: #f1f3f4 !important;\n padding: 0.2em 0.4em !important;\n border-radius: 3px !important;\n font-size: 0.9em !important;\n color: #d73a49 !important;\n}\n\n/* Distill article improvements */\nd-article {\n max-width: none;\n font-size: 19px;\n line-height: 1.7 !important;\n color: #1a1a1a;\n padding-top: 1rem !important;\n grid-row-gap: 0 !important;\n}\n\nd-article > * {\n grid-column: middle !important;\n max-width: none;\n}\n\n/* Adjust for TOC on larger screens */\n@media (min-width: 1200px) {\n d-article > * {\n grid-column: text / page-end !important;\n max-width: none;\n }\n}\n\n/* Improve paragraph readability */\nd-article p {\n font-size: 19px;\n line-height: 1.5;\n margin-top: 0 !important;\n color: #1a1a1a;\n}\n\n/* Improve heading sizes */\nd-article h1 {\n font-size: 3rem;\n line-height: 1.2;\n margin: 3rem 0 2rem 0;\n color: #1a202c;\n font-weight: 700;\n}\n\nd-article h2 {\n font-size: 2.5rem;\n line-height: 1.3;\n margin: 1.5rem 0 0.75rem 0 !important;\n padding-bottom: 0.5rem !important;\n color: #1a202c;\n font-weight: 650;\n}\n\nd-article h3 {\n font-size: 2rem;\n line-height: 1.4;\n margin: 2rem 0 1rem 0;\n color: #1a202c;\n font-weight: 600;\n}\n\nd-article h4 {\n font-size: 1.5rem;\n line-height: 1.4;\n margin: 1.5rem 0 1rem 0;\n color: #2d3748;\n font-weight: 600;\n}\n\n/* Improve list readability */\nd-article ul li,\nd-article ol li {\n font-size: 18px;\n line-height: 1.7;\n margin-bottom: 0.5rem;\n}\n\n/* Enhanced tenet reference styling with custom tooltips */\na[href^=\"#source-of-truth\"],\na[href^=\"#one-model-one-file\"],\na[href^=\"#code-is-product\"],\na[href^=\"#standardize-dont-abstract\"],\na[href^=\"#do-repeat-yourself\"],\na[href^=\"#minimal-user-api\"],\na[href^=\"#backwards-compatibility\"],\na[href^=\"#consistent-public-surface\"],\na[href^=\"#modular-toolbox\"] {\n position: relative;\n color: #667eea;\n font-weight: 600;\n text-decoration: underline;\n text-decoration-color: rgba(102, 126, 234, 0.3);\n transition: all 0.3s ease;\n}\n\na[href^=\"#source-of-truth\"]:hover,\na[href^=\"#one-model-one-file\"]:hover,\na[href^=\"#code-is-product\"]:hover,\na[href^=\"#standardize-dont-abstract\"]:hover,\na[href^=\"#do-repeat-yourself\"]:hover,\na[href^=\"#minimal-user-api\"]:hover,\na[href^=\"#backwards-compatibility\"]:hover,\na[href^=\"#consistent-public-surface\"]:hover,\na[href^=\"#modular-toolbox\"]:hover {\n color: #4c51bf;\n text-decoration-color: #4c51bf;\n background: rgba(102, 126, 234, 0.1);\n padding: 2px 4px;\n border-radius: 4px;\n}\n\n/* Custom tooltip using data-tooltip attribute */\na[data-tooltip]:after {\n content: attr(data-tooltip);\n position: absolute;\n bottom: 100%;\n left: 50%;\n transform: translateX(-50%);\n background: #1a202c;\n color: white;\n padding: 0.75rem 1rem;\n border-radius: 8px;\n font-size: 0.85em;\n font-weight: 400;\n white-space: normal;\n width: 320px;\n line-height: 1.4;\n z-index: 1001;\n opacity: 0;\n visibility: hidden;\n transition: opacity 0.3s ease, visibility 0.3s ease;\n pointer-events: none;\n box-shadow: 0 4px 12px rgba(0, 0, 0, 0.2);\n margin-bottom: 8px;\n}\n\na[data-tooltip]:before {\n content: '';\n position: absolute;\n bottom: 100%;\n left: 50%;\n transform: translateX(-50%);\n border: 8px solid transparent;\n border-top-color: #1a202c;\n z-index: 1002;\n opacity: 0;\n visibility: hidden;\n transition: opacity 0.3s ease, visibility 0.3s ease;\n}\n\na[data-tooltip]:hover:after,\na[data-tooltip]:hover:before {\n opacity: 1;\n visibility: visible;\n}\n\n/* Breadcrumb navigation styling */\n.crumbs {\n background: linear-gradient(135deg, #f0f4ff 0%, #e6eeff 100%);\n border-left: 5px solid #667eea;\n padding: 1.25rem 1.75rem;\n margin: 2.5rem 0;\n border-radius: 0 8px 8px 0;\n box-shadow: 0 2px 8px rgba(102, 126, 234, 0.12);\n font-size: 0.95em;\n line-height: 1.6;\n color: #4a5568;\n}\n\n.crumbs strong {\n color: #667eea;\n font-weight: 700;\n}\n\n.crumbs code {\n background: rgba(102, 126, 234, 0.1);\n padding: 0.15em 0.4em;\n border-radius: 3px;\n font-size: 0.9em;\n color: #4c51bf;\n}\n\n.crumbs a {\n color: #667eea;\n font-weight: 500;\n}\n\n/* Improve blockquote styling */\nd-article blockquote {\n font-size: 19px;\n line-height: 1.8;\n padding: 1.5rem 2rem;\n margin: 2rem 0;\n border-left: 4px solid #667eea;\n background: linear-gradient(135deg, #f8f9fa 0%, #e9ecef 50%);\n border-radius: 0 8px 8px 0;\n font-style: italic;\n color: #4a5568;\n}\n\n/* Link capsule styling - only for external HTTP(S) links */\nd-article a[href^=\"http://\"],\nd-article a[href^=\"https://\"] {\n background: linear-gradient(135deg, #e3f2fd 0%, #bbdefb 100%);\n color: #1565c0;\n text-decoration: none;\n padding: 0.15em 0.5em;\n border-radius: 12px;\n border: 1px solid #90caf9;\n display: inline-block;\n transition: all 0.3s ease;\n font-weight: 500;\n box-shadow: 0 1px 3px rgba(21, 101, 192, 0.15);\n}\n\nd-article a[href^=\"http://\"]:hover,\nd-article a[href^=\"https://\"]:hover {\n background: linear-gradient(135deg, #2196f3 0%, #1976d2 100%);\n color: white;\n border-color: #1565c0;\n transform: translateY(-1px);\n box-shadow: 0 4px 12px rgba(21, 101, 192, 0.3);\n}\n\nd-article a[href^=\"http://\"]:active,\nd-article a[href^=\"https://\"]:active {\n transform: translateY(0);\n box-shadow: 0 1px 3px rgba(21, 101, 192, 0.2);\n}\n\n/* Full width elements */\nd-article .code-compare,\nd-article .interactive-demo,\nd-article .memory-chart-container {\n max-width: none;\n width: 100%;\n margin-left: 0;\n margin-right: 0;\n}\n\n/* Responsive design improvements */\n@media (max-width: 1200px) {\n d-article .code-compare,\n d-article .interactive-demo {\n max-width: 95%;\n margin-left: auto;\n margin-right: auto;\n }\n}\n\n@media (max-width: 768px) {\n .tenet-list li.tenet {\n padding: 1rem;\n }\n\n .interactive-demo .demo-content {\n padding: 1rem;\n }\n}\n\n"],"sourceRoot":""}]);
1888
- // Exports
1889
- /* harmony default export */ const __WEBPACK_DEFAULT_EXPORT__ = (___CSS_LOADER_EXPORT___);
1890
-
1891
-
1892
- /***/ })
1893
-
1894
- /******/ });
1895
- /************************************************************************/
1896
- /******/ // The module cache
1897
- /******/ var __webpack_module_cache__ = {};
1898
- /******/
1899
- /******/ // The require function
1900
- /******/ function __webpack_require__(moduleId) {
1901
- /******/ // Check if module is in cache
1902
- /******/ var cachedModule = __webpack_module_cache__[moduleId];
1903
- /******/ if (cachedModule !== undefined) {
1904
- /******/ return cachedModule.exports;
1905
- /******/ }
1906
- /******/ // Create a new module (and put it into the cache)
1907
- /******/ var module = __webpack_module_cache__[moduleId] = {
1908
- /******/ id: moduleId,
1909
- /******/ // no module.loaded needed
1910
- /******/ exports: {}
1911
- /******/ };
1912
- /******/
1913
- /******/ // Execute the module function
1914
- /******/ __webpack_modules__[moduleId](module, module.exports, __webpack_require__);
1915
- /******/
1916
- /******/ // Return the exports of the module
1917
- /******/ return module.exports;
1918
- /******/ }
1919
- /******/
1920
- /************************************************************************/
1921
- /******/ /* webpack/runtime/compat get default export */
1922
- /******/ (() => {
1923
- /******/ // getDefaultExport function for compatibility with non-harmony modules
1924
- /******/ __webpack_require__.n = (module) => {
1925
- /******/ var getter = module && module.__esModule ?
1926
- /******/ () => (module['default']) :
1927
- /******/ () => (module);
1928
- /******/ __webpack_require__.d(getter, { a: getter });
1929
- /******/ return getter;
1930
- /******/ };
1931
- /******/ })();
1932
- /******/
1933
- /******/ /* webpack/runtime/define property getters */
1934
- /******/ (() => {
1935
- /******/ // define getter functions for harmony exports
1936
- /******/ __webpack_require__.d = (exports, definition) => {
1937
- /******/ for(var key in definition) {
1938
- /******/ if(__webpack_require__.o(definition, key) && !__webpack_require__.o(exports, key)) {
1939
- /******/ Object.defineProperty(exports, key, { enumerable: true, get: definition[key] });
1940
- /******/ }
1941
- /******/ }
1942
- /******/ };
1943
- /******/ })();
1944
- /******/
1945
- /******/ /* webpack/runtime/hasOwnProperty shorthand */
1946
- /******/ (() => {
1947
- /******/ __webpack_require__.o = (obj, prop) => (Object.prototype.hasOwnProperty.call(obj, prop))
1948
- /******/ })();
1949
- /******/
1950
- /******/ /* webpack/runtime/nonce */
1951
- /******/ (() => {
1952
- /******/ __webpack_require__.nc = undefined;
1953
- /******/ })();
1954
- /******/
1955
- /************************************************************************/
1956
- var __webpack_exports__ = {};
1957
-
1958
- // EXTERNAL MODULE: ./node_modules/style-loader/dist/runtime/injectStylesIntoStyleTag.js
1959
- var injectStylesIntoStyleTag = __webpack_require__(72);
1960
- var injectStylesIntoStyleTag_default = /*#__PURE__*/__webpack_require__.n(injectStylesIntoStyleTag);
1961
- // EXTERNAL MODULE: ./node_modules/style-loader/dist/runtime/styleDomAPI.js
1962
- var styleDomAPI = __webpack_require__(825);
1963
- var styleDomAPI_default = /*#__PURE__*/__webpack_require__.n(styleDomAPI);
1964
- // EXTERNAL MODULE: ./node_modules/style-loader/dist/runtime/insertBySelector.js
1965
- var insertBySelector = __webpack_require__(659);
1966
- var insertBySelector_default = /*#__PURE__*/__webpack_require__.n(insertBySelector);
1967
- // EXTERNAL MODULE: ./node_modules/style-loader/dist/runtime/setAttributesWithoutAttributes.js
1968
- var setAttributesWithoutAttributes = __webpack_require__(56);
1969
- var setAttributesWithoutAttributes_default = /*#__PURE__*/__webpack_require__.n(setAttributesWithoutAttributes);
1970
- // EXTERNAL MODULE: ./node_modules/style-loader/dist/runtime/insertStyleElement.js
1971
- var insertStyleElement = __webpack_require__(540);
1972
- var insertStyleElement_default = /*#__PURE__*/__webpack_require__.n(insertStyleElement);
1973
- // EXTERNAL MODULE: ./node_modules/style-loader/dist/runtime/styleTagTransform.js
1974
- var styleTagTransform = __webpack_require__(113);
1975
- var styleTagTransform_default = /*#__PURE__*/__webpack_require__.n(styleTagTransform);
1976
- // EXTERNAL MODULE: ./node_modules/css-loader/dist/cjs.js!./src/style.css
1977
- var style = __webpack_require__(208);
1978
- ;// ./src/style.css
1979
-
1980
-
1981
-
1982
-
1983
-
1984
-
1985
-
1986
-
1987
-
1988
-
1989
-
1990
- var options = {};
1991
-
1992
- options.styleTagTransform = (styleTagTransform_default());
1993
- options.setAttributes = (setAttributesWithoutAttributes_default());
1994
- options.insert = insertBySelector_default().bind(null, "head");
1995
- options.domAPI = (styleDomAPI_default());
1996
- options.insertStyleElement = (insertStyleElement_default());
1997
-
1998
- var update = injectStylesIntoStyleTag_default()(style/* default */.A, options);
1999
-
2000
-
2001
-
2002
-
2003
- /* harmony default export */ const src_style = (style/* default */.A && style/* default */.A.locals ? style/* default */.A.locals : undefined);
2004
-
2005
- ;// ./src/index.js
2006
- // Main JavaScript file for Scaling Insanity
2007
-
2008
-
2009
- // Import any additional functionality
2010
- console.log('blog loaded');
2011
-
2012
- // Add any custom JavaScript functionality here
2013
- document.addEventListener('DOMContentLoaded', function () {
2014
- // Initialize syntax highlighting for code blocks
2015
- if (window.hljs) {
2016
- hljs.highlightAll();
2017
- }
2018
-
2019
- // Initialize any interactive components
2020
- initializeInteractiveComponents();
2021
- });
2022
- function initializeInteractiveComponents() {
2023
- // This will be expanded as we add interactive components
2024
- console.log('Interactive components initialized');
2025
- }
2026
- /******/ })()
2027
- ;
2028
- //# sourceMappingURL=main.bundle.js.map
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
dist/main.bundle.js.map DELETED
The diff for this file is too large to render. See raw diff
 
dist/static/Bloatedness_visualizer.png DELETED

Git LFS Details

  • SHA256: 6e30ca37e88572e00b06651728b5667464837c69c18b22c97a04127367d8a500
  • Pointer size: 131 Bytes
  • Size of remote file: 118 kB
dist/static/Jaccard_similarity_plot.png DELETED

Git LFS Details

  • SHA256: 486fad0f93c66d7ccc9fb35bad4ef75a8e49fbe6d48e5adbab6eda6e9367b653
  • Pointer size: 130 Bytes
  • Size of remote file: 65.9 kB
dist/static/d3_dependency_graph.html DELETED
@@ -1,1902 +0,0 @@
1
-
2
- <!DOCTYPE html>
3
- <html lang="en">
4
- <head>
5
- <meta charset="UTF-8">
6
- <title>Transformers Modular Model Dependencies</title>
7
- <style>
8
- /* Google‑font – small fallback cost & optional */
9
- @import url('https://fonts.googleapis.com/css2?family=Inter:wght@400;600&display=swap');
10
-
11
- :root {
12
- --base‑size: 60px; /* icon radius helper */
13
- }
14
-
15
- body {
16
- font-family: 'Inter', Arial, sans-serif;
17
- margin: 0;
18
- overflow: hidden;
19
- background-color: transparent; /* requested transparency */
20
- }
21
-
22
- svg {
23
- width: 100vw;
24
- height: 100vh;
25
- }
26
-
27
- .link {
28
- stroke: #999;
29
- stroke-opacity: 0.6;
30
- }
31
-
32
- .node-label {
33
- fill: #333;
34
- pointer-events: none;
35
- text-anchor: middle;
36
- font-weight: 600;
37
- }
38
-
39
- .link-label {
40
- fill: #555;
41
- font-size: 10px;
42
- pointer-events: none;
43
- text-anchor: middle;
44
- }
45
-
46
- .node.base path { fill: #ffbe0b; }
47
- .node.derived circle { fill: #1f77b4; }
48
-
49
- /* Legend styling */
50
- #legend {
51
- position: fixed;
52
- top: 18px;
53
- left: 18px;
54
- font-size: 20px;
55
- background: rgba(255,255,255,0.92);
56
- padding: 18px 28px;
57
- border-radius: 10px;
58
- border: 1.5px solid #bbb;
59
- font-family: 'Inter', Arial, sans-serif;
60
- box-shadow: 0 2px 8px rgba(0,0,0,0.08);
61
- z-index: 1000;
62
- }
63
- </style>
64
- </head>
65
- <body>
66
- <div id="legend">🟡 base model (HF icon)<br>🔵 derived modular model<br>Edge label: #classes imported</div>
67
- <svg id="dependency-graph"></svg>
68
- <script src="https://d3js.org/d3.v7.min.js"></script>
69
- <script>
70
- const graphData = {
71
- "nodes": [
72
- {
73
- "id": "aimv2",
74
- "is_base": false,
75
- "size": 1.146341463414634
76
- },
77
- {
78
- "id": "arcee",
79
- "is_base": false,
80
- "size": 1.0975609756097562
81
- },
82
- {
83
- "id": "aria",
84
- "is_base": false,
85
- "size": 1.146341463414634
86
- },
87
- {
88
- "id": "auto",
89
- "is_base": true,
90
- "size": 1.0975609756097562
91
- },
92
- {
93
- "id": "aya_vision",
94
- "is_base": false,
95
- "size": 1.048780487804878
96
- },
97
- {
98
- "id": "bamba",
99
- "is_base": false,
100
- "size": 1.2439024390243902
101
- },
102
- {
103
- "id": "bart",
104
- "is_base": true,
105
- "size": 1.146341463414634
106
- },
107
- {
108
- "id": "beit",
109
- "is_base": true,
110
- "size": 1.048780487804878
111
- },
112
- {
113
- "id": "bigbird_pegasus",
114
- "is_base": true,
115
- "size": 1.048780487804878
116
- },
117
- {
118
- "id": "biogpt",
119
- "is_base": false,
120
- "size": 1.0975609756097562
121
- },
122
- {
123
- "id": "bitnet",
124
- "is_base": false,
125
- "size": 1.0975609756097562
126
- },
127
- {
128
- "id": "blip",
129
- "is_base": true,
130
- "size": 1.048780487804878
131
- },
132
- {
133
- "id": "blip_2",
134
- "is_base": true,
135
- "size": 1.048780487804878
136
- },
137
- {
138
- "id": "chameleon",
139
- "is_base": true,
140
- "size": 1.0975609756097562
141
- },
142
- {
143
- "id": "clip",
144
- "is_base": true,
145
- "size": 1.2439024390243902
146
- },
147
- {
148
- "id": "cohere",
149
- "is_base": false,
150
- "size": 1.1951219512195121
151
- },
152
- {
153
- "id": "cohere2",
154
- "is_base": false,
155
- "size": 1.0975609756097562
156
- },
157
- {
158
- "id": "colpali",
159
- "is_base": false,
160
- "size": 1.0975609756097562
161
- },
162
- {
163
- "id": "colqwen2",
164
- "is_base": false,
165
- "size": 1.048780487804878
166
- },
167
- {
168
- "id": "conditional_detr",
169
- "is_base": false,
170
- "size": 1.048780487804878
171
- },
172
- {
173
- "id": "csm",
174
- "is_base": false,
175
- "size": 1.048780487804878
176
- },
177
- {
178
- "id": "d_fine",
179
- "is_base": false,
180
- "size": 1.0975609756097562
181
- },
182
- {
183
- "id": "data2vec",
184
- "is_base": false,
185
- "size": 1.048780487804878
186
- },
187
- {
188
- "id": "deepseek_v2",
189
- "is_base": false,
190
- "size": 1.0975609756097562
191
- },
192
- {
193
- "id": "deepseek_v3",
194
- "is_base": false,
195
- "size": 1.146341463414634
196
- },
197
- {
198
- "id": "deepseek_vl",
199
- "is_base": false,
200
- "size": 1.146341463414634
201
- },
202
- {
203
- "id": "deepseek_vl_hybrid",
204
- "is_base": false,
205
- "size": 1.146341463414634
206
- },
207
- {
208
- "id": "deformable_detr",
209
- "is_base": false,
210
- "size": 1.048780487804878
211
- },
212
- {
213
- "id": "depth_anything",
214
- "is_base": true,
215
- "size": 1.048780487804878
216
- },
217
- {
218
- "id": "detr",
219
- "is_base": true,
220
- "size": 1.2439024390243902
221
- },
222
- {
223
- "id": "dia",
224
- "is_base": false,
225
- "size": 1.0975609756097562
226
- },
227
- {
228
- "id": "diffllama",
229
- "is_base": false,
230
- "size": 1.146341463414634
231
- },
232
- {
233
- "id": "dinov2",
234
- "is_base": true,
235
- "size": 1.0975609756097562
236
- },
237
- {
238
- "id": "dinov2_with_registers",
239
- "is_base": false,
240
- "size": 1.048780487804878
241
- },
242
- {
243
- "id": "doge",
244
- "is_base": false,
245
- "size": 1.0975609756097562
246
- },
247
- {
248
- "id": "dots1",
249
- "is_base": false,
250
- "size": 1.0975609756097562
251
- },
252
- {
253
- "id": "dpt",
254
- "is_base": false,
255
- "size": 1.0975609756097562
256
- },
257
- {
258
- "id": "emu3",
259
- "is_base": false,
260
- "size": 1.146341463414634
261
- },
262
- {
263
- "id": "eomt",
264
- "is_base": false,
265
- "size": 1.1951219512195121
266
- },
267
- {
268
- "id": "ernie4_5",
269
- "is_base": false,
270
- "size": 1.146341463414634
271
- },
272
- {
273
- "id": "ernie4_5_moe",
274
- "is_base": false,
275
- "size": 1.1951219512195121
276
- },
277
- {
278
- "id": "esm",
279
- "is_base": true,
280
- "size": 1.048780487804878
281
- },
282
- {
283
- "id": "evolla",
284
- "is_base": false,
285
- "size": 1.0975609756097562
286
- },
287
- {
288
- "id": "exaone4",
289
- "is_base": false,
290
- "size": 1.0975609756097562
291
- },
292
- {
293
- "id": "falcon_h1",
294
- "is_base": false,
295
- "size": 1.146341463414634
296
- },
297
- {
298
- "id": "falcon_mamba",
299
- "is_base": false,
300
- "size": 1.048780487804878
301
- },
302
- {
303
- "id": "gemma",
304
- "is_base": false,
305
- "size": 1.3414634146341464
306
- },
307
- {
308
- "id": "gemma2",
309
- "is_base": false,
310
- "size": 1.2439024390243902
311
- },
312
- {
313
- "id": "gemma3",
314
- "is_base": false,
315
- "size": 1.146341463414634
316
- },
317
- {
318
- "id": "gemma3n",
319
- "is_base": false,
320
- "size": 1.1951219512195121
321
- },
322
- {
323
- "id": "glm",
324
- "is_base": false,
325
- "size": 1.2439024390243902
326
- },
327
- {
328
- "id": "glm4",
329
- "is_base": false,
330
- "size": 1.146341463414634
331
- },
332
- {
333
- "id": "glm4_moe",
334
- "is_base": false,
335
- "size": 1.146341463414634
336
- },
337
- {
338
- "id": "glm4v",
339
- "is_base": false,
340
- "size": 1.0975609756097562
341
- },
342
- {
343
- "id": "got_ocr2",
344
- "is_base": false,
345
- "size": 1.0975609756097562
346
- },
347
- {
348
- "id": "gpt_neox",
349
- "is_base": false,
350
- "size": 1.0975609756097562
351
- },
352
- {
353
- "id": "granite",
354
- "is_base": false,
355
- "size": 1.0975609756097562
356
- },
357
- {
358
- "id": "granitemoe",
359
- "is_base": true,
360
- "size": 1.048780487804878
361
- },
362
- {
363
- "id": "granitemoehybrid",
364
- "is_base": false,
365
- "size": 1.0975609756097562
366
- },
367
- {
368
- "id": "granitemoeshared",
369
- "is_base": false,
370
- "size": 1.0975609756097562
371
- },
372
- {
373
- "id": "grounding_dino",
374
- "is_base": false,
375
- "size": 1.048780487804878
376
- },
377
- {
378
- "id": "helium",
379
- "is_base": false,
380
- "size": 1.146341463414634
381
- },
382
- {
383
- "id": "hgnet_v2",
384
- "is_base": false,
385
- "size": 1.048780487804878
386
- },
387
- {
388
- "id": "hubert",
389
- "is_base": false,
390
- "size": 1.048780487804878
391
- },
392
- {
393
- "id": "idefics",
394
- "is_base": true,
395
- "size": 1.146341463414634
396
- },
397
- {
398
- "id": "idefics3",
399
- "is_base": true,
400
- "size": 1.048780487804878
401
- },
402
- {
403
- "id": "ijepa",
404
- "is_base": false,
405
- "size": 1.048780487804878
406
- },
407
- {
408
- "id": "image_processing_base",
409
- "is_base": true,
410
- "size": 1.048780487804878
411
- },
412
- {
413
- "id": "informer",
414
- "is_base": false,
415
- "size": 1.0975609756097562
416
- },
417
- {
418
- "id": "instructblip",
419
- "is_base": true,
420
- "size": 1.048780487804878
421
- },
422
- {
423
- "id": "instructblipvideo",
424
- "is_base": false,
425
- "size": 1.0975609756097562
426
- },
427
- {
428
- "id": "internvl",
429
- "is_base": false,
430
- "size": 1.1951219512195121
431
- },
432
- {
433
- "id": "jamba",
434
- "is_base": true,
435
- "size": 1.0975609756097562
436
- },
437
- {
438
- "id": "janus",
439
- "is_base": false,
440
- "size": 1.3902439024390243
441
- },
442
- {
443
- "id": "kyutai_speech_to_text",
444
- "is_base": false,
445
- "size": 1.146341463414634
446
- },
447
- {
448
- "id": "lfm2",
449
- "is_base": false,
450
- "size": 1.0975609756097562
451
- },
452
- {
453
- "id": "lightglue",
454
- "is_base": false,
455
- "size": 1.2439024390243902
456
- },
457
- {
458
- "id": "llama",
459
- "is_base": true,
460
- "size": 3.0
461
- },
462
- {
463
- "id": "llama4",
464
- "is_base": true,
465
- "size": 1.048780487804878
466
- },
467
- {
468
- "id": "llava",
469
- "is_base": true,
470
- "size": 1.3414634146341464
471
- },
472
- {
473
- "id": "llava_next",
474
- "is_base": true,
475
- "size": 1.146341463414634
476
- },
477
- {
478
- "id": "llava_next_video",
479
- "is_base": false,
480
- "size": 1.0975609756097562
481
- },
482
- {
483
- "id": "llava_onevision",
484
- "is_base": false,
485
- "size": 1.0975609756097562
486
- },
487
- {
488
- "id": "mamba",
489
- "is_base": true,
490
- "size": 1.048780487804878
491
- },
492
- {
493
- "id": "mamba2",
494
- "is_base": true,
495
- "size": 1.146341463414634
496
- },
497
- {
498
- "id": "mask2former",
499
- "is_base": false,
500
- "size": 1.0975609756097562
501
- },
502
- {
503
- "id": "maskformer",
504
- "is_base": true,
505
- "size": 1.048780487804878
506
- },
507
- {
508
- "id": "mbart",
509
- "is_base": true,
510
- "size": 1.048780487804878
511
- },
512
- {
513
- "id": "mimi",
514
- "is_base": true,
515
- "size": 1.048780487804878
516
- },
517
- {
518
- "id": "minimax",
519
- "is_base": false,
520
- "size": 1.048780487804878
521
- },
522
- {
523
- "id": "mistral",
524
- "is_base": false,
525
- "size": 1.3414634146341464
526
- },
527
- {
528
- "id": "mistral3",
529
- "is_base": false,
530
- "size": 1.0975609756097562
531
- },
532
- {
533
- "id": "mixtral",
534
- "is_base": false,
535
- "size": 1.2439024390243902
536
- },
537
- {
538
- "id": "mlcd",
539
- "is_base": false,
540
- "size": 1.146341463414634
541
- },
542
- {
543
- "id": "modeling_outputs",
544
- "is_base": true,
545
- "size": 1.048780487804878
546
- },
547
- {
548
- "id": "modernbert",
549
- "is_base": false,
550
- "size": 1.0975609756097562
551
- },
552
- {
553
- "id": "modernbert_decoder",
554
- "is_base": false,
555
- "size": 1.048780487804878
556
- },
557
- {
558
- "id": "moonshine",
559
- "is_base": false,
560
- "size": 1.146341463414634
561
- },
562
- {
563
- "id": "moshi",
564
- "is_base": true,
565
- "size": 1.048780487804878
566
- },
567
- {
568
- "id": "nemotron",
569
- "is_base": true,
570
- "size": 1.048780487804878
571
- },
572
- {
573
- "id": "olmo",
574
- "is_base": false,
575
- "size": 1.0975609756097562
576
- },
577
- {
578
- "id": "olmo2",
579
- "is_base": false,
580
- "size": 1.146341463414634
581
- },
582
- {
583
- "id": "opt",
584
- "is_base": true,
585
- "size": 1.048780487804878
586
- },
587
- {
588
- "id": "owlv2",
589
- "is_base": false,
590
- "size": 1.048780487804878
591
- },
592
- {
593
- "id": "owlvit",
594
- "is_base": true,
595
- "size": 1.048780487804878
596
- },
597
- {
598
- "id": "paligemma",
599
- "is_base": true,
600
- "size": 1.146341463414634
601
- },
602
- {
603
- "id": "perception_lm",
604
- "is_base": false,
605
- "size": 1.048780487804878
606
- },
607
- {
608
- "id": "phi",
609
- "is_base": false,
610
- "size": 1.0975609756097562
611
- },
612
- {
613
- "id": "phi3",
614
- "is_base": false,
615
- "size": 1.2439024390243902
616
- },
617
- {
618
- "id": "phi4_multimodal",
619
- "is_base": false,
620
- "size": 1.146341463414634
621
- },
622
- {
623
- "id": "plbart",
624
- "is_base": false,
625
- "size": 1.146341463414634
626
- },
627
- {
628
- "id": "prompt_depth_anything",
629
- "is_base": false,
630
- "size": 1.048780487804878
631
- },
632
- {
633
- "id": "qwen2",
634
- "is_base": false,
635
- "size": 1.1951219512195121
636
- },
637
- {
638
- "id": "qwen2_5_omni",
639
- "is_base": false,
640
- "size": 1.1951219512195121
641
- },
642
- {
643
- "id": "qwen2_5_vl",
644
- "is_base": false,
645
- "size": 1.146341463414634
646
- },
647
- {
648
- "id": "qwen2_audio",
649
- "is_base": true,
650
- "size": 1.0975609756097562
651
- },
652
- {
653
- "id": "qwen2_moe",
654
- "is_base": true,
655
- "size": 1.048780487804878
656
- },
657
- {
658
- "id": "qwen2_vl",
659
- "is_base": true,
660
- "size": 1.146341463414634
661
- },
662
- {
663
- "id": "qwen3",
664
- "is_base": false,
665
- "size": 1.2439024390243902
666
- },
667
- {
668
- "id": "qwen3_moe",
669
- "is_base": false,
670
- "size": 1.2439024390243902
671
- },
672
- {
673
- "id": "rt_detr",
674
- "is_base": false,
675
- "size": 1.1951219512195121
676
- },
677
- {
678
- "id": "rt_detr_v2",
679
- "is_base": false,
680
- "size": 1.0975609756097562
681
- },
682
- {
683
- "id": "sam",
684
- "is_base": true,
685
- "size": 1.146341463414634
686
- },
687
- {
688
- "id": "sam_hq",
689
- "is_base": false,
690
- "size": 1.0975609756097562
691
- },
692
- {
693
- "id": "sew",
694
- "is_base": false,
695
- "size": 1.048780487804878
696
- },
697
- {
698
- "id": "siglip",
699
- "is_base": true,
700
- "size": 1.2926829268292683
701
- },
702
- {
703
- "id": "siglip2",
704
- "is_base": false,
705
- "size": 1.048780487804878
706
- },
707
- {
708
- "id": "smollm3",
709
- "is_base": false,
710
- "size": 1.0975609756097562
711
- },
712
- {
713
- "id": "smolvlm",
714
- "is_base": false,
715
- "size": 1.048780487804878
716
- },
717
- {
718
- "id": "starcoder2",
719
- "is_base": false,
720
- "size": 1.048780487804878
721
- },
722
- {
723
- "id": "superglue",
724
- "is_base": true,
725
- "size": 1.048780487804878
726
- },
727
- {
728
- "id": "t5gemma",
729
- "is_base": false,
730
- "size": 1.048780487804878
731
- },
732
- {
733
- "id": "time_series_transformer",
734
- "is_base": true,
735
- "size": 1.048780487804878
736
- },
737
- {
738
- "id": "timesfm",
739
- "is_base": false,
740
- "size": 1.0975609756097562
741
- },
742
- {
743
- "id": "timm_wrapper",
744
- "is_base": true,
745
- "size": 1.048780487804878
746
- },
747
- {
748
- "id": "unispeech",
749
- "is_base": false,
750
- "size": 1.048780487804878
751
- },
752
- {
753
- "id": "unispeech_sat",
754
- "is_base": false,
755
- "size": 1.048780487804878
756
- },
757
- {
758
- "id": "vipllava",
759
- "is_base": false,
760
- "size": 1.048780487804878
761
- },
762
- {
763
- "id": "vit",
764
- "is_base": true,
765
- "size": 1.0975609756097562
766
- },
767
- {
768
- "id": "voxtral",
769
- "is_base": false,
770
- "size": 1.048780487804878
771
- },
772
- {
773
- "id": "wav2vec2",
774
- "is_base": true,
775
- "size": 1.3902439024390243
776
- },
777
- {
778
- "id": "wav2vec2_bert",
779
- "is_base": false,
780
- "size": 1.0975609756097562
781
- },
782
- {
783
- "id": "wav2vec2_conformer",
784
- "is_base": false,
785
- "size": 1.0975609756097562
786
- },
787
- {
788
- "id": "wavlm",
789
- "is_base": false,
790
- "size": 1.048780487804878
791
- },
792
- {
793
- "id": "whisper",
794
- "is_base": true,
795
- "size": 1.048780487804878
796
- },
797
- {
798
- "id": "yolos",
799
- "is_base": false,
800
- "size": 1.048780487804878
801
- },
802
- {
803
- "id": "zamba",
804
- "is_base": true,
805
- "size": 1.048780487804878
806
- },
807
- {
808
- "id": "zamba2",
809
- "is_base": false,
810
- "size": 1.146341463414634
811
- }
812
- ],
813
- "links": [
814
- {
815
- "source": "llama",
816
- "target": "doge",
817
- "label": "8 classes"
818
- },
819
- {
820
- "source": "mixtral",
821
- "target": "doge",
822
- "label": "2 classes"
823
- },
824
- {
825
- "source": "mixtral",
826
- "target": "minimax",
827
- "label": "11 classes"
828
- },
829
- {
830
- "source": "clip",
831
- "target": "phi",
832
- "label": "1 classes"
833
- },
834
- {
835
- "source": "llama",
836
- "target": "phi",
837
- "label": "8 classes"
838
- },
839
- {
840
- "source": "qwen2_vl",
841
- "target": "qwen2_5_vl",
842
- "label": "15 classes"
843
- },
844
- {
845
- "source": "ernie4_5",
846
- "target": "ernie4_5_moe",
847
- "label": "3 classes"
848
- },
849
- {
850
- "source": "llama",
851
- "target": "ernie4_5_moe",
852
- "label": "2 classes"
853
- },
854
- {
855
- "source": "mixtral",
856
- "target": "ernie4_5_moe",
857
- "label": "2 classes"
858
- },
859
- {
860
- "source": "qwen3_moe",
861
- "target": "ernie4_5_moe",
862
- "label": "2 classes"
863
- },
864
- {
865
- "source": "llama",
866
- "target": "mistral",
867
- "label": "10 classes"
868
- },
869
- {
870
- "source": "llama",
871
- "target": "gpt_neox",
872
- "label": "4 classes"
873
- },
874
- {
875
- "source": "mistral",
876
- "target": "phi3",
877
- "label": "7 classes"
878
- },
879
- {
880
- "source": "wav2vec2",
881
- "target": "unispeech",
882
- "label": "9 classes"
883
- },
884
- {
885
- "source": "llama",
886
- "target": "olmo",
887
- "label": "8 classes"
888
- },
889
- {
890
- "source": "gemma",
891
- "target": "helium",
892
- "label": "3 classes"
893
- },
894
- {
895
- "source": "granite",
896
- "target": "helium",
897
- "label": "1 classes"
898
- },
899
- {
900
- "source": "llama",
901
- "target": "helium",
902
- "label": "5 classes"
903
- },
904
- {
905
- "source": "gemma",
906
- "target": "bitnet",
907
- "label": "1 classes"
908
- },
909
- {
910
- "source": "llama",
911
- "target": "bitnet",
912
- "label": "7 classes"
913
- },
914
- {
915
- "source": "maskformer",
916
- "target": "mask2former",
917
- "label": "1 classes"
918
- },
919
- {
920
- "source": "jamba",
921
- "target": "falcon_h1",
922
- "label": "1 classes"
923
- },
924
- {
925
- "source": "llama",
926
- "target": "falcon_h1",
927
- "label": "7 classes"
928
- },
929
- {
930
- "source": "mamba2",
931
- "target": "falcon_h1",
932
- "label": "4 classes"
933
- },
934
- {
935
- "source": "llava",
936
- "target": "got_ocr2",
937
- "label": "6 classes"
938
- },
939
- {
940
- "source": "sam",
941
- "target": "got_ocr2",
942
- "label": "5 classes"
943
- },
944
- {
945
- "source": "esm",
946
- "target": "evolla",
947
- "label": "9 classes"
948
- },
949
- {
950
- "source": "llama",
951
- "target": "evolla",
952
- "label": "6 classes"
953
- },
954
- {
955
- "source": "gemma2",
956
- "target": "gemma3n",
957
- "label": "6 classes"
958
- },
959
- {
960
- "source": "gemma3",
961
- "target": "gemma3n",
962
- "label": "6 classes"
963
- },
964
- {
965
- "source": "paligemma",
966
- "target": "gemma3n",
967
- "label": "4 classes"
968
- },
969
- {
970
- "source": "timm_wrapper",
971
- "target": "gemma3n",
972
- "label": "1 classes"
973
- },
974
- {
975
- "source": "llama",
976
- "target": "csm",
977
- "label": "8 classes"
978
- },
979
- {
980
- "source": "owlvit",
981
- "target": "owlv2",
982
- "label": "1 classes"
983
- },
984
- {
985
- "source": "llama",
986
- "target": "zamba2",
987
- "label": "2 classes"
988
- },
989
- {
990
- "source": "mamba2",
991
- "target": "zamba2",
992
- "label": "3 classes"
993
- },
994
- {
995
- "source": "zamba",
996
- "target": "zamba2",
997
- "label": "10 classes"
998
- },
999
- {
1000
- "source": "blip",
1001
- "target": "janus",
1002
- "label": "1 classes"
1003
- },
1004
- {
1005
- "source": "blip_2",
1006
- "target": "janus",
1007
- "label": "1 classes"
1008
- },
1009
- {
1010
- "source": "chameleon",
1011
- "target": "janus",
1012
- "label": "6 classes"
1013
- },
1014
- {
1015
- "source": "idefics",
1016
- "target": "janus",
1017
- "label": "2 classes"
1018
- },
1019
- {
1020
- "source": "llama",
1021
- "target": "janus",
1022
- "label": "1 classes"
1023
- },
1024
- {
1025
- "source": "siglip",
1026
- "target": "janus",
1027
- "label": "4 classes"
1028
- },
1029
- {
1030
- "source": "wav2vec2",
1031
- "target": "wav2vec2_conformer",
1032
- "label": "13 classes"
1033
- },
1034
- {
1035
- "source": "clip",
1036
- "target": "mlcd",
1037
- "label": "7 classes"
1038
- },
1039
- {
1040
- "source": "llama",
1041
- "target": "mlcd",
1042
- "label": "1 classes"
1043
- },
1044
- {
1045
- "source": "qwen2_vl",
1046
- "target": "mlcd",
1047
- "label": "2 classes"
1048
- },
1049
- {
1050
- "source": "gemma2",
1051
- "target": "gemma3",
1052
- "label": "10 classes"
1053
- },
1054
- {
1055
- "source": "paligemma",
1056
- "target": "gemma3",
1057
- "label": "4 classes"
1058
- },
1059
- {
1060
- "source": "instructblip",
1061
- "target": "instructblipvideo",
1062
- "label": "9 classes"
1063
- },
1064
- {
1065
- "source": "auto",
1066
- "target": "instructblipvideo",
1067
- "label": "1 classes"
1068
- },
1069
- {
1070
- "source": "glm4",
1071
- "target": "glm4v",
1072
- "label": "3 classes"
1073
- },
1074
- {
1075
- "source": "qwen2_5_vl",
1076
- "target": "glm4v",
1077
- "label": "16 classes"
1078
- },
1079
- {
1080
- "source": "llama",
1081
- "target": "exaone4",
1082
- "label": "10 classes"
1083
- },
1084
- {
1085
- "source": "olmo2",
1086
- "target": "exaone4",
1087
- "label": "2 classes"
1088
- },
1089
- {
1090
- "source": "cohere",
1091
- "target": "glm4_moe",
1092
- "label": "1 classes"
1093
- },
1094
- {
1095
- "source": "deepseek_v3",
1096
- "target": "glm4_moe",
1097
- "label": "7 classes"
1098
- },
1099
- {
1100
- "source": "gpt_neox",
1101
- "target": "glm4_moe",
1102
- "label": "1 classes"
1103
- },
1104
- {
1105
- "source": "detr",
1106
- "target": "conditional_detr",
1107
- "label": "1 classes"
1108
- },
1109
- {
1110
- "source": "detr",
1111
- "target": "grounding_dino",
1112
- "label": "1 classes"
1113
- },
1114
- {
1115
- "source": "gemma",
1116
- "target": "qwen3",
1117
- "label": "1 classes"
1118
- },
1119
- {
1120
- "source": "llama",
1121
- "target": "qwen3",
1122
- "label": "1 classes"
1123
- },
1124
- {
1125
- "source": "qwen2",
1126
- "target": "qwen3",
1127
- "label": "10 classes"
1128
- },
1129
- {
1130
- "source": "llava_next",
1131
- "target": "llava_onevision",
1132
- "label": "1 classes"
1133
- },
1134
- {
1135
- "source": "llava_next_video",
1136
- "target": "llava_onevision",
1137
- "label": "9 classes"
1138
- },
1139
- {
1140
- "source": "llava",
1141
- "target": "vipllava",
1142
- "label": "5 classes"
1143
- },
1144
- {
1145
- "source": "detr",
1146
- "target": "deformable_detr",
1147
- "label": "1 classes"
1148
- },
1149
- {
1150
- "source": "llava",
1151
- "target": "perception_lm",
1152
- "label": "5 classes"
1153
- },
1154
- {
1155
- "source": "wav2vec2",
1156
- "target": "wavlm",
1157
- "label": "9 classes"
1158
- },
1159
- {
1160
- "source": "llama",
1161
- "target": "glm",
1162
- "label": "4 classes"
1163
- },
1164
- {
1165
- "source": "phi3",
1166
- "target": "glm",
1167
- "label": "1 classes"
1168
- },
1169
- {
1170
- "source": "llama",
1171
- "target": "timesfm",
1172
- "label": "1 classes"
1173
- },
1174
- {
1175
- "source": "phi4_multimodal",
1176
- "target": "timesfm",
1177
- "label": "1 classes"
1178
- },
1179
- {
1180
- "source": "image_processing_base",
1181
- "target": "dpt",
1182
- "label": "1 classes"
1183
- },
1184
- {
1185
- "source": "beit",
1186
- "target": "dpt",
1187
- "label": "1 classes"
1188
- },
1189
- {
1190
- "source": "llama",
1191
- "target": "gemma",
1192
- "label": "5 classes"
1193
- },
1194
- {
1195
- "source": "llama",
1196
- "target": "kyutai_speech_to_text",
1197
- "label": "1 classes"
1198
- },
1199
- {
1200
- "source": "mimi",
1201
- "target": "kyutai_speech_to_text",
1202
- "label": "1 classes"
1203
- },
1204
- {
1205
- "source": "moshi",
1206
- "target": "kyutai_speech_to_text",
1207
- "label": "2 classes"
1208
- },
1209
- {
1210
- "source": "llama",
1211
- "target": "granite",
1212
- "label": "5 classes"
1213
- },
1214
- {
1215
- "source": "idefics3",
1216
- "target": "smolvlm",
1217
- "label": "9 classes"
1218
- },
1219
- {
1220
- "source": "granitemoe",
1221
- "target": "granitemoeshared",
1222
- "label": "4 classes"
1223
- },
1224
- {
1225
- "source": "glm",
1226
- "target": "moonshine",
1227
- "label": "3 classes"
1228
- },
1229
- {
1230
- "source": "llama",
1231
- "target": "moonshine",
1232
- "label": "3 classes"
1233
- },
1234
- {
1235
- "source": "whisper",
1236
- "target": "moonshine",
1237
- "label": "2 classes"
1238
- },
1239
- {
1240
- "source": "llava",
1241
- "target": "aya_vision",
1242
- "label": "6 classes"
1243
- },
1244
- {
1245
- "source": "deepseek_v3",
1246
- "target": "dots1",
1247
- "label": "5 classes"
1248
- },
1249
- {
1250
- "source": "qwen3",
1251
- "target": "dots1",
1252
- "label": "6 classes"
1253
- },
1254
- {
1255
- "source": "mistral",
1256
- "target": "starcoder2",
1257
- "label": "9 classes"
1258
- },
1259
- {
1260
- "source": "modeling_outputs",
1261
- "target": "sam_hq",
1262
- "label": "1 classes"
1263
- },
1264
- {
1265
- "source": "sam",
1266
- "target": "sam_hq",
1267
- "label": "15 classes"
1268
- },
1269
- {
1270
- "source": "wav2vec2",
1271
- "target": "wav2vec2_bert",
1272
- "label": "3 classes"
1273
- },
1274
- {
1275
- "source": "wav2vec2_conformer",
1276
- "target": "wav2vec2_bert",
1277
- "label": "6 classes"
1278
- },
1279
- {
1280
- "source": "mistral",
1281
- "target": "mixtral",
1282
- "label": "9 classes"
1283
- },
1284
- {
1285
- "source": "chameleon",
1286
- "target": "emu3",
1287
- "label": "2 classes"
1288
- },
1289
- {
1290
- "source": "llama",
1291
- "target": "emu3",
1292
- "label": "5 classes"
1293
- },
1294
- {
1295
- "source": "siglip",
1296
- "target": "emu3",
1297
- "label": "1 classes"
1298
- },
1299
- {
1300
- "source": "paligemma",
1301
- "target": "colpali",
1302
- "label": "3 classes"
1303
- },
1304
- {
1305
- "source": "phi3",
1306
- "target": "phi4_multimodal",
1307
- "label": "7 classes"
1308
- },
1309
- {
1310
- "source": "siglip",
1311
- "target": "phi4_multimodal",
1312
- "label": "9 classes"
1313
- },
1314
- {
1315
- "source": "qwen2_audio",
1316
- "target": "voxtral",
1317
- "label": "4 classes"
1318
- },
1319
- {
1320
- "source": "idefics",
1321
- "target": "deepseek_vl",
1322
- "label": "2 classes"
1323
- },
1324
- {
1325
- "source": "janus",
1326
- "target": "deepseek_vl",
1327
- "label": "4 classes"
1328
- },
1329
- {
1330
- "source": "glm",
1331
- "target": "glm4",
1332
- "label": "4 classes"
1333
- },
1334
- {
1335
- "source": "phi3",
1336
- "target": "glm4",
1337
- "label": "1 classes"
1338
- },
1339
- {
1340
- "source": "gemma2",
1341
- "target": "t5gemma",
1342
- "label": "9 classes"
1343
- },
1344
- {
1345
- "source": "auto",
1346
- "target": "lightglue",
1347
- "label": "1 classes"
1348
- },
1349
- {
1350
- "source": "clip",
1351
- "target": "lightglue",
1352
- "label": "1 classes"
1353
- },
1354
- {
1355
- "source": "cohere",
1356
- "target": "lightglue",
1357
- "label": "1 classes"
1358
- },
1359
- {
1360
- "source": "llama",
1361
- "target": "lightglue",
1362
- "label": "2 classes"
1363
- },
1364
- {
1365
- "source": "superglue",
1366
- "target": "lightglue",
1367
- "label": "2 classes"
1368
- },
1369
- {
1370
- "source": "llava_next",
1371
- "target": "llava_next_video",
1372
- "label": "7 classes"
1373
- },
1374
- {
1375
- "source": "rt_detr",
1376
- "target": "hgnet_v2",
1377
- "label": "1 classes"
1378
- },
1379
- {
1380
- "source": "deepseek_vl",
1381
- "target": "deepseek_vl_hybrid",
1382
- "label": "7 classes"
1383
- },
1384
- {
1385
- "source": "idefics",
1386
- "target": "deepseek_vl_hybrid",
1387
- "label": "2 classes"
1388
- },
1389
- {
1390
- "source": "sam",
1391
- "target": "deepseek_vl_hybrid",
1392
- "label": "2 classes"
1393
- },
1394
- {
1395
- "source": "wav2vec2",
1396
- "target": "data2vec",
1397
- "label": "11 classes"
1398
- },
1399
- {
1400
- "source": "depth_anything",
1401
- "target": "prompt_depth_anything",
1402
- "label": "7 classes"
1403
- },
1404
- {
1405
- "source": "gemma",
1406
- "target": "modernbert",
1407
- "label": "2 classes"
1408
- },
1409
- {
1410
- "source": "bamba",
1411
- "target": "lfm2",
1412
- "label": "1 classes"
1413
- },
1414
- {
1415
- "source": "llama",
1416
- "target": "lfm2",
1417
- "label": "8 classes"
1418
- },
1419
- {
1420
- "source": "wav2vec2",
1421
- "target": "sew",
1422
- "label": "11 classes"
1423
- },
1424
- {
1425
- "source": "wav2vec2",
1426
- "target": "hubert",
1427
- "label": "7 classes"
1428
- },
1429
- {
1430
- "source": "gemma",
1431
- "target": "gemma2",
1432
- "label": "9 classes"
1433
- },
1434
- {
1435
- "source": "detr",
1436
- "target": "rt_detr",
1437
- "label": "2 classes"
1438
- },
1439
- {
1440
- "source": "rt_detr",
1441
- "target": "d_fine",
1442
- "label": "12 classes"
1443
- },
1444
- {
1445
- "source": "rt_detr_v2",
1446
- "target": "d_fine",
1447
- "label": "1 classes"
1448
- },
1449
- {
1450
- "source": "llava",
1451
- "target": "mistral3",
1452
- "label": "6 classes"
1453
- },
1454
- {
1455
- "source": "mistral",
1456
- "target": "mistral3",
1457
- "label": "1 classes"
1458
- },
1459
- {
1460
- "source": "modernbert",
1461
- "target": "modernbert_decoder",
1462
- "label": "6 classes"
1463
- },
1464
- {
1465
- "source": "llama",
1466
- "target": "aria",
1467
- "label": "8 classes"
1468
- },
1469
- {
1470
- "source": "llava",
1471
- "target": "aria",
1472
- "label": "4 classes"
1473
- },
1474
- {
1475
- "source": "llava_next",
1476
- "target": "aria",
1477
- "label": "1 classes"
1478
- },
1479
- {
1480
- "source": "siglip",
1481
- "target": "siglip2",
1482
- "label": "16 classes"
1483
- },
1484
- {
1485
- "source": "llama",
1486
- "target": "arcee",
1487
- "label": "5 classes"
1488
- },
1489
- {
1490
- "source": "nemotron",
1491
- "target": "arcee",
1492
- "label": "1 classes"
1493
- },
1494
- {
1495
- "source": "mamba",
1496
- "target": "falcon_mamba",
1497
- "label": "10 classes"
1498
- },
1499
- {
1500
- "source": "llama",
1501
- "target": "deepseek_v2",
1502
- "label": "9 classes"
1503
- },
1504
- {
1505
- "source": "llama4",
1506
- "target": "deepseek_v2",
1507
- "label": "1 classes"
1508
- },
1509
- {
1510
- "source": "bart",
1511
- "target": "informer",
1512
- "label": "1 classes"
1513
- },
1514
- {
1515
- "source": "time_series_transformer",
1516
- "target": "informer",
1517
- "label": "12 classes"
1518
- },
1519
- {
1520
- "source": "colpali",
1521
- "target": "colqwen2",
1522
- "label": "3 classes"
1523
- },
1524
- {
1525
- "source": "bamba",
1526
- "target": "granitemoehybrid",
1527
- "label": "4 classes"
1528
- },
1529
- {
1530
- "source": "granitemoeshared",
1531
- "target": "granitemoehybrid",
1532
- "label": "7 classes"
1533
- },
1534
- {
1535
- "source": "bart",
1536
- "target": "plbart",
1537
- "label": "5 classes"
1538
- },
1539
- {
1540
- "source": "bigbird_pegasus",
1541
- "target": "plbart",
1542
- "label": "1 classes"
1543
- },
1544
- {
1545
- "source": "mbart",
1546
- "target": "plbart",
1547
- "label": "1 classes"
1548
- },
1549
- {
1550
- "source": "llama",
1551
- "target": "qwen3_moe",
1552
- "label": "4 classes"
1553
- },
1554
- {
1555
- "source": "mixtral",
1556
- "target": "qwen3_moe",
1557
- "label": "3 classes"
1558
- },
1559
- {
1560
- "source": "qwen2_moe",
1561
- "target": "qwen3_moe",
1562
- "label": "1 classes"
1563
- },
1564
- {
1565
- "source": "qwen3",
1566
- "target": "qwen3_moe",
1567
- "label": "1 classes"
1568
- },
1569
- {
1570
- "source": "clip",
1571
- "target": "internvl",
1572
- "label": "1 classes"
1573
- },
1574
- {
1575
- "source": "janus",
1576
- "target": "internvl",
1577
- "label": "1 classes"
1578
- },
1579
- {
1580
- "source": "llama",
1581
- "target": "internvl",
1582
- "label": "1 classes"
1583
- },
1584
- {
1585
- "source": "llava",
1586
- "target": "internvl",
1587
- "label": "5 classes"
1588
- },
1589
- {
1590
- "source": "glm",
1591
- "target": "ernie4_5",
1592
- "label": "1 classes"
1593
- },
1594
- {
1595
- "source": "llama",
1596
- "target": "ernie4_5",
1597
- "label": "4 classes"
1598
- },
1599
- {
1600
- "source": "dinov2",
1601
- "target": "eomt",
1602
- "label": "4 classes"
1603
- },
1604
- {
1605
- "source": "mask2former",
1606
- "target": "eomt",
1607
- "label": "2 classes"
1608
- },
1609
- {
1610
- "source": "siglip",
1611
- "target": "eomt",
1612
- "label": "1 classes"
1613
- },
1614
- {
1615
- "source": "vit",
1616
- "target": "eomt",
1617
- "label": "1 classes"
1618
- },
1619
- {
1620
- "source": "llama",
1621
- "target": "dia",
1622
- "label": "4 classes"
1623
- },
1624
- {
1625
- "source": "phi3",
1626
- "target": "dia",
1627
- "label": "1 classes"
1628
- },
1629
- {
1630
- "source": "llama",
1631
- "target": "deepseek_v3",
1632
- "label": "9 classes"
1633
- },
1634
- {
1635
- "source": "jamba",
1636
- "target": "bamba",
1637
- "label": "2 classes"
1638
- },
1639
- {
1640
- "source": "llama",
1641
- "target": "bamba",
1642
- "label": "6 classes"
1643
- },
1644
- {
1645
- "source": "mamba2",
1646
- "target": "bamba",
1647
- "label": "4 classes"
1648
- },
1649
- {
1650
- "source": "llama",
1651
- "target": "olmo2",
1652
- "label": "3 classes"
1653
- },
1654
- {
1655
- "source": "olmo",
1656
- "target": "olmo2",
1657
- "label": "7 classes"
1658
- },
1659
- {
1660
- "source": "clip",
1661
- "target": "aimv2",
1662
- "label": "3 classes"
1663
- },
1664
- {
1665
- "source": "llama",
1666
- "target": "aimv2",
1667
- "label": "2 classes"
1668
- },
1669
- {
1670
- "source": "siglip",
1671
- "target": "aimv2",
1672
- "label": "6 classes"
1673
- },
1674
- {
1675
- "source": "gemma",
1676
- "target": "diffllama",
1677
- "label": "1 classes"
1678
- },
1679
- {
1680
- "source": "llama",
1681
- "target": "diffllama",
1682
- "label": "8 classes"
1683
- },
1684
- {
1685
- "source": "mistral",
1686
- "target": "diffllama",
1687
- "label": "1 classes"
1688
- },
1689
- {
1690
- "source": "rt_detr",
1691
- "target": "rt_detr_v2",
1692
- "label": "6 classes"
1693
- },
1694
- {
1695
- "source": "vit",
1696
- "target": "ijepa",
1697
- "label": "3 classes"
1698
- },
1699
- {
1700
- "source": "llama",
1701
- "target": "smollm3",
1702
- "label": "9 classes"
1703
- },
1704
- {
1705
- "source": "qwen2",
1706
- "target": "smollm3",
1707
- "label": "1 classes"
1708
- },
1709
- {
1710
- "source": "cohere",
1711
- "target": "cohere2",
1712
- "label": "8 classes"
1713
- },
1714
- {
1715
- "source": "gemma2",
1716
- "target": "cohere2",
1717
- "label": "1 classes"
1718
- },
1719
- {
1720
- "source": "bart",
1721
- "target": "biogpt",
1722
- "label": "3 classes"
1723
- },
1724
- {
1725
- "source": "opt",
1726
- "target": "biogpt",
1727
- "label": "1 classes"
1728
- },
1729
- {
1730
- "source": "detr",
1731
- "target": "yolos",
1732
- "label": "1 classes"
1733
- },
1734
- {
1735
- "source": "wav2vec2",
1736
- "target": "unispeech_sat",
1737
- "label": "11 classes"
1738
- },
1739
- {
1740
- "source": "llama",
1741
- "target": "qwen2",
1742
- "label": "10 classes"
1743
- },
1744
- {
1745
- "source": "mistral",
1746
- "target": "qwen2",
1747
- "label": "1 classes"
1748
- },
1749
- {
1750
- "source": "llama",
1751
- "target": "cohere",
1752
- "label": "6 classes"
1753
- },
1754
- {
1755
- "source": "llama",
1756
- "target": "qwen2_5_omni",
1757
- "label": "1 classes"
1758
- },
1759
- {
1760
- "source": "qwen2_5_vl",
1761
- "target": "qwen2_5_omni",
1762
- "label": "8 classes"
1763
- },
1764
- {
1765
- "source": "qwen2_audio",
1766
- "target": "qwen2_5_omni",
1767
- "label": "2 classes"
1768
- },
1769
- {
1770
- "source": "qwen2_vl",
1771
- "target": "qwen2_5_omni",
1772
- "label": "1 classes"
1773
- },
1774
- {
1775
- "source": "dinov2",
1776
- "target": "dinov2_with_registers",
1777
- "label": "6 classes"
1778
- }
1779
- ]
1780
- };
1781
- const hfLogoPath = "M21.2,6.7c-0.2-0.2-0.5-0.3-0.8-0.3H3.6C3.3,6.4,3,6.5,2.8,6.7s-0.3,0.5-0.3,0.8v10.8c0,0.3,0.1,0.5,0.3,0.8 c0.2,0.2,0.5,0.3,0.8,0.3h16.8c0.3,0,0.5-0.1,0.8-0.3c0.2-0.2,0.3-0.5,0.3-0.8V7.5C21.5,7.2,21.4,6.9,21.2,6.7z M12,17.8L5.9,9.4h3.1 V8.3h6v1.1h3.1L12,17.8z"; // kept for potential future use
1782
-
1783
- const width = window.innerWidth;
1784
- const height = window.innerHeight;
1785
-
1786
- const svg = d3.select('#dependency-graph')
1787
- .call(
1788
- d3.zoom().on('zoom', (event) => {
1789
- g.attr('transform', event.transform);
1790
- })
1791
- );
1792
-
1793
- const g = svg.append('g');
1794
-
1795
- // Forces – tweaked for tighter graph
1796
- const simulation = d3.forceSimulation(graphData.nodes)
1797
- .force('link', d3.forceLink(graphData.links).id(d => d.id).distance(500))
1798
- .force('charge', d3.forceManyBody().strength(-500))
1799
- .force('center', d3.forceCenter(width / 2, height / 2))
1800
- .force('collide', d3.forceCollide(0.01 * parseFloat(getComputedStyle(document.documentElement).getPropertyValue('--base‑size'))));
1801
-
1802
- // Links
1803
- const link = g.append('g')
1804
- .selectAll('line')
1805
- .data(graphData.links)
1806
- .join('line')
1807
- .attr('class', 'link')
1808
- .attr('stroke-width', 1.5);
1809
-
1810
- // Link‑labels (#classes)
1811
- const linkLabel = g.append('g')
1812
- .selectAll('text')
1813
- .data(graphData.links)
1814
- .join('text')
1815
- .attr('class', 'link-label')
1816
- .text(d => d.label);
1817
-
1818
- // Nodes (base vs derived)
1819
- const node = g.append('g')
1820
- .selectAll('g')
1821
- .data(graphData.nodes)
1822
- .join('g')
1823
- .attr('class', d => d.is_base ? 'node base' : 'node derived')
1824
- .call(d3.drag()
1825
- .on('start', dragstarted)
1826
- .on('drag', dragged)
1827
- .on('end', dragended)
1828
- );
1829
-
1830
- // Base‑model icon (styled circle instead of external image)
1831
- node.filter(d => d.is_base)
1832
- .append('circle')
1833
- .attr('r', parseFloat(getComputedStyle(document.documentElement).getPropertyValue('--base‑size')) / 2)
1834
- .attr('fill', '#FFD21E')
1835
- .attr('stroke', '#FF9D00')
1836
- .attr('stroke-width', 3);
1837
-
1838
- // Add 🤗 emoji as text for base models
1839
- node.filter(d => d.is_base)
1840
- .append('text')
1841
- .attr('text-anchor', 'middle')
1842
- .attr('dy', '0.35em')
1843
- .style('font-size', '24px')
1844
- .text('🤗');
1845
-
1846
- // Base‑model label (below icon)
1847
- node.filter(d => d.is_base)
1848
- .append('text')
1849
- .attr('class', 'node-label')
1850
- .attr('y', parseFloat(getComputedStyle(document.documentElement).getPropertyValue('--base‑size')) / 2 + 8)
1851
- .style('font-size', '40px')
1852
- .text(d => d.id);
1853
-
1854
- // Derived‑model circle + label w/ background rect
1855
- const derived = node.filter(d => !d.is_base);
1856
-
1857
- derived.append('circle')
1858
- .attr('r', d => 20 * d.size); // scaled
1859
-
1860
- const labelGroup = derived.append('g').attr('class', 'label-group');
1861
- labelGroup.append('rect')
1862
- .attr('x', -45)
1863
- .attr('y', -18)
1864
- .attr('width', 90)
1865
- .attr('height', 36)
1866
- .attr('rx', 8)
1867
- .attr('fill', '#fffbe6')
1868
- .attr('stroke', '#ccc');
1869
- labelGroup.append('text')
1870
- .attr('class', 'node-label')
1871
- .attr('dy', '0.35em')
1872
- .style('font-size', '18px')
1873
- .text(d => d.id);
1874
-
1875
- // Tick
1876
- simulation.on('tick', () => {
1877
- link.attr('x1', d => d.source.x)
1878
- .attr('y1', d => d.source.y)
1879
- .attr('x2', d => d.target.x)
1880
- .attr('y2', d => d.target.y);
1881
-
1882
- linkLabel.attr('x', d => (d.source.x + d.target.x) / 2)
1883
- .attr('y', d => (d.source.y + d.target.y) / 2);
1884
-
1885
- node.attr('transform', d => `translate(${d.x}, ${d.y})`);
1886
- });
1887
-
1888
- // Drag helpers
1889
- function dragstarted(event, d) {
1890
- if (!event.active) simulation.alphaTarget(0.3).restart();
1891
- d.fx = d.x; d.fy = d.y;
1892
- }
1893
- function dragged(event, d) {
1894
- d.fx = event.x; d.fy = event.y;
1895
- }
1896
- function dragended(event, d) {
1897
- if (!event.active) simulation.alphaTarget(0);
1898
- d.fx = null; d.fy = null;
1899
- }
1900
- </script>
1901
- </body>
1902
- </html>
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
dist/static/fast_image_processors.png DELETED

Git LFS Details

  • SHA256: fc95ff78646cc2ab036d486fb212b40c6fd07206c8506c92b0659c16dd766d31
  • Pointer size: 131 Bytes
  • Size of remote file: 127 kB
dist/static/graph_modular_related_models.png DELETED

Git LFS Details

  • SHA256: bd824e584f1a036c4e7f1118de90697582fd6c31131c1a5d2ebc0a9ea30b27ce
  • Pointer size: 131 Bytes
  • Size of remote file: 124 kB
dist/static/hf-logo.svg DELETED
dist/static/model_debugger.png DELETED

Git LFS Details

  • SHA256: 7f62f8f9b3dfdd62463cda84144e2ea144cdc883cfccd0c4b737ead173ecc7c6
  • Pointer size: 131 Bytes
  • Size of remote file: 142 kB
dist/static/modular_candidates.png DELETED

Git LFS Details

  • SHA256: b209e24b01c98890a00361cf49d80365afc30b7f469271a52aee92ca2f905027
  • Pointer size: 131 Bytes
  • Size of remote file: 171 kB
dist/style.css DELETED
@@ -1,741 +0,0 @@
1
- /* style.css - Scaling Insanity */
2
-
3
- /* Import ultrascale-playbook base styles and add transformers-specific styling */
4
- /* Define colors */
5
- :root {
6
- --distill-gray: rgb(107, 114, 128);
7
- --distill-gray-light: rgb(185, 185, 185);
8
- --distill-gray-lighter: rgb(228, 228, 228);
9
- --distill-gray-lightest: rgb(245, 245, 245);
10
- --distill-blue: #007BFF;
11
- }
12
-
13
- /* Container for the controls */
14
- [id^="plot-"] {
15
- display: flex;
16
- flex-direction: column;
17
- align-items: center;
18
- gap: 15px; /* Adjust the gap between controls as needed */
19
- }
20
- [id^="plot-"] figure {
21
- margin-bottom: 0px;
22
- margin-top: 0px;
23
- padding: 0px;
24
- }
25
- .plotly_caption {
26
- font-style: italic;
27
- margin-top: 10px;
28
- }
29
-
30
- .plotly_controls {
31
- display: flex;
32
- flex-wrap: wrap;
33
- flex-direction: row;
34
- justify-content: center;
35
- align-items: flex-start;
36
- gap: 30px;
37
- }
38
-
39
-
40
- .plotly_input_container {
41
- display: flex;
42
- align-items: center;
43
- flex-direction: column;
44
- gap: 10px;
45
- }
46
-
47
- /* Style for the select dropdown */
48
- .plotly_input_container > select {
49
- padding: 2px 4px;
50
- /* border: 1px solid #ccc; */
51
- line-height: 1.5em;
52
- text-align: center;
53
- border-radius: 4px;
54
- font-size: 12px;
55
- background-color: var(--distill-gray-lightest);
56
- outline: none;
57
- }
58
-
59
- /* Style for the range input */
60
-
61
- .plotly_slider {
62
- display: flex;
63
- align-items: center;
64
- gap: 10px;
65
- }
66
-
67
- .plotly_slider > input[type="range"] {
68
- -webkit-appearance: none;
69
- height: 2px;
70
- background: var(--distill-gray-light);
71
- border-radius: 5px;
72
- outline: none;
73
- }
74
-
75
- .plotly_slider > span {
76
- font-size: 14px;
77
- line-height: 1.6em;
78
- min-width: 16px;
79
- }
80
-
81
- .plotly_slider > input[type="range"]::-webkit-slider-thumb {
82
- -webkit-appearance: none;
83
- appearance: none;
84
- width: 18px;
85
- height: 18px;
86
- border-radius: 50%;
87
- background: var(--distill-blue);
88
- cursor: pointer;
89
- }
90
-
91
- .plotly_slider > input[type="range"]::-moz-range-thumb {
92
- width: 18px;
93
- height: 18px;
94
- border-radius: 50%;
95
- background: var(--distill-blue);
96
- cursor: pointer;
97
- }
98
-
99
- /* Style for the labels */
100
- .plotly_input_container > label {
101
- font-size: 14px;
102
- font-weight: bold;
103
- }
104
-
105
- .main-plot-container {
106
- margin-top: 21px;
107
- margin-bottom: 35px;
108
- }
109
-
110
- .main-plot-container > figure {
111
- display: block !important;
112
- /* Let this be handled by graph-container */
113
- margin-bottom: 0px;
114
- margin-top: 0px;
115
- }
116
- .main-plot-container > div {
117
- display: none !important;
118
- }
119
-
120
-
121
- @media (min-width: 768px) {
122
- .main-plot-container > figure {
123
- display: none !important;
124
- }
125
- .main-plot-container > div {
126
- display: flex !important;
127
- }
128
- }
129
-
130
- d-byline .byline {
131
- grid-template-columns: 1fr;
132
- grid-column: text;
133
- font-size: 0.9rem;
134
- line-height: 1.8em;
135
- }
136
-
137
- @media (min-width: 768px) {
138
- d-byline .byline {
139
- grid-template-columns: 5fr 1fr 1fr;
140
- }
141
- }
142
-
143
- #title-plot {
144
- margin-top: 0px;
145
- margin-bottom: 0px;
146
- }
147
-
148
- d-contents > nav a.active {
149
- text-decoration: underline;
150
- }
151
-
152
- @media (max-width: 1199px) {
153
- d-contents {
154
- display: none;
155
- background: white;
156
- justify-self: start;
157
- align-self: start;
158
- padding-bottom: 0.5em;
159
- margin-bottom: 1em;
160
- padding-left: 0.25em;
161
- border-bottom: 1px solid rgba(0, 0, 0, 0.1);
162
- border-bottom-width: 1px;
163
- border-bottom-style: solid;
164
- border-bottom-color: rgba(0, 0, 0, 0.1);
165
- overflow-y: scroll;
166
- height: calc(100vh - 40px);
167
- scrollbar-width: none;
168
- z-index: -100;
169
- }
170
- }
171
-
172
- d-contents a:hover {
173
- border-bottom: none;
174
- }
175
-
176
- toc-title {
177
- font-weight: bold;
178
- font-size: 1.2em;
179
- color: #333;
180
- }
181
-
182
- toggle-icon {
183
- transition: transform 0.3s;
184
- }
185
-
186
- toggle-icon.collapsed {
187
- transform: rotate(90deg);
188
- }
189
-
190
- .toc-content {
191
- margin-top: 15px;
192
- overflow: hidden;
193
- /* max-height: 1000px; */
194
- transition: max-height 0.3s ease-out;
195
- }
196
-
197
- .toc-content.collapsed {
198
- max-height: 0;
199
- margin-top: 0;
200
- }
201
-
202
- @media (min-width: 1200px) {
203
- d-article {
204
- /* Ensure d-article does not prevent sticky positioning */
205
- overflow: visible;
206
- }
207
-
208
- d-contents {
209
- align-self: start;
210
- background: white;
211
- grid-column-start: 1 !important;
212
- grid-column-end: 4 !important;
213
- grid-row: auto / span 6;
214
- justify-self: end;
215
- margin-top: 0em;
216
- padding-right: 3em;
217
- padding-left: 2em;
218
- /* border-right: 1px solid rgba(0, 0, 0, 0.1);
219
- border-right-width: 1px;
220
- border-right-style: solid;
221
- border-right-color: rgba(0, 0, 0, 0.1); */
222
- position: -webkit-sticky; /* For Safari */
223
- position: sticky;
224
- top: 10px; /* Adjust this value if needed */
225
- overflow-y: auto;
226
- height: calc(100vh - 40px);
227
- scrollbar-width: none;
228
- transition: max-height 0.3s ease-out;
229
- z-index: -100;
230
- }
231
- }
232
-
233
- d-contents nav h3 {
234
- margin-top: 0;
235
- margin-bottom: 1em;
236
- }
237
-
238
- d-contents nav div div {
239
- color: rgba(0, 0, 0, 0.8);
240
- font-weight: bold;
241
- }
242
-
243
- d-contents nav a {
244
- color: rgba(0, 0, 0, 0.8);
245
- border-bottom: none;
246
- text-decoration: none;
247
- }
248
-
249
- d-contents li {
250
- list-style-type: none;
251
- }
252
-
253
- d-contents ul, d-article d-contents ul {
254
- padding-left: 1em;
255
- }
256
-
257
- d-contents nav ul li {
258
- margin-bottom: .25em;
259
- }
260
-
261
- d-contents nav a:hover {
262
- text-decoration: underline solid rgba(0, 0, 0, 0.6);
263
- }
264
-
265
- d-contents nav ul {
266
- margin-top: 0;
267
- margin-bottom: 6px;
268
- }
269
-
270
-
271
- d-contents nav > div {
272
- display: block;
273
- outline: none;
274
- margin-bottom: 0.5em;
275
- }
276
-
277
- d-contents nav > div > a {
278
- font-size: 13px;
279
- font-weight: 600;
280
- }
281
-
282
- d-article aside {
283
- margin-bottom: 1em;
284
- }
285
-
286
- d-article img {
287
- max-width: 100%;
288
- }
289
-
290
- @media (min-width: 768px) {
291
- d-article aside {
292
- margin-bottom: 0;
293
- }
294
- }
295
-
296
- d-contents nav > div > a:hover,
297
- d-contents nav > ul > li > a:hover {
298
- text-decoration: none;
299
- }
300
-
301
- .note-box {
302
- background-color: #f6f8fa;
303
- border-left: 4px solid #444444;
304
- padding: 1rem;
305
- margin: 1rem 0; /* Keep this modest margin */
306
- border-radius: 6px;
307
- /* Add this to ensure the box only takes up needed space */
308
- display: inline-block;
309
- }
310
-
311
- .note-box-title {
312
- margin: 0;
313
- color: #444444;
314
- font-weight: 600;
315
- font-size: 1em;
316
- }
317
-
318
- .note-box-content {
319
- margin-top: 0.5rem;
320
- margin-bottom: 0; /* Ensure no bottom margin */
321
- color: #24292f;
322
- font-size: 0.9em;
323
- line-height: 1.5em;
324
- }
325
-
326
- /* For dark mode support */
327
- @media (prefers-color-scheme: dark) {
328
- .note-box {
329
- background-color: #1c1c1c;
330
- border-left-color: #888888;
331
- }
332
- .note-box-title {
333
- color: #888888;
334
- }
335
- .note-box-content {
336
- color: #d4d4d4;
337
- }
338
- }
339
-
340
- d-article {
341
- font-size: 1.0em;
342
- }
343
-
344
- .figure-legend {
345
- font-size: 0.9em;
346
- font-style: italic;
347
- color: var(--distill-gray);
348
- line-height: 1.5em;
349
- }
350
-
351
- d-code {
352
- font-size: 12px;
353
- }
354
-
355
- .large-image-background {
356
- width: 100vw;
357
- padding-top: 10px;
358
- padding-bottom: 10px;
359
- margin-left: calc(-50vw + 50%);
360
- margin-right: calc(-50vw + 50%);
361
- background: white;
362
- height: fit-content; /* This will make it match the image height */
363
- display: flex;
364
- justify-content: center; /* This will center your image */
365
- }
366
-
367
- .large-image-background-transparent {
368
- /* width: 100vw; */
369
- padding-top: 10px;
370
- padding-bottom: 10px;
371
- /* margin-left: calc(-50vw + 50%); */
372
- margin-left:-100px;
373
- margin-right: -100px;
374
- /* margin-right: calc(-50vw + 50%); */
375
- /* background: white; */
376
- height: fit-content; /* This will make it match the image height */
377
- display: flex;
378
- justify-content: center; /* This will center your image */
379
- }
380
-
381
- .boxed-image {
382
- padding: 0.5rem;
383
- background: white;
384
- border-radius: 12px;
385
- border: 1px solid #e5e7eb;
386
- box-shadow: 0 4px 6px rgba(0, 0, 0, 0.1);
387
- }
388
-
389
- d-article li {
390
- margin-bottom: 0.0em;
391
- }
392
-
393
- d-article ul ul {
394
- margin-bottom: 0.0em;
395
- }
396
-
397
- d-article ol ol {
398
- margin-bottom: 0.0em;
399
- }
400
-
401
- d-article hr {
402
- grid-column: text;
403
- }
404
-
405
- /* Memory visualization */
406
- #graph-all {
407
- min-width: 500px;
408
- margin-right: 10px;
409
- margin-bottom: 2rem;
410
- padding: 0.5rem;
411
- background: #f9fafb;
412
- border-radius: 12px;
413
- border: 1px solid #e5e7eb;
414
- box-shadow: 0 4px 6px rgba(0, 0, 0, 0.1);
415
- }
416
-
417
-
418
- /* Main container styles */
419
- #controls {
420
- max-width: 1200px;
421
- /* margin: 2rem auto; */
422
- margin-bottom: 2rem;
423
- margin-left: 10px;
424
- padding: 0.6rem;
425
- background: #f9fafb;
426
- border-radius: 12px;
427
- border: 1px solid #e5e7eb;
428
- box-shadow: 0 4px 6px rgba(0, 0, 0, 0.1);
429
- }
430
-
431
- /* Grid layout */
432
- #controls {
433
- display: grid;
434
- grid-template-columns: 1fr 1fr;
435
- /* gap: 2rem; */
436
- }
437
-
438
- /* Cell styles */
439
- .cell {
440
- margin-bottom: 0.2rem;
441
- }
442
-
443
- /* Label styles */
444
- label {
445
- display: block;
446
- /* margin-bottom: 0.5rem; */
447
- font-size: 0.8rem;
448
- font-weight: 500;
449
- color: #374151;
450
- }
451
-
452
- /* Input container for range + number combination */
453
- .input-container {
454
- display: flex;
455
- gap: 1rem;
456
- align-items: center;
457
- }
458
-
459
- /* Range input styling */
460
- input[type="range"] {
461
- flex: 1;
462
- height: 6px;
463
- background: #e5e7eb;
464
- border-radius: 3px;
465
- appearance: none;
466
- outline: none;
467
- }
468
-
469
- input[type="range"]::-webkit-slider-thumb {
470
- appearance: none;
471
- width: 16px;
472
- height: 16px;
473
- background: #3b82f6;
474
- border-radius: 50%;
475
- cursor: pointer;
476
- transition: background 0.15s ease;
477
- }
478
-
479
- input[type="range"]::-webkit-slider-thumb:hover {
480
- background: #2563eb;
481
- }
482
-
483
- /* Number input styling */
484
- input[type="number"] {
485
- width: 80px;
486
- padding: 0.5rem;
487
- border: 1px solid #e5e7eb;
488
- border-radius: 6px;
489
- font-size: 0.9rem;
490
- color: #374151;
491
- }
492
-
493
- /* Select styling */
494
- select {
495
- width: 100%;
496
- padding: 0.5rem;
497
- border: 1px solid #e5e7eb;
498
- border-radius: 6px;
499
- background: white;
500
- font-size: 0.9rem;
501
- color: #374151;
502
- cursor: pointer;
503
- }
504
-
505
- /* Checkbox styling */
506
- input[type="checkbox"] {
507
- width: 1.2rem;
508
- height: 1.2rem;
509
- margin-right: 0.5rem;
510
- border: 2px solid #e5e7eb;
511
- border-radius: 4px;
512
- cursor: pointer;
513
- }
514
-
515
- /* Column specific styles */
516
- .column-1 {
517
- padding-right: 0.5rem;
518
- }
519
-
520
- .column-2 {
521
- padding-left: 0.5rem;
522
- }
523
-
524
- /* Checkbox container */
525
- .checkbox-container {
526
- display: flex;
527
- align-items: center;
528
- margin-bottom: 1rem;
529
- }
530
-
531
- /* Memory visualization styles */
532
- .memory-block {
533
- background: #fff;
534
- border-radius: 8px;
535
- padding: 1rem;
536
- margin-bottom: 1rem;
537
- box-shadow: 0 2px 4px rgba(0, 0, 0, 0.05);
538
- }
539
-
540
- .memory-title {
541
- font-size: 1.1rem;
542
- font-weight: 500;
543
- color: #374151;
544
- margin-bottom: 0.5rem;
545
- }
546
-
547
- .memory-value {
548
- font-size: 1.5rem;
549
- font-weight: 600;
550
- color: #3b82f6;
551
- }
552
-
553
- /* Responsive adjustments */
554
- @media (max-width: 768px) {
555
- #controls {
556
- grid-template-columns: 1fr;
557
- padding: 1rem;
558
- }
559
-
560
- .column-1, .column-2 {
561
- padding: 0;
562
- }
563
- }
564
-
565
- /* Hover states and transitions */
566
- input:hover, select:hover {
567
- border-color: #3b82f6;
568
- }
569
-
570
- input:focus, select:focus {
571
- border-color: #2563eb;
572
- outline: none;
573
- box-shadow: 0 0 0 2px rgba(59, 130, 246, 0.1);
574
- }
575
-
576
- /* Add smooth transitions */
577
- input, select, button {
578
- transition: all 0.15s ease;
579
- }
580
-
581
- /* Preset dropdown special styling */
582
- select[name="presets"] {
583
- background-color: #f3f4f6;
584
- font-weight: 500;
585
- }
586
-
587
- /* Memory graph enhancements */
588
- .activation-memory {
589
- background: #dbeafe;
590
- padding: 1rem;
591
- border-radius: 8px;
592
- margin-bottom: 1rem;
593
- }
594
-
595
- .gradient-memory {
596
- background: #ede9fe;
597
- padding: 1rem;
598
- border-radius: 8px;
599
- }
600
-
601
- .order-button-second {
602
- background: linear-gradient(135deg, #6DB4C4, #D4A5B8);
603
- color: white;
604
- font-size: 18px;
605
- font-weight: 600;
606
- padding: 20px 20px;
607
- border: none;
608
- border-radius: 12px;
609
- cursor: pointer;
610
- text-transform: uppercase;
611
- letter-spacing: 1px;
612
- box-shadow: 0 4px 15px rgba(0, 0, 0, 0.2);
613
- transition: all 0.3s ease;
614
- position: relative;
615
- overflow: hidden;
616
- }
617
- .order-button-second:hover {
618
- transform: translateY(-2px);
619
- box-shadow: 0 6px 20px rgba(0, 0, 0, 0.25);
620
- }
621
-
622
- .order-button:active {
623
- transform: translateY(0);
624
- box-shadow: 0 2px 10px rgba(0, 0, 0, 0.2);
625
- }
626
-
627
- .order-button-second::before {
628
- content: '';
629
- position: absolute;
630
- top: 0;
631
- left: -100%;
632
- width: 100%;
633
- height: 100%;
634
- background: linear-gradient(135deg, rgba(255, 255, 255, 0.2), rgba(255, 255, 255, 0));
635
- transition: left 0.5s ease;
636
- }
637
-
638
- .order-button-second:hover::before {
639
- left: 100%;
640
- }
641
-
642
- .order-button {
643
- background: linear-gradient(135deg, #6DB4C4, #D4A5B8);
644
- color: white;
645
- font-size: 18px;
646
- font-weight: 600;
647
- padding: 16px 32px;
648
- border: none;
649
- border-radius: 12px;
650
- cursor: pointer;
651
- text-transform: uppercase;
652
- letter-spacing: 1px;
653
- box-shadow: 0 4px 15px rgba(0, 0, 0, 0.2);
654
- transition: all 0.3s ease;
655
- position: relative;
656
- overflow: hidden;
657
- }
658
-
659
- .order-button:hover {
660
- transform: translateY(-2px);
661
- box-shadow: 0 6px 20px rgba(0, 0, 0, 0.25);
662
- }
663
-
664
- .order-button:active {
665
- transform: translateY(0);
666
- box-shadow: 0 2px 10px rgba(0, 0, 0, 0.2);
667
- }
668
-
669
- .order-button::before {
670
- content: '';
671
- position: absolute;
672
- top: 0;
673
- left: -100%;
674
- width: 100%;
675
- height: 100%;
676
- background: linear-gradient(135deg, rgba(255, 255, 255, 0.2), rgba(255, 255, 255, 0));
677
- transition: left 0.5s ease;
678
- }
679
-
680
- .order-button:hover::before {
681
- left: 100%;
682
- }
683
- .order-button-container-second {
684
- /* display: flex; */
685
- justify-content: center;
686
- margin: 0px 0;
687
- }
688
-
689
- .order-button-container {
690
- display: flex;
691
- justify-content: center;
692
- margin: 0px 0 40px 0;
693
- }
694
-
695
- d-article img {
696
- width: 100%!important;
697
- }
698
-
699
-
700
- iframe, .js-plotly-plot {
701
- width: 100%!important;
702
- margin-bottom: 20px;
703
- }
704
-
705
- .modebar-container {
706
- display: none;
707
- }
708
-
709
- #graph-container {
710
- display: grid; grid-template-columns: 1fr 1fr; align-items: center;
711
- }
712
-
713
- @media (max-width: 768px) {
714
- #graph-container {
715
- grid-template-columns: 1fr;
716
- }
717
- }
718
-
719
- @media (max-width: 1024px) {
720
- #graph-container {
721
- grid-template-columns: 1fr;
722
- }
723
- #graph-all {
724
- margin-right: 0px;
725
- }
726
- #controls {
727
- margin-left: 0px;
728
- }
729
- }
730
-
731
- .main-plot-container svg {
732
- background: transparent !important;
733
- }
734
-
735
- .large-image-background-transparent {
736
- margin-left: 0px;
737
- margin-right: 0px;
738
- }
739
-
740
- /* Import transformers-specific styles */
741
- @import url('./transformers-custom.css');