diff --git a/.gitattributes b/.gitattributes index 7a96d7f968f63860f9d645770b75866b567cdae0..3ea99e41c0dc3d629a543c645ad9674e0414aa96 100644 --- a/.gitattributes +++ b/.gitattributes @@ -5,6 +5,7 @@ *.ckpt filter=lfs diff=lfs merge=lfs -text *.ftz filter=lfs diff=lfs merge=lfs -text *.gz filter=lfs diff=lfs merge=lfs -text +dist/**/*.gz -filter -diff -merge *.h5 filter=lfs diff=lfs merge=lfs -text *.joblib filter=lfs diff=lfs merge=lfs -text *.lfs.* filter=lfs diff=lfs merge=lfs -text diff --git a/.gitignore b/.gitignore index 3c3629e647f5ddf82548912e337bea9826b434af..9f7937398f448b6e79aae26988c60c6157af4784 100644 --- a/.gitignore +++ b/.gitignore @@ -1 +1,39 @@ node_modules +# Python +__pycache__ +*.py[cod] +*.so +.Python +env/ +venv/ +*.egg-info/ +dist/ +build/ +*.egg +.idea/ +.vscode/ +.astro/ +.claude/ +*.swp +.DS_Store +# Node +node_modules/ +*.log +*.env +*.cache + +app/scripts/latex-to-mdx/output/ +app/src/content/embeds/typography/generated + +# PDF export +app/public/*.pdf +app/public/*.png +app/public/*.jpg +app/public/data/**/* + +.astro/ + +# Template sync temporary directories +.template-sync/ +.temp-*/ +.backup-*/ diff --git a/CHANGELOG.md b/CHANGELOG.md new file mode 100644 index 0000000000000000000000000000000000000000..5837b2b57b8d319f7a12c1b0ff413044b7792f33 --- /dev/null +++ b/CHANGELOG.md @@ -0,0 +1,118 @@ +# Changelog + +All notable changes to the Research Article Template will be documented in this file. + +The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), +and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). + +## [Unreleased] + +### Added +- Initial open source release +- Comprehensive documentation +- Contributing guidelines +- License file + +## [1.0.0] - 2024-12-19 + +### Added +- **Core Features**: + - Markdown/MDX-based writing system + - KaTeX mathematical notation support + - Syntax highlighting for code blocks + - Academic citations with BibTeX integration + - Footnotes and sidenotes system + - Auto-generated table of contents + - Interactive Mermaid diagrams + - Plotly.js and D3.js integration + - HTML embed support + - Gradio app embedding + - Dataviz color palettes + - Image optimization + - SEO-friendly structure + - Automatic PDF export + - Dark/light theme toggle + - Mobile-responsive design + - LaTeX import functionality + - Template synchronization system + +- **Components**: + - Figure component with captions + - MultiFigure for image galleries + - Note component with variants + - Quote component + - Accordion for collapsible content + - Sidenote component + - Table of Contents + - Theme Toggle + - HTML Embed + - Raw HTML support + - SEO component + - Hero section + - Footer + - Full-width and wide layouts + +- **Build System**: + - Astro 4.10.0 integration + - PostCSS with custom media queries + - Automatic compression + - Docker support + - Nginx configuration + - Git LFS support + +- **Scripts**: + - PDF export functionality + - LaTeX to MDX conversion + - Template synchronization + - Font SVG generation + - TrackIO data generation + +- **Documentation**: + - Getting started guide + - Writing best practices + - Component reference + - LaTeX conversion guide + - Interactive examples + +### Technical Details +- **Framework**: Astro 4.10.0 +- **Styling**: PostCSS with custom properties +- **Math**: KaTeX 0.16.22 +- **Charts**: Plotly.js 3.1.0, D3.js 7.9.0 +- **Diagrams**: Mermaid 11.10.1 +- **Node.js**: >=20.0.0 +- **License**: CC-BY-4.0 + +### Browser Support +- Chrome (latest) +- Firefox (latest) +- Safari (latest) +- Edge (latest) + +--- + +## Version History + +- **1.0.0**: Initial stable release with full feature set +- **0.0.1**: Development version (pre-release) + +## Migration Guide + +### From 0.0.1 to 1.0.0 + +This is the first stable release. No breaking changes from the development version. + +### Updating Your Project + +Use the template synchronization system to update: + +```bash +npm run sync:template -- --dry-run # Preview changes +npm run sync:template # Apply updates +``` + +## Support + +- **Documentation**: [Hugging Face Space](https://huggingface.co/spaces/tfrere/research-article-template) +- **Issues**: [Community Discussions](https://huggingface.co/spaces/tfrere/research-article-template/discussions) +- **Contact**: [@tfrere](https://huggingface.co/tfrere) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 0000000000000000000000000000000000000000..a4573b5d9abcd9e9ba35095677d0443b157298ec --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,196 @@ +# Contributing to Research Article Template + +Thank you for your interest in contributing to the Research Article Template! This document provides guidelines and information for contributors. + +## 🤝 How to Contribute + +### Reporting Issues + +Before creating an issue, please: +1. **Search existing issues** to avoid duplicates +2. **Use the issue template** when available +3. **Provide detailed information**: + - Clear description of the problem + - Steps to reproduce + - Expected vs actual behavior + - Environment details (OS, Node.js version, browser) + - Screenshots if applicable + +### Suggesting Features + +We welcome feature suggestions! Please: +1. **Check existing discussions** first +2. **Describe the use case** clearly +3. **Explain the benefits** for the community +4. **Consider implementation complexity** + +### Code Contributions + +#### Getting Started + +1. **Fork the repository** on Hugging Face +2. **Clone your fork**: + ```bash + git clone git@hf.co:spaces//research-article-template + cd research-article-template + ``` +3. **Install dependencies**: + ```bash + cd app + npm install + ``` +4. **Create a feature branch**: + ```bash + git checkout -b feature/your-feature-name + ``` + +#### Development Workflow + +1. **Make your changes** following our coding standards +2. **Test thoroughly**: + ```bash + npm run dev # Test locally + npm run build # Ensure build works + ``` +3. **Update documentation** if needed +4. **Commit with clear messages**: + ```bash + git commit -m "feat: add new component for interactive charts" + ``` + +#### Pull Request Process + +1. **Push your branch**: + ```bash + git push origin feature/your-feature-name + ``` +2. **Create a Pull Request** with: + - Clear title and description + - Reference related issues + - Screenshots for UI changes + - Testing instructions + +## 📋 Coding Standards + +### Code Style + +- **Use Prettier** for consistent formatting +- **Follow existing patterns** in the codebase +- **Write clear, self-documenting code** +- **Add comments** for complex logic +- **Use meaningful variable names** + +### File Organization + +- **Components**: Place in `src/components/` +- **Styles**: Use CSS modules or component-scoped styles +- **Assets**: Organize in `src/content/assets/` +- **Documentation**: Update relevant `.mdx` files + +### Commit Message Format + +We follow [Conventional Commits](https://www.conventionalcommits.org/): + +``` +type(scope): description + +feat: add new interactive chart component +fix: resolve mobile layout issues +docs: update installation instructions +style: improve button hover states +refactor: simplify component structure +test: add unit tests for utility functions +``` + +**Types**: `feat`, `fix`, `docs`, `style`, `refactor`, `test`, `chore` + +## 🧪 Testing + +### Manual Testing + +Before submitting: +- [ ] Test on different screen sizes +- [ ] Verify dark/light theme compatibility +- [ ] Check browser compatibility (Chrome, Firefox, Safari) +- [ ] Test with different content types +- [ ] Ensure accessibility standards + +### Automated Testing + +```bash +# Run build to catch errors +npm run build + +# Test PDF export +npm run export:pdf + +# Test LaTeX conversion +npm run latex:convert +``` + +## 📚 Documentation + +### Writing Guidelines + +- **Use clear, concise language** +- **Provide examples** for complex features +- **Include screenshots** for UI changes +- **Update both English content and code comments** + +### Documentation Structure + +- **README.md**: Project overview and quick start +- **CONTRIBUTING.md**: This file +- **Content files**: In `src/content/chapters/demo/` +- **Component docs**: Inline comments and examples + +## 🎯 Areas for Contribution + +### High Priority + +- **Bug fixes** and stability improvements +- **Accessibility enhancements** +- **Mobile responsiveness** +- **Performance optimizations** +- **Documentation improvements** + +### Feature Ideas + +- **New interactive components** +- **Additional export formats** +- **Enhanced LaTeX import** +- **Theme customization** +- **Plugin system** + +### Community + +- **Answer questions** in discussions +- **Share examples** of your work +- **Write tutorials** and guides +- **Help with translations** + +## 🚫 What Not to Contribute + +- **Breaking changes** without discussion +- **Major architectural changes** without approval +- **Dependencies** that significantly increase bundle size +- **Features** that don't align with the project's goals + +## 📞 Getting Help + +- **Discussions**: [Community tab](https://huggingface.co/spaces/tfrere/research-article-template/discussions) +- **Issues**: [Report bugs](https://huggingface.co/spaces/tfrere/research-article-template/discussions?status=open&type=issue) +- **Contact**: [@tfrere](https://huggingface.co/tfrere) on Hugging Face + +## 📄 License + +By contributing, you agree that your contributions will be licensed under the same [CC-BY-4.0 license](LICENSE) that covers the project. + +## 🙏 Recognition + +Contributors will be: +- **Listed in acknowledgments** (if desired) +- **Mentioned in release notes** for significant contributions +- **Credited** in relevant documentation + +Thank you for helping make scientific writing more accessible and interactive! 🎉 diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000000000000000000000000000000000000..2022d150c921dda4c7dfe5804d0a0e4bf64cc946 --- /dev/null +++ b/Dockerfile @@ -0,0 +1,71 @@ +# Use an official Node runtime as the base image for building the application +# Build with Playwright (browsers and deps ready) +FROM mcr.microsoft.com/playwright:v1.55.0-jammy AS build + +# Install git, git-lfs, and dependencies for Pandoc (only if ENABLE_LATEX_CONVERSION=true) +RUN apt-get update && apt-get install -y git git-lfs wget && apt-get clean + +# Install latest Pandoc from GitHub releases (only installed if needed later) +RUN wget -qO- https://github.com/jgm/pandoc/releases/download/3.8/pandoc-3.8-linux-amd64.tar.gz | tar xzf - -C /tmp && \ + cp /tmp/pandoc-3.8/bin/pandoc /usr/local/bin/ && \ + cp /tmp/pandoc-3.8/bin/pandoc-lua /usr/local/bin/ && \ + rm -rf /tmp/pandoc-3.8 + +# Set the working directory in the container +WORKDIR /app + +# Copy package.json and package-lock.json +COPY app/package*.json ./ + +# Install dependencies +RUN npm install + +# Copy the rest of the application code +COPY app/ . + +# Conditionally convert LaTeX to MDX if ENABLE_LATEX_CONVERSION=true +ARG ENABLE_LATEX_CONVERSION=false +RUN if [ "$ENABLE_LATEX_CONVERSION" = "true" ]; then \ + echo "🔄 LaTeX importer enabled - running latex:convert..."; \ + npm run latex:convert; \ + else \ + echo "⏭️ LaTeX importer disabled - skipping..."; \ + fi + +# Ensure `public/data` is a real directory with real files (not a symlink) +# This handles the case where `public/data` is a symlink in the repo, which +# would be broken inside the container after COPY. +RUN set -e; \ + if [ -e public ] && [ ! -d public ]; then rm -f public; fi; \ + mkdir -p public; \ + if [ -L public/data ] || { [ -e public/data ] && [ ! -d public/data ]; }; then rm -f public/data; fi; \ + mkdir -p public/data; \ + cp -a src/content/assets/data/. public/data/ + +# Build the application +RUN npm run build + +# Generate the PDF (light theme, full wait) +RUN npm run export:pdf -- --theme=light --wait=full + +# Use an official Nginx runtime as the base image for serving the application +FROM nginx:alpine + +# Copy the built application from the build stage +COPY --from=build /app/dist /usr/share/nginx/html + +# Copy a custom Nginx configuration file +COPY nginx.conf /etc/nginx/nginx.conf + +# Create necessary directories and set permissions +RUN mkdir -p /var/cache/nginx /var/run /var/log/nginx && \ + chmod -R 777 /var/cache/nginx /var/run /var/log/nginx /etc/nginx/nginx.conf + +# Switch to non-root user +USER nginx + +# Expose port 8080 +EXPOSE 8080 + +# Command to run the application +CMD ["nginx", "-g", "daemon off;"] diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..b267a53137822114e4c0bcef2e6383aaf52a70f1 --- /dev/null +++ b/LICENSE @@ -0,0 +1,33 @@ +Creative Commons Attribution 4.0 International License + +Copyright (c) 2024 Thibaud Frere + +This work is licensed under the Creative Commons Attribution 4.0 International License. +To view a copy of this license, visit http://creativecommons.org/licenses/by/4.0/ +or send a letter to Creative Commons, PO Box 1866, Mountain View, CA 94042, USA. + +You are free to: + + Share — copy and redistribute the material in any medium or format + Adapt — remix, transform, and build upon the material for any purpose, even commercially. + +The licensor cannot revoke these freedoms as long as you follow the license terms. + +Under the following terms: + + Attribution — You must give appropriate credit, provide a link to the license, and indicate if changes were made. You may do so in any reasonable manner, but not in any way that suggests the licensor endorses you or your use. + + No additional restrictions — You may not apply legal terms or technological measures that legally restrict others from doing anything the license permits. + +Notices: + + You do not have to comply with the license for elements of the material in the public domain or where your use is permitted by an applicable exception or limitation. + + No warranties are given. The license may not give you all of the permissions necessary for your intended use. For example, other rights such as publicity, privacy, or moral rights may limit how you use the material. + +--- + +For the source code and technical implementation: +- The source code is available at: https://huggingface.co/spaces/tfrere/research-article-template +- Third-party figures and assets are excluded from this license and marked in their captions +- Dependencies and third-party libraries maintain their respective licenses diff --git a/README.md b/README.md index 8bbefb170d33faf0f2780f00b4dcb3eee9d332c7..a94fd96962f629a1c6139c62b95e9b898616f053 100644 --- a/README.md +++ b/README.md @@ -1,11 +1,121 @@ --- -title: Maintain the unmaintainable +title: 'Maintain the unmaintainable' emoji: 📚 -colorFrom: pink +colorFrom: blue colorTo: indigo -sdk: static -app_file: app/dist/index.html +sdk: docker pinned: false +header: mini +app_port: 8080 +tags: + - research-article-template + - research paper + - scientific paper + - data visualization +thumbnail: https://huggingface.co/spaces/tfrere/research-paper-template/thumb.jpg --- +
-Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference +# Research Article Template + +[![License: CC BY 4.0](https://img.shields.io/badge/License-CC%20BY%204.0-lightgrey.svg)](https://creativecommons.org/licenses/by/4.0/) +[![Node.js Version](https://img.shields.io/badge/node-%3E%3D20.0.0-brightgreen.svg)](https://nodejs.org/) +[![Astro](https://img.shields.io/badge/Astro-4.10.0-orange.svg)](https://astro.build/) +[![Hugging Face Spaces](https://img.shields.io/badge/%F0%9F%A4%97%20Hugging%20Face-Spaces-blue)](https://huggingface.co/spaces/tfrere/research-article-template) + + +**A modern, interactive template for scientific writing** that brings papers to life with web-native features. The web offers what static PDFs can't: **interactive diagrams**, **progressive notation**, and **exploratory views** that show how ideas behave. This template treats interactive artifacts—figures, math, code, and inspectable experiments—as **first-class** alongside prose, helping readers **build intuition** instead of skimming results—all with **minimal setup** and no web knowledge required. + +**[Try the live demo & documentation →](https://huggingface.co/spaces/tfrere/research-article-template)** + +
+ +## 🚀 Quick Start + +### Option 1: Duplicate on Hugging Face (Recommended) + +1. Visit **[🤗 Research Article Template](https://huggingface.co/spaces/tfrere/research-article-template)** +2. Click **"Duplicate this Space"** +3. Clone your new repository: + ```bash + git clone git@hf.co:spaces// + cd + ``` + +### Option 2: Clone Directly + +```bash +git clone https://github.com/tfrere/research-article-template.git +cd research-article-template +``` + +### Installation + +```bash +# Install Node.js 20+ (use nvm for version management) +nvm install 20 +nvm use 20 + +# Install Git LFS and pull assets +git lfs install +git lfs pull + +# Install dependencies +cd app +npm install + +# Start development server +npm run dev +``` + +Visit `http://localhost:4321` to see your site! + +## 🎯 Who This Is For + +- **Scientists** writing modern, web-native research papers +- **Educators** creating interactive, explorable lessons +- **Researchers** who want to focus on ideas, not infrastructure +- **Anyone** who values clear, engaging technical communication + +## 🌟 Inspired by Distill + +This template carries forward the spirit of [Distill](https://distill.pub/) (2016–2021), pushing interactive scientific writing even further with: +- Accessible, high-quality explanations +- Reproducible, production-ready demos +- Modern web technologies and best practices + +## 🤝 Contributing + +We welcome contributions! Please see our [Contributing Guidelines](CONTRIBUTING.md) for details. + +### Ways to Contribute + +- **Report bugs** - Open an issue with detailed information +- **Suggest features** - Share ideas for improvements +- **Improve documentation** - Help others get started +- **Submit code** - Fix bugs or add features +- **Join discussions** - Share feedback and ideas + +## 📄 License + +This project is licensed under the [Creative Commons Attribution 4.0 International License](https://creativecommons.org/licenses/by/4.0/). + +- **Diagrams and text**: CC-BY 4.0 +- **Source code**: Available on [Hugging Face](https://huggingface.co/spaces/tfrere/research-article-template) +- **Third-party figures**: Excluded and marked in captions + +## 🙏 Acknowledgments + +- Inspired by [Distill](https://distill.pub/) and the interactive scientific writing movement +- Built with [Astro](https://astro.build/), [MDX](https://mdxjs.com/), and modern web technologies +- Community feedback and contributions from researchers worldwide + +## 📞 Support + +- **[Community Discussions](https://huggingface.co/spaces/tfrere/research-article-template/discussions)** - Ask questions and share ideas +- **[Report Issues](https://huggingface.co/spaces/tfrere/research-article-template/discussions?status=open&type=issue)** - Bug reports and feature requests +- **Contact**: [@tfrere](https://huggingface.co/tfrere) on Hugging Face + +--- + +**Made with ❤️ for the scientific community** diff --git a/app/.astro/astro/content.d.ts b/app/.astro/astro/content.d.ts index 579098f4486eea05796e6546ad36d694cfd1de08..e5be5a92014f57f2efba8362f3706f319b32678e 100644 --- a/app/.astro/astro/content.d.ts +++ b/app/.astro/astro/content.d.ts @@ -151,13 +151,22 @@ declare module 'astro:content' { >; type ContentEntryMap = { - + "embeds": { +"demo/vibe-code-d3-embeds-directives.md": { + id: "demo/vibe-code-d3-embeds-directives.md"; + slug: "demo/vibe-code-d3-embeds-directives"; + body: string; + collection: "embeds"; + data: any +} & { render(): Render[".md"] }; +}; + }; type DataEntryMap = { - "embeds": Record; diff --git a/app/astro.config.mjs b/app/astro.config.mjs index 10ceabec05b41c6e0eb64aea23f8dac6caa3c357..a00150600b0ca51f142971b8611048f559bc41f9 100644 --- a/app/astro.config.mjs +++ b/app/astro.config.mjs @@ -5,11 +5,16 @@ import mermaid from 'astro-mermaid'; import compressor from 'astro-compressor'; import remarkMath from 'remark-math'; import rehypeKatex from 'rehype-katex'; +import remarkFootnotes from 'remark-footnotes'; import rehypeSlug from 'rehype-slug'; import rehypeAutolinkHeadings from 'rehype-autolink-headings'; +import rehypeCitation from 'rehype-citation'; import rehypeCodeCopy from './plugins/rehype/code-copy.mjs'; +import rehypeReferencesAndFootnotes from './plugins/rehype/post-citation.mjs'; +import remarkIgnoreCitationsInCode from './plugins/remark/ignore-citations-in-code.mjs'; import remarkDirective from 'remark-directive'; import remarkOutputContainer from './plugins/remark/output-container.mjs'; +import rehypeRestoreAtInCode from './plugins/rehype/restore-at-in-code.mjs'; import rehypeWrapTables from './plugins/rehype/wrap-tables.mjs'; import rehypeWrapOutput from './plugins/rehype/wrap-outputs.mjs'; // Built-in Shiki (dual themes) — no rehype-pretty-code @@ -42,7 +47,9 @@ export default defineConfig({ } }, remarkPlugins: [ + remarkIgnoreCitationsInCode, remarkMath, + [remarkFootnotes, { inlineNotes: true }], remarkDirective, remarkOutputContainer ], @@ -52,6 +59,13 @@ export default defineConfig({ [rehypeKatex, { trust: true, }], + [rehypeCitation, { + bibliography: 'src/content/bibliography.bib', + linkCitations: true, + csl: "apa", + }], + rehypeReferencesAndFootnotes, + rehypeRestoreAtInCode, rehypeCodeCopy, rehypeWrapOutput, rehypeWrapTables diff --git a/app/dist/_astro/index.BzKj3Iki.css b/app/dist/_astro/index.BzKj3Iki.css deleted file mode 100644 index e870e3b7991738e821ec123ee96ed599850596a0..0000000000000000000000000000000000000000 --- a/app/dist/_astro/index.BzKj3Iki.css +++ /dev/null @@ -1 +0,0 @@ -@import"https://fonts.googleapis.com/css2?family=Source+Sans+Pro:ital,wght@0,200..900;1,200..900&display=swap";.html-embed{margin:0 0 var(--block-spacing-y);z-index:var(--z-elevated);position:relative;width:min(1100px,100vw - var(--content-padding-x) * 2);margin-left:50%;transform:translate(-50%)}.html-embed__title{text-align:left;font-weight:600;font-size:.95rem;color:var(--text-color);margin:0;padding:0;padding-bottom:var(--spacing-1);position:relative;display:block;width:100%;background:var(--page-bg);z-index:var(--z-elevated)}.html-embed__card{background:var(--code-bg);border:1px solid var(--border-color);border-radius:10px;padding:12px;z-index:calc(var(--z-elevated) + 1);position:relative}.html-embed__card.is-frameless{background:transparent;border-color:transparent;padding:0}.html-embed__desc{text-align:left;font-size:.9rem;color:var(--muted-color);margin:0;padding:0;padding-top:var(--spacing-1);position:relative;z-index:var(--z-elevated);display:block;width:100%;background:var(--page-bg)}.html-embed__card svg text{fill:var(--text-color)}.html-embed__card label{color:var(--text-color)}.plotly-graph-div{width:100%;min-height:320px}@media (max-width: 768px){.plotly-graph-div{min-height:260px}}[id^=plot-]{display:flex;flex-direction:column;align-items:center;gap:15px}.plotly_caption{font-style:italic;margin-top:10px}.plotly_controls{display:flex;flex-wrap:wrap;justify-content:center;gap:30px}.plotly_input_container{display:flex;align-items:center;flex-direction:column;gap:10px}.plotly_input_container>select{padding:2px 4px;line-height:1.5em;text-align:center;border-radius:4px;font-size:12px;background-color:var(--neutral-200);outline:none;border:1px solid var(--neutral-300)}.plotly_slider{display:flex;align-items:center;gap:10px}.plotly_slider>input[type=range]{-webkit-appearance:none;-moz-appearance:none;appearance:none;height:2px;background:var(--neutral-400);border-radius:5px;outline:none}.plotly_slider>input[type=range]::-webkit-slider-thumb{-webkit-appearance:none;width:18px;height:18px;border-radius:50%;background:var(--primary-color);cursor:pointer}.plotly_slider>input[type=range]::-moz-range-thumb{width:18px;height:18px;border-radius:50%;background:var(--primary-color);cursor:pointer}.plotly_slider>span{font-size:14px;line-height:1.6em;min-width:16px}[data-theme=dark] .html-embed__card:not(.is-frameless){background:#12151b;border-color:#ffffff26}[data-theme=dark] .html-embed__card .xaxislayer-above text,[data-theme=dark] .html-embed__card .yaxislayer-above text,[data-theme=dark] .html-embed__card .infolayer text,[data-theme=dark] .html-embed__card .legend text,[data-theme=dark] .html-embed__card .annotation text,[data-theme=dark] .html-embed__card .colorbar text,[data-theme=dark] .html-embed__card .hoverlayer text{fill:#fff!important}[data-theme=dark] .html-embed__card .xaxislayer-above path,[data-theme=dark] .html-embed__card .yaxislayer-above path,[data-theme=dark] .html-embed__card .xlines-above,[data-theme=dark] .html-embed__card .ylines-above{stroke:#ffffff59!important}[data-theme=dark] .html-embed__card .gridlayer path{stroke:#ffffff26!important}[data-theme=dark] .html-embed__card .legend rect.bg{fill:#00000040!important;stroke:#fff3!important}[data-theme=dark] .html-embed__card .hoverlayer .bg{fill:#000c!important;stroke:#fff3!important}[data-theme=dark] .html-embed__card .colorbar .cbbg{fill:#00000040!important;stroke:#fff3!important}.force-light-mode{filter:invert(0);--csstools-color-scheme--light: initial;color-scheme:light;background:#fff;padding:20px;border-radius:10px}[data-theme=dark] .force-light-mode .html-embed__card{background:#fff!important;border-color:#ddd!important}[data-theme=dark] .force-light-mode *{color:#333!important}@media (max-width: 1024px){.html-embed{width:100%;margin-left:0;transform:none}}@media print{.html-embed,.html-embed__card{max-width:100%!important;width:100%!important;margin-left:0!important;margin-right:0!important}.html-embed__card{padding:6px}.html-embed__card.is-frameless{padding:0}.html-embed__card svg,.html-embed__card canvas,.html-embed__card img{max-width:100%!important;height:auto!important}.html-embed__card>div[id^=frag-]{width:100%!important}}@media print{.html-embed,.html-embed__card{-moz-column-break-inside:avoid;break-inside:avoid;page-break-inside:avoid}.html-embed,.html-embed__card{max-width:100%!important;width:100%!important}.html-embed__card{padding:6px}.html-embed__card.is-frameless{padding:0}.html-embed__card svg,.html-embed__card canvas,.html-embed__card img,.html-embed__card video,.html-embed__card iframe{max-width:100%!important;height:auto!important}.html-embed__card>div[id^=frag-]{width:100%!important;max-width:100%!important}.html-embed .d3-galaxy{width:100%!important;max-width:980px!important;margin-left:auto!important;margin-right:auto!important}}.hero[data-astro-cid-bbe6dxrz]{width:100%;padding:0;text-align:center}.hero-title[data-astro-cid-bbe6dxrz]{font-size:max(28px,min(4vw,48px));font-weight:800;line-height:1.1;max-width:100%;margin:auto}.hero-banner[data-astro-cid-bbe6dxrz]{max-width:980px;margin:0 auto}.hero-desc[data-astro-cid-bbe6dxrz]{color:var(--muted-color);font-style:italic;margin:0 0 16px}.meta[data-astro-cid-bbe6dxrz]{border-top:1px solid var(--border-color);border-bottom:1px solid var(--border-color);padding:1rem 0;font-size:.9rem}.meta-container[data-astro-cid-bbe6dxrz]{max-width:760px;display:flex;flex-direction:row;justify-content:space-between;margin:0 auto;padding:0 var(--content-padding-x);gap:8px}.meta-container[data-astro-cid-bbe6dxrz] a[data-astro-cid-bbe6dxrz]:not(.button){color:var(--primary-color);-webkit-text-decoration:underline;text-decoration:underline;text-underline-offset:2px;text-decoration-thickness:.06em;text-decoration-color:var(--link-underline);transition:text-decoration-color .15s ease-in-out}.meta-container[data-astro-cid-bbe6dxrz] a[data-astro-cid-bbe6dxrz]:hover{text-decoration-color:var(--link-underline-hover)}.meta-container[data-astro-cid-bbe6dxrz] a[data-astro-cid-bbe6dxrz].button,.meta-container[data-astro-cid-bbe6dxrz] .button[data-astro-cid-bbe6dxrz]{-webkit-text-decoration:none;text-decoration:none}.meta-container-cell[data-astro-cid-bbe6dxrz]{display:flex;flex-direction:column;gap:8px;max-width:250px}.meta-container-cell[data-astro-cid-bbe6dxrz] h3[data-astro-cid-bbe6dxrz]{margin:0;font-size:12px;font-weight:400;color:var(--muted-color);text-transform:uppercase;letter-spacing:.02em}.meta-container-cell[data-astro-cid-bbe6dxrz] p[data-astro-cid-bbe6dxrz]{margin:0}.authors[data-astro-cid-bbe6dxrz]{margin:0;list-style-type:none;padding-left:0;display:flex;flex-wrap:wrap}.authors[data-astro-cid-bbe6dxrz] li[data-astro-cid-bbe6dxrz]{white-space:nowrap;margin-right:4px}.affiliations[data-astro-cid-bbe6dxrz]{margin:0;padding-left:1.25em}.affiliations[data-astro-cid-bbe6dxrz] li[data-astro-cid-bbe6dxrz]{margin:0}header[data-astro-cid-bbe6dxrz].meta .meta-container[data-astro-cid-bbe6dxrz]{flex-wrap:wrap;row-gap:12px}@media (max-width: 768px){.meta-container-cell--affiliations[data-astro-cid-bbe6dxrz],.meta-container-cell--pdf[data-astro-cid-bbe6dxrz]{text-align:right}}@media print{.meta-container-cell--pdf[data-astro-cid-bbe6dxrz]{display:none!important}}.footer{contain:layout style;font-size:.8em;line-height:1.7em;margin-top:60px;margin-bottom:0;border-top:1px solid rgba(0,0,0,.1);color:#00000080}.footer-inner{max-width:1280px;margin:0 auto;padding:60px 16px 48px;display:grid;grid-template-columns:220px minmax(0,680px) 260px;grid-gap:32px;gap:32px;align-items:start}.citation-block,.acknowledgements-block,.references-block,.reuse-block,.doi-block{display:contents}.citation-block>h3,.acknowledgements-block>h3,.references-block>h3,.reuse-block>h3,.doi-block>h3{grid-column:1;font-size:15px;margin:0;text-align:right;padding-right:30px}.citation-block>:not(h3),.acknowledgements-block>:not(h3),.references-block>:not(h3),.reuse-block>:not(h3),.doi-block>:not(h3){grid-column:2}.citation-block h3{margin:0 0 8px}.citation-block h4{margin:16px 0 8px;font-size:14px;text-transform:uppercase;color:var(--muted-color)}.citation-block p,.acknowledgements-block p,.reuse-block p,.doi-block p,.footnotes ol,.footnotes ol p,.references{margin-top:0}.citation{font-family:ui-monospace,SFMono-Regular,Menlo,Monaco,Consolas,Liberation Mono,Courier New,monospace;font-size:11px;line-height:15px;border-left:1px solid rgba(0,0,0,.1);border:1px solid rgba(0,0,0,.1);background:#00000005;padding:10px 18px;border-radius:3px;color:#969696;overflow:hidden;margin-top:-12px;white-space:pre-wrap;word-wrap:break-word}.citation a{color:#0009;-webkit-text-decoration:underline;text-decoration:underline}.citation.short{margin-top:-4px}.references-block h3{margin:0}.references-block ol{padding:0 0 0 15px}@media (min-width: 768px){.references-block ol{padding:0 0 0 30px;margin-left:-30px}}.references-block li{margin-bottom:1em}.references-block a{color:var(--text-color)}[data-theme=dark] .footer{border-top-color:#ffffff26;color:#c8c8c8cc}[data-theme=dark] .citation{background:#ffffff0a;border-color:#ffffff26;color:#c8c8c8}[data-theme=dark] .citation a{color:#ffffffbf}.footer a{color:var(--primary-color);border-bottom:1px solid var(--link-underline);-webkit-text-decoration:none;text-decoration:none}.footer a:hover{color:var(--primary-color-hover);border-bottom-color:var(--link-underline-hover)}[data-theme=dark] .footer a{color:var(--primary-color)}#theme-toggle[data-astro-cid-x3pjskd3]{display:inline-flex;align-items:center;gap:8px;border:none;background:transparent;padding:6px 10px;border-radius:8px;cursor:pointer;color:var(--text-color)!important}#theme-toggle[data-astro-cid-x3pjskd3] .icon[data-astro-cid-x3pjskd3].dark,[data-astro-cid-x3pjskd3][data-theme=dark] #theme-toggle[data-astro-cid-x3pjskd3] .icon[data-astro-cid-x3pjskd3].light{display:none}[data-astro-cid-x3pjskd3][data-theme=dark] #theme-toggle[data-astro-cid-x3pjskd3] .icon[data-astro-cid-x3pjskd3].dark{display:inline}#theme-toggle[data-astro-cid-x3pjskd3] .icon[data-astro-cid-x3pjskd3]{filter:none!important}.table-of-contents{position:sticky;top:32px;margin-top:12px}.table-of-contents nav{border-left:1px solid var(--border-color);padding-left:16px;font-size:13px}.table-of-contents .title{font-weight:600;font-size:14px;margin-bottom:8px}.table-of-contents nav ul{margin:0 0 6px;padding-left:1em}.table-of-contents nav li{list-style:none;margin:.25em 0}.table-of-contents nav a,.table-of-contents nav a:link,.table-of-contents nav a:visited{color:var(--text-color);-webkit-text-decoration:none;text-decoration:none;border-bottom:none}.table-of-contents nav>ul>li>a{font-weight:700}.table-of-contents nav a:hover{-webkit-text-decoration:underline solid var(--muted-color);text-decoration:underline solid var(--muted-color)}.table-of-contents nav a.active{-webkit-text-decoration:underline;text-decoration:underline}.table-of-contents-mobile{display:none;margin:8px 0 16px}.table-of-contents-mobile>summary{cursor:pointer;list-style:none;padding:var(--spacing-3) var(--spacing-4);border:1px solid var(--border-color);border-radius:8px;color:var(--text-color);font-weight:600;position:relative}.table-of-contents-mobile[open]>summary{border-bottom-left-radius:0;border-bottom-right-radius:0}.table-of-contents-mobile>summary:after{content:"";position:absolute;right:var(--spacing-4);top:50%;width:8px;height:8px;border-right:2px solid currentColor;border-bottom:2px solid currentColor;transform:translateY(-70%) rotate(45deg);transition:transform .15s ease;opacity:.7}.table-of-contents-mobile[open]>summary:after{transform:translateY(-30%) rotate(-135deg)}.table-of-contents-mobile nav{border-left:none;padding:10px 12px;font-size:14px;border:1px solid var(--border-color);border-top:none;border-bottom-left-radius:8px;border-bottom-right-radius:8px}.table-of-contents-mobile nav ul{margin:0 0 6px;padding-left:1em}.table-of-contents-mobile nav li{list-style:none;margin:.25em 0}.table-of-contents-mobile nav a,.table-of-contents-mobile nav a:link,.table-of-contents-mobile nav a:visited{color:var(--text-color);-webkit-text-decoration:none;text-decoration:none;border-bottom:none}.table-of-contents-mobile nav>ul>li>a{font-weight:700}.table-of-contents-mobile nav a:hover{-webkit-text-decoration:underline solid var(--muted-color);text-decoration:underline solid var(--muted-color)}.table-of-contents-mobile nav a.active{-webkit-text-decoration:underline;text-decoration:underline}@font-face{font-family:KaTeX_AMS;font-style:normal;font-weight:400;src:url(/_astro/KaTeX_AMS-Regular.BQhdFMY1.woff2) format("woff2"),url(/_astro/KaTeX_AMS-Regular.DMm9YOAa.woff) format("woff"),url(/_astro/KaTeX_AMS-Regular.DRggAlZN.ttf) format("truetype")}@font-face{font-family:KaTeX_Caligraphic;font-style:normal;font-weight:700;src:url(/_astro/KaTeX_Caligraphic-Bold.Dq_IR9rO.woff2) format("woff2"),url(/_astro/KaTeX_Caligraphic-Bold.BEiXGLvX.woff) format("woff"),url(/_astro/KaTeX_Caligraphic-Bold.ATXxdsX0.ttf) format("truetype")}@font-face{font-family:KaTeX_Caligraphic;font-style:normal;font-weight:400;src:url(/_astro/KaTeX_Caligraphic-Regular.Di6jR-x-.woff2) format("woff2"),url(/_astro/KaTeX_Caligraphic-Regular.CTRA-rTL.woff) format("woff"),url(/_astro/KaTeX_Caligraphic-Regular.wX97UBjC.ttf) format("truetype")}@font-face{font-family:KaTeX_Fraktur;font-style:normal;font-weight:700;src:url(/_astro/KaTeX_Fraktur-Bold.CL6g_b3V.woff2) format("woff2"),url(/_astro/KaTeX_Fraktur-Bold.BsDP51OF.woff) format("woff"),url(/_astro/KaTeX_Fraktur-Bold.BdnERNNW.ttf) format("truetype")}@font-face{font-family:KaTeX_Fraktur;font-style:normal;font-weight:400;src:url(/_astro/KaTeX_Fraktur-Regular.CTYiF6lA.woff2) format("woff2"),url(/_astro/KaTeX_Fraktur-Regular.Dxdc4cR9.woff) format("woff"),url(/_astro/KaTeX_Fraktur-Regular.CB_wures.ttf) format("truetype")}@font-face{font-family:KaTeX_Main;font-style:normal;font-weight:700;src:url(/_astro/KaTeX_Main-Bold.Cx986IdX.woff2) format("woff2"),url(/_astro/KaTeX_Main-Bold.Jm3AIy58.woff) format("woff"),url(/_astro/KaTeX_Main-Bold.waoOVXN0.ttf) format("truetype")}@font-face{font-family:KaTeX_Main;font-style:italic;font-weight:700;src:url(/_astro/KaTeX_Main-BoldItalic.DxDJ3AOS.woff2) format("woff2"),url(/_astro/KaTeX_Main-BoldItalic.SpSLRI95.woff) format("woff"),url(/_astro/KaTeX_Main-BoldItalic.DzxPMmG6.ttf) format("truetype")}@font-face{font-family:KaTeX_Main;font-style:italic;font-weight:400;src:url(/_astro/KaTeX_Main-Italic.NWA7e6Wa.woff2) format("woff2"),url(/_astro/KaTeX_Main-Italic.BMLOBm91.woff) format("woff"),url(/_astro/KaTeX_Main-Italic.3WenGoN9.ttf) format("truetype")}@font-face{font-family:KaTeX_Main;font-style:normal;font-weight:400;src:url(/_astro/KaTeX_Main-Regular.B22Nviop.woff2) format("woff2"),url(/_astro/KaTeX_Main-Regular.Dr94JaBh.woff) format("woff"),url(/_astro/KaTeX_Main-Regular.ypZvNtVU.ttf) format("truetype")}@font-face{font-family:KaTeX_Math;font-style:italic;font-weight:700;src:url(/_astro/KaTeX_Math-BoldItalic.CZnvNsCZ.woff2) format("woff2"),url(/_astro/KaTeX_Math-BoldItalic.iY-2wyZ7.woff) format("woff"),url(/_astro/KaTeX_Math-BoldItalic.B3XSjfu4.ttf) format("truetype")}@font-face{font-family:KaTeX_Math;font-style:italic;font-weight:400;src:url(/_astro/KaTeX_Math-Italic.t53AETM-.woff2) format("woff2"),url(/_astro/KaTeX_Math-Italic.DA0__PXp.woff) format("woff"),url(/_astro/KaTeX_Math-Italic.flOr_0UB.ttf) format("truetype")}@font-face{font-family:KaTeX_SansSerif;font-style:normal;font-weight:700;src:url(/_astro/KaTeX_SansSerif-Bold.D1sUS0GD.woff2) format("woff2"),url(/_astro/KaTeX_SansSerif-Bold.DbIhKOiC.woff) format("woff"),url(/_astro/KaTeX_SansSerif-Bold.CFMepnvq.ttf) format("truetype")}@font-face{font-family:KaTeX_SansSerif;font-style:italic;font-weight:400;src:url(/_astro/KaTeX_SansSerif-Italic.C3H0VqGB.woff2) format("woff2"),url(/_astro/KaTeX_SansSerif-Italic.DN2j7dab.woff) format("woff"),url(/_astro/KaTeX_SansSerif-Italic.YYjJ1zSn.ttf) format("truetype")}@font-face{font-family:KaTeX_SansSerif;font-style:normal;font-weight:400;src:url(/_astro/KaTeX_SansSerif-Regular.DDBCnlJ7.woff2) format("woff2"),url(/_astro/KaTeX_SansSerif-Regular.CS6fqUqJ.woff) format("woff"),url(/_astro/KaTeX_SansSerif-Regular.BNo7hRIc.ttf) format("truetype")}@font-face{font-family:KaTeX_Script;font-style:normal;font-weight:400;src:url(/_astro/KaTeX_Script-Regular.D3wIWfF6.woff2) format("woff2"),url(/_astro/KaTeX_Script-Regular.D5yQViql.woff) format("woff"),url(/_astro/KaTeX_Script-Regular.C5JkGWo-.ttf) format("truetype")}@font-face{font-family:KaTeX_Size1;font-style:normal;font-weight:400;src:url(/_astro/KaTeX_Size1-Regular.mCD8mA8B.woff2) format("woff2"),url(/_astro/KaTeX_Size1-Regular.C195tn64.woff) format("woff"),url(/_astro/KaTeX_Size1-Regular.Dbsnue_I.ttf) format("truetype")}@font-face{font-family:KaTeX_Size2;font-style:normal;font-weight:400;src:url(/_astro/KaTeX_Size2-Regular.Dy4dx90m.woff2) format("woff2"),url(/_astro/KaTeX_Size2-Regular.oD1tc_U0.woff) format("woff"),url(/_astro/KaTeX_Size2-Regular.B7gKUWhC.ttf) format("truetype")}@font-face{font-family:KaTeX_Size3;font-style:normal;font-weight:400;src:url(data:font/woff2;base64,d09GMgABAAAAAA4oAA4AAAAAHbQAAA3TAAEAAAAAAAAAAAAAAAAAAAAAAAAAAAAABmAAgRQIDgmcDBEICo1oijYBNgIkA14LMgAEIAWJAAeBHAyBHBvbGiMRdnO0IkRRkiYDgr9KsJ1NUAf2kILNxgUmgqIgq1P89vcbIcmsQbRps3vCcXdYOKSWEPEKgZgQkprQQsxIXUgq0DqpGKmIvrgkeVGtEQD9DzAO29fM9jYhxZEsL2FeURH2JN4MIcTdO049NCVdxQ/w9NrSYFEBKTDKpLKfNkCGDc1RwjZLQcm3vqJ2UW9Xfa3tgAHz6ivp6vgC2yD4/6352ndnN0X0TL7seypkjZlMsjmZnf0Mm5Q+JykRWQBKCVCVPbARPXWyQtb5VgLB6Biq7/Uixcj2WGqdI8tGSgkuRG+t910GKP2D7AQH0DB9FMDW/obJZ8giFI3Wg8Cvevz0M+5m0rTh7XDBlvo9Y4vm13EXmfttwI4mBo1EG15fxJhUiCLbiiyCf/ZA6MFAhg3pGIZGdGIVjtPn6UcMk9A/UUr9PhoNsCENw1APAq0gpH73e+M+0ueyHbabc3vkbcdtzcf/fiy+NxQEjf9ud/ELBHAXJ0nk4z+MXH2Ev/kWyV4k7SkvpPc9Qr38F6RPWnM9cN6DJ0AdD1BhtgABtmoRoFCvPsBAumNm6soZG2Gk5GyVTo2sJncSyp0jQTYoR6WDvTwaaEcHsxHfvuWhHA3a6bN7twRKtcGok6NsCi7jYRrM2jExsUFMxMQYuJbMhuWNOumEJy9hi29Dmg5zMp/A5+hhPG19j1vBrq8JTLr8ki5VLPmG/PynJHVul440bxg5xuymHUFPBshC+nA9I1FmwbRBTNHAcik3Oae0cxKoI3MOriM42UrPe51nsaGxJ+WfXubAsP84aabUlQSJ1IiE0iPETLUU4CATgfXSCSpuRFRmCGbO+wSpAnzaeaCYW1VNEysRtuXCEL1kUFUbbtMv3Tilt/1c11jt3Q5bbMa84cpWipp8Elw3MZhOHsOlwwVUQM3lAR35JiFQbaYCRnMF2lxAWoOg2gyoIV4PouX8HytNIfLhqpJtXB4vjiViUI8IJ7bkC4ikkQvKksnOTKICwnqWSZ9YS5f0WCxmpgjbIq7EJcM4aI2nmhLNY2JIUgOjXZFWBHb+x5oh6cwb0Tv1ackHdKi0I9OO2wE9aogIOn540CCCziyhN+IaejtgAONKznHlHyutPrHGwCx9S6B8kfS4Mfi4Eyv7OU730bT1SCBjt834cXsf43zVjPUqqJjgrjeGnBxSG4aYAKFuVbeCfkDIjAqMb6yLNIbCuvXhMH2/+k2vkNpkORhR59N1CkzoOENvneIosjYmuTxlhUzaGEJQ/iWqx4dmwpmKjrwTiTGTCVozNAYqk/zXOndWxuWSmJkQpJw3pK5KX6QrLt5LATMqpmPAQhkhK6PUjzHUn7E0gHE0kPE0iKkolgkUx9SZmVAdDgpffdyJKg3k7VmzYGCwVXGz/tXmkOIp+vcWs+EMuhhvN0h9uhfzWJziBQmCREGSIFmQIkgVpAnSBRmC//6hkLZwaVhwxlrJSOdqlFtOYxlau9F2QN5Y98xmIAsiM1HVp2VFX+DHHGg6Ecjh3vmqtidX3qHI2qycTk/iwxSt5UzTmEP92ZBnEWTk4Mx8Mpl78ZDokxg/KWb+Q0QkvdKVmq3TMW+RXEgrsziSAfNXFMhDc60N5N9jQzjfO0kBKpUZl0ZmwJ41j/B9Hz6wmRaJB84niNmQrzp9eSlQCDDzazGDdVi3P36VZQ+Jy4f9UBNp+3zTjqI4abaFAm+GShVaXlsGdF3FYzZcDI6cori4kMxUECl9IjJZpzkvitAoxKue+90pDMvcKRxLl53TmOKCmV/xRolNKSqqUxc6LStOETmFOiLZZptlZepcKiAzteG8PEdpnQpbOMNcMsR4RR2Bs0cKFEvSmIjAFcnarqwUL4lDhHmnVkwu1IwshbiCcgvOheZuYyOteufZZwlcTlLgnZ3o/WcYdzZHW/WGaqaVfmTZ1aWCceJjkbZqsfbkOtcFlUZM/jy+hXHDbaUobWqqXaeWobbLO99yG5N3U4wxco0rQGGcOLASFMXeJoham8M+/x6O2WywK2l4HGbq1CoUyC/IZikQhdq3SiuNrvAEj0AVu9x2x3lp/xWzahaxidezFVtdcb5uEnzyl0ZmYiuKI0exvCd4Xc9CV1KB0db00z92wDPde0kukbvZIWN6jUWFTmPIC/Y4UPCm8UfDTFZpZNon1qLFTkBhxzB+FjQRA2Q/YRJT8pQigslMaUpFyAG8TMlXigiqmAZX4xgijKjRlGpLE0GdplRfCaJo0JQaSxNBk6ZmMzcya0FmrcisDdn0Q3HI2sWSppYigmlM1XT/kLQZSNpMJG0WkjYbSZuDpM1F0uYhFc1HxU4m1QJjDK6iL0S5uSj5rgXc3RejEigtcRBtqYPQsiTskmO5vosV+q4VGIKbOkDg0jtRrq+Em1YloaTFar3EGr1EUC8R0kus1Uus00usL97ABr2BjXoDm/QGNhuWtMVBKOwg/i78lT7hBsAvDmwHc/ao3vmUbBmhjeYySZNWvGkfZAgISDSaDo1SVpzGDsAEkF8B+gEapViUoZgUWXcRIGFZNm6gWbAKk0bp0k1MHG9fLYtV4iS2SmLEQFARzRcnf9PUS0LVn05/J9MiRRBU3v2IrvW974v4N00L7ZMk0wXP1409CHo/an8zTRHD3eSJ6m8D4YMkZNl3M79sqeuAsr/m3f+8/yl7A50aiAEJgeBeMWzu7ui9UfUBCe2TIqZIoOd/3/udRBOQidQZUERzb2/VwZN1H/Sju82ew2H2Wfr6qvfVf3hqwDvAIpkQVFy4B9Pe9e4/XvPeceu7h3dvO56iJPf0+A6cqA2ip18ER+iFgggiuOkvj24bby0N9j2UHIkgqIt+sVgfodC4YghLSMjSZbH0VR/6dMDrYJeKHilKTemt6v6kvzvn3/RrdWtr0GoN/xL+Sex/cPYLUpepx9cz/D46UPU5KXgAQa+NDps1v6J3xP1i2HtaDB0M9aX2deA7SYff//+gUCovMmIK/qfsFcOk+4Y5ZN97XlG6zebqtMbKgeRFi51vnxTQYBUik2rS/Cn6PC8ADR8FGxsRPB82dzfND90gIcshOcYUkfjherBz53odpm6TP8txlwOZ71xmfHHOvq053qFF/MRlS3jP0ELudrf2OeN8DHvp6ZceLe8qKYvWz/7yp0u4dKPfli3CYq0O13Ih71mylJ80tOi10On8wi+F4+LWgDPeJ30msSQt9/vkmHq9/Lvo2b461mP801v3W4xTcs6CbvF9UDdrSt+A8OUbpSh55qAUFXWznBBfdeJ8a4d7ugT5tvxUza3h9m4H7ptTqiG4z0g5dc0X29OcGlhpGFMpQo9ytTS+NViZpNdvU4kWx+LKxNY10kQ1yqGXrhe4/1nvP7E+nd5A92TtaRplbHSqoIdOqtRWti+fkB5/n1+/VvCmz12pG1kpQWsfi1ftlBobm0bpngs16CHkbIwdLnParxtTV3QYRlfJ0KFskH7pdN/YDn+yRuSd7sNH3aO0DYPggk6uWuXrfOc+fa3VTxFVvKaNxHsiHmsXyCLIE5yuOeN3/Jdf8HBL/5M6shjyhxHx9BjB1O0+4NLOnjLLSxwO7ukN4jMbOIcD879KLSi6Pk61Oqm2377n8079PXEEQ7cy7OKEC9nbpet118fxweTafpt69x/Bt8UqGzNQt7aelpc44dn5cqhwf71+qKp/Zf/+a0zcizOUWpl/iBcSXip0pplkatCchoH5c5aUM8I7/dWxAej8WicPL1URFZ9BDJelUwEwTkGqUhgSlydVes95YdXvhh9Gfz/aeFWvgVb4tuLbcv4+wLdutVZv/cUonwBD/6eDlE0aSiKK/uoH3+J1wDE/jMVqY2ysGufN84oIXB0sPzy8ollX/LegY74DgJXJR57sn+VGza0x3DnuIgABFM15LmajjjsNlYj+JEZGbuRYcAMOWxFkPN2w6Wd46xo4gVWQR/X4lyI/R6K/YK0110GzudPRW7Y+UOBGTfNNzHeYT0fiH0taunBpq9HEW8OKSaBGj21L0MqenEmNRWBAWDWAk4CpNoEZJ2tTaPFgbQYj8HxtFilErs3BTRwT8uO1NXQaWfIotchmPkAF5mMBAliEmZiOGVgCG9LgRzpscMAOOwowlT3JhusdazXGSC/hxR3UlmWVwWHpOIKheqONvjyhSiTHIkVUco5bnji8m//zL7PKaT1Vl5I6UE609f+gkr6MZKVyKc7zJRmCahLsdlyA5fdQkRSan9LgnnLEyGSkaKJCJog0wAgvepWBt80+1yKln1bMVtCljfNWDueKLsWwaEbBSfSPTEmVRsUcYYMnEjcjeyCZzBXK9E9BYBXLKjOSpUDR+nEV3TFSUdQaz+ot98QxgXwx0GQ+EEUAKB2qZPkQQ0GqFD8UPFMqyaCHM24BZmSGic9EYMagKizOw9Hz50DMrDLrqqLkTAhplMictiCAx5S3BIUQdeJeLnBy2CNtMfz6cV4u8XKoFZQesbf9YZiIERiHjaNodDW6LgcirX/mPnJIkBGDUpTBhSa0EIr38D5hCIszhCM8URGBqImoWjpvpt1ebu/v3Gl3qJfMnNM+9V+kiRFyROTPHQWOcs1dNW94/ukKMPZBvDi55i5CttdeJz84DLngLqjcdwEZ87bFFR8CIG35OAkDVN6VRDZ7aq67NteYqZ2lpT8oYB2CytoBd6VuAx4WgiAsnuj3WohG+LugzXiQRDeM3XYXlULv4dp5VFYC) format("woff2"),url(/_astro/KaTeX_Size3-Regular.CTq5MqoE.woff) format("woff"),url(/_astro/KaTeX_Size3-Regular.DgpXs0kz.ttf) format("truetype")}@font-face{font-family:KaTeX_Size4;font-style:normal;font-weight:400;src:url(/_astro/KaTeX_Size4-Regular.Dl5lxZxV.woff2) format("woff2"),url(/_astro/KaTeX_Size4-Regular.BF-4gkZK.woff) format("woff"),url(/_astro/KaTeX_Size4-Regular.DWFBv043.ttf) format("truetype")}@font-face{font-family:KaTeX_Typewriter;font-style:normal;font-weight:400;src:url(/_astro/KaTeX_Typewriter-Regular.CO6r4hn1.woff2) format("woff2"),url(/_astro/KaTeX_Typewriter-Regular.C0xS9mPB.woff) format("woff"),url(/_astro/KaTeX_Typewriter-Regular.D3Ib7_Hf.ttf) format("truetype")}.katex{font: 1.21em KaTeX_Main,Times New Roman,serif;line-height:1.2;text-indent:0;text-rendering:auto}.katex *{-ms-high-contrast-adjust:none!important;border-color:currentColor}.katex .katex-version:after{content:"0.16.22"}.katex .katex-mathml{clip:rect(1px,1px,1px,1px);border:0;height:1px;overflow:hidden;padding:0;position:absolute;width:1px}.katex .katex-html>.newline{display:block}.katex .base{position:relative;white-space:nowrap;width:-moz-min-content;width:min-content}.katex .base,.katex .strut{display:inline-block}.katex .textbf{font-weight:700}.katex .textit{font-style:italic}.katex .textrm{font-family:KaTeX_Main}.katex .textsf{font-family:KaTeX_SansSerif}.katex .texttt{font-family:KaTeX_Typewriter}.katex .mathnormal{font-family:KaTeX_Math;font-style:italic}.katex .mathit{font-family:KaTeX_Main;font-style:italic}.katex .mathrm{font-style:normal}.katex .mathbf{font-family:KaTeX_Main;font-weight:700}.katex .boldsymbol{font-family:KaTeX_Math;font-style:italic;font-weight:700}.katex .amsrm,.katex .mathbb,.katex .textbb{font-family:KaTeX_AMS}.katex .mathcal{font-family:KaTeX_Caligraphic}.katex .mathfrak,.katex .textfrak{font-family:KaTeX_Fraktur}.katex .mathboldfrak,.katex .textboldfrak{font-family:KaTeX_Fraktur;font-weight:700}.katex .mathtt{font-family:KaTeX_Typewriter}.katex .mathscr,.katex .textscr{font-family:KaTeX_Script}.katex .mathsf,.katex .textsf{font-family:KaTeX_SansSerif}.katex .mathboldsf,.katex .textboldsf{font-family:KaTeX_SansSerif;font-weight:700}.katex .mathitsf,.katex .mathsfit,.katex .textitsf{font-family:KaTeX_SansSerif;font-style:italic}.katex .mainrm{font-family:KaTeX_Main;font-style:normal}.katex .vlist-t{border-collapse:collapse;display:inline-table;table-layout:fixed}.katex .vlist-r{display:table-row}.katex .vlist{display:table-cell;position:relative;vertical-align:bottom}.katex .vlist>span{display:block;height:0;position:relative}.katex .vlist>span>span{display:inline-block}.katex .vlist>span>.pstrut{overflow:hidden;width:0}.katex .vlist-t2{margin-right:-2px}.katex .vlist-s{display:table-cell;font-size:1px;min-width:2px;vertical-align:bottom;width:2px}.katex .vbox{align-items:baseline;display:inline-flex;flex-direction:column}.katex .hbox{width:100%}.katex .hbox,.katex .thinbox{display:inline-flex;flex-direction:row}.katex .thinbox{max-width:0;width:0}.katex .msupsub{text-align:left}.katex .mfrac>span>span{text-align:center}.katex .mfrac .frac-line{border-bottom-style:solid;display:inline-block;width:100%}.katex .hdashline,.katex .hline,.katex .mfrac .frac-line,.katex .overline .overline-line,.katex .rule,.katex .underline .underline-line{min-height:1px}.katex .mspace{display:inline-block}.katex .clap,.katex .llap,.katex .rlap{position:relative;width:0}.katex .clap>.inner,.katex .llap>.inner,.katex .rlap>.inner{position:absolute}.katex .clap>.fix,.katex .llap>.fix,.katex .rlap>.fix{display:inline-block}.katex .llap>.inner{right:0}.katex .clap>.inner,.katex .rlap>.inner{left:0}.katex .clap>.inner>span{margin-left:-50%;margin-right:50%}.katex .rule{border:0 solid;display:inline-block;position:relative}.katex .hline,.katex .overline .overline-line,.katex .underline .underline-line{border-bottom-style:solid;display:inline-block;width:100%}.katex .hdashline{border-bottom-style:dashed;display:inline-block;width:100%}.katex .sqrt>.root{margin-left:.2777777778em;margin-right:-.5555555556em}.katex .fontsize-ensurer.reset-size1.size1,.katex .sizing.reset-size1.size1{font-size:1em}.katex .fontsize-ensurer.reset-size1.size2,.katex .sizing.reset-size1.size2{font-size:1.2em}.katex .fontsize-ensurer.reset-size1.size3,.katex .sizing.reset-size1.size3{font-size:1.4em}.katex .fontsize-ensurer.reset-size1.size4,.katex .sizing.reset-size1.size4{font-size:1.6em}.katex .fontsize-ensurer.reset-size1.size5,.katex .sizing.reset-size1.size5{font-size:1.8em}.katex .fontsize-ensurer.reset-size1.size6,.katex .sizing.reset-size1.size6{font-size:2em}.katex .fontsize-ensurer.reset-size1.size7,.katex .sizing.reset-size1.size7{font-size:2.4em}.katex .fontsize-ensurer.reset-size1.size8,.katex .sizing.reset-size1.size8{font-size:2.88em}.katex .fontsize-ensurer.reset-size1.size9,.katex .sizing.reset-size1.size9{font-size:3.456em}.katex .fontsize-ensurer.reset-size1.size10,.katex .sizing.reset-size1.size10{font-size:4.148em}.katex .fontsize-ensurer.reset-size1.size11,.katex .sizing.reset-size1.size11{font-size:4.976em}.katex .fontsize-ensurer.reset-size2.size1,.katex .sizing.reset-size2.size1{font-size:.8333333333em}.katex .fontsize-ensurer.reset-size2.size2,.katex .sizing.reset-size2.size2{font-size:1em}.katex .fontsize-ensurer.reset-size2.size3,.katex .sizing.reset-size2.size3{font-size:1.1666666667em}.katex .fontsize-ensurer.reset-size2.size4,.katex .sizing.reset-size2.size4{font-size:1.3333333333em}.katex .fontsize-ensurer.reset-size2.size5,.katex .sizing.reset-size2.size5{font-size:1.5em}.katex .fontsize-ensurer.reset-size2.size6,.katex .sizing.reset-size2.size6{font-size:1.6666666667em}.katex .fontsize-ensurer.reset-size2.size7,.katex .sizing.reset-size2.size7{font-size:2em}.katex .fontsize-ensurer.reset-size2.size8,.katex .sizing.reset-size2.size8{font-size:2.4em}.katex .fontsize-ensurer.reset-size2.size9,.katex .sizing.reset-size2.size9{font-size:2.88em}.katex .fontsize-ensurer.reset-size2.size10,.katex .sizing.reset-size2.size10{font-size:3.4566666667em}.katex .fontsize-ensurer.reset-size2.size11,.katex .sizing.reset-size2.size11{font-size:4.1466666667em}.katex .fontsize-ensurer.reset-size3.size1,.katex .sizing.reset-size3.size1{font-size:.7142857143em}.katex .fontsize-ensurer.reset-size3.size2,.katex .sizing.reset-size3.size2{font-size:.8571428571em}.katex .fontsize-ensurer.reset-size3.size3,.katex .sizing.reset-size3.size3{font-size:1em}.katex .fontsize-ensurer.reset-size3.size4,.katex .sizing.reset-size3.size4{font-size:1.1428571429em}.katex .fontsize-ensurer.reset-size3.size5,.katex .sizing.reset-size3.size5{font-size:1.2857142857em}.katex .fontsize-ensurer.reset-size3.size6,.katex .sizing.reset-size3.size6{font-size:1.4285714286em}.katex .fontsize-ensurer.reset-size3.size7,.katex .sizing.reset-size3.size7{font-size:1.7142857143em}.katex .fontsize-ensurer.reset-size3.size8,.katex .sizing.reset-size3.size8{font-size:2.0571428571em}.katex .fontsize-ensurer.reset-size3.size9,.katex .sizing.reset-size3.size9{font-size:2.4685714286em}.katex .fontsize-ensurer.reset-size3.size10,.katex .sizing.reset-size3.size10{font-size:2.9628571429em}.katex .fontsize-ensurer.reset-size3.size11,.katex .sizing.reset-size3.size11{font-size:3.5542857143em}.katex .fontsize-ensurer.reset-size4.size1,.katex .sizing.reset-size4.size1{font-size:.625em}.katex .fontsize-ensurer.reset-size4.size2,.katex .sizing.reset-size4.size2{font-size:.75em}.katex .fontsize-ensurer.reset-size4.size3,.katex .sizing.reset-size4.size3{font-size:.875em}.katex .fontsize-ensurer.reset-size4.size4,.katex .sizing.reset-size4.size4{font-size:1em}.katex .fontsize-ensurer.reset-size4.size5,.katex .sizing.reset-size4.size5{font-size:1.125em}.katex .fontsize-ensurer.reset-size4.size6,.katex .sizing.reset-size4.size6{font-size:1.25em}.katex .fontsize-ensurer.reset-size4.size7,.katex .sizing.reset-size4.size7{font-size:1.5em}.katex .fontsize-ensurer.reset-size4.size8,.katex .sizing.reset-size4.size8{font-size:1.8em}.katex .fontsize-ensurer.reset-size4.size9,.katex .sizing.reset-size4.size9{font-size:2.16em}.katex .fontsize-ensurer.reset-size4.size10,.katex .sizing.reset-size4.size10{font-size:2.5925em}.katex .fontsize-ensurer.reset-size4.size11,.katex .sizing.reset-size4.size11{font-size:3.11em}.katex .fontsize-ensurer.reset-size5.size1,.katex .sizing.reset-size5.size1{font-size:.5555555556em}.katex .fontsize-ensurer.reset-size5.size2,.katex .sizing.reset-size5.size2{font-size:.6666666667em}.katex .fontsize-ensurer.reset-size5.size3,.katex .sizing.reset-size5.size3{font-size:.7777777778em}.katex .fontsize-ensurer.reset-size5.size4,.katex .sizing.reset-size5.size4{font-size:.8888888889em}.katex .fontsize-ensurer.reset-size5.size5,.katex .sizing.reset-size5.size5{font-size:1em}.katex .fontsize-ensurer.reset-size5.size6,.katex .sizing.reset-size5.size6{font-size:1.1111111111em}.katex .fontsize-ensurer.reset-size5.size7,.katex .sizing.reset-size5.size7{font-size:1.3333333333em}.katex .fontsize-ensurer.reset-size5.size8,.katex .sizing.reset-size5.size8{font-size:1.6em}.katex .fontsize-ensurer.reset-size5.size9,.katex .sizing.reset-size5.size9{font-size:1.92em}.katex .fontsize-ensurer.reset-size5.size10,.katex .sizing.reset-size5.size10{font-size:2.3044444444em}.katex .fontsize-ensurer.reset-size5.size11,.katex .sizing.reset-size5.size11{font-size:2.7644444444em}.katex .fontsize-ensurer.reset-size6.size1,.katex .sizing.reset-size6.size1{font-size:.5em}.katex .fontsize-ensurer.reset-size6.size2,.katex .sizing.reset-size6.size2{font-size:.6em}.katex .fontsize-ensurer.reset-size6.size3,.katex .sizing.reset-size6.size3{font-size:.7em}.katex .fontsize-ensurer.reset-size6.size4,.katex .sizing.reset-size6.size4{font-size:.8em}.katex .fontsize-ensurer.reset-size6.size5,.katex .sizing.reset-size6.size5{font-size:.9em}.katex .fontsize-ensurer.reset-size6.size6,.katex .sizing.reset-size6.size6{font-size:1em}.katex .fontsize-ensurer.reset-size6.size7,.katex .sizing.reset-size6.size7{font-size:1.2em}.katex .fontsize-ensurer.reset-size6.size8,.katex .sizing.reset-size6.size8{font-size:1.44em}.katex .fontsize-ensurer.reset-size6.size9,.katex .sizing.reset-size6.size9{font-size:1.728em}.katex .fontsize-ensurer.reset-size6.size10,.katex .sizing.reset-size6.size10{font-size:2.074em}.katex .fontsize-ensurer.reset-size6.size11,.katex .sizing.reset-size6.size11{font-size:2.488em}.katex .fontsize-ensurer.reset-size7.size1,.katex .sizing.reset-size7.size1{font-size:.4166666667em}.katex .fontsize-ensurer.reset-size7.size2,.katex .sizing.reset-size7.size2{font-size:.5em}.katex .fontsize-ensurer.reset-size7.size3,.katex .sizing.reset-size7.size3{font-size:.5833333333em}.katex .fontsize-ensurer.reset-size7.size4,.katex .sizing.reset-size7.size4{font-size:.6666666667em}.katex .fontsize-ensurer.reset-size7.size5,.katex .sizing.reset-size7.size5{font-size:.75em}.katex .fontsize-ensurer.reset-size7.size6,.katex .sizing.reset-size7.size6{font-size:.8333333333em}.katex .fontsize-ensurer.reset-size7.size7,.katex .sizing.reset-size7.size7{font-size:1em}.katex .fontsize-ensurer.reset-size7.size8,.katex .sizing.reset-size7.size8{font-size:1.2em}.katex .fontsize-ensurer.reset-size7.size9,.katex .sizing.reset-size7.size9{font-size:1.44em}.katex .fontsize-ensurer.reset-size7.size10,.katex .sizing.reset-size7.size10{font-size:1.7283333333em}.katex .fontsize-ensurer.reset-size7.size11,.katex .sizing.reset-size7.size11{font-size:2.0733333333em}.katex .fontsize-ensurer.reset-size8.size1,.katex .sizing.reset-size8.size1{font-size:.3472222222em}.katex .fontsize-ensurer.reset-size8.size2,.katex .sizing.reset-size8.size2{font-size:.4166666667em}.katex .fontsize-ensurer.reset-size8.size3,.katex .sizing.reset-size8.size3{font-size:.4861111111em}.katex .fontsize-ensurer.reset-size8.size4,.katex .sizing.reset-size8.size4{font-size:.5555555556em}.katex .fontsize-ensurer.reset-size8.size5,.katex .sizing.reset-size8.size5{font-size:.625em}.katex .fontsize-ensurer.reset-size8.size6,.katex .sizing.reset-size8.size6{font-size:.6944444444em}.katex .fontsize-ensurer.reset-size8.size7,.katex .sizing.reset-size8.size7{font-size:.8333333333em}.katex .fontsize-ensurer.reset-size8.size8,.katex .sizing.reset-size8.size8{font-size:1em}.katex .fontsize-ensurer.reset-size8.size9,.katex .sizing.reset-size8.size9{font-size:1.2em}.katex .fontsize-ensurer.reset-size8.size10,.katex .sizing.reset-size8.size10{font-size:1.4402777778em}.katex .fontsize-ensurer.reset-size8.size11,.katex .sizing.reset-size8.size11{font-size:1.7277777778em}.katex .fontsize-ensurer.reset-size9.size1,.katex .sizing.reset-size9.size1{font-size:.2893518519em}.katex .fontsize-ensurer.reset-size9.size2,.katex .sizing.reset-size9.size2{font-size:.3472222222em}.katex .fontsize-ensurer.reset-size9.size3,.katex .sizing.reset-size9.size3{font-size:.4050925926em}.katex .fontsize-ensurer.reset-size9.size4,.katex .sizing.reset-size9.size4{font-size:.462962963em}.katex .fontsize-ensurer.reset-size9.size5,.katex .sizing.reset-size9.size5{font-size:.5208333333em}.katex .fontsize-ensurer.reset-size9.size6,.katex .sizing.reset-size9.size6{font-size:.5787037037em}.katex .fontsize-ensurer.reset-size9.size7,.katex .sizing.reset-size9.size7{font-size:.6944444444em}.katex .fontsize-ensurer.reset-size9.size8,.katex .sizing.reset-size9.size8{font-size:.8333333333em}.katex .fontsize-ensurer.reset-size9.size9,.katex .sizing.reset-size9.size9{font-size:1em}.katex .fontsize-ensurer.reset-size9.size10,.katex .sizing.reset-size9.size10{font-size:1.2002314815em}.katex .fontsize-ensurer.reset-size9.size11,.katex .sizing.reset-size9.size11{font-size:1.4398148148em}.katex .fontsize-ensurer.reset-size10.size1,.katex .sizing.reset-size10.size1{font-size:.2410800386em}.katex .fontsize-ensurer.reset-size10.size2,.katex .sizing.reset-size10.size2{font-size:.2892960463em}.katex .fontsize-ensurer.reset-size10.size3,.katex .sizing.reset-size10.size3{font-size:.337512054em}.katex .fontsize-ensurer.reset-size10.size4,.katex .sizing.reset-size10.size4{font-size:.3857280617em}.katex .fontsize-ensurer.reset-size10.size5,.katex .sizing.reset-size10.size5{font-size:.4339440694em}.katex .fontsize-ensurer.reset-size10.size6,.katex .sizing.reset-size10.size6{font-size:.4821600771em}.katex .fontsize-ensurer.reset-size10.size7,.katex .sizing.reset-size10.size7{font-size:.5785920926em}.katex .fontsize-ensurer.reset-size10.size8,.katex .sizing.reset-size10.size8{font-size:.6943105111em}.katex .fontsize-ensurer.reset-size10.size9,.katex .sizing.reset-size10.size9{font-size:.8331726133em}.katex .fontsize-ensurer.reset-size10.size10,.katex .sizing.reset-size10.size10{font-size:1em}.katex .fontsize-ensurer.reset-size10.size11,.katex .sizing.reset-size10.size11{font-size:1.1996142719em}.katex .fontsize-ensurer.reset-size11.size1,.katex .sizing.reset-size11.size1{font-size:.2009646302em}.katex .fontsize-ensurer.reset-size11.size2,.katex .sizing.reset-size11.size2{font-size:.2411575563em}.katex .fontsize-ensurer.reset-size11.size3,.katex .sizing.reset-size11.size3{font-size:.2813504823em}.katex .fontsize-ensurer.reset-size11.size4,.katex .sizing.reset-size11.size4{font-size:.3215434084em}.katex .fontsize-ensurer.reset-size11.size5,.katex .sizing.reset-size11.size5{font-size:.3617363344em}.katex .fontsize-ensurer.reset-size11.size6,.katex .sizing.reset-size11.size6{font-size:.4019292605em}.katex .fontsize-ensurer.reset-size11.size7,.katex .sizing.reset-size11.size7{font-size:.4823151125em}.katex .fontsize-ensurer.reset-size11.size8,.katex .sizing.reset-size11.size8{font-size:.578778135em}.katex .fontsize-ensurer.reset-size11.size9,.katex .sizing.reset-size11.size9{font-size:.6945337621em}.katex .fontsize-ensurer.reset-size11.size10,.katex .sizing.reset-size11.size10{font-size:.8336012862em}.katex .fontsize-ensurer.reset-size11.size11,.katex .sizing.reset-size11.size11{font-size:1em}.katex .delimsizing.size1{font-family:KaTeX_Size1}.katex .delimsizing.size2{font-family:KaTeX_Size2}.katex .delimsizing.size3{font-family:KaTeX_Size3}.katex .delimsizing.size4{font-family:KaTeX_Size4}.katex .delimsizing.mult .delim-size1>span{font-family:KaTeX_Size1}.katex .delimsizing.mult .delim-size4>span{font-family:KaTeX_Size4}.katex .nulldelimiter{display:inline-block;width:.12em}.katex .delimcenter,.katex .op-symbol{position:relative}.katex .op-symbol.small-op{font-family:KaTeX_Size1}.katex .op-symbol.large-op{font-family:KaTeX_Size2}.katex .accent>.vlist-t,.katex .op-limits>.vlist-t{text-align:center}.katex .accent .accent-body{position:relative}.katex .accent .accent-body:not(.accent-full){width:0}.katex .overlay{display:block}.katex .mtable .vertical-separator{display:inline-block;min-width:1px}.katex .mtable .arraycolsep{display:inline-block}.katex .mtable .col-align-c>.vlist-t{text-align:center}.katex .mtable .col-align-l>.vlist-t{text-align:left}.katex .mtable .col-align-r>.vlist-t{text-align:right}.katex .svg-align{text-align:left}.katex svg{fill:currentColor;stroke:currentColor;fill-rule:nonzero;fill-opacity:1;stroke-width:1;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1;display:block;height:inherit;position:absolute;width:100%}.katex svg path{stroke:none}.katex img{border-style:none;max-height:none;max-width:none;min-height:0;min-width:0}.katex .stretchy{display:block;overflow:hidden;position:relative;width:100%}.katex .stretchy:after,.katex .stretchy:before{content:""}.katex .hide-tail{overflow:hidden;position:relative;width:100%}.katex .halfarrow-left{left:0;overflow:hidden;position:absolute;width:50.2%}.katex .halfarrow-right{overflow:hidden;position:absolute;right:0;width:50.2%}.katex .brace-left{left:0;overflow:hidden;position:absolute;width:25.1%}.katex .brace-center{left:25%;overflow:hidden;position:absolute;width:50%}.katex .brace-right{overflow:hidden;position:absolute;right:0;width:25.1%}.katex .x-arrow-pad{padding:0 .5em}.katex .cd-arrow-pad{padding:0 .55556em 0 .27778em}.katex .mover,.katex .munder,.katex .x-arrow{text-align:center}.katex .boxpad{padding:0 .3em}.katex .fbox,.katex .fcolorbox{border:.04em solid;box-sizing:border-box}.katex .cancel-pad{padding:0 .2em}.katex .cancel-lap{margin-left:-.2em;margin-right:-.2em}.katex .sout{border-bottom-style:solid;border-bottom-width:.08em}.katex .angl{border-right:.049em solid;border-top:.049em solid;box-sizing:border-box;margin-right:.03889em}.katex .anglpad{padding:0 .03889em}.katex .eqn-num:before{content:"(" counter(katexEqnNo) ")";counter-increment:katexEqnNo}.katex .mml-eqn-num:before{content:"(" counter(mmlEqnNo) ")";counter-increment:mmlEqnNo}.katex .mtr-glue{width:50%}.katex .cd-vert-arrow{display:inline-block;position:relative}.katex .cd-label-left{display:inline-block;position:absolute;right:calc(50% + .3em);text-align:left}.katex .cd-label-right{display:inline-block;left:calc(50% + .3em);position:absolute;text-align:right}.katex-display{display:block;margin:1em 0;text-align:center}.katex-display>.katex{display:block;text-align:center;white-space:nowrap}.katex-display>.katex>.katex-html{display:block;position:relative}.katex-display>.katex>.katex-html>.tag{position:absolute;right:0}.katex-display.leqno>.katex>.katex-html>.tag{left:0;right:auto}.katex-display.fleqn>.katex{padding-left:2em;text-align:left}body{counter-reset:katexEqnNo mmlEqnNo}:root{--neutral-600: rgb(107, 114, 128);--neutral-400: rgb(185, 185, 185);--neutral-300: rgb(228, 228, 228);--neutral-200: rgb(245, 245, 245);--default-font-family: Source Sans Pro, ui-sans-serif, system-ui, -apple-system, Segoe UI, Roboto, Ubuntu, Cantarell, Noto Sans, sans-serif, "Apple Color Emoji", "Segoe UI Emoji", "Segoe UI Symbol", "Noto Color Emoji";--primary-base: rgb(222, 144, 202);--primary-color: var(--primary-base);--primary-color-hover: oklch(from var(--primary-color) calc(l - .05) c h);--primary-color-active: oklch(from var(--primary-color) calc(l - .1) c h);--on-primary: #ffffff;--page-bg: #ffffff;--text-color: rgba(0, 0, 0, .85);--transparent-page-contrast: rgba(255, 255, 255, .85);--muted-color: rgba(0, 0, 0, .6);--border-color: rgba(0, 0, 0, .1);--surface-bg: #fafafa;--code-bg: #f6f8fa;--link-underline: var(--primary-color);--link-underline-hover: var(--primary-color-hover);--spacing-1: 8px;--spacing-2: 12px;--spacing-3: 16px;--spacing-4: 24px;--spacing-5: 32px;--spacing-6: 40px;--spacing-7: 48px;--spacing-8: 56px;--spacing-9: 64px;--spacing-10: 72px;--content-padding-x: 16px;--block-spacing-y: var(--spacing-4);--palette-count: 8;--button-radius: 6px;--button-padding-x: 12px;--button-padding-y: 8px;--button-font-size: 14px;--button-icon-padding: 8px;--button-big-padding-x: 16px;--button-big-padding-y: 12px;--button-big-font-size: 16px;--button-big-icon-padding: 12px;--table-border-radius: 8px;--table-header-bg: oklch(from var(--surface-bg) calc(l - .02) c h);--table-row-odd-bg: oklch(from var(--surface-bg) calc(l - .01) c h);--z-base: 0;--z-content: 1;--z-elevated: 10;--z-overlay: 1000;--z-modal: 1100;--z-tooltip: 1200;--axis-color: var(--muted-color);--tick-color: var(--text-color);--grid-color: rgba(0, 0, 0, .08)}[data-theme=dark]{--page-bg: #0f1115;--text-color: rgba(255, 255, 255, .9);--muted-color: rgba(255, 255, 255, .7);--border-color: rgba(255, 255, 255, .15);--surface-bg: #12151b;--code-bg: #12151b;--transparent-page-contrast: rgba(0, 0, 0, .85);--axis-color: var(--muted-color);--tick-color: var(--muted-color);--grid-color: rgba(255, 255, 255, .1);--primary-color-hover: oklch(from var(--primary-color) calc(l - .05) c h);--primary-color-active: oklch(from var(--primary-color) calc(l - .1) c h);--on-primary: #0f1115;--csstools-color-scheme--light: ;color-scheme:dark}html{box-sizing:border-box;background:#fff;background:var(--page-bg);color:#000000d9;color:var(--text-color)}*,*:before,*:after{box-sizing:inherit}body{margin:0;font-family:Source Sans Pro,ui-sans-serif,system-ui,-apple-system,Segoe UI,Roboto,Ubuntu,Cantarell,Noto Sans,sans-serif,"Apple Color Emoji","Segoe UI Emoji",Segoe UI Symbol,"Noto Color Emoji";font-family:var(--default-font-family);background:#fff;background:var(--page-bg);color:#000000d9;color:var(--text-color)}audio{display:block;width:100%}img,picture{max-width:100%;height:auto;display:block;position:relative;z-index:10;z-index:var(--z-elevated)}html{font-size:16px;line-height:1.6}.content-grid main{color:#000000d9;color:var(--text-color)}.content-grid main p{margin:0 0 16px;margin:0 0 var(--spacing-3)}.content-grid main h2{font-weight:600;font-size:max(22px,min(2.6vw,32px));line-height:1.2;margin:72px 0 32px;margin:var(--spacing-10) 0 var(--spacing-5);padding-bottom:12px;padding-bottom:var(--spacing-2);border-bottom:1px solid rgba(0,0,0,.1);border-bottom:1px solid var(--border-color)}.content-grid main h3{font-weight:700;font-size:max(18px,min(2.1vw,22px));line-height:1.25;margin:56px 0 24px;margin:var(--spacing-8) 0 var(--spacing-4)}.content-grid main h4{font-weight:600;text-transform:uppercase;font-size:14px;line-height:1.2;margin:56px 0 24px;margin:var(--spacing-8) 0 var(--spacing-4)}.content-grid main a{color:#de90ca;color:var(--primary-color);-webkit-text-decoration:none;text-decoration:none;background:var(--sufrace-bg);border-bottom:1px solid rgba(222,144,202,.4)}@supports (color: color-mix(in lch,red,blue)){.content-grid main a{border-bottom:1px solid color-mix(in srgb,var(--primary-color, #007AFF) 40%,transparent)}}.content-grid main a:hover{color:#ce80ba;color:var(--primary-color-hover);border-bottom:1px solid rgba(222,144,202,.4)}@supports (color: color-mix(in lch,red,blue)){.content-grid main a:hover{border-bottom:1px solid color-mix(in srgb,var(--primary-color, #007AFF) 40%,transparent)}}.content-grid main h2 a,.content-grid main h3 a,.content-grid main h4 a,.content-grid main h5 a,.content-grid main h6 a{color:inherit;border-bottom:none;-webkit-text-decoration:none;text-decoration:none}.content-grid main h2 a:hover,.content-grid main h3 a:hover,.content-grid main h4 a:hover,.content-grid main h5 a:hover,.content-grid main h6 a:hover{color:inherit;border-bottom:none;-webkit-text-decoration:none;text-decoration:none}.content-grid main ul,.content-grid main ol{padding-left:24px;margin:0 0 16px;margin:0 0 var(--spacing-3)}.content-grid main li{margin-bottom:12px;margin-bottom:var(--spacing-2)}.content-grid main li:last-child{margin-bottom:0}.content-grid main blockquote{border-left:2px solid rgba(0,0,0,.1);border-left:2px solid var(--border-color);padding-left:24px;padding-left:var(--spacing-4);font-style:italic;color:#0009;color:var(--muted-color);margin:24px 0;margin:var(--spacing-4) 0}.muted{color:#0009;color:var(--muted-color)}[data-footnote-ref]{margin-left:4px}.content-grid main mark{background-color:#de90ca08;border:1px solid rgba(222,144,202,.05);color:inherit;padding:1px 3px;border-radius:2px;font-weight:500;box-decoration-break:clone;-webkit-box-decoration-break:clone}@supports (color: color-mix(in lch,red,blue)){.content-grid main mark{background-color:color-mix(in srgb,var(--primary-color, #007AFF) 3%,transparent);border:1px solid color-mix(in srgb,var(--primary-color) 5%,transparent)}}.feature-grid{display:grid;grid-template-columns:repeat(auto-fit,minmax(200px,1fr));grid-gap:12px;gap:12px;margin:46px 0}.feature-card{display:flex;flex-direction:column;padding:16px;border:1px solid rgba(222,144,202,.4);background:#de90ca0d!important;border-radius:8px;-webkit-text-decoration:none;text-decoration:none;color:inherit;transition:all .2s ease}@supports (color: color-mix(in lch,red,blue)){.feature-card{border:1px solid color-mix(in srgb,var(--primary-color) 40%,transparent);background:color-mix(in srgb,var(--primary-color, #007AFF) 05%,transparent)!important}}.feature-card:hover{transform:translateY(-2px);box-shadow:0 2px 8px #00000014}.feature-card strong{font-size:14px;font-weight:600;color:#000000d9;color:var(--text-color);color:#de90ca!important;color:var(--primary-color)!important;margin-bottom:0!important}.feature-card span{font-size:12px;color:#0009;color:var(--muted-color);color:#de90ca!important;color:var(--primary-color)!important;margin-bottom:0!important;opacity:1}.katex .tag{background:none;border:none;opacity:.4}.content-grid{max-width:1280px;margin:40px auto 0;padding:0 16px;padding:0 var(--content-padding-x);display:grid;grid-template-columns:260px minmax(0,680px) 260px;grid-gap:32px;gap:32px;align-items:start}.content-grid>main{max-width:100%;margin:0;padding:0}.content-grid>main>*:first-child{margin-top:0}@media (max-width: 1100px){.content-grid{overflow:hidden;display:block;margin-top:12px;margin-top:var(--spacing-2)}.content-grid{grid-template-columns:1fr}.table-of-contents{position:static;display:none}.table-of-contents-mobile{display:block}.footer-inner{grid-template-columns:1fr;gap:16px}.footer-inner>h3{grid-column:auto;margin-top:16px}.footer-inner{display:block;padding:40px 16px}}.wide,.full-width{box-sizing:border-box;position:relative;z-index:10;z-index:var(--z-elevated);background-color:var(--background-color)}.wide{width:min(1100px,100vw - 16px * 2);width:min(1100px,100vw - var(--content-padding-x) * 2);margin-left:50%;transform:translate(-50%);padding:16px;padding:var(--content-padding-x);border-radius:6px;border-radius:var(--button-radius);background-color:#fff;background-color:var(--page-bg)}.full-width{width:100vw;margin-left:calc(50% - 50vw);margin-right:calc(50% - 50vw)}@media (max-width: 1100px){.wide,.full-width{width:100%;margin-left:0;margin-right:0;padding:0;transform:none}}#theme-toggle{position:fixed;top:24px;top:calc(var(--spacing-4) + var(--hf-spaces-topbar, 0px));right:16px;right:var(--spacing-3);margin:0;z-index:1000;z-index:var(--z-overlay)}@media (max-width: 640px){header.meta .meta-container{display:flex;flex-wrap:wrap;row-gap:12px;-moz-column-gap:8px;column-gap:8px;max-width:100%;padding:0 24px;padding:0 var(--spacing-4)}header.meta .meta-container .meta-container-cell{flex:1 1 calc(50% - 8px);min-width:0}}@media (max-width: 320px){header.meta .meta-container .meta-container-cell{flex-basis:100%;text-align:center}header.meta .affiliations{list-style-position:inside;padding-left:0;margin-left:0}header.meta .affiliations li{text-align:center}}@media (max-width: 768px){.d3-neural .panel{flex-direction:column}.d3-neural .panel .left{flex:0 0 auto;width:100%}.d3-neural .panel .right{flex:0 0 auto;width:100%;min-width:0}}@media print{html,body{background:#fff}body{margin:0}#theme-toggle{display:none!important}.content-grid main a{-webkit-text-decoration:none;text-decoration:none;border-bottom:1px solid rgba(0,0,0,.2)}.content-grid main pre,.content-grid main blockquote,.content-grid main table,.content-grid main figure{-moz-column-break-inside:avoid;break-inside:avoid;page-break-inside:avoid}.content-grid main h2{page-break-before:auto;page-break-after:avoid;-moz-column-break-after:avoid;break-after:avoid-page}.code-lang-chip{display:none!important}:root{--border-color: rgba(0,0,0,.2);--link-underline: rgba(0,0,0,.3);--link-underline-hover: rgba(0,0,0,.4)}.content-grid{grid-template-columns:1fr!important}.table-of-contents,.right-aside,.table-of-contents-mobile{display:none!important}main>nav:first-of-type{display:none!important}.hero,.hero-banner,.d3-banner,.d3-banner svg,.html-embed__card,.js-plotly-plot,figure,pre,table,blockquote,.wide,.full-width{-moz-column-break-inside:avoid;break-inside:avoid;page-break-inside:avoid}.hero{page-break-after:avoid}}@media print{.meta-container-cell--pdf{display:none!important}}code{font-size:14px;font-family:ui-monospace,SFMono-Regular,Menlo,Monaco,Consolas,Liberation Mono,Courier New,monospace;background-color:#f6f8fa;background-color:var(--code-bg);border-radius:.3em;border:1px solid rgba(0,0,0,.1);border:1px solid var(--border-color);color:#000000d9;color:var(--text-color);font-weight:400;line-height:1.5}p code,.note code{white-space:nowrap;padding:calc(8px/3) 4px;padding:calc(var(--spacing-1)/3) calc(var(--spacing-1)/2)}.astro-code{position:relative;border:1px solid rgba(0,0,0,.1);border:1px solid var(--border-color);border-radius:6px;padding:0;font-size:14px;--code-gutter-width: 2.5em}.astro-code,section.content-grid pre{width:100%;max-width:100%;box-sizing:border-box;-webkit-overflow-scrolling:touch;padding:0;margin-bottom:24px!important;margin-bottom:var(--block-spacing-y)!important;overflow-x:auto}section.content-grid pre.astro-code{margin:0;padding:8px 0;padding:var(--spacing-1) 0}section.content-grid pre code{display:inline-block;min-width:100%}@media (max-width: 1100px){.astro-code,section.content-grid pre{white-space:pre-wrap;word-wrap:anywhere;word-break:break-word}section.content-grid pre code{white-space:pre-wrap;display:block;min-width:0}}[data-theme=light] .astro-code{background-color:#f6f8fa;background-color:var(--code-bg)}[data-theme=light] .astro-code span{color:var(--shiki-light)!important}[data-theme=dark] .astro-code span{color:var(--shiki-dark)!important}[data-theme=light] .astro-code{--shiki-foreground: #24292f;--shiki-background: #ffffff}.astro-code code{counter-reset:astro-code-line;display:block;background:none;border:none}.astro-code .line{display:inline-block;position:relative;padding-left:calc(var(--code-gutter-width) + 8px);padding-left:calc(var(--code-gutter-width) + var(--spacing-1));min-height:1.25em}.astro-code .line:before{counter-increment:astro-code-line;content:counter(astro-code-line);position:absolute;left:0;top:0;bottom:0;width:calc(var(--code-gutter-width));text-align:right;color:#0009;color:var(--muted-color);opacity:.3;-webkit-user-select:none;-moz-user-select:none;user-select:none;padding-right:12px;padding-right:var(--spacing-2);border-right:1px solid rgba(0,0,0,.1);border-right:1px solid var(--border-color)}.astro-code .line:empty:after{content:" "}.astro-code code>.line:last-child:empty{display:none}.code-card{position:relative}.code-card .code-copy{position:absolute;top:12px;top:var(--spacing-2);right:12px;right:var(--spacing-2);z-index:3;display:none}.code-card:hover .code-copy{display:block}.code-card .code-copy svg{width:16px;height:16px;display:block;fill:currentColor}.code-card pre{margin:0 0 8px;margin:0 0 var(--spacing-1)}.code-card.no-copy:after{top:8px;right:8px}.accordion .astro-code{padding:0;border:none}.accordion .astro-code{margin-bottom:0!important}.accordion .code-output{border:none;border-top:1px solid rgba(0,0,0,.1)!important;border-top:1px solid var(--border-color)!important}.accordion pre{margin-bottom:0!important}.accordion .code-card pre{margin:0!important}.accordion .astro-code:after{right:0;bottom:0}.code-output{position:relative;background:#f4f6f8;border:1px solid rgba(0,0,0,.1);border:1px solid var(--border-color);border-radius:6px;margin-top:0;margin-bottom:24px;margin-bottom:var(--block-spacing-y);padding:0!important}@supports (color: lab(from red l 1 1% / calc(alpha + .1))){.code-output{background:oklch(from var(--code-bg) calc(l - .005) c h)}}.code-output pre{padding:22px 16px 16px!important;padding:calc(var(--spacing-3) + 6px) var(--spacing-3) var(--spacing-3) var(--spacing-3)!important}.code-card+.code-output,.astro-code+.code-output,section.content-grid pre+.code-output{margin-top:0;border-top:none;border-top-left-radius:0;border-top-right-radius:0;box-shadow:inset 0 8px 12px -12px #00000026}.astro-code:has(+.code-output){margin-bottom:0!important}.code-card:has(+.code-output) .astro-code{margin-bottom:0!important}section.content-grid pre:has(+.code-output){margin-bottom:0!important}.astro-code:has(+.code-output){border-bottom-left-radius:0;border-bottom-right-radius:0}.code-card:has(+.code-output) .astro-code{border-bottom-left-radius:0;border-bottom-right-radius:0}section.content-grid pre:has(+.code-output){border-bottom-left-radius:0;border-bottom-right-radius:0}.code-output:before{content:"Output";position:absolute;top:0;right:0;font-size:10px;line-height:1;color:#0009;color:var(--muted-color);text-transform:uppercase;letter-spacing:.04em;border-top:none;border-right:none;border-radius:0 0 0 6px;padding:10px}.code-output>:where(*):first-child{margin-top:0!important}.code-output>:where(*):last-child{margin-bottom:0!important}.code-filename{display:inline-block;font-size:12px;line-height:1;color:#0009;color:var(--muted-color);background:#fafafa;background:var(--surface-bg);border:1px solid rgba(0,0,0,.1);border:1px solid var(--border-color);border-bottom:none;border-radius:6px 6px 0 0;padding:4px 8px;margin:0}.code-filename+.code-card .astro-code,.code-filename+.astro-code,.code-filename+section.content-grid pre{border-top-left-radius:0;border-top-right-radius:6px}button,.button{-webkit-appearance:none;-moz-appearance:none;appearance:none;background:linear-gradient(15deg,#de90ca,#ce80ba 35%);background:linear-gradient(15deg,var(--primary-color) 0%,var(--primary-color-hover) 35%);color:#fff;border:1px solid transparent;border-radius:6px;border-radius:var(--button-radius);padding:8px 12px;padding:var(--button-padding-y) var(--button-padding-x);font-size:14px;font-size:var(--button-font-size);line-height:1;cursor:pointer;display:inline-block;-webkit-text-decoration:none;text-decoration:none;transition:background-color .15s ease,border-color .15s ease,box-shadow .15s ease,transform .02s ease}button:has(>svg:only-child),.button:has(>svg:only-child){padding:8px;padding:var(--button-icon-padding)}button:hover,.button:hover{filter:brightness(96%)}button:active,.button:active{transform:translateY(1px)}button:focus-visible,.button:focus-visible{outline:none}button:disabled,.button:disabled{opacity:.6;cursor:not-allowed}.button--ghost{background:transparent!important;color:#de90ca!important;color:var(--primary-color)!important;border-color:#de90ca!important;border-color:var(--primary-color)!important}.button--ghost:hover{color:#ce80ba!important;color:var(--primary-color-hover)!important;border-color:#ce80ba!important;border-color:var(--primary-color-hover)!important;filter:none}.button.button--big{padding:12px 16px;padding:var(--button-big-padding-y) var(--button-big-padding-x);font-size:16px;font-size:var(--button-big-font-size)}.button.button--big:has(>svg:only-child){padding:12px;padding:var(--button-big-icon-padding)}.button-group .button{margin:5px}.content-grid main table{border-collapse:collapse;table-layout:auto;margin:0}.content-grid main th,.content-grid main td{border-bottom:1px solid rgba(0,0,0,.1);border-bottom:1px solid var(--border-color);padding:6px 8px;font-size:15px;white-space:nowrap;word-break:auto-phrase;white-space:break-spaces;vertical-align:top}.content-grid main thead th{border-bottom:1px solid rgba(0,0,0,.1);border-bottom:1px solid var(--border-color)}.content-grid main thead th{background:#f3f3f3;background:var(--table-header-bg);padding-top:10px;padding-bottom:10px;font-weight:600}.content-grid main hr{border:none;border-bottom:1px solid rgba(0,0,0,.1);border-bottom:1px solid var(--border-color);margin:32px 0;margin:var(--spacing-5) 0}.content-grid main .table-scroll{width:100%;overflow-x:auto;-webkit-overflow-scrolling:touch;border:1px solid rgba(0,0,0,.1);border:1px solid var(--border-color);border-radius:8px;border-radius:var(--table-border-radius);background:#fafafa;background:var(--surface-bg);margin:0 0 24px;margin:0 0 var(--block-spacing-y)}.content-grid main .table-scroll>table{width:-moz-fit-content;width:fit-content;min-width:100%;max-width:none}.content-grid main .table-scroll>table th,.content-grid main .table-scroll>table td{border-right:1px solid rgba(0,0,0,.1);border-right:1px solid var(--border-color)}.content-grid main .table-scroll>table th:last-child,.content-grid main .table-scroll>table td:last-child{border-right:none}.content-grid main .table-scroll>table thead th:first-child{border-top-left-radius:8px;border-top-left-radius:var(--table-border-radius)}.content-grid main .table-scroll>table thead th:last-child{border-top-right-radius:8px;border-top-right-radius:var(--table-border-radius)}.content-grid main .table-scroll>table tbody tr:last-child td:first-child{border-bottom-left-radius:8px;border-bottom-left-radius:var(--table-border-radius)}.content-grid main .table-scroll>table tbody tr:last-child td:last-child{border-bottom-right-radius:8px;border-bottom-right-radius:var(--table-border-radius)}.content-grid main .table-scroll>table tbody tr:nth-child(odd) td{background:#f7f7f7;background:var(--table-row-odd-bg)}.content-grid main .table-scroll>table tbody tr:last-child td{border-bottom:none}.accordion .accordion__content .table-scroll{border:none;border-radius:0;margin:0;margin-bottom:0!important}.accordion .accordion__content table{margin:0!important}.accordion .accordion__content .table-scroll>table thead th:first-child,.accordion .accordion__content .table-scroll>table thead th:last-child,.accordion .accordion__content .table-scroll>table tbody tr:last-child td:first-child,.accordion .accordion__content .table-scroll>table tbody tr:last-child td:last-child{border-radius:0}@supports not ((width: -moz-fit-content) or (width: fit-content)){.content-grid main .table-scroll>table{width:-moz-max-content;width:max-content;min-width:100%}}.tag-list{display:flex;flex-wrap:wrap;gap:8px;margin:8px 0 16px}.tag{display:inline-flex;align-items:center;gap:6px;padding:8px 12px;font-size:12px;line-height:1;border-radius:6px;border-radius:var(--button-radius);background:#fafafa;background:var(--surface-bg);border:1px solid rgba(0,0,0,.1);border:1px solid var(--border-color);color:#000000d9;color:var(--text-color)}.card{background:#fafafa;background:var(--surface-bg);border:1px solid rgba(0,0,0,.1);border:1px solid var(--border-color);border-radius:10px;padding:12px;padding:var(--spacing-2);z-index:11;z-index:calc(var(--z-elevated) + 1);position:relative;margin-bottom:24px;margin-bottom:var(--block-spacing-y)}select{background-color:#fff;background-color:var(--page-bg);border:1px solid rgba(202,131,183,.55);border-radius:6px;border-radius:var(--button-radius);padding:8px 12px;padding:var(--button-padding-y) var(--button-padding-x) var(--button-padding-y) var(--button-padding-x);font-family:Source Sans Pro,ui-sans-serif,system-ui,-apple-system,Segoe UI,Roboto,Ubuntu,Cantarell,Noto Sans,sans-serif,"Apple Color Emoji","Segoe UI Emoji",Segoe UI Symbol,"Noto Color Emoji";font-family:var(--default-font-family);font-size:14px;font-size:var(--button-font-size);color:#000000d9;color:var(--text-color);background-image:url("data:image/svg+xml;charset=utf-8,%3Csvg xmlns='http://www.w3.org/2000/svg' width='12' height='12' viewBox='0 0 12 12'%3E%3Cpath fill='%23666' d='M6 8.825L1.175 4 2.35 2.825 6 6.475 9.65 2.825 10.825 4z'/%3E%3C/svg%3E");background-repeat:no-repeat;background-position:right 14px center;background-position:right calc(var(--button-padding-x) + 2px) center;background-size:12px;cursor:pointer;transition:border-color .2s ease,box-shadow .2s ease;-webkit-appearance:none;-moz-appearance:none;appearance:none}@supports (color: color-mix(in lch,red,blue)){select{border:1px solid color-mix(in srgb,var(--primary-color) 50%,var(--border-color))}}select:hover,select:focus,select:active{border-color:#de90ca;border-color:var(--primary-color)}select:focus{outline:none;border-color:#de90ca;border-color:var(--primary-color);box-shadow:0 0 0 2px #de90ca1a}@supports (color: lab(from red l 1 1% / calc(alpha + .1))){select:focus{box-shadow:0 0 0 2px rgba(from var(--primary-color) r g b / .1)}}select:disabled{opacity:.6;cursor:not-allowed;background-color:#fafafa;background-color:var(--surface-bg)}[data-theme=dark] select{background-image:url("data:image/svg+xml;charset=utf-8,%3Csvg xmlns='http://www.w3.org/2000/svg' width='12' height='12' viewBox='0 0 12 12'%3E%3Cpath fill='%23bbb' d='M6 8.825L1.175 4 2.35 2.825 6 6.475 9.65 2.825 10.825 4z'/%3E%3C/svg%3E")}input[type=checkbox]{-webkit-appearance:none;-moz-appearance:none;appearance:none;width:16px;height:16px;border:2px solid rgba(0,0,0,.1);border:2px solid var(--border-color);border-radius:3px;background-color:#fff;background-color:var(--page-bg);cursor:pointer;position:relative;transition:all .2s ease;margin-right:12px;margin-right:var(--spacing-2)}input[type=checkbox]:hover{border-color:#de90ca;border-color:var(--primary-color)}input[type=checkbox]:focus{outline:none;border-color:#de90ca;border-color:var(--primary-color);box-shadow:0 0 0 2px #de90ca1a}@supports (color: lab(from red l 1 1% / calc(alpha + .1))){input[type=checkbox]:focus{box-shadow:0 0 0 2px rgba(from var(--primary-color) r g b / .1)}}input[type=checkbox]:checked{background-color:#de90ca;background-color:var(--primary-color);border-color:#de90ca;border-color:var(--primary-color)}input[type=checkbox]:checked:before{content:"";position:absolute;top:1px;left:4px;width:4px;height:8px;border:solid #ffffff;border:solid var(--on-primary);border-width:0 2px 2px 0;transform:rotate(45deg)}input[type=checkbox]:disabled{opacity:.6;cursor:not-allowed}input[type=radio]{-webkit-appearance:none;-moz-appearance:none;appearance:none;width:16px;height:16px;border:2px solid rgba(0,0,0,.1);border:2px solid var(--border-color);border-radius:50%;background-color:#fff;background-color:var(--page-bg);cursor:pointer;position:relative;transition:all .2s ease;margin-right:12px;margin-right:var(--spacing-2)}input[type=radio]:hover{border-color:#de90ca;border-color:var(--primary-color)}input[type=radio]:focus{outline:none;border-color:#de90ca;border-color:var(--primary-color);box-shadow:0 0 0 2px #de90ca1a}@supports (color: lab(from red l 1 1% / calc(alpha + .1))){input[type=radio]:focus{box-shadow:0 0 0 2px rgba(from var(--primary-color) r g b / .1)}}input[type=radio]:checked{border-color:#de90ca;border-color:var(--primary-color)}input[type=radio]:checked:before{content:"";position:absolute;top:2px;left:2px;width:8px;height:8px;border-radius:50%;background-color:#de90ca;background-color:var(--primary-color)}input[type=radio]:disabled{opacity:.6;cursor:not-allowed}input[type=text],input[type=email],input[type=password],input[type=number],input[type=url],input[type=search],textarea{-webkit-appearance:none;-moz-appearance:none;appearance:none;background-color:#fff;background-color:var(--page-bg);border:1px solid rgba(0,0,0,.1);border:1px solid var(--border-color);border-radius:6px;border-radius:var(--button-radius);padding:8px 12px;padding:var(--button-padding-y) var(--button-padding-x);font-family:Source Sans Pro,ui-sans-serif,system-ui,-apple-system,Segoe UI,Roboto,Ubuntu,Cantarell,Noto Sans,sans-serif,"Apple Color Emoji","Segoe UI Emoji",Segoe UI Symbol,"Noto Color Emoji";font-family:var(--default-font-family);font-size:14px;font-size:var(--button-font-size);color:#000000d9;color:var(--text-color);transition:border-color .2s ease,box-shadow .2s ease;width:100%}input[type=text]:hover,input[type=email]:hover,input[type=password]:hover,input[type=number]:hover,input[type=url]:hover,input[type=search]:hover,textarea:hover{border-color:#de90ca;border-color:var(--primary-color)}input[type=text]:focus,input[type=email]:focus,input[type=password]:focus,input[type=number]:focus,input[type=url]:focus,input[type=search]:focus,textarea:focus{outline:none;border-color:#de90ca;border-color:var(--primary-color);box-shadow:0 0 0 2px #de90ca1a}@supports (color: lab(from red l 1 1% / calc(alpha + .1))){input[type=text]:focus,input[type=email]:focus,input[type=password]:focus,input[type=number]:focus,input[type=url]:focus,input[type=search]:focus,textarea:focus{box-shadow:0 0 0 2px rgba(from var(--primary-color) r g b / .1)}}input[type=text]:disabled,input[type=email]:disabled,input[type=password]:disabled,input[type=number]:disabled,input[type=url]:disabled,input[type=search]:disabled,textarea:disabled{opacity:.6;cursor:not-allowed;background-color:#fafafa;background-color:var(--surface-bg)}label{display:flex;align-items:center;font-size:14px;font-size:var(--button-font-size);color:#000000d9;color:var(--text-color);cursor:pointer;margin-bottom:0;line-height:1.4;-webkit-user-select:none;-moz-user-select:none;user-select:none}.form-group{margin-bottom:24px;margin-bottom:var(--spacing-4);display:flex;align-items:center;gap:12px;gap:var(--spacing-2)}.form-group label{margin-bottom:0}.form-group.vertical{flex-direction:column;align-items:flex-start}.form-group.vertical label{margin-bottom:8px;margin-bottom:var(--spacing-1)}.form-inline{display:flex;align-items:center;gap:12px;gap:var(--spacing-2);margin-bottom:16px;margin-bottom:var(--spacing-3)}.form-inline label{margin-bottom:0}div[style*="display: flex"] label,div[class*=flex] label,.trackio-controls label,.scale-controls label,.theme-selector label{margin-bottom:0!important;align-self:center}.tenet-list{margin:3rem 0}.tenet-list ol{counter-reset:tenet-counter 0;list-style:none;padding-left:0;display:grid;grid-template-columns:1fr;grid-gap:2.5rem;gap:2.5rem;max-width:900px;margin:0 auto}.tenet-list li.tenet{counter-increment:tenet-counter;background:linear-gradient(135deg,#fff,#f8f9fa);border:2px solid #e2e8f0;border-radius:16px;padding:2rem 2rem 2rem 4rem;margin:0;position:relative;box-shadow:0 12px 35px #0000001f;transition:all .3s ease;cursor:pointer}.tenet-list li.tenet:hover{transform:translateY(-8px) scale(1.02);box-shadow:0 20px 50px #00000040;border-color:#007bff80;background:linear-gradient(135deg,#fff,#f0f8ff)}.tenet-list li.tenet:nth-child(1):before{background:linear-gradient(135deg,#667eea,#764ba2)}.tenet-list li.tenet:nth-child(2):before{background:linear-gradient(135deg,#f093fb,#f5576c)}.tenet-list li.tenet:nth-child(3):before{background:linear-gradient(135deg,#4facfe,#00f2fe)}.tenet-list li.tenet:nth-child(4):before{background:linear-gradient(135deg,#43e97b,#38f9d7)}.tenet-list li.tenet:nth-child(5):before{background:linear-gradient(135deg,#fa709a,#fee140)}.tenet-list li.tenet:nth-child(6):before{background:linear-gradient(135deg,#a8edea,#fed6e3)}.tenet-list li.tenet:nth-child(7):before{background:linear-gradient(135deg,#ff9a9e,#fecfef)}.tenet-list li.tenet:nth-child(8):before{background:linear-gradient(135deg,#a18cd1,#fbc2eb)}.tenet-list li.tenet:nth-child(9):before{background:linear-gradient(135deg,#ffecd2,#fcb69f)}.tenet-list li.tenet:before{content:counter(tenet-counter);position:absolute;top:-12px;left:-12px;color:#fff;width:48px;height:48px;border-radius:50%;display:flex;align-items:center;justify-content:center;font-size:1.2em;font-weight:700;box-shadow:0 4px 12px #00000026;border:3px solid white}.tenet-list li.tenet strong{color:#1a202c;font-size:1.1em;display:block;margin-bottom:.5rem}.tenet-list li.tenet em{color:#4a5568;font-size:.95em;font-style:italic;display:block;margin-top:.75rem;padding:1rem;background:#00000008;border-radius:8px;border-left:3px solid #e2e8f0}.tenet-list li.tenet p{color:#2d3748;line-height:1.6;margin:.5rem 0}@keyframes pulse-glow{0%{box-shadow:0 4px 12px #00000026}50%{box-shadow:0 4px 20px #00000040}to{box-shadow:0 4px 12px #00000026}}.tenet-list li.tenet:hover:before{animation:pulse-glow 2s ease-in-out infinite}[data-theme=dark] .tenet-list li.tenet{background:linear-gradient(135deg,#1a202c,#2d3748);border-color:#4a5568}[data-theme=dark] .tenet-list li.tenet:hover{background:linear-gradient(135deg,#2d3748,#374151);border-color:#667eea80}[data-theme=dark] .tenet-list li.tenet strong{color:#e2e8f0}[data-theme=dark] .tenet-list li.tenet p{color:#cbd5e0}[data-theme=dark] .tenet-list li.tenet em{color:#a0aec0;background:#ffffff0d;border-left-color:#4a5568}@media (max-width: 768px){.tenet-list li.tenet{padding:1.5rem}}.crumbs{background:linear-gradient(135deg,#f0f4ff,#e6eeff);border-left:5px solid #667eea;padding:1.25rem 1.75rem;margin:2.5rem 0;border-radius:0 8px 8px 0;box-shadow:0 2px 8px #667eea1f;font-size:.95em;line-height:1.6;color:#4a5568}.crumbs strong{color:#667eea;font-weight:700}.crumbs code{background:#667eea1a;padding:.15em .4em;border-radius:3px;font-size:.9em;color:#4c51bf}.crumbs a{color:#667eea;font-weight:500}[data-theme=dark] .crumbs{background:linear-gradient(135deg,#1e293b,#334155);border-left-color:#818cf8;color:#cbd5e0}[data-theme=dark] .crumbs strong{color:#a5b4fc}[data-theme=dark] .crumbs code{background:#818cf833;color:#c7d2fe}[data-theme=dark] .crumbs a{color:#a5b4fc}main a[href^="http://"],main a[href^="https://"]{background:linear-gradient(135deg,#e3f2fd,#bbdefb);color:#1565c0;-webkit-text-decoration:none;text-decoration:none;padding:.15em .5em;border-radius:12px;border:1px solid #90caf9;display:inline-block;transition:all .3s ease;font-weight:500;box-shadow:0 1px 3px #1565c026}main a[href^="http://"]:hover,main a[href^="https://"]:hover{background:linear-gradient(135deg,#2196f3,#1976d2);color:#fff;border-color:#1565c0;transform:translateY(-1px);box-shadow:0 4px 12px #1565c04d}main a[href^="http://"]:active,main a[href^="https://"]:active{transform:translateY(0);box-shadow:0 1px 3px #1565c033}a[href^="#source-of-truth"],a[href^="#one-model-one-file"],a[href^="#code-is-product"],a[href^="#standardize-dont-abstract"],a[href^="#do-repeat-yourself"],a[href^="#minimal-user-api"],a[href^="#backwards-compatibility"],a[href^="#consistent-public-surface"],a[href^="#modular"]{position:relative;color:#667eea;font-weight:600;-webkit-text-decoration:underline;text-decoration:underline;text-decoration-color:#667eea4d;transition:all .3s ease}a[href^="#source-of-truth"]:hover,a[href^="#one-model-one-file"]:hover,a[href^="#code-is-product"]:hover,a[href^="#standardize-dont-abstract"]:hover,a[href^="#do-repeat-yourself"]:hover,a[href^="#minimal-user-api"]:hover,a[href^="#backwards-compatibility"]:hover,a[href^="#consistent-public-surface"]:hover,a[href^="#modular"]:hover{color:#4c51bf;text-decoration-color:#4c51bf;background:#667eea1a;padding:2px 4px;border-radius:4px}a[href^="#source-of-truth"]:after{content:"Model implementations should be reliable, reproducible, and faithful to original performances."}a[href^="#one-model-one-file"]:after{content:"All inference and training core logic visible, top‑to‑bottom, in a single file."}a[href^="#code-is-product"]:after{content:"Optimize for reading, diffing, and tweaking. Code quality matters as much as functionality."}a[href^="#standardize-dont-abstract"]:after{content:"Model-specific logic belongs in the model file, not hidden behind abstractions."}a[href^="#do-repeat-yourself"]:after{content:"Strategic duplication can improve readability and maintainability when done thoughtfully."}a[href^="#minimal-user-api"]:after{content:"Config, model, preprocessing; from_pretrained, save_pretrained, push_to_hub. Least amount of codepaths."}a[href^="#backwards-compatibility"]:after{content:"Any artifact once on the hub must remain loadable. Breaking changes are unacceptable."}a[href^="#consistent-public-surface"]:after{content:"Uniform naming, signatures, and conventions across all models for predictability."}a[href^="#modular"]:after{content:"Architecture components shared via modular system, removing boilerplate while keeping expanded files visible."}a[href^="#source-of-truth"]:after,a[href^="#one-model-one-file"]:after,a[href^="#code-is-product"]:after,a[href^="#standardize-dont-abstract"]:after,a[href^="#do-repeat-yourself"]:after,a[href^="#minimal-user-api"]:after,a[href^="#backwards-compatibility"]:after,a[href^="#consistent-public-surface"]:after,a[href^="#modular"]:after{position:absolute;bottom:100%;left:50%;transform:translate(-50%);background:#1a202c;color:#fff;padding:.75rem 1rem;border-radius:8px;font-size:.85em;font-weight:400;white-space:normal;width:300px;line-height:1.4;z-index:1001;opacity:0;visibility:hidden;transition:opacity .3s ease,visibility .3s ease;pointer-events:none;box-shadow:0 4px 12px #0003;margin-bottom:.5rem}a[href^="#source-of-truth"]:hover:after,a[href^="#one-model-one-file"]:hover:after,a[href^="#code-is-product"]:hover:after,a[href^="#standardize-dont-abstract"]:hover:after,a[href^="#do-repeat-yourself"]:hover:after,a[href^="#minimal-user-api"]:hover:after,a[href^="#backwards-compatibility"]:hover:after,a[href^="#consistent-public-surface"]:hover:after,a[href^="#modular"]:hover:after{opacity:1;visibility:visible}[data-theme=dark] main a[href^="http://"],[data-theme=dark] main a[href^="https://"]{background:linear-gradient(135deg,#1e3a5f,#2563eb);color:#bfdbfe;border-color:#3b82f6}[data-theme=dark] main a[href^="http://"]:hover,[data-theme=dark] main a[href^="https://"]:hover{background:linear-gradient(135deg,#2563eb,#1d4ed8);color:#fff;border-color:#60a5fa}[data-theme=dark] a[href^="#source-of-truth"]:after,[data-theme=dark] a[href^="#one-model-one-file"]:after,[data-theme=dark] a[href^="#code-is-product"]:after,[data-theme=dark] a[href^="#standardize-dont-abstract"]:after,[data-theme=dark] a[href^="#do-repeat-yourself"]:after,[data-theme=dark] a[href^="#minimal-user-api"]:after,[data-theme=dark] a[href^="#backwards-compatibility"]:after,[data-theme=dark] a[href^="#consistent-public-surface"]:after,[data-theme=dark] a[href^="#modular"]:after{background:#2d3748;color:#e2e8f0}[data-theme=dark] a[href^="#source-of-truth"],[data-theme=dark] a[href^="#one-model-one-file"],[data-theme=dark] a[href^="#code-is-product"],[data-theme=dark] a[href^="#standardize-dont-abstract"],[data-theme=dark] a[href^="#do-repeat-yourself"],[data-theme=dark] a[href^="#minimal-user-api"],[data-theme=dark] a[href^="#backwards-compatibility"],[data-theme=dark] a[href^="#consistent-public-surface"],[data-theme=dark] a[href^="#modular"]{color:#a5b4fc;text-decoration-color:#a5b4fc4d}[data-theme=dark] a[href^="#source-of-truth"]:hover,[data-theme=dark] a[href^="#one-model-one-file"]:hover,[data-theme=dark] a[href^="#code-is-product"]:hover,[data-theme=dark] a[href^="#standardize-dont-abstract"]:hover,[data-theme=dark] a[href^="#do-repeat-yourself"]:hover,[data-theme=dark] a[href^="#minimal-user-api"]:hover,[data-theme=dark] a[href^="#backwards-compatibility"]:hover,[data-theme=dark] a[href^="#consistent-public-surface"]:hover,[data-theme=dark] a[href^="#modular"]:hover{color:#c7d2fe;background:#a5b4fc26}.demo-wide,.demo-full-width{display:flex;flex-direction:column;align-items:center;justify-content:center;width:100%;min-height:150px;color:#0009;color:var(--muted-color);font-size:12px;border:2px dashed rgba(0,0,0,.1);border:2px dashed var(--border-color);border-radius:8px;background:#fafafa;background:var(--surface-bg);margin-bottom:24px;margin-bottom:var(--block-spacing-y)}.mermaid{background:none!important;margin-bottom:24px!important;margin-bottom:var(--block-spacing-y)!important}.content-grid main img{max-width:100%;height:auto;width:min(1100px,100vw - 16px * 2);width:min(1100px,100vw - var(--content-padding-x) * 2);margin-left:50%;transform:translate(-50%);display:block}.content-grid main .figure-legend{text-align:center;font-size:.9rem;color:#0009;color:var(--muted-color);font-style:italic;margin:12px 0 24px;margin:var(--spacing-2) 0 var(--spacing-4);width:min(1100px,100vw - 16px * 2);width:min(1100px,100vw - var(--content-padding-x) * 2);margin-left:50%;transform:translate(-50%)}@media (max-width: 1024px){.content-grid main img,.content-grid main .figure-legend{width:100%;margin-left:0;transform:none}} diff --git a/app/dist/_astro/index.BzKj3Iki.css.gz b/app/dist/_astro/index.BzKj3Iki.css.gz deleted file mode 100644 index f38bb76701ff88228371c5fe0f87cbf8a67dcbcc..0000000000000000000000000000000000000000 --- a/app/dist/_astro/index.BzKj3Iki.css.gz +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:f24edb1dda097271f639fc0ed71e61996174a8ac9f816dc40504c1730734a66e -size 18501 diff --git a/app/dist/index.html b/app/dist/index.html index 367bb0a506acce31ab45b0a924af585b36b6b49a..cd5c2d358b4a06af3952fda1f345d0cda1d9410a 100644 --- a/app/dist/index.html +++ b/app/dist/index.html @@ -12,35 +12,37 @@ document.documentElement.setAttribute("data-theme", theme); } catch {} })(); - -

Maintain the unmaintainable:
1M python loc, 400+ models

+ -

A peek into software engineering for the transformers library

Affiliation

Hugging Face

Published

October 2, 2025

Table of Contents
will have their summary available on hover.

External links to articles will help you solidify your knowledge.

Several interactive visualisations are available as you go - scroll, zoom, drag away to explore them.

-
    -
  • Breadcrumb boxes summarize what you just learned, connect it to the tenets, and point to what’s coming Next. Think of them as narrative signposts to help you keep track.
  • -
+

Breadcrumb boxes summarize what you just learned, connect it to the tenets, and point to what’s coming Next. Think of them as narrative signposts to help you keep track.

We will get started by enumerating the tenets. Then we’ll look at concrete examples that show how they shape our decision-making. These examples are necessarily detailed, and sometimes complex, because they illustrate the challenges to maintain and grow a large codebase that caters to multiple collectives, has millions of users, hundreds of contributors, and always strives for simplicity and consistency.

The core tenets of transformers

We summarize the foundations on which we’ve built everything, and write the “tenets” of the library. They behave like software interfaces, hence it is crucial that they are explicitly written down. However opinionated they are, they have evolved over time.

These principles were not decided in a vacuum. The library evolved towards them, and once they emerged, they were recognized as critical.

-
  1. Source of Truth

    We aim to be a source of truth for all model definitions. This is more of a goal than a tenet, but it strongly guides our decisions. Model implementations should be reliable, reproducible, and faithful to the original implementations. If we are successful, they should become reference baselines for the ecosystem, so they’ll be easily adopted by downstream libraries and projects. It’s much easier for a project to always refer to the transformers implementation, than to learn a different research codebase every time a new architecture is released.

    This overarching guideline ensures quality and reproducibility across all models in the library, and aspires to make the community work easier.
  2. One Model, One File

    All inference and training core logic has to be visible, top‑to‑bottom, to maximize each model’s hackability.

    Every model should be completely understandable and hackable by reading a single file from top to bottom.
  3. Code is Product

    Optimize for reading, diffing, and tweaking, our users are power users. Variables should be explicit, full words, even several words, readability is primordial.

    Code quality matters as much as functionality - optimize for human readers, not just computers.
  4. Standardize, Don’t Abstract

    If it’s model behavior, keep it in the file; use abstractions only for generic infra.

    Model-specific logic belongs in the model file, not hidden behind abstractions.
  5. DRY* (DO Repeat Yourself)

    Copy when it helps users; keep successors in sync without centralizing behavior.

    Evolution: With the introduction and global adoption of modular transformers, we do not repeat any logic in the modular files, but end user files remain faithful to the original tenet.

    Strategic duplication can improve readability and maintainability when done thoughtfully.
  6. Minimal User API

    Config, model, preprocessing; from_pretrained, save_pretrained, push_to_hub. We want the least amount of codepaths. Reading should be obvious, configurations should be obvious.

    Keep the public interface simple and predictable, users should know what to expect.
  7. Backwards Compatibility

    Evolve by additive standardization, never break public APIs.

    Any artifact that was once on the hub and loadable with transformers should be usable indefinitely with the same interface. Further, public methods should not change to avoid breaking dependencies. If we do deprecate something, it’s with very long cycles beforehand.

    Once something is public, it stays public, evolution through addition, not breaking changes.
  8. Consistent Public Surface

    Same argument names, same outputs, hidden states and attentions exposed, enforced by tests. This is a goal as well as a tenet.

    All models should feel familiar - consistent interfaces reduce cognitive load.
+
  1. Source of Truth

    We aim to be a source of truth for all model definitions. This is more of a goal than a tenet, but it strongly guides our decisions. Model implementations should be reliable, reproducible, and faithful to the original implementations. If we are successful, they should become reference baselines for the ecosystem, so they’ll be easily adopted by downstream libraries and projects. It’s much easier for a project to always refer to the transformers implementation, than to learn a different research codebase every time a new architecture is released.

    This overarching guideline ensures quality and reproducibility across all models in the library, and aspires to make the community work easier.
  2. One Model, One File

    All inference and training core logic has to be visible, top‑to‑bottom, to maximize each model’s hackability.

    Every model should be completely understandable and hackable by reading a single file from top to bottom.
  3. Code is Product

    Optimize for reading, diffing, and tweaking, our users are power users. Variables should be explicit, full words, even several words, readability is primordial.

    Code quality matters as much as functionality - optimize for human readers, not just computers.
  4. Standardize, Don’t Abstract

    If it’s model behavior, keep it in the file; use abstractions only for generic infra.

    Model-specific logic belongs in the model file, not hidden behind abstractions.
  5. DRY* (DO Repeat Yourself)

    Copy when it helps users; keep successors in sync without centralizing behavior.

    Evolution:

    With the introduction and global adoption of modular transformers, we do not repeat any logic in the modular files, but end user files remain faithful to the original tenet.

    Strategic duplication can improve readability and maintainability when done thoughtfully.
  6. Minimal User API

    Config, model, preprocessing; from_pretrained, save_pretrained, push_to_hub. We want the least amount of codepaths. Reading should be obvious, configurations should be obvious.

    Keep the public interface simple and predictable, users should know what to expect.
  7. Backwards Compatibility

    Evolve by additive standardization, never break public APIs.

    Any artifact that was once on the hub and loadable with transformers should be usable indefinitely with the same interface. Further, public methods should not change to avoid breaking dependencies. If we do deprecate something, it’s with very long cycles beforehand.

    Once something is public, it stays public, evolution through addition, not breaking changes.
  8. Consistent Public Surface

    Same argument names, same outputs, hidden states and attentions exposed, enforced by tests. This is a goal as well as a tenet.

    All models should feel familiar - consistent interfaces reduce cognitive load.

When a PR is merged, it is because the contribution is worthwhile, and because the transformers team finds the design of the contribution to be aligned with the tenets.

-

Does all the code in the library strictly follow these tenets? No. The library is a gigantic house with connected nooks, corridors, crannies everywhere, built by thousands of different workers. We try to make it so all the code added is compliant, because if we fail and merge it, we cannot change it lest we break backwards compatibility.

+

Does all the code in the library strictly follow these tenets? No. The library is a gigantic house with connected nooks, corridors, crannies everywhere, built by thousands of different workers. We try to make it so all the code added is compliant, because if we fail and merge it, we cannot change it lest we break backwards compatibility

backwards-compatibility
Any artifact once on the hub must remain loadable. Breaking changes are unacceptable.
.

To see what constitutes adherence to the tenets, let’s take the example of code repetition.

-

The following function, essential to the implementation of Rotary Positional Embeddings can be found in more than 70 modeling_<file>.py across src/transformers/models/. Why keep it? Because we want all the model logic to be contained in the modeling file. In order to do that, we do repeat ourselves.

+

The following function, essential to the implementation of Rotary Positional Embeddings can be found in more than 70 modeling_<file>.py across src/transformers/models/. Why keep it? Because we want all the model logic to be contained in the modeling file

one-model-one-file
All inference and training core logic visible, top‑to‑bottom, in a single file.
. In order to do that, we do repeat ourselves
do-repeat-yourself
Strategic duplication can improve readability and maintainability when done thoughtfully.
.

def rotate_half(x):
     """Rotates half the hidden dims of the input."""
     x1 = x[..., : x.shape[-1] // 2]
@@ -439,205 +564,983 @@ We continue to support all new models and expect to do so for the foreseeable fu
 

We want all models to have self-contained modeling code. Every core functionality must be in the modeling code, every non-core functionality can be outside of it.

This comes at a great cost. For years, we have used what we call the #Copied from... mechanism: we added comments of a specific format documenting that some code was copied from another model, saving time both for the reviewers and for the CI: we had tooling to ensure that the copied blocks remained in sync.

But the LOC count kept creeping up. Each new model copied over hundreds of lines that we considered largely boilerplate, yet, we could not remove them.

-

We needed to separate two principles that were so far intertwined, repetition and hackability.

+

We needed to separate two principles that were so far intertwined, repetition

do-repeat-yourself
Strategic duplication can improve readability and maintainability when done thoughtfully.
and hackability
one-model-one-file
All inference and training core logic visible, top‑to‑bottom, in a single file.
.

What was the solution to this? Let’s talk about modular transformers.

-

TL;DR: Read the code in one place (One Model, One File). Keep semantics local (Standardize, Don’t Abstract). Allow strategic duplication for end users (DRY*). Keep the public surface minimal and stable (Minimal API, Backwards Compatibility, Consistent Surface).

Next: how modular transformers honor these while removing boilerplate.

+

TL;DR: Read the code in one place, one model, one file.

one-model-one-file
All inference and training core logic visible, top‑to‑bottom, in a single file.
. Keep semantics local (Standardize, Don’t Abstract). Allow strategic duplication for end users (DRY*). Keep the public surface minimal and stable (Minimal API, Backwards Compatibility, Consistent Surface).

Next: how modular transformers honor these while removing boilerplate.

Modular transformers

-

Transformers is an opinionated library. The previous philosophy page, and the blog post were already pointing at the drawbacks mentioned just above, which have been iteratively addressed. modular transformers was introduced to allow a form of inheritance without breaking One model, One file.

-

We amended the principle of DRY* by progressively removing all pieces of code that were “copied from” another file.

+

Transformers is an opinionated library. The previous philosophy page, and the blog post were already pointing at the drawbacks mentioned just above, which have been iteratively addressed. modular transformers was introduced to allow a form of inheritance without breaking the one model, one file rule.

one-model-one-file
All inference and training core logic visible, top‑to‑bottom, in a single file.

+

We amended the principle of DRY*

do-repeat-yourself
Strategic duplication can improve readability and maintainability when done thoughtfully.
by progressively removing all pieces of code that were “copied from” another file.

It works as follows. In order to contribute a model, GLM for instance, we define a modular_ file that can inherit from any function across all other modeling, configuration and processor files already existing in the library. The modular file can use inheritance across models: and then, it will be unravelled into a fully functional modeling file.

-Auto-generated modeling code -
-
-
- modular_glm.py -
-
class GlmMLP(Phi3MLP):
-    pass
-
-class GlmAttention(LlamaAttention):
-    def __init__(self, config, layer_idx=None):
-        super().__init__(config, layer_idx)
-        self.o_proj = nn.Linear(
-            config.num_attention_heads * self.head_dim, 
-            config.hidden_size, 
-            bias=False
-        )
-
-class GlmForCausalLM(LlamaForCausalLM):
-    pass
-
- -
-
- modeling_glm.py (auto-expanded) -
-
class GlmMLP(nn.Module):
-    def __init__(self, config):
-        super().__init__()
-        self.config = config
-        self.gate_up_proj = nn.Linear(
-            config.hidden_size, 
-            2 * config.intermediate_size, 
-            bias=False
-        )
-        self.down_proj = nn.Linear(
-            config.intermediate_size, 
-            config.hidden_size, 
-            bias=False
-        )
-        self.activation_fn = ACT2FN[config.hidden_act]
-
-    def forward(self, hidden_states: torch.FloatTensor) -> torch.FloatTensor:
-        up_states = self.gate_up_proj(hidden_states)
-        gate, up_states = up_states.chunk(2, dim=-1)
-        up_states = up_states * self.activation_fn(gate)
-        return self.down_proj(up_states)
-
-
-class GlmAttention(nn.Module):
-    """Multi-headed attention from 'Attention Is All You Need' paper"""
-
-    def __init__(self, config: GlmConfig, layer_idx: Optional[int] = None):
-        super().__init__()
-        self.config = config
-        self.layer_idx = layer_idx
-        self.head_dim = getattr(
-            config, "head_dim", 
-            config.hidden_size // config.num_attention_heads
-        )
-        self.num_key_value_groups = (
-            config.num_attention_heads // config.num_key_value_heads
-        )
-        self.scaling = self.head_dim**-0.5
-        self.attention_dropout = config.attention_dropout
-        self.is_causal = True
-
-        self.q_proj = nn.Linear(
-            config.hidden_size, 
-            config.num_attention_heads * self.head_dim, 
-            bias=config.attention_bias
-        )
-        self.k_proj = nn.Linear(
-            config.hidden_size, 
-            config.num_key_value_heads * self.head_dim, 
-            bias=config.attention_bias
-        )
-        self.v_proj = nn.Linear(
-            config.hidden_size, 
-            config.num_key_value_heads * self.head_dim, 
-            bias=config.attention_bias
-        )
-        self.o_proj = nn.Linear(
-            config.num_attention_heads * self.head_dim, 
-            config.hidden_size, 
-            bias=False
-        )
-
-    def forward(
-        self,
-        hidden_states: torch.Tensor,
-        position_embeddings: Tuple[torch.Tensor, torch.Tensor],
-        attention_mask: Optional[torch.Tensor],
-        past_key_value: Optional[Cache] = None,
-        cache_position: Optional[torch.LongTensor] = None,
-        **kwargs: Unpack[FlashAttentionKwargs],
-    ) -> Tuple[torch.Tensor, Optional[torch.Tensor], Optional[Tuple[torch.Tensor]]]:
-        input_shape = hidden_states.shape[:-1]
-        hidden_shape = (*input_shape, -1, self.head_dim)
-
-        query_states = self.q_proj(hidden_states).view(hidden_shape).transpose(1, 2)
-        key_states = self.k_proj(hidden_states).view(hidden_shape).transpose(1, 2)
-        value_states = self.v_proj(hidden_states).view(hidden_shape).transpose(1, 2)
-
-        cos, sin = position_embeddings
-        query_states, key_states = apply_rotary_pos_emb(query_states, key_states, cos, sin)
-
-        if past_key_value is not None:
-            cache_kwargs = {"sin": sin, "cos": cos, "cache_position": cache_position}
-            key_states, value_states = past_key_value.update(
-                key_states, value_states, self.layer_idx, cache_kwargs
-            )
-
-        attention_interface: Callable = eager_attention_forward
-        if self.config._attn_implementation != "eager":
-            attention_interface = ALL_ATTENTION_FUNCTIONS[self.config._attn_implementation]
-
-        attn_output, attn_weights = attention_interface(
-            self, query_states, key_states, value_states,
-            attention_mask, dropout=0.0 if not self.training else self.attention_dropout,
-            scaling=self.scaling, **kwargs,
-        )
-
-        attn_output = attn_output.reshape(*input_shape, -1).contiguous()
-        attn_output = self.o_proj(attn_output)
-        return attn_output, attn_weights
-
-
-@use_kernel_forward_from_hub("RMSNorm")
-class GlmRMSNorm(nn.Module):
-    def __init__(self, hidden_size, eps=1e-6):
-        super().__init__()
-        self.weight = nn.Parameter(torch.ones(hidden_size))
-        self.variance_epsilon = eps
-
-    def forward(self, hidden_states):
-        input_dtype = hidden_states.dtype
-        hidden_states = hidden_states.to(torch.float32)
-        variance = hidden_states.pow(2).mean(-1, keepdim=True)
-        hidden_states = hidden_states * torch.rsqrt(variance + self.variance_epsilon)
-        return self.weight * hidden_states.to(input_dtype)
-
-# ... (many more classes and functions would follow)
-
-
- -

- Left: Clean modular definition with inheritance. - Right: Auto-expanded version with all inherited functionality visible. -

+

modular_glm.py

class GlmMLP(Phi3MLP):
+    pass
+
+class GlmAttention(LlamaAttention):
+    def __init__(self, config, layer_idx=None):
+        super().__init__(config, layer_idx)
+        self.o_proj = nn.Linear(
+            config.num_attention_heads * self.head_dim, 
+            config.hidden_size, 
+            bias=False
+        )
+
+class GlmForCausalLM(LlamaForCausalLM):
+    pass
+

modeling_glm.py (auto-expanded)

class GlmMLP(nn.Module):
+    def __init__(self, config):
+        super().__init__()
+        self.config = config
+        self.gate_up_proj = nn.Linear(
+            config.hidden_size, 
+            2 * config.intermediate_size, 
+            bias=False
+        )
+        self.down_proj = nn.Linear(
+            config.intermediate_size, 
+            config.hidden_size, 
+            bias=False
+        )
+        self.activation_fn = ACT2FN[config.hidden_act]
+
+    def forward(self, hidden_states: torch.FloatTensor) -> torch.FloatTensor:
+        up_states = self.gate_up_proj(hidden_states)
+        gate, up_states = up_states.chunk(2, dim=-1)
+        up_states = up_states * self.activation_fn(gate)
+        return self.down_proj(up_states)
+
+
+class GlmAttention(nn.Module):
+    """Multi-headed attention from 'Attention Is All You Need' paper"""
+
+    def __init__(self, config: GlmConfig, layer_idx: Optional[int] = None):
+        super().__init__()
+        self.config = config
+        self.layer_idx = layer_idx
+        self.head_dim = getattr(
+            config, "head_dim", 
+            config.hidden_size // config.num_attention_heads
+        )
+        self.num_key_value_groups = (
+            config.num_attention_heads // config.num_key_value_heads
+        )
+        self.scaling = self.head_dim**-0.5
+        self.attention_dropout = config.attention_dropout
+        self.is_causal = True
+
+        self.q_proj = nn.Linear(
+            config.hidden_size, 
+            config.num_attention_heads * self.head_dim, 
+            bias=config.attention_bias
+        )
+        self.k_proj = nn.Linear(
+            config.hidden_size, 
+            config.num_key_value_heads * self.head_dim, 
+            bias=config.attention_bias
+        )
+        self.v_proj = nn.Linear(
+            config.hidden_size, 
+            config.num_key_value_heads * self.head_dim, 
+            bias=config.attention_bias
+        )
+        self.o_proj = nn.Linear(
+            config.num_attention_heads * self.head_dim, 
+            config.hidden_size, 
+            bias=False
+        )
+
+    def forward(
+        self,
+        hidden_states: torch.Tensor,
+        position_embeddings: Tuple[torch.Tensor, torch.Tensor],
+        attention_mask: Optional[torch.Tensor],
+        past_key_value: Optional[Cache] = None,
+        cache_position: Optional[torch.LongTensor] = None,
+        **kwargs: Unpack[FlashAttentionKwargs],
+    ) -> Tuple[torch.Tensor, Optional[torch.Tensor], Optional[Tuple[torch.Tensor]]]:
+        input_shape = hidden_states.shape[:-1]
+        hidden_shape = (*input_shape, -1, self.head_dim)
+
+        query_states = self.q_proj(hidden_states).view(hidden_shape).transpose(1, 2)
+        key_states = self.k_proj(hidden_states).view(hidden_shape).transpose(1, 2)
+        value_states = self.v_proj(hidden_states).view(hidden_shape).transpose(1, 2)
+
+        cos, sin = position_embeddings
+        query_states, key_states = apply_rotary_pos_emb(query_states, key_states, cos, sin)
+
+        if past_key_value is not None:
+            cache_kwargs = {"sin": sin, "cos": cos, "cache_position": cache_position}
+            key_states, value_states = past_key_value.update(
+                key_states, value_states, self.layer_idx, cache_kwargs
+            )
+
+        attention_interface: Callable = eager_attention_forward
+        if self.config._attn_implementation != "eager":
+            attention_interface = ALL_ATTENTION_FUNCTIONS[self.config._attn_implementation]
+
+        attn_output, attn_weights = attention_interface(
+            self, query_states, key_states, value_states,
+            attention_mask, dropout=0.0 if not self.training else self.attention_dropout,
+            scaling=self.scaling, **kwargs,
+        )
+
+        attn_output = attn_output.reshape(*input_shape, -1).contiguous()
+        attn_output = self.o_proj(attn_output)
+        return attn_output, attn_weights
+
+
+@use_kernel_forward_from_hub("RMSNorm")
+class GlmRMSNorm(nn.Module):
+    def __init__(self, hidden_size, eps=1e-6):
+        super().__init__()
+        self.weight = nn.Parameter(torch.ones(hidden_size))
+        self.variance_epsilon = eps
+
+    def forward(self, hidden_states):
+        input_dtype = hidden_states.dtype
+        hidden_states = hidden_states.to(torch.float32)
+        variance = hidden_states.pow(2).mean(-1, keepdim=True)
+        hidden_states = hidden_states * torch.rsqrt(variance + self.variance_epsilon)
+        return self.weight * hidden_states.to(input_dtype)
+
+# ... (many more classes and functions would follow)
+
Left: Clean modular definition with inheritance. Right: Auto-expanded version with all inherited functionality visible.

As you can see, we can define a new model as a modular combination of fragments taken from others.

-

You might think “well that’s just how inheritance works”. The crucial difference is that we do visibly what is essentially the compiler’s job: by unrolling the inheritances, we make visible all of the modeling code, keeping it all in one piece.

+

You might think “well that’s just how inheritance works”. The crucial difference is that we do visibly what is essentially the compiler’s job: by unrolling the inheritances, we make visible all of the modeling code, keeping it all in one piece.

one-model-one-file
All inference and training core logic visible, top‑to‑bottom, in a single file.

You can see below the difference between GlmAttention and LlamaAttention, with the latter having been copied with minimal changes.

-

Llama vs GLM

+
Llama vs GLM
Figure 1: Comparison of attention implementations between Llama and GLM, showing code reuse with minimal modifications.

What is the consequence? When adding a model, we do not need to go over the entire modeling file. The modular (left side above) is enough.

When AutoModel.from_pretrained(...) is called, it is indeed the modeling (right side) that is ran, and all the tests run on the modeling code.

More importantly, the auto-generated modeling file is what users read to understand the code, what they step through in their debuggers and what they hack for their needs.

What does that give us?

-

TL;DR: A small modular_*.py declares reuse; the expanded modeling file stays visible (One Model, One File tenet preserved). Reviewers and contributors maintain the shard, not the repetition.

Next: the measurable effect on effective LOC and maintenance cost.

+

TL;DR: A small modular_*.py declares reuse; the expanded modeling file stays visible and unique

one-model-one-file
All inference and training core logic visible, top‑to‑bottom, in a single file.
. Reviewers and contributors maintain the shard, not the repetition.

Next: the measurable effect on effective LOC and maintenance cost.

A maintainable control surface

The effect of modular can be measured in lines of code (LOC). If a model only has a modeling file, we add its LOC count. However, if a model has a modular_.py and a corresponding automatically generated modeling_/.py, we only count the LOC under the modular file. The modeling code has no maintenance cost as it is strictly dependent on the modular file.

That gives an “effective LOC” curve: the 𝗺𝗮𝗶𝗻𝘁𝗲𝗻𝗮𝗻𝗰𝗲 𝘀𝘂𝗿𝗳𝗮𝗰𝗲.

Measured on git history, raw modeling_*.py grew at ~362 LOC/day before modular; counting only modular shards yields ~25 LOC/day after — about 15× lower. The effective curve (blue line below) represents the maintenance surface today: what maintainers actually read and review.

Less code to hand-maintain means fewer places to break. Naturally LOC is not a direct measure of complexity, but they correlate in review effort and change risk.

-
+
+ + +

The blue line (effective) is the sum of the red + green, whereas the yellow would have been the progression without modular. We can see that the maintenance surface is essentially constant (in LOC) since the implementation of modular. If you zoom in, you’ll notice there’s a sharp drop near the end, it’s essentially due to us removing support for Jax and TensorFlow library-wide.

But this was not the only effort that allowed us to reduce maintenance load.

We recently underwent a deep refactor of the attention implementation. You’ve likely heard about flash attention and its several variants.

The attention computation itself happens at a lower level of abstraction than the model itself.

-

However, we were adding specific torch operations for each backend (sdpa, the several flash-attention iterations, flex attention) but it wasn’t a minimal user api. Next section explains what we did.

+

However, we were adding specific torch operations for each backend (sdpa, the several flash-attention iterations, flex attention) but it wasn’t a minimal user api

minimal-user-api
Config, model, preprocessing; from_pretrained, save_pretrained, push_to_hub. Least amount of codepaths.
. Next section explains what we did.

Evidence: effective (i.e., maintainable) LOC growth drops ~15× when counting shards instead of expanded modeling files. Less code to read, fewer places to break.

Next: how the attention interface stays standard without hiding semantics.

External Attention classes

The solution for the “attention abstraction problem” was to move to a standard attention interface that allows the following:

The naive implementation of attention, called “eager”, is available by default. We use a Callable called eager_attention_forward, which can run as long as the user has PyTorch installed – which is a requirement any way.

Instead of using a class interface and a class hierarchy, we just moved to a function interface. When a more complex attention implementation is needed, we use other Callables, including much faster kernel bindings when available. The decision to use a different attention implementation is based on the model configuration file we download from the Hub, and it can also be overridden by the user.

-

This is a clear example that that we prefer an interface that is standard, but not abstract. To be completely precise, this is what the interface selection looks like in transformers code:

+

This is a clear example that that we prefer an interface that is standard, but not abstract

standardize-dont-abstract
Model-specific logic belongs in the model file, not hidden behind abstractions.
. To be completely precise, this is what the interface selection looks like in transformers code:

attention_interface: Callable = eager_attention_forward
 if self.config._attn_implementation != "eager":
     attention_interface = ALL_ATTENTION_FUNCTIONS[self.config._attn_implementation]
 
-

Having the attention interfaces functionalized allows to do dynamic switching of attentions as well, increasing their hackability. +

Having the attention interfaces functionalized allows to do dynamic switching of attentions as well, increasing their hackability

code-is-product
Optimize for reading, diffing, and tweaking. Code quality matters as much as functionality.
. Another strength of the new attention interface is the possibility to enforce specific kwargs, which are needed by kernel providers and other dependencies.

Backend integrations sometimes require specific kwargs.

-

We know that kwargs are often a necessary evil that plagues tools with widespread compatibility; and it is something we have aimed to reduce, and will continue reduce in order to improve readability - with them, the current system is a minimal user api.

+

We know that kwargs are often a necessary evil that plagues tools with widespread compatibility; and it is something we have aimed to reduce, and will continue reduce in order to improve readability - with them, the current system is a minimal user api

minimal-user-api
Config, model, preprocessing; from_pretrained, save_pretrained, push_to_hub. Least amount of codepaths.
.

We reduce that surface and document expectations; where flexibility is necessary, we plan to use typing.Annotated to convey shapes and invariants without constraining integrations. Such an implementation could look like this in the future:

from typing import Annotated
 
@@ -650,30 +1553,51 @@ Another strength of the new attention interface is the possibility to enforce sp
 

Why does it matter?

Because we want to avoid code modifications that are unrelated to the model.

We choose to place the level of abstraction higher than the device placement: a matrix multiplication - a nn.Linear layer - should be always expressed in the same way, regardless of how it is placed.

-

Hence, we want to touch the modeling code minimally, and only modify it when architectural changes are involved – not depending on the way you run it. For tensor parallelism, we simply specify a tp_plan:

-
# In the model's config (example: ERNIE 4.5-style decoder blocks)
-    base_model_tp_plan = {
-        "layers.*.self_attn.q_proj": "colwise",
-        "layers.*.self_attn.k_proj": "colwise",
-        "layers.*.self_attn.v_proj": "colwise",
-        "layers.*.self_attn.o_proj": "rowwise",
-        "layers.*.mlp.gate_proj": "colwise",
-        "layers.*.mlp.up_proj":   "colwise",
-        "layers.*.mlp.down_proj": "rowwise",
-    }
-    
-    # Runtime
-    import torch
-    from transformers import AutoModelForCausalLM, AutoTokenizer
-    
-    model_id = "your/model-or-local-checkpoint"
-    model = AutoModelForCausalLM.from_pretrained( # <-- will automatically map to the plan defined above
-        model_id, 
-        dtype=torch.bfloat16,
-    )  
-    tok = AutoTokenizer.from_pretrained(model_id)
-    inputs = tok("Hello", return_tensors="pt").to(model.device)
-    out = model(**inputs)
+

Hence, we want to touch the modeling code as little as possible

minimal-user-api
Config, model, preprocessing; from_pretrained, save_pretrained, push_to_hub. Least amount of codepaths.
, and only modify it when architectural changes are involved – not depending on the way you run it. For tensor parallelism, we simply specify a tp_plan:

+
# In the model's config (example: ERNIE 4.5-style decoder blocks)
+base_model_tp_plan = {
+    "layers.*.self_attn.q_proj": "colwise",
+    "layers.*.self_attn.k_proj": "colwise",
+    "layers.*.self_attn.v_proj": "colwise",
+    "layers.*.self_attn.o_proj": "rowwise",
+    "layers.*.mlp.gate_proj": "colwise",
+    "layers.*.mlp.up_proj":   "colwise",
+    "layers.*.mlp.down_proj": "rowwise",
+}
+
+# Runtime
+import torch
+from transformers import AutoModelForCausalLM, AutoTokenizer
+
+model_id = "your/model-or-local-checkpoint"
+model = AutoModelForCausalLM.from_pretrained( # <-- will automatically map to the plan defined above
+    model_id, 
+    dtype=torch.bfloat16,
+)  
+tok = AutoTokenizer.from_pretrained(model_id)
+inputs = tok("Hello", return_tensors="pt").to(model.device)
+out = model(**inputs)
+

The plan is written once, saved as part of the config and passed to .from_pretrained(). It maps module name patterns to partitioning strategies. Strategies are resolved by the internal ParallelInterface, which wires to sharding implementations ColwiseParallel, RowwiseParallel, packed variants, and so on.

The alternative would be to modify classes depending on supported types of parallelism.

The tp_plan solution allows users to run the same model on a single GPU, or distribute it using multiple processes per node, e.g. 4 GPUs:

@@ -699,11 +1623,51 @@ Another strength of the new attention interface is the possibility to enforce sp "full_attention" ],
-

This is minimal to implement on the user side, and allows to keep the modeling code untouched. It is also easy to tweak.

+

This is minimal

minimal-user-api
Config, model, preprocessing; from_pretrained, save_pretrained, push_to_hub. Least amount of codepaths.
to implement on the user side, and allows to keep the modeling code untouched. It is also easy to tweak.

Allowed layer types are explicit; schedules (e.g., sliding/full alternation) live in config. This keeps the file readable and easy to tweak.

Next: speedups come from kernels that don’t change semantics.

Community Kernels

-

The same principle extends to normalization, activation, and other code paths. The model defines semantics; a kernel defines how to execute them faster. We annotate the module to borrow a community‑provided forward, keeping a consistent public surface

-
@use_kernel_forward_from_hub("RMSNorm")
+

The same principle extends to normalization, activation, and other code paths. The model defines semantics; a kernel defines how to execute them faster. We annotate the module to borrow a community‑provided forward, keeping a consistent public surface

consistent-public-surface
Uniform naming, signatures, and conventions across all models for predictability.

+
@use_kernel_forward_from_hub("RMSNorm")
 class GlmRMSNorm(nn.Module):
     ...
 
@@ -723,55 +1687,1286 @@ So I wanted to take a look at the current state of modularity a

So what do we see?

(Graph reading guide: nodes are models; edges are modular imports).

Check out the full viewer here (tab “dependency graph”, hit “build graph”) for better manipulation and exploration.

-
+>

Let’s walk through some sections of this graph together. First, Llama is a basis and an influence for many models, and it is very visible.

-

Llama in the center

+
Llama in the center
Figure 2: Llama as a central model influencing many other models in the dependency graph.

The models linked sometimes pull components from other models than llama of course. Radically different architectures such as mamba have spawned their own dependency subgraph.

Audio models form sparser archipelagos, see for instance wav2vec2 which is a significant basis for a dozen of them.

-

Wav2vec2 influence

-

In the case of VLMs which have massively grown in popularity since 2024, there’s far too many vision-based architectures that are not yet defined as modulars of other existing archs. In other words, there is no strong reference point in terms of software for vision models.

-

As you can see, there is a small DETR island: -DETR archipelago

-

There is also a little llava pocket, and so on, but it’s not comparable to the centrality observed for llama.

-

Another problem is, this visualization only shows modular models. Several models still do NOT have a modular file. If we zoom out significantly, we can see them, the red nodes are models that do not have a modular file yet.

-

Red nodes

-

Hence the next question, and how do we identify modularisable models?

-

Llama-lineage is a hub; several VLMs remain islands — engineering opportunity for shared parents. -Next: timeline + similarity signals to spot modularisable candidates.

-

Many models, but not enough yet, are alike

-

I looked into Jaccard similarity, which we use to measure set differences, to find similarities across models. I know that code is more than a set of characters stringed together. We also tried code-embedding models that ranked candidates better in practice, but for this post we stick to the deterministic Jaccard index.

-

It is interesting, for our comparison, to look at when we deployed the modular logic and what was its rippling effect on the library. Looking at the timeline makes it obvious: adding modular allowed to connect more and more models to solid reference points.

-

Yet, we still have a lot of gaps to fill.

-

Zoom out below - it’s full of models. You can click on a node to see its connections better, or use the text box to search for a model. You can use the full viewer (tab “timeline”, hit “build timeline”) for better exploration.

-
-

Let’s look at a few highly connected models. Let’s start by the foundational work of Llava.

-

Llava in its timeline

-

You see that llava_video is a red node, connected by a red edge to llava: it’s a candidate, something that we can likely remodularize, not touching the actual model but being much more readable with DRY*.

-

The same can be identified with the classical encoders family, centered on BERT:

-

Here roberta, xlm_roberta, ernie are modulars of BERT, while models like mobilebert are likely candidates. -Classical encoders

-

Similarity metrics (Jaccard index or embeddings) surfaces likely parents; the timeline shows consolidation after modular landed. Red nodes/edges = candidates (e.g., llava_videollava) for refactors that preserve behavior.

Next: concrete VLM choices that avoid leaky abstractions.

-

VLM improvements, avoiding abstraction

-

We don’t yet have a cookbook for common VLM patterns (image token scatter, multi‑tower encoders, cross‑attention bridges). This is one of the main improvement points where we can work.

+
Wav2vec2 influence
Figure 3: Cluster of audio architectures based on wav2vec2, forming a specialized archipelago.
+

In the case of VLMs which have massively grown in popularity since 2024, there’s far too many vision-based architectures that are not yet defined as modulars of other existing archs. In other words, there is no strong reference point in terms of software for vision models.

+

As you can see, there is a small DETR island:

+
DETR archipelago
Figure 4: Small DETR archipelago for vision models, less centralized than Llama for text.
+

There is also a little llava pocket, and so on, but it’s not comparable to the centrality observed for llama.

+

Another problem is, this visualization only shows modular models. Several models still do NOT have a modular file. If we zoom out significantly, we can see them, the red nodes are models that do not have a modular file yet.

+
Red nodes
Figure 5: Overview showing red nodes (models without modular files) to be modularized.
+

Hence the next question, and how do we identify modularisable models?

+

Llama-lineage is a hub; several VLMs remain islands — engineering opportunity for shared parents. +Next: timeline + similarity signals to spot modularisable candidates.

+

Many models, but not enough yet, are alike

+

I looked into Jaccard similarity, which we use to measure set differences, to find similarities across models. I know that code is more than a set of characters stringed together. We also tried code-embedding models that ranked candidates better in practice, but for this post we stick to the deterministic Jaccard index.

+

It is interesting, for our comparison, to look at when we deployed the modular logic and what was its rippling effect on the library. Looking at the timeline makes it obvious: adding modular allowed to connect more and more models to solid reference points.

+

Yet, we still have a lot of gaps to fill.

+

Zoom out below - it’s full of models. You can click on a node to see its connections better, or use the text box to search for a model. You can use the full viewer (tab “timeline”, hit “build timeline”) for better exploration.

+
+

Let’s look at a few highly connected models. Let’s start by the foundational work of Llava.

+

Llava in its timeline

+

You see that llava_video is a red node, connected by a red edge to llava: it’s a candidate, something that we can likely remodularize, not touching the actual model but being much more readable with DRY*.

+
+

Let’s look at a few highly connected models. Let’s start by the foundational work of Llava.

+
Llava in its timeline
Figure 6: LLaVA and its variants in the timeline, with llava_video as a candidate for modularization.
+

You see that llava_video is a red node, connected by a red edge to llava: it’s a candidate, something that we can likely remodularize, not touching the actual model

backwards-compatibility
Any artifact once on the hub must remain loadable. Breaking changes are unacceptable.
but being much more readable with DRY*
do-repeat-yourself
Strategic duplication can improve readability and maintainability when done thoughtfully.
.

+

The same can be identified with the classical encoders family, centered on BERT:

+

Here roberta, xlm_roberta, ernie are modulars of BERT, while models like mobilebert are likely candidates.

+
Classical encoders
Figure 7: Family of classical encoders centered on BERT, with several models already modularized.
+

Similarity metrics (Jaccard index or embeddings) surfaces likely parents; the timeline shows consolidation after modular landed. Red nodes/edges = candidates (e.g., llava_videollava) for refactors that preserve behavior.

Next: concrete VLM choices that avoid leaky abstractions.

+

VLM improvements, avoiding abstraction

+

We don’t yet have a cookbook for common VLM patterns (image token scatter, multi‑tower encoders, cross‑attention bridges). This is one of the main improvement points where we can work.

For instance, we thought of abstracting away the mixing of inputs_embeds, the tensor fed into an LLM decoder in 95% of the existing VLMs. It would have looked like something like

class InputsEmbeddingMixerMixin(nn.Module):
     #
 
-

But this is abstracting away an important component of the modeling. Embedding mixin is part of the model, removing it would break it. A user opening modeling_qwen2.5_vl (check out the Qwen2.5VL collection) should not have to go to another file to understand how it works.

+

But this is not an abstraction

standardize-dont-abstract
Model-specific logic belongs in the model file, not hidden behind abstractions.
. Embedding mixin is part of the model, removing it would break it. A user opening modeling_qwen2.5_vl (check out the Qwen2.5VL collection) should not have to go to another file to understand how it works.

What is the current state of these “abstractions” across the codebase? You will see all the imports around a modeling file, here Gemma3n.

-

Gemma3n graph

+
Gemma3n graph
Figure 8: Gemma3n import graph showing dependency complexity, with GenerationMixin very central.

As you can see, the GenerationMixin node is already very heavy. It encompasses all of the utilities around .generate, it is second only to nn.Module. That means every decision we make to abstract something else has to be extremely careful.

The following Pull request to standardize placeholder masking is a good example of what kind of changes are acceptable. In a VLM, we always need to insert embeddings from various encoders at various positions, so we can have a function to do it. For Qwen2 VL, for instance, it will look like this:

@@ -815,14 +3010,200 @@ That means every decision we make to abstract something else has to be extremely return special_image_mask, special_video_mask
-

But this is within the modeling file, not in the PreTrainedModel base class. It will not move away from it, because it’d break the self-contained logic of the model.

+

But this is within the modeling file, not in the PreTrainedModel base class. It will not move away from it, because it’d break the One model, one file tenet.

one-model-one-file
All inference and training core logic visible, top‑to‑bottom, in a single file.

What do we conclude? Going forward, we should aim for VLMs to have a form of centrality similar to that of Llama for text-only models. This centrality should not be achieved at the cost of abstracting and hiding away crucial inner workings of said models.

Keep VLM embedding mix in the modeling file (semantics), standardize safe helpers (e.g., placeholder masking), don’t migrate behavior to PreTrainedModel. Next: pipeline-level wins that came from PyTorch-first choices (fast processors).

On image processing and processors

Deciding to become a torch-first library meant relieving a tremendous amount of support for jax and TensorFlow, and it also meant that we could be more lenient about the amount of torch-dependent utilities that we were able to accept. One of these is the fast processing of images. Where inputs were once minimally assumed to be ndarrays, enforcing native torch and torchvision inputs allowed us to massively improve processing speed for each model.

The gains in performance are immense, up to 20x speedup for most models when using compiled torchvision ops. Furthermore, lets us run the whole pipeline solely on GPU.

-

Fast Image Processors Performance

+
Fast Image Processors Performance
Figure 9: Performance gains of fast image processors, up to 20x acceleration with compiled torchvision.

PyTorch-first lets processors assume torch/torchvision and run the whole pipeline on GPU; big per-model speedups.

Next: how this lowers friction for contributors and downstream users.

Reduce barrier to entry/contribution

This is an overall objective: there’s no transformers without its community.

@@ -833,3896 +3214,603 @@ That means every decision we make to abstract something else has to be extremely

The shape of a contribution: add a model (or variant) with a small modular shard; the community and serving stacks pick it up immediately. Popularity trends (encoders/embeddings) guide where we invest.

Next: power tools enabled by a consistent API.

Models popularity

Talking about dependencies, we can take a look at the number of downloads as a measure of popularity. One thing we see is the prominence of encoders, despite the apparent prevalence of decoder LLMs. The reason is that encoders are used to generate embeddings, which have multiple downstream uses. Just check out EmbeddingGemma for a modern recap. Hence, it is vital to keep the encoders portion of the library viable, usable, fine-tunable.

-
- - -
-
- -
+ if (document.readyState === 'loading') { + document.addEventListener('DOMContentLoaded', () => ensureD3(bootstrap), { once: true }); + } else { + ensureD3(bootstrap); + } + })(); + +

As the codebase grows, we need to maintain it in coordination with our friend Sentence Transformers codebase. Retrieval use-cases, smart databases, FAISS-based indexing rely on it, and thus indirectly on transformers.

-

In that regard, we DO want to be a modular toolbox, being minimal enough and well documented enough so any ML/AI developer can use transformers without having to think about it. We aim to reduce the cognitive load brought about by model development, not increase it.

+

In that regard, we DO want to be a modular toolbox, being minimal

minimal-user-api
Config, model, preprocessing; from_pretrained, save_pretrained, push_to_hub. Least amount of codepaths.
enough and well documented enough so any ML/AI developer can use transformers without having to think about it. We aim to reduce the cognitive load brought about by model development, not increase it.

So, how do these design choices, these “tenets” influence development of models and overall usage of transformers?

Encoders remain critical for embeddings and retrieval; maintaining them well benefits the broader ecosystem (e.g., Sentence Transformers, FAISS).

Next: dev tools that leverage unified attention APIs and PyTorch-only internals.

A surgical toolbox for model development

@@ -4731,7 +3819,7 @@ return Plotly;

All models have the same API for attention computation, thanks to the externalisation of attention classes.

This uniformity allows us to build cool tools to visualize the inner workings of the attention mechanism.

One particular piece of machinery is the attention mask. Here you see the famous bidirectional attention pattern for the whole prefix (text + image) in PaliGemma and all Gemma2+ models, contrasting with the usual “causal-only” models.

-
+
@@ -4779,202 +3867,689 @@ return Plotly;

Uniform attention APIs enable cross-model diagnostics (e.g., PaliGemma prefix bidirectionality vs causal).

Next: whole-model tracing for ports and regressions.

Logging entire model activations

Because everything is PyTorch, we can easily debug any model when we want to add it to transformers. We now have a power-user tool for porting or adding models, that wraps a forward pass, intercepts every submodule call, and logs shapes, dtypes, and sample statistics of inputs/outputs to nested JSON.

-

It just works with PyTorch models and is especially useful when aligning outputs with a reference implementation, to match our Source of Truth guideline.

-

Model debugger interface

+

It just works with PyTorch models and is especially useful when aligning outputs with a reference implementation, to match our Source of Truth guideline

source-of-truth
Model implementations should be reliable, reproducible, and faithful to original performances.
.

+
Model debugger interface
Figure 10: Model debugger interface intercepting calls and logging statistics in nested JSON.

Forward interception and nested JSON logging align ports to reference implementations, reinforcing “Source of Truth.”

Next: CUDA warmup reduces load-time without touching modeling semantics.

Cooking faster CUDA warmups

-

Having a clean external API allows us to work on the true inner workings of transformers. One of a few recent additions was the CUDA warmup via caching_allocator_warmup, which dramatically improved loading times by pre-allocating GPU memory to avoid malloc bottlenecks during model loading. It can achieve a 7x speedup factor for an 8B model, or 6x for a 32B one, as you can check in the PR!

-
-
-
-

Mem allocation patterns during model loading

+
+ })(); + +

It’s hard to overstate how much of a lifesaver that is when you’re trying to load a model as fast as possible, as it’s the narrowest bottleneck for your iteration speed.

Pre-allocating GPU memory removes malloc spikes (e.g., 7× for 8B, 6× for 32B in the referenced PR).

Next: consistent interfaces allow transformers-serve.

Transformers-serve and continuous batching

@@ -5112,7 +4731,27 @@ return Plotly;
  • having it immediately usable in vLLM, SGLang, and so on without additional code. In the case of vLLM, transformers was added as a backend to run models on vLLM, which optimizes throughput/latency on top of existing transformers architectures as seen in this great vLLM x HF blog post.
  • being the reference code for implementations in MLX, llama.cpp and other libraries.
  • -

    This further cements the need for a consistent public surface: we are a backend and a reference, and there’s more software than us to handle serving. At the time of writing, more effort is done in that direction. We already have compatible configs for VLMs for vLLM (say that three times fast), check here for GLM4 video support, and here for MoE support, for instance.

    +

    This further cements the need for a consistent public surface

    consistent-public-surface
    Uniform naming, signatures, and conventions across all models for predictability.
    : we are a backend and a reference, and there’s more software than us to handle serving. At the time of writing, more effort is done in that direction. We already have compatible configs for VLMs for vLLM (say that three times fast), check here for GLM4 video support, and here for MoE support, for instance.

    Being a good backend consumer requires a consistent public surface; modular shards and configs make that stability practical.

    Next: what changes in v5 without breaking the promise of visible semantics.

    What is coming next

    The next major version of transformers is just around the corner (and will have another blog post to its name when it comes out). When v5 is released, we aim to keep backwards compatibility as solid as possible. The changes we make now are in service of that goal.

    @@ -5121,7 +4760,7 @@ return Plotly; author={Pablo Montalvo and Lysandre Debut and Pedro Cuenca and Yoni Gozlan}, year={2025}, -}

    Acknowledgements

    Special thanks to all the reviewers on this! Vaibhav Srivastav, Cyril Vallez, Yoni Gozlan also for his excellent work on fast image processors, Arthur Zucker for his guidance, and Thibaud Frere for designing this template and helping me out with it! +}

    Acknowledgements

    Special thanks to all the reviewers on this! Vaibhav Srivastav, Cyril Vallez, Yoni Gozlan also for his excellent work on fast image processors, Arthur Zucker for his guidance, and of course the wonderful Thibaud Frere for designing this template and helping me out with it! Most importantly: thanks to the entire Open-Source community, sincerely.

    + + + + diff --git a/app/src/components/Glossary.astro b/app/src/components/Glossary.astro new file mode 100644 index 0000000000000000000000000000000000000000..af6a683a49dfd5322dfc651f9433f296e4cf4b38 --- /dev/null +++ b/app/src/components/Glossary.astro @@ -0,0 +1,135 @@ +--- +interface Props { + /** The word or term to define */ + term: string; + /** The definition of the term */ + definition: string; + /** Optional CSS class to apply to the term */ + class?: string; + /** Optional style to apply to the term */ + style?: string; + /** Tooltip position (top, bottom, left, right) */ + position?: 'top' | 'bottom' | 'left' | 'right'; + /** Delay before showing tooltip in ms */ + delay?: number; + /** Disable tooltip on mobile */ + disableOnMobile?: boolean; +} + +const { + term, + definition, + class: className = '', + style: inlineStyle = '', + position = 'top', + delay = 300, + disableOnMobile = false, +} = Astro.props as Props; + +// Generate a unique ID for this component +const tooltipId = `glossary-${Math.random().toString(36).slice(2)}`; +--- + +
    + + {term} + + + +
    + + + diff --git a/app/src/components/Hero.astro b/app/src/components/Hero.astro index c1c50781acf95349a08f5bcb840be84168c3dbc2..337d79f5957584c2b67c5684e500580e40c43795 100644 --- a/app/src/components/Hero.astro +++ b/app/src/components/Hero.astro @@ -105,7 +105,7 @@ const pdfFilename = `${slugify(pdfBase)}.pdf`; normalizedAuthors.length > 0 && (

    Author{normalizedAuthors.length > 1 ? "s" : ""}

    -
      +
      {normalizedAuthors.map((a, i) => { const supers = shouldShowAffiliationSupers && @@ -114,14 +114,16 @@ const pdfFilename = `${slugify(pdfBase)}.pdf`; {a.affiliationIndices.join(",")} ) : null; return ( -
    • - {a.url ? {a.name} : a.name} - {supers} - {i < normalizedAuthors.length - 1 && ", "} -
    • + <> + + {a.url ? {a.name} : a.name} + {supers} + + {i < normalizedAuthors.length - 1 && } + ); })} -
    +
    ) } @@ -183,6 +185,19 @@ const pdfFilename = `${slugify(pdfBase)}.pdf`;

    {doi}

    )} --> +
    +

    PDF

    +

    + + Download PDF + +

    +
    @@ -190,7 +205,7 @@ const pdfFilename = `${slugify(pdfBase)}.pdf`; /* Hero (full-width) */ .hero { width: 100%; - padding: 0px 0px 0px; + padding: 48px 16px 16px; text-align: center; } .hero-title { @@ -262,14 +277,11 @@ const pdfFilename = `${slugify(pdfBase)}.pdf`; } .authors { margin: 0; - list-style-type: none; - padding-left: 0; display: flex; flex-wrap: wrap; } - .authors li { + .authors span { white-space: nowrap; - margin-right: 4px; } .affiliations { margin: 0; diff --git a/app/src/components/HfUser.astro b/app/src/components/HfUser.astro new file mode 100644 index 0000000000000000000000000000000000000000..7d2e596ae988cb1102024f390b375a712d56ea8b --- /dev/null +++ b/app/src/components/HfUser.astro @@ -0,0 +1,96 @@ +--- +interface Props { + username: string; + name?: string; + url?: string; + avatarUrl?: string; +} + +const { username, name, url, avatarUrl } = Astro.props as Props; +const profileUrl = url ?? `https://huggingface.co/${encodeURIComponent(username)}`; +const displayName = name ?? username; +const imgSrc = avatarUrl ?? `https://huggingface.co/api/users/${encodeURIComponent(username)}/avatar`; +--- +
    +
    + {`${displayName} + + {displayName} + + @{username} + + +
    +
    + + + + + diff --git a/app/src/components/HtmlEmbed.astro b/app/src/components/HtmlEmbed.astro index 8f41e5d297a03609af189e2ac108de62a82fb814..91d4762c51e9d8e50eeda957e9787656aee7f59a 100644 --- a/app/src/components/HtmlEmbed.astro +++ b/app/src/components/HtmlEmbed.astro @@ -70,13 +70,9 @@ const htmlWithId = id && html ? html.replace(/
    ]*>/, `
    diff --git a/app/src/components/MultiImage.astro b/app/src/components/MultiImage.astro new file mode 100644 index 0000000000000000000000000000000000000000..ba9375a1fa0413856b8078207b50fbd356eba2c0 --- /dev/null +++ b/app/src/components/MultiImage.astro @@ -0,0 +1,342 @@ +--- +// @ts-ignore - types provided by Astro at runtime +import Image from "./Image.astro"; + +interface ImageItem { + /** Source image imported via astro:assets */ + src: any; + /** Alt text for accessibility */ + alt: string; + /** Individual caption for this image */ + caption?: string; + /** Optional individual image ID for referencing */ + id?: string; + /** Enable zoom on this specific image (defaults to parent zoomable setting) */ + zoomable?: boolean; + /** Enable download on this specific image (defaults to parent downloadable setting) */ + downloadable?: boolean; +} + +interface Props { + /** Array of images to display */ + images: ImageItem[]; + /** Global caption for the entire figure */ + caption?: string; + /** Layout mode: number of columns or 'auto' for responsive */ + layout?: "2-column" | "3-column" | "4-column" | "auto"; + /** Enable medium-zoom behavior on all images (can be overridden per image) */ + zoomable?: boolean; + /** Show download buttons on all images (can be overridden per image) */ + downloadable?: boolean; + /** Optional class to apply on the wrapper */ + class?: string; + /** Optional global ID for the multi-image figure */ + id?: string; +} + +const { + images, + caption, + layout = "3-column", + zoomable = false, + downloadable = false, + class: className, + id, +} = Astro.props as Props; + +const hasCaptionSlot = Astro.slots.has("caption"); +const hasCaption = + hasCaptionSlot || (typeof caption === "string" && caption.length > 0); +const uid = `mi_${Math.random().toString(36).slice(2)}`; + +// Generate CSS grid columns based on layout +const getGridColumns = () => { + switch (layout) { + case "2-column": + return "repeat(2, 1fr)"; + case "3-column": + return "repeat(3, 1fr)"; + case "4-column": + return "repeat(4, 1fr)"; + case "auto": + return "repeat(auto-fit, minmax(200px, 1fr))"; + default: + return "repeat(3, 1fr)"; + } +}; + +const gridColumns = getGridColumns(); +--- + +
    + { + hasCaption ? ( +
    +
    + {images.map((image, index) => ( +
    + {image.alt} + {image.caption && ( +
    + {image.caption} +
    + )} + {image.id && ( + + )} +
    + ))} +
    +
    + {hasCaptionSlot ? ( + + ) : ( + caption && + )} +
    +
    + ) : ( +
    + {images.map((image, index) => ( +
    + {image.alt} + {image.caption && ( +
    + {image.caption} +
    + )} + {image.id && ( + + )} +
    + ))} +
    + ) + } +
    + + + + diff --git a/app/src/components/Note.astro b/app/src/components/Note.astro new file mode 100644 index 0000000000000000000000000000000000000000..b7e31f8d1ac54ef8a78e4233d2a330281286728c --- /dev/null +++ b/app/src/components/Note.astro @@ -0,0 +1,43 @@ +--- +interface Props { + title?: string; + emoji?: string; + class?: string; + variant?: 'neutral' | 'info' | 'success' | 'danger'; +} +const { title, emoji, class: className, variant = 'neutral', ...props } = Astro.props as Props; +const wrapperClass = ["note", `note--${variant}`, className].filter(Boolean).join(" "); +const hasHeader = (emoji && String(emoji).length > 0) || (title && String(title).length > 0); +--- +
    +
    + {emoji && } +
    + {title &&
    {title}
    } +
    + +
    +
    +
    +
    + + + diff --git a/app/src/components/Quote.astro b/app/src/components/Quote.astro new file mode 100644 index 0000000000000000000000000000000000000000..f75c29e93ce0319da6aa20535eaad95e0bf7dff1 --- /dev/null +++ b/app/src/components/Quote.astro @@ -0,0 +1,124 @@ +--- +interface Props { + source?: string; +} + +const { source } = Astro.props; +--- + +
    +
    + +
    + { + source && ( +
    + +
    + ) + } +
    + + diff --git a/app/src/components/RawHtml.astro b/app/src/components/RawHtml.astro new file mode 100644 index 0000000000000000000000000000000000000000..45eee0dbdf4567b7392757d62460996c35ff1efe --- /dev/null +++ b/app/src/components/RawHtml.astro @@ -0,0 +1,6 @@ +--- +const { html, class: className, ariaLabel } = Astro.props; +--- +