commit files to HF hub
Browse files- .gitattributes +1 -0
 - README.md +214 -0
 - added_tokens.json +25 -0
 - config.json +29 -0
 - merges.txt +0 -0
 - model-00001-of-00002.safetensors +3 -0
 - model-00002-of-00002.safetensors +3 -0
 - model.safetensors.index.json +443 -0
 - model_list.json +132 -0
 - special_tokens_map.json +38 -0
 - tokenizer.json +3 -0
 - tokenizer_config.json +217 -0
 - training_args.bin +3 -0
 - training_config.json +21 -0
 - vocab.json +0 -0
 
    	
        .gitattributes
    CHANGED
    
    | 
         @@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text 
     | 
|
| 33 | 
         
             
            *.zip filter=lfs diff=lfs merge=lfs -text
         
     | 
| 34 | 
         
             
            *.zst filter=lfs diff=lfs merge=lfs -text
         
     | 
| 35 | 
         
             
            *tfevents* filter=lfs diff=lfs merge=lfs -text
         
     | 
| 
         | 
| 
         | 
|
| 33 | 
         
             
            *.zip filter=lfs diff=lfs merge=lfs -text
         
     | 
| 34 | 
         
             
            *.zst filter=lfs diff=lfs merge=lfs -text
         
     | 
| 35 | 
         
             
            *tfevents* filter=lfs diff=lfs merge=lfs -text
         
     | 
| 36 | 
         
            +
            tokenizer.json filter=lfs diff=lfs merge=lfs -text
         
     | 
    	
        README.md
    ADDED
    
    | 
         @@ -0,0 +1,214 @@ 
     | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
| 
         | 
|
| 1 | 
         
            +
            # lmarena-ai/p2l-3b-bt-01132025
         
     | 
| 2 | 
         
            +
             
     | 
| 3 | 
         
            +
            Large language model (LLM) evaluations typically rely on aggregated metrics like accuracy or human preference, averaging across users and prompts. This averaging obscures user- and prompt-specific variations in model performance.
         
     | 
| 4 | 
         
            +
            To address this, we propose Prompt-to-Leaderboard (P2L), a method that produces leaderboards specific to a prompt.
         
     | 
| 5 | 
         
            +
            The core idea is to train an LLM taking natural language prompts as input to output a vector of coefficients which are then used to predict the human preference vote.
         
     | 
| 6 | 
         
            +
            The resulting prompt-dependent leaderboards allow for unsupervised task-specific evaluation, optimal routing of queries to models, personalization, and automated evaluation of model strengths and weaknesses. 
         
     | 
| 7 | 
         
            +
            Data from Chatbot Arena suggest that P2L better captures the nuanced landscape of language model performance than the averaged leaderboard.
         
     | 
| 8 | 
         
            +
             
     | 
| 9 | 
         
            +
            **Paper**: [Prompt-to-Leaderboard](https://arxiv.org/abs/2502.14855)
         
     | 
| 10 | 
         
            +
             
     | 
| 11 | 
         
            +
            **Code**: [lmarena/p2l](https://github.com/lmarena/p2l)
         
     | 
| 12 | 
         
            +
             
     | 
| 13 | 
         
            +
            This particular P2L model has a *Bradley-Terry* regression head, which we define below:
         
     | 
| 14 | 
         
            +
             
     | 
| 15 | 
         
            +
            $$
         
     | 
| 16 | 
         
            +
            \begin{equation}
         
     | 
| 17 | 
         
            +
                g_{\theta(z)}(y; x) = \begin{cases}
         
     | 
| 18 | 
         
            +
                    \sigma(x^\top \theta^*(z)) & y = 1, \\
         
     | 
| 19 | 
         
            +
                    1-\sigma(x^\top \theta^*(z)) & y = 0.
         
     | 
| 20 | 
         
            +
                \end{cases}
         
     | 
| 21 | 
         
            +
            \end{equation}
         
     | 
| 22 | 
         
            +
            $$
         
     | 
| 23 | 
         
            +
             
     | 
| 24 | 
         
            +
            More simply, given a prompt, P2L will output a vector of coefficients $\vec{\beta}$. Then the probability that model $i$ beats model $j$, $P(i \succ j) = \sigma(\vec{\beta}_i - \vec{\beta}_j)$.
         
     | 
| 25 | 
         
            +
             
     | 
| 26 | 
         
            +
            See section 2.2 in our paper for more details on various regression heads.
         
     | 
| 27 | 
         
            +
             
     | 
| 28 | 
         
            +
            ## Serving
         
     | 
| 29 | 
         
            +
            To serve a P2L model, please see our documentation on GitHub: [Serving P2L](https://github.com/lmarena/p2l?tab=readme-ov-file#serving-p2l).
         
     | 
| 30 | 
         
            +
             
     | 
| 31 | 
         
            +
            Note: the P2L model outputs with this structure:
         
     | 
| 32 | 
         
            +
             
     | 
| 33 | 
         
            +
             
     | 
| 34 | 
         
            +
            ```python
         
     | 
| 35 | 
         
            +
            class P2LOutputs(ModelOutput):
         
     | 
| 36 | 
         
            +
                coefs: torch.FloatTensor = None # "betas" as described above
         
     | 
| 37 | 
         
            +
                eta: Optional[torch.FloatTensor] = None # tie coefficent (not used for BT head)
         
     | 
| 38 | 
         
            +
                last_hidden_state: torch.FloatTensor = None # last hidden state from the transformer
         
     | 
| 39 | 
         
            +
            ```
         
     | 
| 40 | 
         
            +
             
     | 
| 41 | 
         
            +
            To understand which coefficient index corresponds with which model, see the [`model_list.json`](./model_list.json) found in the repo of each P2L model. As a general rule, the models will always be in sorted order.
         
     | 
| 42 | 
         
            +
             
     | 
| 43 | 
         
            +
            The easiest way to get this list from inside code is with the following:
         
     | 
| 44 | 
         
            +
             
     | 
| 45 | 
         
            +
            ```python
         
     | 
| 46 | 
         
            +
            import json
         
     | 
| 47 | 
         
            +
            from huggingface_hub import hf_hub_download
         
     | 
| 48 | 
         
            +
             
     | 
| 49 | 
         
            +
            fname = hf_hub_download(
         
     | 
| 50 | 
         
            +
                    repo_id="lmarena-ai/p2l-3b-bt-01132025", filename="model_list.json", repo_type="model"
         
     | 
| 51 | 
         
            +
                )
         
     | 
| 52 | 
         
            +
             
     | 
| 53 | 
         
            +
            with open(fname) as fin:
         
     | 
| 54 | 
         
            +
                model_list = json.load(fin)
         
     | 
| 55 | 
         
            +
            ```
         
     | 
| 56 | 
         
            +
             
     | 
| 57 | 
         
            +
             
     | 
| 58 | 
         
            +
             
     | 
| 59 | 
         
            +
            ### Loading from Pretrained
         
     | 
| 60 | 
         
            +
             
     | 
| 61 | 
         
            +
            To define and load the model:
         
     | 
| 62 | 
         
            +
             
     | 
| 63 | 
         
            +
            ```python
         
     | 
| 64 | 
         
            +
             
     | 
| 65 | 
         
            +
            import torch
         
     | 
| 66 | 
         
            +
            from transformers import (
         
     | 
| 67 | 
         
            +
                Qwen2Model,
         
     | 
| 68 | 
         
            +
                Qwen2PreTrainedModel,
         
     | 
| 69 | 
         
            +
                LlamaModel,
         
     | 
| 70 | 
         
            +
                LlamaPreTrainedModel,
         
     | 
| 71 | 
         
            +
                PreTrainedModel,
         
     | 
| 72 | 
         
            +
                AutoTokenizer,
         
     | 
| 73 | 
         
            +
            )
         
     | 
| 74 | 
         
            +
            from transformers import AutoTokenizer
         
     | 
| 75 | 
         
            +
            from transformers.utils import ModelOutput
         
     | 
| 76 | 
         
            +
            from dataclasses import dataclass
         
     | 
| 77 | 
         
            +
            import torch.nn as nn
         
     | 
| 78 | 
         
            +
            import torch.nn.functional as F
         
     | 
| 79 | 
         
            +
            from typing import Dict, Tuple, Callable, Optional
         
     | 
| 80 | 
         
            +
            from huggingface_hub import hf_hub_download
         
     | 
| 81 | 
         
            +
            import json
         
     | 
| 82 | 
         
            +
             
     | 
| 83 | 
         
            +
             
     | 
| 84 | 
         
            +
            @dataclass
         
     | 
| 85 | 
         
            +
            class HeadOutputs(ModelOutput):
         
     | 
| 86 | 
         
            +
                coefs: torch.FloatTensor = None
         
     | 
| 87 | 
         
            +
                eta: Optional[torch.FloatTensor] = None
         
     | 
| 88 | 
         
            +
                gamma: Optional[torch.FloatTensor] = None
         
     | 
| 89 | 
         
            +
             
     | 
| 90 | 
         
            +
             
     | 
| 91 | 
         
            +
            @dataclass
         
     | 
| 92 | 
         
            +
            class P2LOutputs(ModelOutput):
         
     | 
| 93 | 
         
            +
                coefs: torch.FloatTensor = None
         
     | 
| 94 | 
         
            +
                eta: Optional[torch.FloatTensor] = None
         
     | 
| 95 | 
         
            +
                gamma: Optional[torch.FloatTensor] = None
         
     | 
| 96 | 
         
            +
                loss: Optional[torch.FloatTensor] = None
         
     | 
| 97 | 
         
            +
                last_hidden_state: torch.FloatTensor = None
         
     | 
| 98 | 
         
            +
             
     | 
| 99 | 
         
            +
            class BTHead(nn.Module):
         
     | 
| 100 | 
         
            +
                def __init__(
         
     | 
| 101 | 
         
            +
                    self, input_dim, output_dim, linear_head_downsize_factor=None, **kwargs
         
     | 
| 102 | 
         
            +
                ) -> None:
         
     | 
| 103 | 
         
            +
                    super().__init__()
         
     | 
| 104 | 
         
            +
             
     | 
| 105 | 
         
            +
                    if linear_head_downsize_factor:
         
     | 
| 106 | 
         
            +
                        inner_dim = int(output_dim // linear_head_downsize_factor)
         
     | 
| 107 | 
         
            +
                        self.head = nn.Sequential(
         
     | 
| 108 | 
         
            +
                            nn.Linear(in_features=input_dim, out_features=inner_dim, bias=True),
         
     | 
| 109 | 
         
            +
                            nn.Linear(in_features=inner_dim, out_features=output_dim, bias=True),
         
     | 
| 110 | 
         
            +
                        )
         
     | 
| 111 | 
         
            +
                    else:
         
     | 
| 112 | 
         
            +
                        self.head = nn.Linear(
         
     | 
| 113 | 
         
            +
                            in_features=input_dim, out_features=output_dim, bias=True
         
     | 
| 114 | 
         
            +
                        )
         
     | 
| 115 | 
         
            +
             
     | 
| 116 | 
         
            +
                def forward(self, last_hidden_dim: torch.Tensor):
         
     | 
| 117 | 
         
            +
                    coefs = self.head(last_hidden_dim)
         
     | 
| 118 | 
         
            +
                    return HeadOutputs(coefs=coefs)
         
     | 
| 119 | 
         
            +
             
     | 
| 120 | 
         
            +
            class P2LModel(Qwen2PreTrainedModel):
         
     | 
| 121 | 
         
            +
                def __init__(
         
     | 
| 122 | 
         
            +
                    self,
         
     | 
| 123 | 
         
            +
                    config,
         
     | 
| 124 | 
         
            +
                    CLS_id,
         
     | 
| 125 | 
         
            +
                    num_models,
         
     | 
| 126 | 
         
            +
                    head_kwargs={},
         
     | 
| 127 | 
         
            +
                    **kwargs,
         
     | 
| 128 | 
         
            +
                ):
         
     | 
| 129 | 
         
            +
                    super().__init__(config)
         
     | 
| 130 | 
         
            +
             
     | 
| 131 | 
         
            +
                    self.num_models = num_models
         
     | 
| 132 | 
         
            +
                    self.cls_token_id = CLS_id
         
     | 
| 133 | 
         
            +
             
     | 
| 134 | 
         
            +
                    self.model = Qwen2Model(config)
         
     | 
| 135 | 
         
            +
             
     | 
| 136 | 
         
            +
                    self.head = BTHead(
         
     | 
| 137 | 
         
            +
                        input_dim=config.hidden_size,
         
     | 
| 138 | 
         
            +
                        output_dim=self.num_models,
         
     | 
| 139 | 
         
            +
                        **head_kwargs,
         
     | 
| 140 | 
         
            +
                    )
         
     | 
| 141 | 
         
            +
             
     | 
| 142 | 
         
            +
                    self.post_init()
         
     | 
| 143 | 
         
            +
             
     | 
| 144 | 
         
            +
                def freeze_transformer(self):
         
     | 
| 145 | 
         
            +
                    for param in self.model.parameters():
         
     | 
| 146 | 
         
            +
                        param.requires_grad = False
         
     | 
| 147 | 
         
            +
             
     | 
| 148 | 
         
            +
                def get_input_embeddings(self):
         
     | 
| 149 | 
         
            +
                    return self.model.embed_tokens
         
     | 
| 150 | 
         
            +
             
     | 
| 151 | 
         
            +
                def set_input_embeddings(self, value):
         
     | 
| 152 | 
         
            +
                    self.model.embed_tokens = value
         
     | 
| 153 | 
         
            +
             
     | 
| 154 | 
         
            +
                def forward(self, input_ids, attention_mask, labels=None, weights=None):
         
     | 
| 155 | 
         
            +
                    batch_size = input_ids.shape[0]
         
     | 
| 156 | 
         
            +
             
     | 
| 157 | 
         
            +
                    hidden_outputs = self.model(
         
     | 
| 158 | 
         
            +
                        input_ids=input_ids,
         
     | 
| 159 | 
         
            +
                        attention_mask=attention_mask,
         
     | 
| 160 | 
         
            +
                        output_hidden_states=False,
         
     | 
| 161 | 
         
            +
                    ).last_hidden_state  # (bs, num_token, embed_dim)
         
     | 
| 162 | 
         
            +
             
     | 
| 163 | 
         
            +
                    cls_mask = input_ids == self.cls_token_id
         
     | 
| 164 | 
         
            +
             
     | 
| 165 | 
         
            +
                    # double check this is getting the current CLS token
         
     | 
| 166 | 
         
            +
                    cls_hidden_dim = hidden_outputs[cls_mask]
         
     | 
| 167 | 
         
            +
             
     | 
| 168 | 
         
            +
                    assert (
         
     | 
| 169 | 
         
            +
                        cls_hidden_dim.shape[0] == batch_size
         
     | 
| 170 | 
         
            +
                    ), f"input ids {input_ids.shape}, cls_mask {cls_mask.shape}, cls_logit {cls_hidden_dim.shape}"
         
     | 
| 171 | 
         
            +
             
     | 
| 172 | 
         
            +
                    head_output = self.head(cls_hidden_dim)
         
     | 
| 173 | 
         
            +
             
     | 
| 174 | 
         
            +
                
         
     | 
| 175 | 
         
            +
                    outputs = P2LOutputs(
         
     | 
| 176 | 
         
            +
                        coefs=head_output.coefs,
         
     | 
| 177 | 
         
            +
                        last_hidden_state=cls_hidden_dim,
         
     | 
| 178 | 
         
            +
                        eta=head_output.eta,
         
     | 
| 179 | 
         
            +
                        gamma=head_output.gamma,
         
     | 
| 180 | 
         
            +
                    )
         
     | 
| 181 | 
         
            +
             
     | 
| 182 | 
         
            +
                    return outputs
         
     | 
| 183 | 
         
            +
             
     | 
| 184 | 
         
            +
             
     | 
| 185 | 
         
            +
            fname = hf_hub_download(
         
     | 
| 186 | 
         
            +
                    repo_id="lmarena-ai/p2l-3b-bt-01132025", filename="model_list.json", repo_type="model"
         
     | 
| 187 | 
         
            +
                )
         
     | 
| 188 | 
         
            +
             
     | 
| 189 | 
         
            +
            with open(fname) as fin:
         
     | 
| 190 | 
         
            +
                model_list = json.load(fin)
         
     | 
| 191 | 
         
            +
             
     | 
| 192 | 
         
            +
            tokenizer = AutoTokenizer.from_pretrained("lmarena-ai/p2l-3b-bt-01132025")
         
     | 
| 193 | 
         
            +
            model = P2LModel.from_pretrained(
         
     | 
| 194 | 
         
            +
                "lmarena-ai/p2l-3b-bt-01132025",
         
     | 
| 195 | 
         
            +
                CLS_id=tokenizer.cls_token_id,
         
     | 
| 196 | 
         
            +
                num_models=len(model_list),
         
     | 
| 197 | 
         
            +
                torch_dtype=torch.bfloat16,
         
     | 
| 198 | 
         
            +
            )
         
     | 
| 199 | 
         
            +
             
     | 
| 200 | 
         
            +
            ```
         
     | 
| 201 | 
         
            +
             
     | 
| 202 | 
         
            +
            ## Citation
         
     | 
| 203 | 
         
            +
             
     | 
| 204 | 
         
            +
            ```
         
     | 
| 205 | 
         
            +
            @misc{frick2025prompttoleaderboard,
         
     | 
| 206 | 
         
            +
                  title={Prompt-to-Leaderboard}, 
         
     | 
| 207 | 
         
            +
                  author={Evan Frick and Connor Chen and Joseph Tennyson and Tianle Li and Wei-Lin Chiang and Anastasios N. Angelopoulos and Ion Stoica},
         
     | 
| 208 | 
         
            +
                  year={2025},
         
     | 
| 209 | 
         
            +
                  eprint={2502.14855},
         
     | 
| 210 | 
         
            +
                  archivePrefix={arXiv},
         
     | 
| 211 | 
         
            +
                  primaryClass={cs.LG},
         
     | 
| 212 | 
         
            +
                  url={https://arxiv.org/abs/2502.14855}, 
         
     | 
| 213 | 
         
            +
            }
         
     | 
| 214 | 
         
            +
            ```
         
     | 
    	
        added_tokens.json
    ADDED
    
    | 
         @@ -0,0 +1,25 @@ 
     | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
| 
         | 
|
| 1 | 
         
            +
            {
         
     | 
| 2 | 
         
            +
              "</tool_call>": 151658,
         
     | 
| 3 | 
         
            +
              "<tool_call>": 151657,
         
     | 
| 4 | 
         
            +
              "<|box_end|>": 151649,
         
     | 
| 5 | 
         
            +
              "<|box_start|>": 151648,
         
     | 
| 6 | 
         
            +
              "<|cls|>": 151665,
         
     | 
| 7 | 
         
            +
              "<|endoftext|>": 151643,
         
     | 
| 8 | 
         
            +
              "<|file_sep|>": 151664,
         
     | 
| 9 | 
         
            +
              "<|fim_middle|>": 151660,
         
     | 
| 10 | 
         
            +
              "<|fim_pad|>": 151662,
         
     | 
| 11 | 
         
            +
              "<|fim_prefix|>": 151659,
         
     | 
| 12 | 
         
            +
              "<|fim_suffix|>": 151661,
         
     | 
| 13 | 
         
            +
              "<|im_end|>": 151645,
         
     | 
| 14 | 
         
            +
              "<|im_start|>": 151644,
         
     | 
| 15 | 
         
            +
              "<|image_pad|>": 151655,
         
     | 
| 16 | 
         
            +
              "<|object_ref_end|>": 151647,
         
     | 
| 17 | 
         
            +
              "<|object_ref_start|>": 151646,
         
     | 
| 18 | 
         
            +
              "<|quad_end|>": 151651,
         
     | 
| 19 | 
         
            +
              "<|quad_start|>": 151650,
         
     | 
| 20 | 
         
            +
              "<|repo_name|>": 151663,
         
     | 
| 21 | 
         
            +
              "<|video_pad|>": 151656,
         
     | 
| 22 | 
         
            +
              "<|vision_end|>": 151653,
         
     | 
| 23 | 
         
            +
              "<|vision_pad|>": 151654,
         
     | 
| 24 | 
         
            +
              "<|vision_start|>": 151652
         
     | 
| 25 | 
         
            +
            }
         
     | 
    	
        config.json
    ADDED
    
    | 
         @@ -0,0 +1,29 @@ 
     | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
| 
         | 
|
| 1 | 
         
            +
            {
         
     | 
| 2 | 
         
            +
              "_name_or_path": "Qwen/Qwen2.5-3B-Instruct",
         
     | 
| 3 | 
         
            +
              "architectures": [
         
     | 
| 4 | 
         
            +
                "P2LModel"
         
     | 
| 5 | 
         
            +
              ],
         
     | 
| 6 | 
         
            +
              "attention_dropout": 0.0,
         
     | 
| 7 | 
         
            +
              "bos_token_id": 151643,
         
     | 
| 8 | 
         
            +
              "eos_token_id": 151645,
         
     | 
| 9 | 
         
            +
              "hidden_act": "silu",
         
     | 
| 10 | 
         
            +
              "hidden_size": 2048,
         
     | 
| 11 | 
         
            +
              "initializer_range": 0.02,
         
     | 
| 12 | 
         
            +
              "intermediate_size": 11008,
         
     | 
| 13 | 
         
            +
              "max_position_embeddings": 32768,
         
     | 
| 14 | 
         
            +
              "max_window_layers": 70,
         
     | 
| 15 | 
         
            +
              "model_type": "qwen2",
         
     | 
| 16 | 
         
            +
              "num_attention_heads": 16,
         
     | 
| 17 | 
         
            +
              "num_hidden_layers": 36,
         
     | 
| 18 | 
         
            +
              "num_key_value_heads": 2,
         
     | 
| 19 | 
         
            +
              "rms_norm_eps": 1e-06,
         
     | 
| 20 | 
         
            +
              "rope_scaling": null,
         
     | 
| 21 | 
         
            +
              "rope_theta": 1000000.0,
         
     | 
| 22 | 
         
            +
              "sliding_window": null,
         
     | 
| 23 | 
         
            +
              "tie_word_embeddings": true,
         
     | 
| 24 | 
         
            +
              "torch_dtype": "bfloat16",
         
     | 
| 25 | 
         
            +
              "transformers_version": "4.47.1",
         
     | 
| 26 | 
         
            +
              "use_cache": true,
         
     | 
| 27 | 
         
            +
              "use_sliding_window": false,
         
     | 
| 28 | 
         
            +
              "vocab_size": 151936
         
     | 
| 29 | 
         
            +
            }
         
     | 
    	
        merges.txt
    ADDED
    
    | 
         The diff for this file is too large to render. 
		See raw diff 
     | 
| 
         | 
    	
        model-00001-of-00002.safetensors
    ADDED
    
    | 
         @@ -0,0 +1,3 @@ 
     | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
| 
         | 
|
| 1 | 
         
            +
            version https://git-lfs.github.com/spec/v1
         
     | 
| 2 | 
         
            +
            oid sha256:399fe3b1f9dee8335773b02fe426bc0feee93154469a8878d1748703cabde380
         
     | 
| 3 | 
         
            +
            size 4957560304
         
     | 
    	
        model-00002-of-00002.safetensors
    ADDED
    
    | 
         @@ -0,0 +1,3 @@ 
     | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
| 
         | 
|
| 1 | 
         
            +
            version https://git-lfs.github.com/spec/v1
         
     | 
| 2 | 
         
            +
            oid sha256:36a87a0c90baf4a1fed1401cfb3cccc46aeda2cae301b10c60e93dd86853f295
         
     | 
| 3 | 
         
            +
            size 1214899596
         
     | 
    	
        model.safetensors.index.json
    ADDED
    
    | 
         @@ -0,0 +1,443 @@ 
     | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
| 
         | 
|
| 1 | 
         
            +
            {
         
     | 
| 2 | 
         
            +
              "metadata": {
         
     | 
| 3 | 
         
            +
                "total_size": 6172410116
         
     | 
| 4 | 
         
            +
              },
         
     | 
| 5 | 
         
            +
              "weight_map": {
         
     | 
| 6 | 
         
            +
                "head.head.bias": "model-00002-of-00002.safetensors",
         
     | 
| 7 | 
         
            +
                "head.head.weight": "model-00002-of-00002.safetensors",
         
     | 
| 8 | 
         
            +
                "model.embed_tokens.weight": "model-00001-of-00002.safetensors",
         
     | 
| 9 | 
         
            +
                "model.layers.0.input_layernorm.weight": "model-00001-of-00002.safetensors",
         
     | 
| 10 | 
         
            +
                "model.layers.0.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 11 | 
         
            +
                "model.layers.0.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 12 | 
         
            +
                "model.layers.0.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 13 | 
         
            +
                "model.layers.0.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
         
     | 
| 14 | 
         
            +
                "model.layers.0.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
         
     | 
| 15 | 
         
            +
                "model.layers.0.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 16 | 
         
            +
                "model.layers.0.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 17 | 
         
            +
                "model.layers.0.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
         
     | 
| 18 | 
         
            +
                "model.layers.0.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 19 | 
         
            +
                "model.layers.0.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
         
     | 
| 20 | 
         
            +
                "model.layers.0.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 21 | 
         
            +
                "model.layers.1.input_layernorm.weight": "model-00001-of-00002.safetensors",
         
     | 
| 22 | 
         
            +
                "model.layers.1.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 23 | 
         
            +
                "model.layers.1.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 24 | 
         
            +
                "model.layers.1.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 25 | 
         
            +
                "model.layers.1.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
         
     | 
| 26 | 
         
            +
                "model.layers.1.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
         
     | 
| 27 | 
         
            +
                "model.layers.1.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 28 | 
         
            +
                "model.layers.1.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 29 | 
         
            +
                "model.layers.1.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
         
     | 
| 30 | 
         
            +
                "model.layers.1.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 31 | 
         
            +
                "model.layers.1.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
         
     | 
| 32 | 
         
            +
                "model.layers.1.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 33 | 
         
            +
                "model.layers.10.input_layernorm.weight": "model-00001-of-00002.safetensors",
         
     | 
| 34 | 
         
            +
                "model.layers.10.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 35 | 
         
            +
                "model.layers.10.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 36 | 
         
            +
                "model.layers.10.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 37 | 
         
            +
                "model.layers.10.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
         
     | 
| 38 | 
         
            +
                "model.layers.10.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
         
     | 
| 39 | 
         
            +
                "model.layers.10.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 40 | 
         
            +
                "model.layers.10.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 41 | 
         
            +
                "model.layers.10.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
         
     | 
| 42 | 
         
            +
                "model.layers.10.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 43 | 
         
            +
                "model.layers.10.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
         
     | 
| 44 | 
         
            +
                "model.layers.10.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 45 | 
         
            +
                "model.layers.11.input_layernorm.weight": "model-00001-of-00002.safetensors",
         
     | 
| 46 | 
         
            +
                "model.layers.11.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 47 | 
         
            +
                "model.layers.11.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 48 | 
         
            +
                "model.layers.11.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 49 | 
         
            +
                "model.layers.11.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
         
     | 
| 50 | 
         
            +
                "model.layers.11.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
         
     | 
| 51 | 
         
            +
                "model.layers.11.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 52 | 
         
            +
                "model.layers.11.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 53 | 
         
            +
                "model.layers.11.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
         
     | 
| 54 | 
         
            +
                "model.layers.11.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 55 | 
         
            +
                "model.layers.11.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
         
     | 
| 56 | 
         
            +
                "model.layers.11.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 57 | 
         
            +
                "model.layers.12.input_layernorm.weight": "model-00001-of-00002.safetensors",
         
     | 
| 58 | 
         
            +
                "model.layers.12.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 59 | 
         
            +
                "model.layers.12.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 60 | 
         
            +
                "model.layers.12.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 61 | 
         
            +
                "model.layers.12.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
         
     | 
| 62 | 
         
            +
                "model.layers.12.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
         
     | 
| 63 | 
         
            +
                "model.layers.12.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 64 | 
         
            +
                "model.layers.12.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 65 | 
         
            +
                "model.layers.12.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
         
     | 
| 66 | 
         
            +
                "model.layers.12.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 67 | 
         
            +
                "model.layers.12.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
         
     | 
| 68 | 
         
            +
                "model.layers.12.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 69 | 
         
            +
                "model.layers.13.input_layernorm.weight": "model-00001-of-00002.safetensors",
         
     | 
| 70 | 
         
            +
                "model.layers.13.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 71 | 
         
            +
                "model.layers.13.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 72 | 
         
            +
                "model.layers.13.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 73 | 
         
            +
                "model.layers.13.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
         
     | 
| 74 | 
         
            +
                "model.layers.13.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
         
     | 
| 75 | 
         
            +
                "model.layers.13.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 76 | 
         
            +
                "model.layers.13.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 77 | 
         
            +
                "model.layers.13.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
         
     | 
| 78 | 
         
            +
                "model.layers.13.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 79 | 
         
            +
                "model.layers.13.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
         
     | 
| 80 | 
         
            +
                "model.layers.13.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 81 | 
         
            +
                "model.layers.14.input_layernorm.weight": "model-00001-of-00002.safetensors",
         
     | 
| 82 | 
         
            +
                "model.layers.14.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 83 | 
         
            +
                "model.layers.14.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 84 | 
         
            +
                "model.layers.14.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 85 | 
         
            +
                "model.layers.14.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
         
     | 
| 86 | 
         
            +
                "model.layers.14.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
         
     | 
| 87 | 
         
            +
                "model.layers.14.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 88 | 
         
            +
                "model.layers.14.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 89 | 
         
            +
                "model.layers.14.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
         
     | 
| 90 | 
         
            +
                "model.layers.14.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 91 | 
         
            +
                "model.layers.14.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
         
     | 
| 92 | 
         
            +
                "model.layers.14.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 93 | 
         
            +
                "model.layers.15.input_layernorm.weight": "model-00001-of-00002.safetensors",
         
     | 
| 94 | 
         
            +
                "model.layers.15.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 95 | 
         
            +
                "model.layers.15.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 96 | 
         
            +
                "model.layers.15.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 97 | 
         
            +
                "model.layers.15.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
         
     | 
| 98 | 
         
            +
                "model.layers.15.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
         
     | 
| 99 | 
         
            +
                "model.layers.15.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 100 | 
         
            +
                "model.layers.15.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 101 | 
         
            +
                "model.layers.15.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
         
     | 
| 102 | 
         
            +
                "model.layers.15.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 103 | 
         
            +
                "model.layers.15.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
         
     | 
| 104 | 
         
            +
                "model.layers.15.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 105 | 
         
            +
                "model.layers.16.input_layernorm.weight": "model-00001-of-00002.safetensors",
         
     | 
| 106 | 
         
            +
                "model.layers.16.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 107 | 
         
            +
                "model.layers.16.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 108 | 
         
            +
                "model.layers.16.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 109 | 
         
            +
                "model.layers.16.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
         
     | 
| 110 | 
         
            +
                "model.layers.16.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
         
     | 
| 111 | 
         
            +
                "model.layers.16.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 112 | 
         
            +
                "model.layers.16.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 113 | 
         
            +
                "model.layers.16.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
         
     | 
| 114 | 
         
            +
                "model.layers.16.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 115 | 
         
            +
                "model.layers.16.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
         
     | 
| 116 | 
         
            +
                "model.layers.16.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 117 | 
         
            +
                "model.layers.17.input_layernorm.weight": "model-00001-of-00002.safetensors",
         
     | 
| 118 | 
         
            +
                "model.layers.17.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 119 | 
         
            +
                "model.layers.17.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 120 | 
         
            +
                "model.layers.17.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 121 | 
         
            +
                "model.layers.17.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
         
     | 
| 122 | 
         
            +
                "model.layers.17.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
         
     | 
| 123 | 
         
            +
                "model.layers.17.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 124 | 
         
            +
                "model.layers.17.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 125 | 
         
            +
                "model.layers.17.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
         
     | 
| 126 | 
         
            +
                "model.layers.17.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 127 | 
         
            +
                "model.layers.17.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
         
     | 
| 128 | 
         
            +
                "model.layers.17.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 129 | 
         
            +
                "model.layers.18.input_layernorm.weight": "model-00001-of-00002.safetensors",
         
     | 
| 130 | 
         
            +
                "model.layers.18.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 131 | 
         
            +
                "model.layers.18.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 132 | 
         
            +
                "model.layers.18.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 133 | 
         
            +
                "model.layers.18.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
         
     | 
| 134 | 
         
            +
                "model.layers.18.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
         
     | 
| 135 | 
         
            +
                "model.layers.18.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 136 | 
         
            +
                "model.layers.18.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 137 | 
         
            +
                "model.layers.18.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
         
     | 
| 138 | 
         
            +
                "model.layers.18.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 139 | 
         
            +
                "model.layers.18.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
         
     | 
| 140 | 
         
            +
                "model.layers.18.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 141 | 
         
            +
                "model.layers.19.input_layernorm.weight": "model-00001-of-00002.safetensors",
         
     | 
| 142 | 
         
            +
                "model.layers.19.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 143 | 
         
            +
                "model.layers.19.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 144 | 
         
            +
                "model.layers.19.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 145 | 
         
            +
                "model.layers.19.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
         
     | 
| 146 | 
         
            +
                "model.layers.19.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
         
     | 
| 147 | 
         
            +
                "model.layers.19.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 148 | 
         
            +
                "model.layers.19.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 149 | 
         
            +
                "model.layers.19.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
         
     | 
| 150 | 
         
            +
                "model.layers.19.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 151 | 
         
            +
                "model.layers.19.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
         
     | 
| 152 | 
         
            +
                "model.layers.19.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 153 | 
         
            +
                "model.layers.2.input_layernorm.weight": "model-00001-of-00002.safetensors",
         
     | 
| 154 | 
         
            +
                "model.layers.2.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 155 | 
         
            +
                "model.layers.2.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 156 | 
         
            +
                "model.layers.2.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 157 | 
         
            +
                "model.layers.2.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
         
     | 
| 158 | 
         
            +
                "model.layers.2.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
         
     | 
| 159 | 
         
            +
                "model.layers.2.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 160 | 
         
            +
                "model.layers.2.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 161 | 
         
            +
                "model.layers.2.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
         
     | 
| 162 | 
         
            +
                "model.layers.2.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 163 | 
         
            +
                "model.layers.2.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
         
     | 
| 164 | 
         
            +
                "model.layers.2.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 165 | 
         
            +
                "model.layers.20.input_layernorm.weight": "model-00001-of-00002.safetensors",
         
     | 
| 166 | 
         
            +
                "model.layers.20.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 167 | 
         
            +
                "model.layers.20.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 168 | 
         
            +
                "model.layers.20.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 169 | 
         
            +
                "model.layers.20.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
         
     | 
| 170 | 
         
            +
                "model.layers.20.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
         
     | 
| 171 | 
         
            +
                "model.layers.20.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 172 | 
         
            +
                "model.layers.20.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 173 | 
         
            +
                "model.layers.20.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
         
     | 
| 174 | 
         
            +
                "model.layers.20.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 175 | 
         
            +
                "model.layers.20.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
         
     | 
| 176 | 
         
            +
                "model.layers.20.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 177 | 
         
            +
                "model.layers.21.input_layernorm.weight": "model-00001-of-00002.safetensors",
         
     | 
| 178 | 
         
            +
                "model.layers.21.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 179 | 
         
            +
                "model.layers.21.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 180 | 
         
            +
                "model.layers.21.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 181 | 
         
            +
                "model.layers.21.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
         
     | 
| 182 | 
         
            +
                "model.layers.21.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
         
     | 
| 183 | 
         
            +
                "model.layers.21.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 184 | 
         
            +
                "model.layers.21.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 185 | 
         
            +
                "model.layers.21.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
         
     | 
| 186 | 
         
            +
                "model.layers.21.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 187 | 
         
            +
                "model.layers.21.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
         
     | 
| 188 | 
         
            +
                "model.layers.21.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 189 | 
         
            +
                "model.layers.22.input_layernorm.weight": "model-00001-of-00002.safetensors",
         
     | 
| 190 | 
         
            +
                "model.layers.22.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 191 | 
         
            +
                "model.layers.22.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 192 | 
         
            +
                "model.layers.22.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 193 | 
         
            +
                "model.layers.22.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
         
     | 
| 194 | 
         
            +
                "model.layers.22.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
         
     | 
| 195 | 
         
            +
                "model.layers.22.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 196 | 
         
            +
                "model.layers.22.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 197 | 
         
            +
                "model.layers.22.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
         
     | 
| 198 | 
         
            +
                "model.layers.22.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 199 | 
         
            +
                "model.layers.22.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
         
     | 
| 200 | 
         
            +
                "model.layers.22.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 201 | 
         
            +
                "model.layers.23.input_layernorm.weight": "model-00001-of-00002.safetensors",
         
     | 
| 202 | 
         
            +
                "model.layers.23.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 203 | 
         
            +
                "model.layers.23.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 204 | 
         
            +
                "model.layers.23.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 205 | 
         
            +
                "model.layers.23.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
         
     | 
| 206 | 
         
            +
                "model.layers.23.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
         
     | 
| 207 | 
         
            +
                "model.layers.23.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 208 | 
         
            +
                "model.layers.23.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 209 | 
         
            +
                "model.layers.23.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
         
     | 
| 210 | 
         
            +
                "model.layers.23.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 211 | 
         
            +
                "model.layers.23.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
         
     | 
| 212 | 
         
            +
                "model.layers.23.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 213 | 
         
            +
                "model.layers.24.input_layernorm.weight": "model-00001-of-00002.safetensors",
         
     | 
| 214 | 
         
            +
                "model.layers.24.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 215 | 
         
            +
                "model.layers.24.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 216 | 
         
            +
                "model.layers.24.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 217 | 
         
            +
                "model.layers.24.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
         
     | 
| 218 | 
         
            +
                "model.layers.24.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
         
     | 
| 219 | 
         
            +
                "model.layers.24.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 220 | 
         
            +
                "model.layers.24.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 221 | 
         
            +
                "model.layers.24.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
         
     | 
| 222 | 
         
            +
                "model.layers.24.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 223 | 
         
            +
                "model.layers.24.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
         
     | 
| 224 | 
         
            +
                "model.layers.24.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 225 | 
         
            +
                "model.layers.25.input_layernorm.weight": "model-00001-of-00002.safetensors",
         
     | 
| 226 | 
         
            +
                "model.layers.25.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 227 | 
         
            +
                "model.layers.25.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 228 | 
         
            +
                "model.layers.25.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 229 | 
         
            +
                "model.layers.25.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
         
     | 
| 230 | 
         
            +
                "model.layers.25.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
         
     | 
| 231 | 
         
            +
                "model.layers.25.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 232 | 
         
            +
                "model.layers.25.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 233 | 
         
            +
                "model.layers.25.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
         
     | 
| 234 | 
         
            +
                "model.layers.25.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 235 | 
         
            +
                "model.layers.25.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
         
     | 
| 236 | 
         
            +
                "model.layers.25.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 237 | 
         
            +
                "model.layers.26.input_layernorm.weight": "model-00001-of-00002.safetensors",
         
     | 
| 238 | 
         
            +
                "model.layers.26.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 239 | 
         
            +
                "model.layers.26.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 240 | 
         
            +
                "model.layers.26.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 241 | 
         
            +
                "model.layers.26.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
         
     | 
| 242 | 
         
            +
                "model.layers.26.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
         
     | 
| 243 | 
         
            +
                "model.layers.26.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 244 | 
         
            +
                "model.layers.26.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 245 | 
         
            +
                "model.layers.26.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
         
     | 
| 246 | 
         
            +
                "model.layers.26.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 247 | 
         
            +
                "model.layers.26.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
         
     | 
| 248 | 
         
            +
                "model.layers.26.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 249 | 
         
            +
                "model.layers.27.input_layernorm.weight": "model-00001-of-00002.safetensors",
         
     | 
| 250 | 
         
            +
                "model.layers.27.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 251 | 
         
            +
                "model.layers.27.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 252 | 
         
            +
                "model.layers.27.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 253 | 
         
            +
                "model.layers.27.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
         
     | 
| 254 | 
         
            +
                "model.layers.27.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
         
     | 
| 255 | 
         
            +
                "model.layers.27.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 256 | 
         
            +
                "model.layers.27.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 257 | 
         
            +
                "model.layers.27.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
         
     | 
| 258 | 
         
            +
                "model.layers.27.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 259 | 
         
            +
                "model.layers.27.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
         
     | 
| 260 | 
         
            +
                "model.layers.27.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 261 | 
         
            +
                "model.layers.28.input_layernorm.weight": "model-00002-of-00002.safetensors",
         
     | 
| 262 | 
         
            +
                "model.layers.28.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
         
     | 
| 263 | 
         
            +
                "model.layers.28.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
         
     | 
| 264 | 
         
            +
                "model.layers.28.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
         
     | 
| 265 | 
         
            +
                "model.layers.28.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
         
     | 
| 266 | 
         
            +
                "model.layers.28.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
         
     | 
| 267 | 
         
            +
                "model.layers.28.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 268 | 
         
            +
                "model.layers.28.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 269 | 
         
            +
                "model.layers.28.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
         
     | 
| 270 | 
         
            +
                "model.layers.28.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 271 | 
         
            +
                "model.layers.28.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
         
     | 
| 272 | 
         
            +
                "model.layers.28.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 273 | 
         
            +
                "model.layers.29.input_layernorm.weight": "model-00002-of-00002.safetensors",
         
     | 
| 274 | 
         
            +
                "model.layers.29.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
         
     | 
| 275 | 
         
            +
                "model.layers.29.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
         
     | 
| 276 | 
         
            +
                "model.layers.29.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
         
     | 
| 277 | 
         
            +
                "model.layers.29.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
         
     | 
| 278 | 
         
            +
                "model.layers.29.self_attn.k_proj.bias": "model-00002-of-00002.safetensors",
         
     | 
| 279 | 
         
            +
                "model.layers.29.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
         
     | 
| 280 | 
         
            +
                "model.layers.29.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
         
     | 
| 281 | 
         
            +
                "model.layers.29.self_attn.q_proj.bias": "model-00002-of-00002.safetensors",
         
     | 
| 282 | 
         
            +
                "model.layers.29.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
         
     | 
| 283 | 
         
            +
                "model.layers.29.self_attn.v_proj.bias": "model-00002-of-00002.safetensors",
         
     | 
| 284 | 
         
            +
                "model.layers.29.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
         
     | 
| 285 | 
         
            +
                "model.layers.3.input_layernorm.weight": "model-00001-of-00002.safetensors",
         
     | 
| 286 | 
         
            +
                "model.layers.3.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 287 | 
         
            +
                "model.layers.3.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 288 | 
         
            +
                "model.layers.3.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 289 | 
         
            +
                "model.layers.3.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
         
     | 
| 290 | 
         
            +
                "model.layers.3.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
         
     | 
| 291 | 
         
            +
                "model.layers.3.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 292 | 
         
            +
                "model.layers.3.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 293 | 
         
            +
                "model.layers.3.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
         
     | 
| 294 | 
         
            +
                "model.layers.3.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 295 | 
         
            +
                "model.layers.3.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
         
     | 
| 296 | 
         
            +
                "model.layers.3.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 297 | 
         
            +
                "model.layers.30.input_layernorm.weight": "model-00002-of-00002.safetensors",
         
     | 
| 298 | 
         
            +
                "model.layers.30.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
         
     | 
| 299 | 
         
            +
                "model.layers.30.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
         
     | 
| 300 | 
         
            +
                "model.layers.30.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
         
     | 
| 301 | 
         
            +
                "model.layers.30.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
         
     | 
| 302 | 
         
            +
                "model.layers.30.self_attn.k_proj.bias": "model-00002-of-00002.safetensors",
         
     | 
| 303 | 
         
            +
                "model.layers.30.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
         
     | 
| 304 | 
         
            +
                "model.layers.30.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
         
     | 
| 305 | 
         
            +
                "model.layers.30.self_attn.q_proj.bias": "model-00002-of-00002.safetensors",
         
     | 
| 306 | 
         
            +
                "model.layers.30.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
         
     | 
| 307 | 
         
            +
                "model.layers.30.self_attn.v_proj.bias": "model-00002-of-00002.safetensors",
         
     | 
| 308 | 
         
            +
                "model.layers.30.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
         
     | 
| 309 | 
         
            +
                "model.layers.31.input_layernorm.weight": "model-00002-of-00002.safetensors",
         
     | 
| 310 | 
         
            +
                "model.layers.31.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
         
     | 
| 311 | 
         
            +
                "model.layers.31.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
         
     | 
| 312 | 
         
            +
                "model.layers.31.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
         
     | 
| 313 | 
         
            +
                "model.layers.31.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
         
     | 
| 314 | 
         
            +
                "model.layers.31.self_attn.k_proj.bias": "model-00002-of-00002.safetensors",
         
     | 
| 315 | 
         
            +
                "model.layers.31.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
         
     | 
| 316 | 
         
            +
                "model.layers.31.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
         
     | 
| 317 | 
         
            +
                "model.layers.31.self_attn.q_proj.bias": "model-00002-of-00002.safetensors",
         
     | 
| 318 | 
         
            +
                "model.layers.31.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
         
     | 
| 319 | 
         
            +
                "model.layers.31.self_attn.v_proj.bias": "model-00002-of-00002.safetensors",
         
     | 
| 320 | 
         
            +
                "model.layers.31.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
         
     | 
| 321 | 
         
            +
                "model.layers.32.input_layernorm.weight": "model-00002-of-00002.safetensors",
         
     | 
| 322 | 
         
            +
                "model.layers.32.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
         
     | 
| 323 | 
         
            +
                "model.layers.32.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
         
     | 
| 324 | 
         
            +
                "model.layers.32.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
         
     | 
| 325 | 
         
            +
                "model.layers.32.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
         
     | 
| 326 | 
         
            +
                "model.layers.32.self_attn.k_proj.bias": "model-00002-of-00002.safetensors",
         
     | 
| 327 | 
         
            +
                "model.layers.32.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
         
     | 
| 328 | 
         
            +
                "model.layers.32.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
         
     | 
| 329 | 
         
            +
                "model.layers.32.self_attn.q_proj.bias": "model-00002-of-00002.safetensors",
         
     | 
| 330 | 
         
            +
                "model.layers.32.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
         
     | 
| 331 | 
         
            +
                "model.layers.32.self_attn.v_proj.bias": "model-00002-of-00002.safetensors",
         
     | 
| 332 | 
         
            +
                "model.layers.32.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
         
     | 
| 333 | 
         
            +
                "model.layers.33.input_layernorm.weight": "model-00002-of-00002.safetensors",
         
     | 
| 334 | 
         
            +
                "model.layers.33.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
         
     | 
| 335 | 
         
            +
                "model.layers.33.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
         
     | 
| 336 | 
         
            +
                "model.layers.33.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
         
     | 
| 337 | 
         
            +
                "model.layers.33.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
         
     | 
| 338 | 
         
            +
                "model.layers.33.self_attn.k_proj.bias": "model-00002-of-00002.safetensors",
         
     | 
| 339 | 
         
            +
                "model.layers.33.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
         
     | 
| 340 | 
         
            +
                "model.layers.33.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
         
     | 
| 341 | 
         
            +
                "model.layers.33.self_attn.q_proj.bias": "model-00002-of-00002.safetensors",
         
     | 
| 342 | 
         
            +
                "model.layers.33.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
         
     | 
| 343 | 
         
            +
                "model.layers.33.self_attn.v_proj.bias": "model-00002-of-00002.safetensors",
         
     | 
| 344 | 
         
            +
                "model.layers.33.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
         
     | 
| 345 | 
         
            +
                "model.layers.34.input_layernorm.weight": "model-00002-of-00002.safetensors",
         
     | 
| 346 | 
         
            +
                "model.layers.34.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
         
     | 
| 347 | 
         
            +
                "model.layers.34.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
         
     | 
| 348 | 
         
            +
                "model.layers.34.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
         
     | 
| 349 | 
         
            +
                "model.layers.34.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
         
     | 
| 350 | 
         
            +
                "model.layers.34.self_attn.k_proj.bias": "model-00002-of-00002.safetensors",
         
     | 
| 351 | 
         
            +
                "model.layers.34.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
         
     | 
| 352 | 
         
            +
                "model.layers.34.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
         
     | 
| 353 | 
         
            +
                "model.layers.34.self_attn.q_proj.bias": "model-00002-of-00002.safetensors",
         
     | 
| 354 | 
         
            +
                "model.layers.34.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
         
     | 
| 355 | 
         
            +
                "model.layers.34.self_attn.v_proj.bias": "model-00002-of-00002.safetensors",
         
     | 
| 356 | 
         
            +
                "model.layers.34.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
         
     | 
| 357 | 
         
            +
                "model.layers.35.input_layernorm.weight": "model-00002-of-00002.safetensors",
         
     | 
| 358 | 
         
            +
                "model.layers.35.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
         
     | 
| 359 | 
         
            +
                "model.layers.35.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
         
     | 
| 360 | 
         
            +
                "model.layers.35.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
         
     | 
| 361 | 
         
            +
                "model.layers.35.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
         
     | 
| 362 | 
         
            +
                "model.layers.35.self_attn.k_proj.bias": "model-00002-of-00002.safetensors",
         
     | 
| 363 | 
         
            +
                "model.layers.35.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
         
     | 
| 364 | 
         
            +
                "model.layers.35.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
         
     | 
| 365 | 
         
            +
                "model.layers.35.self_attn.q_proj.bias": "model-00002-of-00002.safetensors",
         
     | 
| 366 | 
         
            +
                "model.layers.35.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
         
     | 
| 367 | 
         
            +
                "model.layers.35.self_attn.v_proj.bias": "model-00002-of-00002.safetensors",
         
     | 
| 368 | 
         
            +
                "model.layers.35.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
         
     | 
| 369 | 
         
            +
                "model.layers.4.input_layernorm.weight": "model-00001-of-00002.safetensors",
         
     | 
| 370 | 
         
            +
                "model.layers.4.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 371 | 
         
            +
                "model.layers.4.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 372 | 
         
            +
                "model.layers.4.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 373 | 
         
            +
                "model.layers.4.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
         
     | 
| 374 | 
         
            +
                "model.layers.4.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
         
     | 
| 375 | 
         
            +
                "model.layers.4.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 376 | 
         
            +
                "model.layers.4.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 377 | 
         
            +
                "model.layers.4.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
         
     | 
| 378 | 
         
            +
                "model.layers.4.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 379 | 
         
            +
                "model.layers.4.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
         
     | 
| 380 | 
         
            +
                "model.layers.4.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 381 | 
         
            +
                "model.layers.5.input_layernorm.weight": "model-00001-of-00002.safetensors",
         
     | 
| 382 | 
         
            +
                "model.layers.5.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 383 | 
         
            +
                "model.layers.5.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 384 | 
         
            +
                "model.layers.5.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 385 | 
         
            +
                "model.layers.5.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
         
     | 
| 386 | 
         
            +
                "model.layers.5.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
         
     | 
| 387 | 
         
            +
                "model.layers.5.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 388 | 
         
            +
                "model.layers.5.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 389 | 
         
            +
                "model.layers.5.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
         
     | 
| 390 | 
         
            +
                "model.layers.5.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 391 | 
         
            +
                "model.layers.5.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
         
     | 
| 392 | 
         
            +
                "model.layers.5.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 393 | 
         
            +
                "model.layers.6.input_layernorm.weight": "model-00001-of-00002.safetensors",
         
     | 
| 394 | 
         
            +
                "model.layers.6.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 395 | 
         
            +
                "model.layers.6.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 396 | 
         
            +
                "model.layers.6.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 397 | 
         
            +
                "model.layers.6.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
         
     | 
| 398 | 
         
            +
                "model.layers.6.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
         
     | 
| 399 | 
         
            +
                "model.layers.6.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 400 | 
         
            +
                "model.layers.6.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 401 | 
         
            +
                "model.layers.6.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
         
     | 
| 402 | 
         
            +
                "model.layers.6.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 403 | 
         
            +
                "model.layers.6.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
         
     | 
| 404 | 
         
            +
                "model.layers.6.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 405 | 
         
            +
                "model.layers.7.input_layernorm.weight": "model-00001-of-00002.safetensors",
         
     | 
| 406 | 
         
            +
                "model.layers.7.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 407 | 
         
            +
                "model.layers.7.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 408 | 
         
            +
                "model.layers.7.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 409 | 
         
            +
                "model.layers.7.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
         
     | 
| 410 | 
         
            +
                "model.layers.7.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
         
     | 
| 411 | 
         
            +
                "model.layers.7.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 412 | 
         
            +
                "model.layers.7.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 413 | 
         
            +
                "model.layers.7.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
         
     | 
| 414 | 
         
            +
                "model.layers.7.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 415 | 
         
            +
                "model.layers.7.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
         
     | 
| 416 | 
         
            +
                "model.layers.7.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 417 | 
         
            +
                "model.layers.8.input_layernorm.weight": "model-00001-of-00002.safetensors",
         
     | 
| 418 | 
         
            +
                "model.layers.8.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 419 | 
         
            +
                "model.layers.8.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 420 | 
         
            +
                "model.layers.8.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 421 | 
         
            +
                "model.layers.8.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
         
     | 
| 422 | 
         
            +
                "model.layers.8.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
         
     | 
| 423 | 
         
            +
                "model.layers.8.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 424 | 
         
            +
                "model.layers.8.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 425 | 
         
            +
                "model.layers.8.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
         
     | 
| 426 | 
         
            +
                "model.layers.8.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 427 | 
         
            +
                "model.layers.8.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
         
     | 
| 428 | 
         
            +
                "model.layers.8.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 429 | 
         
            +
                "model.layers.9.input_layernorm.weight": "model-00001-of-00002.safetensors",
         
     | 
| 430 | 
         
            +
                "model.layers.9.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 431 | 
         
            +
                "model.layers.9.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 432 | 
         
            +
                "model.layers.9.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 433 | 
         
            +
                "model.layers.9.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
         
     | 
| 434 | 
         
            +
                "model.layers.9.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
         
     | 
| 435 | 
         
            +
                "model.layers.9.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 436 | 
         
            +
                "model.layers.9.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 437 | 
         
            +
                "model.layers.9.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
         
     | 
| 438 | 
         
            +
                "model.layers.9.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 439 | 
         
            +
                "model.layers.9.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
         
     | 
| 440 | 
         
            +
                "model.layers.9.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
         
     | 
| 441 | 
         
            +
                "model.norm.weight": "model-00002-of-00002.safetensors"
         
     | 
| 442 | 
         
            +
              }
         
     | 
| 443 | 
         
            +
            }
         
     | 
    	
        model_list.json
    ADDED
    
    | 
         @@ -0,0 +1,132 @@ 
     | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
| 
         | 
|
| 1 | 
         
            +
            [
         
     | 
| 2 | 
         
            +
             "amazon-nova-lite-v1.0",
         
     | 
| 3 | 
         
            +
             "amazon-nova-micro-v1.0",
         
     | 
| 4 | 
         
            +
             "amazon-nova-pro-v1.0",
         
     | 
| 5 | 
         
            +
             "athene-70b-0725",
         
     | 
| 6 | 
         
            +
             "athene-v2-chat",
         
     | 
| 7 | 
         
            +
             "c4ai-aya-expanse-32b",
         
     | 
| 8 | 
         
            +
             "c4ai-aya-expanse-8b",
         
     | 
| 9 | 
         
            +
             "chatgpt-4o-latest-20240808",
         
     | 
| 10 | 
         
            +
             "chatgpt-4o-latest-20240903",
         
     | 
| 11 | 
         
            +
             "chatgpt-4o-latest-20241120",
         
     | 
| 12 | 
         
            +
             "claude-3-5-haiku-20241022",
         
     | 
| 13 | 
         
            +
             "claude-3-5-sonnet-20240620",
         
     | 
| 14 | 
         
            +
             "claude-3-5-sonnet-20241022",
         
     | 
| 15 | 
         
            +
             "claude-3-haiku-20240307",
         
     | 
| 16 | 
         
            +
             "claude-3-opus-20240229",
         
     | 
| 17 | 
         
            +
             "claude-3-sonnet-20240229",
         
     | 
| 18 | 
         
            +
             "codestral-2405",
         
     | 
| 19 | 
         
            +
             "command-r",
         
     | 
| 20 | 
         
            +
             "command-r-08-2024",
         
     | 
| 21 | 
         
            +
             "command-r-plus",
         
     | 
| 22 | 
         
            +
             "command-r-plus-08-2024",
         
     | 
| 23 | 
         
            +
             "dbrx-instruct-preview",
         
     | 
| 24 | 
         
            +
             "deepseek-coder-v2",
         
     | 
| 25 | 
         
            +
             "deepseek-coder-v2-0724",
         
     | 
| 26 | 
         
            +
             "deepseek-v2-api-0628",
         
     | 
| 27 | 
         
            +
             "deepseek-v2.5",
         
     | 
| 28 | 
         
            +
             "deepseek-v2.5-1210",
         
     | 
| 29 | 
         
            +
             "deepseek-v3",
         
     | 
| 30 | 
         
            +
             "gemini-1.5-flash-001",
         
     | 
| 31 | 
         
            +
             "gemini-1.5-flash-002",
         
     | 
| 32 | 
         
            +
             "gemini-1.5-flash-8b-001",
         
     | 
| 33 | 
         
            +
             "gemini-1.5-flash-8b-exp-0827",
         
     | 
| 34 | 
         
            +
             "gemini-1.5-flash-exp-0827",
         
     | 
| 35 | 
         
            +
             "gemini-1.5-pro-001",
         
     | 
| 36 | 
         
            +
             "gemini-1.5-pro-002",
         
     | 
| 37 | 
         
            +
             "gemini-1.5-pro-api-0409-preview",
         
     | 
| 38 | 
         
            +
             "gemini-1.5-pro-exp-0801",
         
     | 
| 39 | 
         
            +
             "gemini-1.5-pro-exp-0827",
         
     | 
| 40 | 
         
            +
             "gemini-2.0-flash-exp",
         
     | 
| 41 | 
         
            +
             "gemini-2.0-flash-thinking-exp-1219",
         
     | 
| 42 | 
         
            +
             "gemini-advanced-0514",
         
     | 
| 43 | 
         
            +
             "gemini-exp-1114",
         
     | 
| 44 | 
         
            +
             "gemini-exp-1121",
         
     | 
| 45 | 
         
            +
             "gemini-exp-1206",
         
     | 
| 46 | 
         
            +
             "gemma-1.1-2b-it",
         
     | 
| 47 | 
         
            +
             "gemma-1.1-7b-it",
         
     | 
| 48 | 
         
            +
             "gemma-2-27b-it",
         
     | 
| 49 | 
         
            +
             "gemma-2-2b-it",
         
     | 
| 50 | 
         
            +
             "gemma-2-9b-it",
         
     | 
| 51 | 
         
            +
             "gemma-2-9b-it-simpo",
         
     | 
| 52 | 
         
            +
             "glm-4-0116",
         
     | 
| 53 | 
         
            +
             "glm-4-0520",
         
     | 
| 54 | 
         
            +
             "glm-4-plus",
         
     | 
| 55 | 
         
            +
             "gpt-3.5-turbo-0125",
         
     | 
| 56 | 
         
            +
             "gpt-4-0125-preview",
         
     | 
| 57 | 
         
            +
             "gpt-4-0314",
         
     | 
| 58 | 
         
            +
             "gpt-4-0613",
         
     | 
| 59 | 
         
            +
             "gpt-4-1106-preview",
         
     | 
| 60 | 
         
            +
             "gpt-4-turbo-2024-04-09",
         
     | 
| 61 | 
         
            +
             "gpt-4o-2024-05-13",
         
     | 
| 62 | 
         
            +
             "gpt-4o-2024-08-06",
         
     | 
| 63 | 
         
            +
             "gpt-4o-mini-2024-07-18",
         
     | 
| 64 | 
         
            +
             "granite-3.0-2b-instruct",
         
     | 
| 65 | 
         
            +
             "granite-3.0-8b-instruct",
         
     | 
| 66 | 
         
            +
             "grok-2-2024-08-13",
         
     | 
| 67 | 
         
            +
             "grok-2-mini-2024-08-13",
         
     | 
| 68 | 
         
            +
             "hunyuan-standard-256k",
         
     | 
| 69 | 
         
            +
             "internlm2_5-20b-chat",
         
     | 
| 70 | 
         
            +
             "jamba-1.5-large",
         
     | 
| 71 | 
         
            +
             "jamba-1.5-mini",
         
     | 
| 72 | 
         
            +
             "llama-2-13b-chat",
         
     | 
| 73 | 
         
            +
             "llama-2-70b-chat",
         
     | 
| 74 | 
         
            +
             "llama-3-70b-instruct",
         
     | 
| 75 | 
         
            +
             "llama-3-8b-instruct",
         
     | 
| 76 | 
         
            +
             "llama-3.1-405b-instruct-bf16",
         
     | 
| 77 | 
         
            +
             "llama-3.1-405b-instruct-fp8",
         
     | 
| 78 | 
         
            +
             "llama-3.1-70b-instruct",
         
     | 
| 79 | 
         
            +
             "llama-3.1-8b-instruct",
         
     | 
| 80 | 
         
            +
             "llama-3.1-nemotron-51b-instruct",
         
     | 
| 81 | 
         
            +
             "llama-3.1-nemotron-70b-instruct",
         
     | 
| 82 | 
         
            +
             "llama-3.1-tulu-3-70b",
         
     | 
| 83 | 
         
            +
             "llama-3.1-tulu-3-8b",
         
     | 
| 84 | 
         
            +
             "llama-3.2-1b-instruct",
         
     | 
| 85 | 
         
            +
             "llama-3.2-3b-instruct",
         
     | 
| 86 | 
         
            +
             "llama-3.3-70b-instruct",
         
     | 
| 87 | 
         
            +
             "ministral-8b-2410",
         
     | 
| 88 | 
         
            +
             "mistral-7b-instruct-v0.2",
         
     | 
| 89 | 
         
            +
             "mistral-large-2402",
         
     | 
| 90 | 
         
            +
             "mistral-large-2407",
         
     | 
| 91 | 
         
            +
             "mistral-large-2411",
         
     | 
| 92 | 
         
            +
             "mistral-medium",
         
     | 
| 93 | 
         
            +
             "mixtral-8x22b-instruct-v0.1",
         
     | 
| 94 | 
         
            +
             "mixtral-8x7b-instruct-v0.1",
         
     | 
| 95 | 
         
            +
             "nemotron-4-340b-instruct",
         
     | 
| 96 | 
         
            +
             "o1-2024-12-17",
         
     | 
| 97 | 
         
            +
             "o1-mini",
         
     | 
| 98 | 
         
            +
             "o1-preview",
         
     | 
| 99 | 
         
            +
             "phi-3-medium-4k-instruct",
         
     | 
| 100 | 
         
            +
             "phi-3-mini-128k-instruct",
         
     | 
| 101 | 
         
            +
             "phi-3-mini-4k-instruct",
         
     | 
| 102 | 
         
            +
             "phi-3-mini-4k-instruct-june-2024",
         
     | 
| 103 | 
         
            +
             "phi-3-small-8k-instruct",
         
     | 
| 104 | 
         
            +
             "qwen-max-0428",
         
     | 
| 105 | 
         
            +
             "qwen-max-0919",
         
     | 
| 106 | 
         
            +
             "qwen-plus-0828",
         
     | 
| 107 | 
         
            +
             "qwen1.5-110b-chat",
         
     | 
| 108 | 
         
            +
             "qwen1.5-14b-chat",
         
     | 
| 109 | 
         
            +
             "qwen1.5-32b-chat",
         
     | 
| 110 | 
         
            +
             "qwen1.5-72b-chat",
         
     | 
| 111 | 
         
            +
             "qwen2-72b-instruct",
         
     | 
| 112 | 
         
            +
             "qwen2.5-72b-instruct",
         
     | 
| 113 | 
         
            +
             "qwen2.5-coder-32b-instruct",
         
     | 
| 114 | 
         
            +
             "qwen2.5-plus-1127",
         
     | 
| 115 | 
         
            +
             "qwq-32b-preview",
         
     | 
| 116 | 
         
            +
             "reka-core-20240501",
         
     | 
| 117 | 
         
            +
             "reka-core-20240722",
         
     | 
| 118 | 
         
            +
             "reka-core-20240904",
         
     | 
| 119 | 
         
            +
             "reka-flash-20240722",
         
     | 
| 120 | 
         
            +
             "reka-flash-20240904",
         
     | 
| 121 | 
         
            +
             "reka-flash-21b-20240226",
         
     | 
| 122 | 
         
            +
             "reka-flash-21b-20240226-online",
         
     | 
| 123 | 
         
            +
             "reka-flash-preview-20240611",
         
     | 
| 124 | 
         
            +
             "smollm2-1.7b-instruct",
         
     | 
| 125 | 
         
            +
             "snowflake-arctic-instruct",
         
     | 
| 126 | 
         
            +
             "yi-1.5-34b-chat",
         
     | 
| 127 | 
         
            +
             "yi-34b-chat",
         
     | 
| 128 | 
         
            +
             "yi-large",
         
     | 
| 129 | 
         
            +
             "yi-large-preview",
         
     | 
| 130 | 
         
            +
             "yi-lightning",
         
     | 
| 131 | 
         
            +
             "yi-lightning-lite"
         
     | 
| 132 | 
         
            +
            ]
         
     | 
    	
        special_tokens_map.json
    ADDED
    
    | 
         @@ -0,0 +1,38 @@ 
     | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
| 
         | 
|
| 1 | 
         
            +
            {
         
     | 
| 2 | 
         
            +
              "additional_special_tokens": [
         
     | 
| 3 | 
         
            +
                "<|im_start|>",
         
     | 
| 4 | 
         
            +
                "<|im_end|>",
         
     | 
| 5 | 
         
            +
                "<|object_ref_start|>",
         
     | 
| 6 | 
         
            +
                "<|object_ref_end|>",
         
     | 
| 7 | 
         
            +
                "<|box_start|>",
         
     | 
| 8 | 
         
            +
                "<|box_end|>",
         
     | 
| 9 | 
         
            +
                "<|quad_start|>",
         
     | 
| 10 | 
         
            +
                "<|quad_end|>",
         
     | 
| 11 | 
         
            +
                "<|vision_start|>",
         
     | 
| 12 | 
         
            +
                "<|vision_end|>",
         
     | 
| 13 | 
         
            +
                "<|vision_pad|>",
         
     | 
| 14 | 
         
            +
                "<|image_pad|>",
         
     | 
| 15 | 
         
            +
                "<|video_pad|>"
         
     | 
| 16 | 
         
            +
              ],
         
     | 
| 17 | 
         
            +
              "cls_token": {
         
     | 
| 18 | 
         
            +
                "content": "<|cls|>",
         
     | 
| 19 | 
         
            +
                "lstrip": false,
         
     | 
| 20 | 
         
            +
                "normalized": false,
         
     | 
| 21 | 
         
            +
                "rstrip": false,
         
     | 
| 22 | 
         
            +
                "single_word": false
         
     | 
| 23 | 
         
            +
              },
         
     | 
| 24 | 
         
            +
              "eos_token": {
         
     | 
| 25 | 
         
            +
                "content": "<|im_end|>",
         
     | 
| 26 | 
         
            +
                "lstrip": false,
         
     | 
| 27 | 
         
            +
                "normalized": false,
         
     | 
| 28 | 
         
            +
                "rstrip": false,
         
     | 
| 29 | 
         
            +
                "single_word": false
         
     | 
| 30 | 
         
            +
              },
         
     | 
| 31 | 
         
            +
              "pad_token": {
         
     | 
| 32 | 
         
            +
                "content": "<|endoftext|>",
         
     | 
| 33 | 
         
            +
                "lstrip": false,
         
     | 
| 34 | 
         
            +
                "normalized": false,
         
     | 
| 35 | 
         
            +
                "rstrip": false,
         
     | 
| 36 | 
         
            +
                "single_word": false
         
     | 
| 37 | 
         
            +
              }
         
     | 
| 38 | 
         
            +
            }
         
     | 
    	
        tokenizer.json
    ADDED
    
    | 
         @@ -0,0 +1,3 @@ 
     | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
| 
         | 
|
| 1 | 
         
            +
            version https://git-lfs.github.com/spec/v1
         
     | 
| 2 | 
         
            +
            oid sha256:4ec927f7a1845161c33d3a6c42ae6aad8e4d597b1fe282ca37b5c794f2498d42
         
     | 
| 3 | 
         
            +
            size 11422346
         
     | 
    	
        tokenizer_config.json
    ADDED
    
    | 
         @@ -0,0 +1,217 @@ 
     | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
| 
         | 
|
| 1 | 
         
            +
            {
         
     | 
| 2 | 
         
            +
              "add_bos_token": false,
         
     | 
| 3 | 
         
            +
              "add_prefix_space": false,
         
     | 
| 4 | 
         
            +
              "added_tokens_decoder": {
         
     | 
| 5 | 
         
            +
                "151643": {
         
     | 
| 6 | 
         
            +
                  "content": "<|endoftext|>",
         
     | 
| 7 | 
         
            +
                  "lstrip": false,
         
     | 
| 8 | 
         
            +
                  "normalized": false,
         
     | 
| 9 | 
         
            +
                  "rstrip": false,
         
     | 
| 10 | 
         
            +
                  "single_word": false,
         
     | 
| 11 | 
         
            +
                  "special": true
         
     | 
| 12 | 
         
            +
                },
         
     | 
| 13 | 
         
            +
                "151644": {
         
     | 
| 14 | 
         
            +
                  "content": "<|im_start|>",
         
     | 
| 15 | 
         
            +
                  "lstrip": false,
         
     | 
| 16 | 
         
            +
                  "normalized": false,
         
     | 
| 17 | 
         
            +
                  "rstrip": false,
         
     | 
| 18 | 
         
            +
                  "single_word": false,
         
     | 
| 19 | 
         
            +
                  "special": true
         
     | 
| 20 | 
         
            +
                },
         
     | 
| 21 | 
         
            +
                "151645": {
         
     | 
| 22 | 
         
            +
                  "content": "<|im_end|>",
         
     | 
| 23 | 
         
            +
                  "lstrip": false,
         
     | 
| 24 | 
         
            +
                  "normalized": false,
         
     | 
| 25 | 
         
            +
                  "rstrip": false,
         
     | 
| 26 | 
         
            +
                  "single_word": false,
         
     | 
| 27 | 
         
            +
                  "special": true
         
     | 
| 28 | 
         
            +
                },
         
     | 
| 29 | 
         
            +
                "151646": {
         
     | 
| 30 | 
         
            +
                  "content": "<|object_ref_start|>",
         
     | 
| 31 | 
         
            +
                  "lstrip": false,
         
     | 
| 32 | 
         
            +
                  "normalized": false,
         
     | 
| 33 | 
         
            +
                  "rstrip": false,
         
     | 
| 34 | 
         
            +
                  "single_word": false,
         
     | 
| 35 | 
         
            +
                  "special": true
         
     | 
| 36 | 
         
            +
                },
         
     | 
| 37 | 
         
            +
                "151647": {
         
     | 
| 38 | 
         
            +
                  "content": "<|object_ref_end|>",
         
     | 
| 39 | 
         
            +
                  "lstrip": false,
         
     | 
| 40 | 
         
            +
                  "normalized": false,
         
     | 
| 41 | 
         
            +
                  "rstrip": false,
         
     | 
| 42 | 
         
            +
                  "single_word": false,
         
     | 
| 43 | 
         
            +
                  "special": true
         
     | 
| 44 | 
         
            +
                },
         
     | 
| 45 | 
         
            +
                "151648": {
         
     | 
| 46 | 
         
            +
                  "content": "<|box_start|>",
         
     | 
| 47 | 
         
            +
                  "lstrip": false,
         
     | 
| 48 | 
         
            +
                  "normalized": false,
         
     | 
| 49 | 
         
            +
                  "rstrip": false,
         
     | 
| 50 | 
         
            +
                  "single_word": false,
         
     | 
| 51 | 
         
            +
                  "special": true
         
     | 
| 52 | 
         
            +
                },
         
     | 
| 53 | 
         
            +
                "151649": {
         
     | 
| 54 | 
         
            +
                  "content": "<|box_end|>",
         
     | 
| 55 | 
         
            +
                  "lstrip": false,
         
     | 
| 56 | 
         
            +
                  "normalized": false,
         
     | 
| 57 | 
         
            +
                  "rstrip": false,
         
     | 
| 58 | 
         
            +
                  "single_word": false,
         
     | 
| 59 | 
         
            +
                  "special": true
         
     | 
| 60 | 
         
            +
                },
         
     | 
| 61 | 
         
            +
                "151650": {
         
     | 
| 62 | 
         
            +
                  "content": "<|quad_start|>",
         
     | 
| 63 | 
         
            +
                  "lstrip": false,
         
     | 
| 64 | 
         
            +
                  "normalized": false,
         
     | 
| 65 | 
         
            +
                  "rstrip": false,
         
     | 
| 66 | 
         
            +
                  "single_word": false,
         
     | 
| 67 | 
         
            +
                  "special": true
         
     | 
| 68 | 
         
            +
                },
         
     | 
| 69 | 
         
            +
                "151651": {
         
     | 
| 70 | 
         
            +
                  "content": "<|quad_end|>",
         
     | 
| 71 | 
         
            +
                  "lstrip": false,
         
     | 
| 72 | 
         
            +
                  "normalized": false,
         
     | 
| 73 | 
         
            +
                  "rstrip": false,
         
     | 
| 74 | 
         
            +
                  "single_word": false,
         
     | 
| 75 | 
         
            +
                  "special": true
         
     | 
| 76 | 
         
            +
                },
         
     | 
| 77 | 
         
            +
                "151652": {
         
     | 
| 78 | 
         
            +
                  "content": "<|vision_start|>",
         
     | 
| 79 | 
         
            +
                  "lstrip": false,
         
     | 
| 80 | 
         
            +
                  "normalized": false,
         
     | 
| 81 | 
         
            +
                  "rstrip": false,
         
     | 
| 82 | 
         
            +
                  "single_word": false,
         
     | 
| 83 | 
         
            +
                  "special": true
         
     | 
| 84 | 
         
            +
                },
         
     | 
| 85 | 
         
            +
                "151653": {
         
     | 
| 86 | 
         
            +
                  "content": "<|vision_end|>",
         
     | 
| 87 | 
         
            +
                  "lstrip": false,
         
     | 
| 88 | 
         
            +
                  "normalized": false,
         
     | 
| 89 | 
         
            +
                  "rstrip": false,
         
     | 
| 90 | 
         
            +
                  "single_word": false,
         
     | 
| 91 | 
         
            +
                  "special": true
         
     | 
| 92 | 
         
            +
                },
         
     | 
| 93 | 
         
            +
                "151654": {
         
     | 
| 94 | 
         
            +
                  "content": "<|vision_pad|>",
         
     | 
| 95 | 
         
            +
                  "lstrip": false,
         
     | 
| 96 | 
         
            +
                  "normalized": false,
         
     | 
| 97 | 
         
            +
                  "rstrip": false,
         
     | 
| 98 | 
         
            +
                  "single_word": false,
         
     | 
| 99 | 
         
            +
                  "special": true
         
     | 
| 100 | 
         
            +
                },
         
     | 
| 101 | 
         
            +
                "151655": {
         
     | 
| 102 | 
         
            +
                  "content": "<|image_pad|>",
         
     | 
| 103 | 
         
            +
                  "lstrip": false,
         
     | 
| 104 | 
         
            +
                  "normalized": false,
         
     | 
| 105 | 
         
            +
                  "rstrip": false,
         
     | 
| 106 | 
         
            +
                  "single_word": false,
         
     | 
| 107 | 
         
            +
                  "special": true
         
     | 
| 108 | 
         
            +
                },
         
     | 
| 109 | 
         
            +
                "151656": {
         
     | 
| 110 | 
         
            +
                  "content": "<|video_pad|>",
         
     | 
| 111 | 
         
            +
                  "lstrip": false,
         
     | 
| 112 | 
         
            +
                  "normalized": false,
         
     | 
| 113 | 
         
            +
                  "rstrip": false,
         
     | 
| 114 | 
         
            +
                  "single_word": false,
         
     | 
| 115 | 
         
            +
                  "special": true
         
     | 
| 116 | 
         
            +
                },
         
     | 
| 117 | 
         
            +
                "151657": {
         
     | 
| 118 | 
         
            +
                  "content": "<tool_call>",
         
     | 
| 119 | 
         
            +
                  "lstrip": false,
         
     | 
| 120 | 
         
            +
                  "normalized": false,
         
     | 
| 121 | 
         
            +
                  "rstrip": false,
         
     | 
| 122 | 
         
            +
                  "single_word": false,
         
     | 
| 123 | 
         
            +
                  "special": false
         
     | 
| 124 | 
         
            +
                },
         
     | 
| 125 | 
         
            +
                "151658": {
         
     | 
| 126 | 
         
            +
                  "content": "</tool_call>",
         
     | 
| 127 | 
         
            +
                  "lstrip": false,
         
     | 
| 128 | 
         
            +
                  "normalized": false,
         
     | 
| 129 | 
         
            +
                  "rstrip": false,
         
     | 
| 130 | 
         
            +
                  "single_word": false,
         
     | 
| 131 | 
         
            +
                  "special": false
         
     | 
| 132 | 
         
            +
                },
         
     | 
| 133 | 
         
            +
                "151659": {
         
     | 
| 134 | 
         
            +
                  "content": "<|fim_prefix|>",
         
     | 
| 135 | 
         
            +
                  "lstrip": false,
         
     | 
| 136 | 
         
            +
                  "normalized": false,
         
     | 
| 137 | 
         
            +
                  "rstrip": false,
         
     | 
| 138 | 
         
            +
                  "single_word": false,
         
     | 
| 139 | 
         
            +
                  "special": false
         
     | 
| 140 | 
         
            +
                },
         
     | 
| 141 | 
         
            +
                "151660": {
         
     | 
| 142 | 
         
            +
                  "content": "<|fim_middle|>",
         
     | 
| 143 | 
         
            +
                  "lstrip": false,
         
     | 
| 144 | 
         
            +
                  "normalized": false,
         
     | 
| 145 | 
         
            +
                  "rstrip": false,
         
     | 
| 146 | 
         
            +
                  "single_word": false,
         
     | 
| 147 | 
         
            +
                  "special": false
         
     | 
| 148 | 
         
            +
                },
         
     | 
| 149 | 
         
            +
                "151661": {
         
     | 
| 150 | 
         
            +
                  "content": "<|fim_suffix|>",
         
     | 
| 151 | 
         
            +
                  "lstrip": false,
         
     | 
| 152 | 
         
            +
                  "normalized": false,
         
     | 
| 153 | 
         
            +
                  "rstrip": false,
         
     | 
| 154 | 
         
            +
                  "single_word": false,
         
     | 
| 155 | 
         
            +
                  "special": false
         
     | 
| 156 | 
         
            +
                },
         
     | 
| 157 | 
         
            +
                "151662": {
         
     | 
| 158 | 
         
            +
                  "content": "<|fim_pad|>",
         
     | 
| 159 | 
         
            +
                  "lstrip": false,
         
     | 
| 160 | 
         
            +
                  "normalized": false,
         
     | 
| 161 | 
         
            +
                  "rstrip": false,
         
     | 
| 162 | 
         
            +
                  "single_word": false,
         
     | 
| 163 | 
         
            +
                  "special": false
         
     | 
| 164 | 
         
            +
                },
         
     | 
| 165 | 
         
            +
                "151663": {
         
     | 
| 166 | 
         
            +
                  "content": "<|repo_name|>",
         
     | 
| 167 | 
         
            +
                  "lstrip": false,
         
     | 
| 168 | 
         
            +
                  "normalized": false,
         
     | 
| 169 | 
         
            +
                  "rstrip": false,
         
     | 
| 170 | 
         
            +
                  "single_word": false,
         
     | 
| 171 | 
         
            +
                  "special": false
         
     | 
| 172 | 
         
            +
                },
         
     | 
| 173 | 
         
            +
                "151664": {
         
     | 
| 174 | 
         
            +
                  "content": "<|file_sep|>",
         
     | 
| 175 | 
         
            +
                  "lstrip": false,
         
     | 
| 176 | 
         
            +
                  "normalized": false,
         
     | 
| 177 | 
         
            +
                  "rstrip": false,
         
     | 
| 178 | 
         
            +
                  "single_word": false,
         
     | 
| 179 | 
         
            +
                  "special": false
         
     | 
| 180 | 
         
            +
                },
         
     | 
| 181 | 
         
            +
                "151665": {
         
     | 
| 182 | 
         
            +
                  "content": "<|cls|>",
         
     | 
| 183 | 
         
            +
                  "lstrip": false,
         
     | 
| 184 | 
         
            +
                  "normalized": false,
         
     | 
| 185 | 
         
            +
                  "rstrip": false,
         
     | 
| 186 | 
         
            +
                  "single_word": false,
         
     | 
| 187 | 
         
            +
                  "special": true
         
     | 
| 188 | 
         
            +
                }
         
     | 
| 189 | 
         
            +
              },
         
     | 
| 190 | 
         
            +
              "additional_special_tokens": [
         
     | 
| 191 | 
         
            +
                "<|im_start|>",
         
     | 
| 192 | 
         
            +
                "<|im_end|>",
         
     | 
| 193 | 
         
            +
                "<|object_ref_start|>",
         
     | 
| 194 | 
         
            +
                "<|object_ref_end|>",
         
     | 
| 195 | 
         
            +
                "<|box_start|>",
         
     | 
| 196 | 
         
            +
                "<|box_end|>",
         
     | 
| 197 | 
         
            +
                "<|quad_start|>",
         
     | 
| 198 | 
         
            +
                "<|quad_end|>",
         
     | 
| 199 | 
         
            +
                "<|vision_start|>",
         
     | 
| 200 | 
         
            +
                "<|vision_end|>",
         
     | 
| 201 | 
         
            +
                "<|vision_pad|>",
         
     | 
| 202 | 
         
            +
                "<|image_pad|>",
         
     | 
| 203 | 
         
            +
                "<|video_pad|>"
         
     | 
| 204 | 
         
            +
              ],
         
     | 
| 205 | 
         
            +
              "bos_token": null,
         
     | 
| 206 | 
         
            +
              "chat_template": "{%- if messages[0]['role'] == 'system' %}\n    {{- '<|im_start|>system\\n' + messages[0]['content'] + '<|im_end|>\\n' }}\n{%- endif %}\n{%- for message in messages %}\n    {%- if (message.role == \"user\") or (message.role == \"system\" and not loop.first) or (message.role == \"assistant\" and not message.tool_calls) %}\n        {{- '<|im_start|>' + message.role + '\\n' + message.content + '<|im_end|>' + '\\n' }}\n    {%- elif message.role == \"assistant\" %}\n        {{- '<|im_start|>' + message.role }}\n        {%- if message.content %}\n            {{- '\\n' + message.content }}\n        {%- endif %}\n        {%- for tool_call in message.tool_calls %}\n            {%- if tool_call.function is defined %}\n                {%- set tool_call = tool_call.function %}\n            {%- endif %}\n            {{- '\\n<tool_call>\\n{\"name\": \"' }}\n            {{- tool_call.name }}\n            {{- '\", \"arguments\": ' }}\n            {{- tool_call.arguments | tojson }}\n            {{- '}\\n</tool_call>' }}\n        {%- endfor %}\n        {{- '<|im_end|>\\n' }}\n    {%- elif message.role == \"tool\" %}\n        {%- if (loop.index0 == 0) or (messages[loop.index0 - 1].role != \"tool\") %}\n            {{- '<|im_start|>user' }}\n        {%- endif %}\n        {{- '\\n<tool_response>\\n' }}\n        {{- message.content }}\n        {{- '\\n</tool_response>' }}\n        {%- if loop.last or (messages[loop.index0 + 1].role != \"tool\") %}\n            {{- '<|im_end|>\\n' }}\n        {%- endif %}\n    {%- endif %}\n{%- endfor %}\n{%- if add_generation_prompt %}\n    {{- '<|im_start|>assistant\\n' }}\n{%- endif %}\n\n",
         
     | 
| 207 | 
         
            +
              "clean_up_tokenization_spaces": false,
         
     | 
| 208 | 
         
            +
              "cls_token": "<|cls|>",
         
     | 
| 209 | 
         
            +
              "eos_token": "<|im_end|>",
         
     | 
| 210 | 
         
            +
              "errors": "replace",
         
     | 
| 211 | 
         
            +
              "extra_special_tokens": {},
         
     | 
| 212 | 
         
            +
              "model_max_length": 131072,
         
     | 
| 213 | 
         
            +
              "pad_token": "<|endoftext|>",
         
     | 
| 214 | 
         
            +
              "split_special_tokens": false,
         
     | 
| 215 | 
         
            +
              "tokenizer_class": "Qwen2Tokenizer",
         
     | 
| 216 | 
         
            +
              "unk_token": null
         
     | 
| 217 | 
         
            +
            }
         
     | 
    	
        training_args.bin
    ADDED
    
    | 
         @@ -0,0 +1,3 @@ 
     | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
| 
         | 
|
| 1 | 
         
            +
            version https://git-lfs.github.com/spec/v1
         
     | 
| 2 | 
         
            +
            oid sha256:247e86583bbaade3df9fe6f1d2ee1b5d98bca41b5673d2aa22f9a03d4a2092ab
         
     | 
| 3 | 
         
            +
            size 7032
         
     | 
    	
        training_config.json
    ADDED
    
    | 
         @@ -0,0 +1,21 @@ 
     | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
| 
         | 
|
| 1 | 
         
            +
            {
         
     | 
| 2 | 
         
            +
             "proj_name": "Qwen2.5-3B-Instruct-full-train",
         
     | 
| 3 | 
         
            +
             "learning_rate": 4e-06,
         
     | 
| 4 | 
         
            +
             "adam_epsilon": 1e-08,
         
     | 
| 5 | 
         
            +
             "batch_size": 4,
         
     | 
| 6 | 
         
            +
             "max_length": 8192,
         
     | 
| 7 | 
         
            +
             "num_train_epochs": 1,
         
     | 
| 8 | 
         
            +
             "train_data_path": "full-p2l-data",
         
     | 
| 9 | 
         
            +
             "val_data_path": "p2el/canonical_bt_val_data_11092024",
         
     | 
| 10 | 
         
            +
             "output_dir": "training_outputs",
         
     | 
| 11 | 
         
            +
             "pretrain_model_name": "Qwen/Qwen2.5-3B-Instruct",
         
     | 
| 12 | 
         
            +
             "gradient_accumulation_steps": 16,
         
     | 
| 13 | 
         
            +
             "chat_template": "{%- if messages[0]['role'] == 'system' %}\n    {{- '<|im_start|>system\\n' + messages[0]['content'] + '<|im_end|>\\n' }}\n{%- endif %}\n{%- for message in messages %}\n    {%- if (message.role == \"user\") or (message.role == \"system\" and not loop.first) or (message.role == \"assistant\" and not message.tool_calls) %}\n        {{- '<|im_start|>' + message.role + '\\n' + message.content + '<|im_end|>' + '\\n' }}\n    {%- elif message.role == \"assistant\" %}\n        {{- '<|im_start|>' + message.role }}\n        {%- if message.content %}\n            {{- '\\n' + message.content }}\n        {%- endif %}\n        {%- for tool_call in message.tool_calls %}\n            {%- if tool_call.function is defined %}\n                {%- set tool_call = tool_call.function %}\n            {%- endif %}\n            {{- '\\n<tool_call>\\n{\"name\": \"' }}\n            {{- tool_call.name }}\n            {{- '\", \"arguments\": ' }}\n            {{- tool_call.arguments | tojson }}\n            {{- '}\\n</tool_call>' }}\n        {%- endfor %}\n        {{- '<|im_end|>\\n' }}\n    {%- elif message.role == \"tool\" %}\n        {%- if (loop.index0 == 0) or (messages[loop.index0 - 1].role != \"tool\") %}\n            {{- '<|im_start|>user' }}\n        {%- endif %}\n        {{- '\\n<tool_response>\\n' }}\n        {{- message.content }}\n        {{- '\\n</tool_response>' }}\n        {%- if loop.last or (messages[loop.index0 + 1].role != \"tool\") %}\n            {{- '<|im_end|>\\n' }}\n        {%- endif %}\n    {%- endif %}\n{%- endfor %}\n{%- if add_generation_prompt %}\n    {{- '<|im_start|>assistant\\n' }}\n{%- endif %}\n\n",
         
     | 
| 14 | 
         
            +
             "model_type": "qwen2",
         
     | 
| 15 | 
         
            +
             "head_type": "bt",
         
     | 
| 16 | 
         
            +
             "loss_type": "bt_tie",
         
     | 
| 17 | 
         
            +
             "weighted_loss": false,
         
     | 
| 18 | 
         
            +
             "deepspeed_config_path": "deepspeed/zero1.json",
         
     | 
| 19 | 
         
            +
             "init_type": "reset_params",
         
     | 
| 20 | 
         
            +
             "load_train_data_from_disk": true
         
     | 
| 21 | 
         
            +
            }
         
     | 
    	
        vocab.json
    ADDED
    
    | 
         The diff for this file is too large to render. 
		See raw diff 
     | 
| 
         |