Remove the auto map as the model is integrated natively in transformers
Browse filesHey 👋
Congrats on the release, awesome to see!
The model has been integrated natively in `transformers`  since version v4.51: https://github.com/huggingface/transformers/releases/tag/v4.51.0
If this R1 variant has no architectural change, you should be able to use it without using the `trust_remote_code=True` option.
- config.json +0 -5
    	
        config.json
    CHANGED
    
    | @@ -4,11 +4,6 @@ | |
| 4 | 
             
              ],
         | 
| 5 | 
             
              "attention_bias": false,
         | 
| 6 | 
             
              "attention_dropout": 0.0,
         | 
| 7 | 
            -
              "auto_map": {
         | 
| 8 | 
            -
                "AutoConfig": "configuration_deepseek.DeepseekV3Config",
         | 
| 9 | 
            -
                "AutoModel": "modeling_deepseek.DeepseekV3Model",
         | 
| 10 | 
            -
                "AutoModelForCausalLM": "modeling_deepseek.DeepseekV3ForCausalLM"
         | 
| 11 | 
            -
              },
         | 
| 12 | 
             
              "bos_token_id": 0,
         | 
| 13 | 
             
              "eos_token_id": 1,
         | 
| 14 | 
             
              "ep_size": 1,
         | 
|  | |
| 4 | 
             
              ],
         | 
| 5 | 
             
              "attention_bias": false,
         | 
| 6 | 
             
              "attention_dropout": 0.0,
         | 
|  | |
|  | |
|  | |
|  | |
|  | |
| 7 | 
             
              "bos_token_id": 0,
         | 
| 8 | 
             
              "eos_token_id": 1,
         | 
| 9 | 
             
              "ep_size": 1,
         | 

