Upload config.json
Browse files- config.json +3 -3
    	
        config.json
    CHANGED
    
    | @@ -6,7 +6,7 @@ | |
| 6 | 
             
              "attention_bias": false,
         | 
| 7 | 
             
              "attention_dropout": 0.0,
         | 
| 8 | 
             
              "bos_token_id": 128000,
         | 
| 9 | 
            -
              "eos_token_id":  | 
| 10 | 
             
              "head_dim": 128,
         | 
| 11 | 
             
              "hidden_act": "silu",
         | 
| 12 | 
             
              "hidden_size": 3072,
         | 
| @@ -33,6 +33,6 @@ | |
| 33 | 
             
              "torch_dtype": "bfloat16",
         | 
| 34 | 
             
              "transformers_version": "4.46.3",
         | 
| 35 | 
             
              "unsloth_fixed": true,
         | 
| 36 | 
            -
              "use_cache":  | 
| 37 | 
             
              "vocab_size": 128256
         | 
| 38 | 
            -
            }
         | 
|  | |
| 6 | 
             
              "attention_bias": false,
         | 
| 7 | 
             
              "attention_dropout": 0.0,
         | 
| 8 | 
             
              "bos_token_id": 128000,
         | 
| 9 | 
            +
              "eos_token_id": 128009,
         | 
| 10 | 
             
              "head_dim": 128,
         | 
| 11 | 
             
              "hidden_act": "silu",
         | 
| 12 | 
             
              "hidden_size": 3072,
         | 
|  | |
| 33 | 
             
              "torch_dtype": "bfloat16",
         | 
| 34 | 
             
              "transformers_version": "4.46.3",
         | 
| 35 | 
             
              "unsloth_fixed": true,
         | 
| 36 | 
            +
              "use_cache": true,
         | 
| 37 | 
             
              "vocab_size": 128256
         | 
| 38 | 
            +
            }
         | 

