Update config.json
Browse files- config.json +3 -3
    	
        config.json
    CHANGED
    
    | @@ -1,5 +1,5 @@ | |
| 1 | 
             
            {
         | 
| 2 | 
            -
              "_name_or_path": "gradientai/llama3- | 
| 3 | 
             
              "architectures": [
         | 
| 4 | 
             
                "LlamaForCausalLM"
         | 
| 5 | 
             
              ],
         | 
| @@ -19,10 +19,10 @@ | |
| 19 | 
             
              "pretraining_tp": 1,
         | 
| 20 | 
             
              "rms_norm_eps": 1e-05,
         | 
| 21 | 
             
              "rope_scaling": null,
         | 
| 22 | 
            -
              "rope_theta":  | 
| 23 | 
             
              "tie_word_embeddings": false,
         | 
| 24 | 
             
              "torch_dtype": "bfloat16",
         | 
| 25 | 
            -
              "transformers_version": "4. | 
| 26 | 
             
              "use_cache": true,
         | 
| 27 | 
             
              "vocab_size": 128256
         | 
| 28 | 
             
            }
         | 
|  | |
| 1 | 
             
            {
         | 
| 2 | 
            +
              "_name_or_path": "gradientai/llama3-8b-stage262k-chat",
         | 
| 3 | 
             
              "architectures": [
         | 
| 4 | 
             
                "LlamaForCausalLM"
         | 
| 5 | 
             
              ],
         | 
|  | |
| 19 | 
             
              "pretraining_tp": 1,
         | 
| 20 | 
             
              "rms_norm_eps": 1e-05,
         | 
| 21 | 
             
              "rope_scaling": null,
         | 
| 22 | 
            +
              "rope_theta": 3580165449.0,
         | 
| 23 | 
             
              "tie_word_embeddings": false,
         | 
| 24 | 
             
              "torch_dtype": "bfloat16",
         | 
| 25 | 
            +
              "transformers_version": "4.41.0.dev0",
         | 
| 26 | 
             
              "use_cache": true,
         | 
| 27 | 
             
              "vocab_size": 128256
         | 
| 28 | 
             
            }
         | 

