isLinXu
		
	commited on
		
		
					Commit 
							
							·
						
						eddee85
	
1
								Parent(s):
							
							0fbac23
								
add .
Browse files- config.json +32 -0
- model.safetensors +3 -0
    	
        config.json
    ADDED
    
    | @@ -0,0 +1,32 @@ | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | 
|  | |
| 1 | 
            +
            {
         | 
| 2 | 
            +
                "vit_hidden_dim": 768,
         | 
| 3 | 
            +
                "vit_inter_dim": 3072,
         | 
| 4 | 
            +
                "vit_patch_size": 16,
         | 
| 5 | 
            +
                "vit_img_size": 224,
         | 
| 6 | 
            +
                "vit_n_heads": 12,
         | 
| 7 | 
            +
                "vit_dropout": 0.0,
         | 
| 8 | 
            +
                "vit_n_blocks": 12,
         | 
| 9 | 
            +
                "vit_ln_eps": 1e-06,
         | 
| 10 | 
            +
                "vit_cls_flag": false,
         | 
| 11 | 
            +
                "vit_model_type": "google/siglip-base-patch16-224",
         | 
| 12 | 
            +
                "lm_hidden_dim": 576,
         | 
| 13 | 
            +
                "lm_inter_dim": 1536,
         | 
| 14 | 
            +
                "lm_rms_eps": 1e-05,
         | 
| 15 | 
            +
                "lm_re_base": 100000,
         | 
| 16 | 
            +
                "lm_max_position_embeddings": 8192,
         | 
| 17 | 
            +
                "lm_vocab_size": 49152,
         | 
| 18 | 
            +
                "lm_n_heads": 9,
         | 
| 19 | 
            +
                "lm_n_kv_heads": 3,
         | 
| 20 | 
            +
                "lm_dropout": 0.0,
         | 
| 21 | 
            +
                "lm_n_blocks": 30,
         | 
| 22 | 
            +
                "lm_attn_scaling": 1.0,
         | 
| 23 | 
            +
                "lm_max_length": 79,
         | 
| 24 | 
            +
                "lm_use_tokens": false,
         | 
| 25 | 
            +
                "lm_tie_weights": true,
         | 
| 26 | 
            +
                "lm_model_type": "HuggingFaceTB/SmolLM2-135M",
         | 
| 27 | 
            +
                "lm_tokenizer": "HuggingFaceTB/cosmo2-tokenizer",
         | 
| 28 | 
            +
                "lm_eos_token_id": 0,
         | 
| 29 | 
            +
                "mp_pixel_shuffle_factor": 2,
         | 
| 30 | 
            +
                "vlm_load_backbone_weights": true,
         | 
| 31 | 
            +
                "vlm_checkpoint_path": "checkpoints/nanoVLM-222M"
         | 
| 32 | 
            +
            }
         | 
    	
        model.safetensors
    ADDED
    
    | @@ -0,0 +1,3 @@ | |
|  | |
|  | |
|  | 
|  | |
| 1 | 
            +
            version https://git-lfs.github.com/spec/v1
         | 
| 2 | 
            +
            oid sha256:9a4ce1253ed8aa2d53b95142eb4930732e4af64bcebea53b04e41a707a9c07fe
         | 
| 3 | 
            +
            size 888372464
         | 
