littlebird13 commited on
Commit
5a94164
·
verified ·
1 Parent(s): 835c92b

Add files using upload-large-folder tool

Browse files
config.json ADDED
@@ -0,0 +1,68 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "Qwen3VLMoeForConditionalGeneration"
4
+ ],
5
+ "image_token_id": 151655,
6
+ "model_type": "qwen3_vl_moe",
7
+ "text_config": {
8
+ "attention_bias": false,
9
+ "attention_dropout": 0.0,
10
+ "bos_token_id": 151643,
11
+ "decoder_sparse_step": 1,
12
+ "dtype": "bfloat16",
13
+ "eos_token_id": 151645,
14
+ "head_dim": 128,
15
+ "hidden_act": "silu",
16
+ "hidden_size": 4096,
17
+ "initializer_range": 0.02,
18
+ "intermediate_size": 12288,
19
+ "max_position_embeddings": 262144,
20
+ "mlp_only_layers": [],
21
+ "model_type": "qwen3_vl_moe_text",
22
+ "moe_intermediate_size": 1536,
23
+ "norm_topk_prob": true,
24
+ "num_attention_heads": 64,
25
+ "num_experts": 128,
26
+ "num_experts_per_tok": 8,
27
+ "num_hidden_layers": 94,
28
+ "num_key_value_heads": 4,
29
+ "rms_norm_eps": 1e-06,
30
+ "rope_scaling": {
31
+ "mrope_interleaved": true,
32
+ "mrope_section": [
33
+ 24,
34
+ 20,
35
+ 20
36
+ ],
37
+ "rope_type": "default"
38
+ },
39
+ "rope_theta": 5000000,
40
+ "use_cache": true,
41
+ "vocab_size": 151936
42
+ },
43
+ "tie_word_embeddings": false,
44
+ "transformers_version": "4.57.0.dev0",
45
+ "video_token_id": 151656,
46
+ "vision_config": {
47
+ "deepstack_visual_indexes": [
48
+ 8,
49
+ 16,
50
+ 24
51
+ ],
52
+ "depth": 27,
53
+ "hidden_act": "gelu_pytorch_tanh",
54
+ "hidden_size": 1152,
55
+ "in_channels": 3,
56
+ "initializer_range": 0.02,
57
+ "intermediate_size": 4304,
58
+ "model_type": "qwen3_vl_moe",
59
+ "num_heads": 16,
60
+ "num_position_embeddings": 2304,
61
+ "out_hidden_size": 4096,
62
+ "patch_size": 16,
63
+ "spatial_merge_size": 2,
64
+ "temporal_patch_size": 2
65
+ },
66
+ "vision_end_token_id": 151653,
67
+ "vision_start_token_id": 151652
68
+ }
generation_config.json ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token_id": 151643,
3
+ "pad_token_id": 151643,
4
+ "do_sample": true,
5
+ "eos_token_id": [
6
+ 151645,
7
+ 151643
8
+ ],
9
+ "top_p": 0.8,
10
+ "top_k": 20,
11
+ "temperature": 0.7,
12
+ "repetition_penalty": 1.0,
13
+ "transformers_version": "4.56.0"
14
+ }
merges.txt ADDED
The diff for this file is too large to render. See raw diff
 
model.safetensors.index.json ADDED
The diff for this file is too large to render. See raw diff
 
video_preprocessor_config.json ADDED
@@ -0,0 +1,21 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "size": {
3
+ "longest_edge": 25165824,
4
+ "shortest_edge": 4096
5
+ },
6
+ "patch_size": 16,
7
+ "temporal_patch_size": 2,
8
+ "merge_size": 2,
9
+ "image_mean": [
10
+ 0.5,
11
+ 0.5,
12
+ 0.5
13
+ ],
14
+ "image_std": [
15
+ 0.5,
16
+ 0.5,
17
+ 0.5
18
+ ],
19
+ "processor_class": "Qwen3VLProcessor",
20
+ "video_processor_type": "Qwen3VLVideoProcessor"
21
+ }
vocab.json ADDED
The diff for this file is too large to render. See raw diff