Xenova HF Staff commited on
Commit
41a4dd4
·
verified ·
1 Parent(s): 8ce3e0d

Upload optimized ONNX model

Browse files
.gitattributes CHANGED
@@ -33,3 +33,19 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ onnx/model.onnx_data filter=lfs diff=lfs merge=lfs -text
37
+ onnx/model.onnx_data_1 filter=lfs diff=lfs merge=lfs -text
38
+ onnx/model.onnx_data_2 filter=lfs diff=lfs merge=lfs -text
39
+ onnx/model.onnx_data_3 filter=lfs diff=lfs merge=lfs -text
40
+ onnx/model.onnx_data_4 filter=lfs diff=lfs merge=lfs -text
41
+ onnx/model.onnx_data_5 filter=lfs diff=lfs merge=lfs -text
42
+ onnx/model.onnx_data_6 filter=lfs diff=lfs merge=lfs -text
43
+ onnx/model.onnx_data_7 filter=lfs diff=lfs merge=lfs -text
44
+ onnx/model_fp16.onnx_data filter=lfs diff=lfs merge=lfs -text
45
+ onnx/model_fp16.onnx_data_1 filter=lfs diff=lfs merge=lfs -text
46
+ onnx/model_fp16.onnx_data_2 filter=lfs diff=lfs merge=lfs -text
47
+ onnx/model_fp16.onnx_data_3 filter=lfs diff=lfs merge=lfs -text
48
+ onnx/model_q4.onnx_data filter=lfs diff=lfs merge=lfs -text
49
+ onnx/model_q4.onnx_data_1 filter=lfs diff=lfs merge=lfs -text
50
+ onnx/model_q4f16.onnx_data filter=lfs diff=lfs merge=lfs -text
51
+ onnx/model_q4f16.onnx_data_1 filter=lfs diff=lfs merge=lfs -text
chat_template.jinja ADDED
@@ -0,0 +1,61 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {%- if tools %}
2
+ {{- '<|im_start|>system\n' }}
3
+ {%- if messages[0].role == 'system' %}
4
+ {{- messages[0].content + '\n\n' }}
5
+ {%- endif %}
6
+ {{- "# Tools\n\nYou may call one or more functions to assist with the user query.\n\nYou are provided with function signatures within <tools></tools> XML tags:\n<tools>" }}
7
+ {%- for tool in tools %}
8
+ {{- "\n" }}
9
+ {{- tool | tojson }}
10
+ {%- endfor %}
11
+ {{- "\n</tools>\n\nFor each function call, return a json object with function name and arguments within <tool_call></tool_call> XML tags:\n<tool_call>\n{\"name\": <function-name>, \"arguments\": <args-json-object>}\n</tool_call><|im_end|>\n" }}
12
+ {%- else %}
13
+ {%- if messages[0].role == 'system' %}
14
+ {{- '<|im_start|>system\n' + messages[0].content + '<|im_end|>\n' }}
15
+ {%- endif %}
16
+ {%- endif %}
17
+ {%- for message in messages %}
18
+ {%- if message.content is string %}
19
+ {%- set content = message.content %}
20
+ {%- else %}
21
+ {%- set content = '' %}
22
+ {%- endif %}
23
+ {%- if (message.role == "user") or (message.role == "system" and not loop.first) %}
24
+ {{- '<|im_start|>' + message.role + '\n' + content + '<|im_end|>' + '\n' }}
25
+ {%- elif message.role == "assistant" %}
26
+ {{- '<|im_start|>' + message.role + '\n' + content }}
27
+ {%- if message.tool_calls %}
28
+ {%- for tool_call in message.tool_calls %}
29
+ {%- if (loop.first and content) or (not loop.first) %}
30
+ {{- '\n' }}
31
+ {%- endif %}
32
+ {%- if tool_call.function %}
33
+ {%- set tool_call = tool_call.function %}
34
+ {%- endif %}
35
+ {{- '<tool_call>\n{"name": "' }}
36
+ {{- tool_call.name }}
37
+ {{- '", "arguments": ' }}
38
+ {%- if tool_call.arguments is string %}
39
+ {{- tool_call.arguments }}
40
+ {%- else %}
41
+ {{- tool_call.arguments | tojson }}
42
+ {%- endif %}
43
+ {{- '}\n</tool_call>' }}
44
+ {%- endfor %}
45
+ {%- endif %}
46
+ {{- '<|im_end|>\n' }}
47
+ {%- elif message.role == "tool" %}
48
+ {%- if loop.first or (messages[loop.index0 - 1].role != "tool") %}
49
+ {{- '<|im_start|>user' }}
50
+ {%- endif %}
51
+ {{- '\n<tool_response>\n' }}
52
+ {{- content }}
53
+ {{- '\n</tool_response>' }}
54
+ {%- if loop.last or (messages[loop.index0 + 1].role != "tool") %}
55
+ {{- '<|im_end|>\n' }}
56
+ {%- endif %}
57
+ {%- endif %}
58
+ {%- endfor %}
59
+ {%- if add_generation_prompt %}
60
+ {{- '<|im_start|>assistant\n' }}
61
+ {%- endif %}
config.json ADDED
@@ -0,0 +1,82 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "Qwen3ForCausalLM"
4
+ ],
5
+ "attention_bias": false,
6
+ "attention_dropout": 0.0,
7
+ "bos_token_id": 151643,
8
+ "dtype": "bfloat16",
9
+ "eos_token_id": 151645,
10
+ "head_dim": 128,
11
+ "hidden_act": "silu",
12
+ "hidden_size": 2560,
13
+ "initializer_range": 0.02,
14
+ "intermediate_size": 9728,
15
+ "layer_types": [
16
+ "full_attention",
17
+ "full_attention",
18
+ "full_attention",
19
+ "full_attention",
20
+ "full_attention",
21
+ "full_attention",
22
+ "full_attention",
23
+ "full_attention",
24
+ "full_attention",
25
+ "full_attention",
26
+ "full_attention",
27
+ "full_attention",
28
+ "full_attention",
29
+ "full_attention",
30
+ "full_attention",
31
+ "full_attention",
32
+ "full_attention",
33
+ "full_attention",
34
+ "full_attention",
35
+ "full_attention",
36
+ "full_attention",
37
+ "full_attention",
38
+ "full_attention",
39
+ "full_attention",
40
+ "full_attention",
41
+ "full_attention",
42
+ "full_attention",
43
+ "full_attention",
44
+ "full_attention",
45
+ "full_attention",
46
+ "full_attention",
47
+ "full_attention",
48
+ "full_attention",
49
+ "full_attention",
50
+ "full_attention",
51
+ "full_attention"
52
+ ],
53
+ "max_position_embeddings": 262144,
54
+ "max_window_layers": 36,
55
+ "model_type": "qwen3",
56
+ "num_attention_heads": 32,
57
+ "num_hidden_layers": 36,
58
+ "num_key_value_heads": 8,
59
+ "rms_norm_eps": 1e-06,
60
+ "rope_parameters": {
61
+ "rope_theta": 5000000,
62
+ "rope_type": "default"
63
+ },
64
+ "sliding_window": null,
65
+ "tie_word_embeddings": true,
66
+ "transformers_version": "5.0.0.dev0",
67
+ "use_cache": true,
68
+ "use_sliding_window": false,
69
+ "vocab_size": 151936,
70
+ "transformers.js_config": {
71
+ "use_external_data_format": {
72
+ "model.onnx": 8,
73
+ "model_fp16.onnx": 4,
74
+ "model_q4.onnx": 2,
75
+ "model_q4f16.onnx": 2
76
+ },
77
+ "kv_cache_dtype": {
78
+ "q4f16": "float16",
79
+ "fp16": "float16"
80
+ }
81
+ }
82
+ }
generation_config.json ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token_id": 151643,
3
+ "do_sample": true,
4
+ "eos_token_id": [
5
+ 151645,
6
+ 151643
7
+ ],
8
+ "pad_token_id": 151643,
9
+ "temperature": 0.7,
10
+ "top_k": 20,
11
+ "top_p": 0.8,
12
+ "transformers_version": "5.0.0.dev0",
13
+ "trust_remote_code": false
14
+ }
onnx/model.onnx ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:836ccbda036b372b7955770b73b4cbeab122fbb6c0d6497abdd1161755b93a75
3
+ size 360704
onnx/model.onnx_data ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a8df008f1b786688004ff846e6108edb81a0b928e220a63fd3130eb808b49b91
3
+ size 2093774848
onnx/model.onnx_data_1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b41e463e4cf65f17ae5afe46cce9ceb0d6c356c1e8740a2c634de6d7a4cdf1a1
3
+ size 2081525760
onnx/model.onnx_data_2 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e9cb66f584d47ee9cc3606151ca4ff0687bab70171093fe96e98d7d8044f59b2
3
+ size 2060564480
onnx/model.onnx_data_3 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d6dafc32d4f0bc66205a9f11f022a9bedac1d8944b44f93a7edaf74ade00f756
3
+ size 2018611200
onnx/model.onnx_data_4 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d4719ef7866fdea45844fe56779e021cc3cd074f5da00e55938ebc2c218bc9f7
3
+ size 2018611200
onnx/model.onnx_data_5 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d2c6c37e00816977aae545af8b5c545f33ff969979973f238101cc8c6a124a98
3
+ size 2018611200
onnx/model.onnx_data_6 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d6da134b1a7eb48e9f213d612f57c73eb7ea0ad98dace1d454c0c82160d995ed
3
+ size 2018611200
onnx/model.onnx_data_7 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a5976a27b154a370805ae1c8fceeec3938febb9210e861e7818a48578f383d80
3
+ size 1913743360
onnx/model_fp16.onnx ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6940ac6cbb335f1852fa8aa520c8f58d6e2f155663b0f112e23e60191c183c0e
3
+ size 344089
onnx/model_fp16.onnx_data ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fc8a8340a220fdae788a199019cce44d4423eda937a98d63b16366f2aef83133
3
+ size 2087650304
onnx/model_fp16.onnx_data_1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1cc7069291ef115cb2855f8cef6fed7ee652964284186b77d2761fc3b3a671f4
3
+ size 2089395200
onnx/model_fp16.onnx_data_2 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e890d2fab5cd90514d36e63ea4a3f0c96395bd9bdd0288e5328bb6a8b28544d1
3
+ size 2068418560
onnx/model_fp16.onnx_data_3 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:70a621d5319094244f2919f3f67916afae77caa4937def33480064259f5cf54e
3
+ size 1866562560
onnx/model_q4.onnx ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bd0b111dd18c881a220d146e5491a1111d46438a11b5087354667bf16a83207c
3
+ size 454851
onnx/model_q4.onnx_data ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:21e214a9c87271f4cfe5828794f9af288a3d856482ef99881bd54f6d4dfbb83a
3
+ size 2085644288
onnx/model_q4.onnx_data_1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3f9ffba3de283d1740cb4153ec6fd4104f2444a91036cbdca1dafa1ea4a3ba9f
3
+ size 1875968000
onnx/model_q4f16.onnx ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6e603b4d2324fa94a1c1fc927d1f985da78413afcffa27b02b008d909eccc4a8
3
+ size 437655
onnx/model_q4f16.onnx_data ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d2b17463255a120a6d4e88c60e24bb6be6607ab6e34dd89b62234879bfd6ce0d
3
+ size 2094347264
onnx/model_q4f16.onnx_data_1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6c0d3b7ee94fa08c2dc766dfa4c5b790e492077701847b40fa001f648f3ebded
3
+ size 794787840
tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json ADDED
@@ -0,0 +1,32 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_bos_token": false,
3
+ "add_prefix_space": false,
4
+ "additional_special_tokens": null,
5
+ "backend": "tokenizers",
6
+ "bos_token": null,
7
+ "clean_up_tokenization_spaces": false,
8
+ "eos_token": "<|im_end|>",
9
+ "errors": "replace",
10
+ "extra_special_tokens": [
11
+ "<|im_start|>",
12
+ "<|im_end|>",
13
+ "<|object_ref_start|>",
14
+ "<|object_ref_end|>",
15
+ "<|box_start|>",
16
+ "<|box_end|>",
17
+ "<|quad_start|>",
18
+ "<|quad_end|>",
19
+ "<|vision_start|>",
20
+ "<|vision_end|>",
21
+ "<|vision_pad|>",
22
+ "<|image_pad|>",
23
+ "<|video_pad|>"
24
+ ],
25
+ "is_local": false,
26
+ "model_max_length": 1010000,
27
+ "pad_token": "<|endoftext|>",
28
+ "split_special_tokens": false,
29
+ "tokenizer_class": "Qwen2Tokenizer",
30
+ "unk_token": null,
31
+ "chat_template": "{%- if tools %}\n {{- '<|im_start|>system\\n' }}\n {%- if messages[0].role == 'system' %}\n {{- messages[0].content + '\\n\\n' }}\n {%- endif %}\n {{- \"# Tools\\n\\nYou may call one or more functions to assist with the user query.\\n\\nYou are provided with function signatures within <tools></tools> XML tags:\\n<tools>\" }}\n {%- for tool in tools %}\n {{- \"\\n\" }}\n {{- tool | tojson }}\n {%- endfor %}\n {{- \"\\n</tools>\\n\\nFor each function call, return a json object with function name and arguments within <tool_call></tool_call> XML tags:\\n<tool_call>\\n{\\\"name\\\": <function-name>, \\\"arguments\\\": <args-json-object>}\\n</tool_call><|im_end|>\\n\" }}\n{%- else %}\n {%- if messages[0].role == 'system' %}\n {{- '<|im_start|>system\\n' + messages[0].content + '<|im_end|>\\n' }}\n {%- endif %}\n{%- endif %}\n{%- for message in messages %}\n {%- if message.content is string %}\n {%- set content = message.content %}\n {%- else %}\n {%- set content = '' %}\n {%- endif %}\n {%- if (message.role == \"user\") or (message.role == \"system\" and not loop.first) %}\n {{- '<|im_start|>' + message.role + '\\n' + content + '<|im_end|>' + '\\n' }}\n {%- elif message.role == \"assistant\" %}\n {{- '<|im_start|>' + message.role + '\\n' + content }}\n {%- if message.tool_calls %}\n {%- for tool_call in message.tool_calls %}\n {%- if (loop.first and content) or (not loop.first) %}\n {{- '\\n' }}\n {%- endif %}\n {%- if tool_call.function %}\n {%- set tool_call = tool_call.function %}\n {%- endif %}\n {{- '<tool_call>\\n{\"name\": \"' }}\n {{- tool_call.name }}\n {{- '\", \"arguments\": ' }}\n {%- if tool_call.arguments is string %}\n {{- tool_call.arguments }}\n {%- else %}\n {{- tool_call.arguments | tojson }}\n {%- endif %}\n {{- '}\\n</tool_call>' }}\n {%- endfor %}\n {%- endif %}\n {{- '<|im_end|>\\n' }}\n {%- elif message.role == \"tool\" %}\n {%- if loop.first or (messages[loop.index0 - 1].role != \"tool\") %}\n {{- '<|im_start|>user' }}\n {%- endif %}\n {{- '\\n<tool_response>\\n' }}\n {{- content }}\n {{- '\\n</tool_response>' }}\n {%- if loop.last or (messages[loop.index0 + 1].role != \"tool\") %}\n {{- '<|im_end|>\\n' }}\n {%- endif %}\n {%- endif %}\n{%- endfor %}\n{%- if add_generation_prompt %}\n {{- '<|im_start|>assistant\\n' }}\n{%- endif %}"
32
+ }