Upload optimized ONNX model
Browse files- .gitattributes +4 -0
- chat_template.jinja +37 -0
- config.json +69 -0
- onnx/model.onnx +3 -0
- onnx/model.onnx_data +3 -0
- onnx/model_fp16.onnx +3 -0
- onnx/model_fp16.onnx_data +3 -0
- onnx/model_q4.onnx +3 -0
- onnx/model_q4.onnx_data +3 -0
- onnx/model_q4f16.onnx +3 -0
- onnx/model_q4f16.onnx_data +3 -0
- special_tokens_map.json +23 -0
- tokenizer.json +0 -0
- tokenizer_config.json +0 -0
    	
        .gitattributes
    CHANGED
    
    | @@ -33,3 +33,7 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text | |
| 33 | 
             
            *.zip filter=lfs diff=lfs merge=lfs -text
         | 
| 34 | 
             
            *.zst filter=lfs diff=lfs merge=lfs -text
         | 
| 35 | 
             
            *tfevents* filter=lfs diff=lfs merge=lfs -text
         | 
|  | |
|  | |
|  | |
|  | 
|  | |
| 33 | 
             
            *.zip filter=lfs diff=lfs merge=lfs -text
         | 
| 34 | 
             
            *.zst filter=lfs diff=lfs merge=lfs -text
         | 
| 35 | 
             
            *tfevents* filter=lfs diff=lfs merge=lfs -text
         | 
| 36 | 
            +
            onnx/model.onnx_data filter=lfs diff=lfs merge=lfs -text
         | 
| 37 | 
            +
            onnx/model_fp16.onnx_data filter=lfs diff=lfs merge=lfs -text
         | 
| 38 | 
            +
            onnx/model_q4.onnx_data filter=lfs diff=lfs merge=lfs -text
         | 
| 39 | 
            +
            onnx/model_q4f16.onnx_data filter=lfs diff=lfs merge=lfs -text
         | 
    	
        chat_template.jinja
    ADDED
    
    | @@ -0,0 +1,37 @@ | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | 
|  | |
| 1 | 
            +
            {{- bos_token -}}
         | 
| 2 | 
            +
            {%- set system_prompt = "" -%}
         | 
| 3 | 
            +
            {%- set ns = namespace(system_prompt="") -%}
         | 
| 4 | 
            +
            {%- if messages[0]["role"] == "system" -%}
         | 
| 5 | 
            +
            	{%- set ns.system_prompt = messages[0]["content"] -%}
         | 
| 6 | 
            +
            	{%- set messages = messages[1:] -%}
         | 
| 7 | 
            +
            {%- endif -%}
         | 
| 8 | 
            +
            {%- if tools -%}
         | 
| 9 | 
            +
            	{%- set ns.system_prompt = ns.system_prompt + ("\n" if ns.system_prompt else "") + "List of tools: <|tool_list_start|>[" -%}
         | 
| 10 | 
            +
            	{%- for tool in tools -%}
         | 
| 11 | 
            +
            		{%- if tool is not string -%}
         | 
| 12 | 
            +
                        {%- set tool = tool | tojson -%}
         | 
| 13 | 
            +
            		{%- endif -%}
         | 
| 14 | 
            +
            		{%- set ns.system_prompt = ns.system_prompt + tool -%}
         | 
| 15 | 
            +
                    {%- if not loop.last -%}
         | 
| 16 | 
            +
                        {%- set ns.system_prompt = ns.system_prompt + ", " -%}
         | 
| 17 | 
            +
                    {%- endif -%}
         | 
| 18 | 
            +
            	{%- endfor -%}
         | 
| 19 | 
            +
            	{%- set ns.system_prompt = ns.system_prompt + "]<|tool_list_end|>" -%}
         | 
| 20 | 
            +
            {%- endif -%}
         | 
| 21 | 
            +
            {%- if ns.system_prompt -%}
         | 
| 22 | 
            +
            	{{- "<|im_start|>system\n" + ns.system_prompt + "<|im_end|>\n" -}}
         | 
| 23 | 
            +
            {%- endif -%}
         | 
| 24 | 
            +
            {%- for message in messages -%}
         | 
| 25 | 
            +
            	{{- "<|im_start|>" + message["role"] + "\n" -}}
         | 
| 26 | 
            +
            	{%- set content = message["content"] -%}
         | 
| 27 | 
            +
            	{%- if content is not string -%}
         | 
| 28 | 
            +
            		{%- set content = content | tojson -%}
         | 
| 29 | 
            +
            	{%- endif -%}
         | 
| 30 | 
            +
            	{%- if message["role"] == "tool" -%}
         | 
| 31 | 
            +
            		{%- set content = "<|tool_response_start|>" + content + "<|tool_response_end|>" -%}
         | 
| 32 | 
            +
            	{%- endif -%}
         | 
| 33 | 
            +
            	{{- content + "<|im_end|>\n" -}}
         | 
| 34 | 
            +
            {%- endfor -%}
         | 
| 35 | 
            +
            {%- if add_generation_prompt -%}
         | 
| 36 | 
            +
            	{{- "<|im_start|>assistant\n" -}}
         | 
| 37 | 
            +
            {%- endif -%}
         | 
    	
        config.json
    ADDED
    
    | @@ -0,0 +1,69 @@ | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | 
|  | |
| 1 | 
            +
            {
         | 
| 2 | 
            +
              "architectures": [
         | 
| 3 | 
            +
                "Lfm2ForCausalLM"
         | 
| 4 | 
            +
              ],
         | 
| 5 | 
            +
              "block_auto_adjust_ff_dim": true,
         | 
| 6 | 
            +
              "block_dim": 1024,
         | 
| 7 | 
            +
              "block_ff_dim": 6656,
         | 
| 8 | 
            +
              "block_ffn_dim_multiplier": 1.0,
         | 
| 9 | 
            +
              "block_mlp_init_scale": 1.0,
         | 
| 10 | 
            +
              "block_multiple_of": 256,
         | 
| 11 | 
            +
              "block_norm_eps": 1e-05,
         | 
| 12 | 
            +
              "block_out_init_scale": 1.0,
         | 
| 13 | 
            +
              "block_use_swiglu": true,
         | 
| 14 | 
            +
              "block_use_xavier_init": true,
         | 
| 15 | 
            +
              "bos_token_id": 1,
         | 
| 16 | 
            +
              "conv_L_cache": 3,
         | 
| 17 | 
            +
              "conv_bias": false,
         | 
| 18 | 
            +
              "conv_dim": 1024,
         | 
| 19 | 
            +
              "conv_dim_out": 1024,
         | 
| 20 | 
            +
              "conv_use_xavier_init": true,
         | 
| 21 | 
            +
              "eos_token_id": 7,
         | 
| 22 | 
            +
              "hidden_size": 1024,
         | 
| 23 | 
            +
              "initializer_range": 0.02,
         | 
| 24 | 
            +
              "intermediate_size": 6656,
         | 
| 25 | 
            +
              "layer_types": [
         | 
| 26 | 
            +
                "conv",
         | 
| 27 | 
            +
                "conv",
         | 
| 28 | 
            +
                "full_attention",
         | 
| 29 | 
            +
                "conv",
         | 
| 30 | 
            +
                "conv",
         | 
| 31 | 
            +
                "full_attention",
         | 
| 32 | 
            +
                "conv",
         | 
| 33 | 
            +
                "conv",
         | 
| 34 | 
            +
                "full_attention",
         | 
| 35 | 
            +
                "conv",
         | 
| 36 | 
            +
                "full_attention",
         | 
| 37 | 
            +
                "conv",
         | 
| 38 | 
            +
                "full_attention",
         | 
| 39 | 
            +
                "conv",
         | 
| 40 | 
            +
                "full_attention",
         | 
| 41 | 
            +
                "conv"
         | 
| 42 | 
            +
              ],
         | 
| 43 | 
            +
              "max_position_embeddings": 128000,
         | 
| 44 | 
            +
              "model_type": "lfm2",
         | 
| 45 | 
            +
              "norm_eps": 1e-05,
         | 
| 46 | 
            +
              "num_attention_heads": 16,
         | 
| 47 | 
            +
              "num_heads": 16,
         | 
| 48 | 
            +
              "num_hidden_layers": 16,
         | 
| 49 | 
            +
              "num_key_value_heads": 8,
         | 
| 50 | 
            +
              "pad_token_id": 0,
         | 
| 51 | 
            +
              "rope_theta": 1000000.0,
         | 
| 52 | 
            +
              "torch_dtype": "float32",
         | 
| 53 | 
            +
              "transformers_version": "4.56.0.dev0",
         | 
| 54 | 
            +
              "use_cache": true,
         | 
| 55 | 
            +
              "use_pos_enc": true,
         | 
| 56 | 
            +
              "vocab_size": 93083,
         | 
| 57 | 
            +
              "transformers.js_config": {
         | 
| 58 | 
            +
                "use_external_data_format": {
         | 
| 59 | 
            +
                  "model.onnx": 1,
         | 
| 60 | 
            +
                  "model_fp16.onnx": 1,
         | 
| 61 | 
            +
                  "model_q4.onnx": 1,
         | 
| 62 | 
            +
                  "model_q4f16.onnx": 1
         | 
| 63 | 
            +
                },
         | 
| 64 | 
            +
                "kv_cache_dtype": {
         | 
| 65 | 
            +
                  "q4f16": "float16",
         | 
| 66 | 
            +
                  "fp16": "float16"
         | 
| 67 | 
            +
                }
         | 
| 68 | 
            +
              }
         | 
| 69 | 
            +
            }
         | 
    	
        onnx/model.onnx
    ADDED
    
    | @@ -0,0 +1,3 @@ | |
|  | |
|  | |
|  | 
|  | |
| 1 | 
            +
            version https://git-lfs.github.com/spec/v1
         | 
| 2 | 
            +
            oid sha256:eacde2ff05dc93042680cd7601d43ed9a64327cbd21dbe5b07be94b4a1cb71df
         | 
| 3 | 
            +
            size 149015
         | 
    	
        onnx/model.onnx_data
    ADDED
    
    | @@ -0,0 +1,3 @@ | |
|  | |
|  | |
|  | 
|  | |
| 1 | 
            +
            version https://git-lfs.github.com/spec/v1
         | 
| 2 | 
            +
            oid sha256:bb7f563b2c32ed3ee01b40773928f21666a12eb9691d9ce5f75b127e40e85b4a
         | 
| 3 | 
            +
            size 1563533312
         | 
    	
        onnx/model_fp16.onnx
    ADDED
    
    | @@ -0,0 +1,3 @@ | |
|  | |
|  | |
|  | 
|  | |
| 1 | 
            +
            version https://git-lfs.github.com/spec/v1
         | 
| 2 | 
            +
            oid sha256:22959d696352ed01e92904b4ca70b82fe5c4aa55139ba490cda951d59921453d
         | 
| 3 | 
            +
            size 148241
         | 
    	
        onnx/model_fp16.onnx_data
    ADDED
    
    | @@ -0,0 +1,3 @@ | |
|  | |
|  | |
|  | 
|  | |
| 1 | 
            +
            version https://git-lfs.github.com/spec/v1
         | 
| 2 | 
            +
            oid sha256:ea3e6a7f24ff378e44d04a17519d1c5a5e92165dbde4796e3cc643637eace3eb
         | 
| 3 | 
            +
            size 781766656
         | 
    	
        onnx/model_q4.onnx
    ADDED
    
    | @@ -0,0 +1,3 @@ | |
|  | |
|  | |
|  | 
|  | |
| 1 | 
            +
            version https://git-lfs.github.com/spec/v1
         | 
| 2 | 
            +
            oid sha256:dd9d7826cc183ae07daea2249381ecb1d5964473b6813b606829b3378cfcf3a0
         | 
| 3 | 
            +
            size 182692
         | 
    	
        onnx/model_q4.onnx_data
    ADDED
    
    | @@ -0,0 +1,3 @@ | |
|  | |
|  | |
|  | 
|  | |
| 1 | 
            +
            version https://git-lfs.github.com/spec/v1
         | 
| 2 | 
            +
            oid sha256:98d5ef7df537511e5035dd8dbd9d6a48d72a4e7b928206f6dddc484ab73a50b4
         | 
| 3 | 
            +
            size 593862656
         | 
    	
        onnx/model_q4f16.onnx
    ADDED
    
    | @@ -0,0 +1,3 @@ | |
|  | |
|  | |
|  | 
|  | |
| 1 | 
            +
            version https://git-lfs.github.com/spec/v1
         | 
| 2 | 
            +
            oid sha256:3c92a12d4ade4f07ea609bcab8a74e0055bef001c801d1a7b393b3a6e0b89fcb
         | 
| 3 | 
            +
            size 181992
         | 
    	
        onnx/model_q4f16.onnx_data
    ADDED
    
    | @@ -0,0 +1,3 @@ | |
|  | |
|  | |
|  | 
|  | |
| 1 | 
            +
            version https://git-lfs.github.com/spec/v1
         | 
| 2 | 
            +
            oid sha256:2736c623e6bae483644d686d5f6bb4ccd53851a98ce2ee563169319d0709e8fa
         | 
| 3 | 
            +
            size 368758784
         | 
    	
        special_tokens_map.json
    ADDED
    
    | @@ -0,0 +1,23 @@ | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | 
|  | |
| 1 | 
            +
            {
         | 
| 2 | 
            +
              "bos_token": {
         | 
| 3 | 
            +
                "content": "<|startoftext|>",
         | 
| 4 | 
            +
                "lstrip": false,
         | 
| 5 | 
            +
                "normalized": false,
         | 
| 6 | 
            +
                "rstrip": false,
         | 
| 7 | 
            +
                "single_word": false
         | 
| 8 | 
            +
              },
         | 
| 9 | 
            +
              "eos_token": {
         | 
| 10 | 
            +
                "content": "<|im_end|>",
         | 
| 11 | 
            +
                "lstrip": false,
         | 
| 12 | 
            +
                "normalized": false,
         | 
| 13 | 
            +
                "rstrip": false,
         | 
| 14 | 
            +
                "single_word": false
         | 
| 15 | 
            +
              },
         | 
| 16 | 
            +
              "pad_token": {
         | 
| 17 | 
            +
                "content": "<|pad|>",
         | 
| 18 | 
            +
                "lstrip": false,
         | 
| 19 | 
            +
                "normalized": false,
         | 
| 20 | 
            +
                "rstrip": false,
         | 
| 21 | 
            +
                "single_word": false
         | 
| 22 | 
            +
              }
         | 
| 23 | 
            +
            }
         | 
    	
        tokenizer.json
    ADDED
    
    | The diff for this file is too large to render. 
		See raw diff | 
|  | 
    	
        tokenizer_config.json
    ADDED
    
    | The diff for this file is too large to render. 
		See raw diff | 
|  | 

