Upload folder using huggingface_hub
Browse files- README.md +4 -0
- config.json +1 -4
- generation_config.json +13 -12
README.md
CHANGED
|
@@ -7,6 +7,10 @@ license: apache-2.0
|
|
| 7 |
pipeline_tag: image-text-to-text
|
| 8 |
library_name: transformers
|
| 9 |
---
|
|
|
|
|
|
|
|
|
|
|
|
|
| 10 |
<div>
|
| 11 |
<p style="margin-top: 0;margin-bottom: 0;">
|
| 12 |
<em><a href="https://docs.unsloth.ai/basics/unsloth-dynamic-v2.0-gguf">Unsloth Dynamic 2.0</a> achieves superior accuracy & outperforms other leading quants.</em>
|
|
|
|
| 7 |
pipeline_tag: image-text-to-text
|
| 8 |
library_name: transformers
|
| 9 |
---
|
| 10 |
+
> [!NOTE]
|
| 11 |
+
> Includes Unsloth **chat template fixes**! <br> For `llama.cpp`, use `--jinja`
|
| 12 |
+
>
|
| 13 |
+
|
| 14 |
<div>
|
| 15 |
<p style="margin-top: 0;margin-bottom: 0;">
|
| 16 |
<em><a href="https://docs.unsloth.ai/basics/unsloth-dynamic-v2.0-gguf">Unsloth Dynamic 2.0</a> achieves superior accuracy & outperforms other leading quants.</em>
|
config.json
CHANGED
|
@@ -2,8 +2,6 @@
|
|
| 2 |
"architectures": [
|
| 3 |
"Qwen3VLForConditionalGeneration"
|
| 4 |
],
|
| 5 |
-
"torch_dtype": "bfloat16",
|
| 6 |
-
"eos_token_id": 151645,
|
| 7 |
"image_token_id": 151655,
|
| 8 |
"model_type": "qwen3_vl",
|
| 9 |
"pad_token_id": 151654,
|
|
@@ -49,7 +47,6 @@
|
|
| 49 |
17
|
| 50 |
],
|
| 51 |
"depth": 24,
|
| 52 |
-
"torch_dtype": "bfloat16",
|
| 53 |
"hidden_act": "gelu_pytorch_tanh",
|
| 54 |
"hidden_size": 1024,
|
| 55 |
"in_channels": 3,
|
|
@@ -65,4 +62,4 @@
|
|
| 65 |
},
|
| 66 |
"vision_end_token_id": 151653,
|
| 67 |
"vision_start_token_id": 151652
|
| 68 |
-
}
|
|
|
|
| 2 |
"architectures": [
|
| 3 |
"Qwen3VLForConditionalGeneration"
|
| 4 |
],
|
|
|
|
|
|
|
| 5 |
"image_token_id": 151655,
|
| 6 |
"model_type": "qwen3_vl",
|
| 7 |
"pad_token_id": 151654,
|
|
|
|
| 47 |
17
|
| 48 |
],
|
| 49 |
"depth": 24,
|
|
|
|
| 50 |
"hidden_act": "gelu_pytorch_tanh",
|
| 51 |
"hidden_size": 1024,
|
| 52 |
"in_channels": 3,
|
|
|
|
| 62 |
},
|
| 63 |
"vision_end_token_id": 151653,
|
| 64 |
"vision_start_token_id": 151652
|
| 65 |
+
}
|
generation_config.json
CHANGED
|
@@ -1,13 +1,14 @@
|
|
| 1 |
{
|
| 2 |
-
|
| 3 |
-
|
| 4 |
-
|
| 5 |
-
|
| 6 |
-
|
| 7 |
-
|
| 8 |
-
|
| 9 |
-
|
| 10 |
-
|
| 11 |
-
|
| 12 |
-
|
| 13 |
-
|
|
|
|
|
|
| 1 |
{
|
| 2 |
+
"bos_token_id": 151643,
|
| 3 |
+
"pad_token_id": 151643,
|
| 4 |
+
"do_sample": true,
|
| 5 |
+
"eos_token_id": [
|
| 6 |
+
151645,
|
| 7 |
+
151643
|
| 8 |
+
],
|
| 9 |
+
"top_p": 0.8,
|
| 10 |
+
"top_k": 20,
|
| 11 |
+
"temperature": 0.7,
|
| 12 |
+
"repetition_penalty": 1.0,
|
| 13 |
+
"transformers_version": "4.56.0"
|
| 14 |
+
}
|