agreeupon commited on
Commit
72a5beb
·
verified ·
1 Parent(s): 39049ac

Add files using upload-large-folder tool

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. =0.14.0 +242 -0
  2. DeepSeek-V3-AWQ/.gitattributes +36 -0
  3. DeepSeek-V3-AWQ/README.md +37 -0
  4. DeepSeek-V3-AWQ/configuration_deepseek.py +210 -0
  5. DeepSeek-V3-AWQ/generation_config.json +7 -0
  6. DeepSeek-V3-AWQ/tokenizer.json +0 -0
  7. cuda-keyring_1.0-1_all.deb +0 -0
  8. download.py +150 -0
  9. eichi_utils/tensor_combiner.py +194 -0
  10. eichi_utils/ui_styles.py +210 -0
  11. eichi_utils/vae_cache.py +305 -0
  12. hf_download/hub/.locks/models--hunyuanvideo-community--HunyuanVideo/469be27c5c010538f845f518c4f5e8574c78f7c8.lock +0 -0
  13. hf_download/hub/.locks/models--hunyuanvideo-community--HunyuanVideo/54accb98811931fca7598da4f7239b03b912eaa2bd5fe639f2da00923374f4a0.lock +0 -0
  14. hf_download/hub/.locks/models--hunyuanvideo-community--HunyuanVideo/660c6f5b1abae9dc498ac2d21e1347d2abdb0cf6c0c0c8576cd796491d9a6cdd.lock +0 -0
  15. hf_download/hub/.locks/models--hunyuanvideo-community--HunyuanVideo/715167338723844a8b46281e6dedaf9e2000f771.lock +0 -0
  16. hf_download/hub/.locks/models--hunyuanvideo-community--HunyuanVideo/747d2159aaebc628e8105b91c2ab77d50a289f17.lock +0 -0
  17. hf_download/hub/.locks/models--hunyuanvideo-community--HunyuanVideo/76e821f1b6f0a9709293c3b6b51ed90980b3166b.lock +0 -0
  18. hf_download/hub/.locks/models--hunyuanvideo-community--HunyuanVideo/7c6fa7065265909bd500cafb38cc939b81b1b018.lock +0 -0
  19. hf_download/hub/.locks/models--hunyuanvideo-community--HunyuanVideo/b6e7a9e010002205834fd4f2808ca042bad4a246.lock +0 -0
  20. hf_download/hub/.locks/models--hunyuanvideo-community--HunyuanVideo/c93f133c65ab1aeaa9ed1e998901a306636375b2f57fa53cd279241147a9a0e9.lock +0 -0
  21. hf_download/hub/.locks/models--hunyuanvideo-community--HunyuanVideo/cf0682d6de72c1547f41b4f6d7c59f62deffef94.lock +0 -0
  22. hf_download/hub/.locks/models--hunyuanvideo-community--HunyuanVideo/d2c593db4aa75b17a42c1f74d7cc38e257eaeed222e6a52674c65544165dcbaa.lock +0 -0
  23. hf_download/hub/.locks/models--hunyuanvideo-community--HunyuanVideo/d67c77f57cab4c9bf7f4420c256aed684c8ac7b49c6ab72cff9924e9513db9f1.lock +0 -0
  24. hf_download/hub/.locks/models--hunyuanvideo-community--HunyuanVideo/f5ad57d3eda300a3195bc9c0bb36ab76ebe88831f128e9851e63440aff4a6741.lock +0 -0
  25. hf_download/hub/.locks/models--hunyuanvideo-community--HunyuanVideo/f5f2205251eb0b863c5b0f9a60cd9fad069c5872.lock +0 -0
  26. hf_download/hub/models--hunyuanvideo-community--HunyuanVideo/blobs/22f91ac3aeb401be0a10d294e00bb1d6293bc4c5 +31 -0
  27. hf_download/hub/models--hunyuanvideo-community--HunyuanVideo/blobs/469be27c5c010538f845f518c4f5e8574c78f7c8 +0 -0
  28. hf_download/hub/models--hunyuanvideo-community--HunyuanVideo/blobs/747d2159aaebc628e8105b91c2ab77d50a289f17 +2096 -0
  29. hf_download/hub/models--hunyuanvideo-community--HunyuanVideo/blobs/76e821f1b6f0a9709293c3b6b51ed90980b3166b +0 -0
  30. hf_download/hub/models--hunyuanvideo-community--HunyuanVideo/blobs/7c6fa7065265909bd500cafb38cc939b81b1b018 +30 -0
  31. hf_download/hub/models--hunyuanvideo-community--HunyuanVideo/blobs/b6e7a9e010002205834fd4f2808ca042bad4a246 +297 -0
  32. hf_download/hub/models--hunyuanvideo-community--HunyuanVideo/blobs/b70acd51d20aeee27af7a81cea7d68f5288b8f4b +32 -0
  33. hf_download/hub/models--hunyuanvideo-community--HunyuanVideo/blobs/cf0682d6de72c1547f41b4f6d7c59f62deffef94 +30 -0
  34. hf_download/hub/models--hunyuanvideo-community--HunyuanVideo/blobs/f5f2205251eb0b863c5b0f9a60cd9fad069c5872 +30 -0
  35. hf_download/hub/models--hunyuanvideo-community--HunyuanVideo/snapshots/e8c2aaa66fe3742a32c11a6766aecbf07c56e773/text_encoder/config.json +30 -0
  36. hf_download/hub/models--hunyuanvideo-community--HunyuanVideo/snapshots/e8c2aaa66fe3742a32c11a6766aecbf07c56e773/text_encoder_2/config.json +25 -0
  37. hf_download/hub/models--hunyuanvideo-community--HunyuanVideo/snapshots/e8c2aaa66fe3742a32c11a6766aecbf07c56e773/tokenizer/special_tokens_map.json +30 -0
  38. hf_download/hub/models--hunyuanvideo-community--HunyuanVideo/snapshots/e8c2aaa66fe3742a32c11a6766aecbf07c56e773/tokenizer/tokenizer_config.json +2096 -0
  39. hf_download/hub/models--hunyuanvideo-community--HunyuanVideo/snapshots/e8c2aaa66fe3742a32c11a6766aecbf07c56e773/tokenizer_2/merges.txt +0 -0
  40. hf_download/hub/models--hunyuanvideo-community--HunyuanVideo/snapshots/e8c2aaa66fe3742a32c11a6766aecbf07c56e773/tokenizer_2/special_tokens_map.json +30 -0
  41. hf_download/hub/models--hunyuanvideo-community--HunyuanVideo/snapshots/e8c2aaa66fe3742a32c11a6766aecbf07c56e773/tokenizer_2/tokenizer_config.json +31 -0
  42. hf_download/hub/models--hunyuanvideo-community--HunyuanVideo/snapshots/e8c2aaa66fe3742a32c11a6766aecbf07c56e773/vae/config.json +32 -0
  43. hf_download/hub/models--lllyasviel--flux_redux_bfl/refs/main +1 -0
  44. kaggle.json +1 -0
  45. locales/i18n.py +126 -0
  46. locales/ja.json +0 -0
  47. locales/ru.json +0 -0
  48. locales/zh-tw.json +0 -0
  49. lora_utils/__init__.py +46 -0
  50. lora_utils/dynamic_swap_lora.py +76 -0
=0.14.0 ADDED
@@ -0,0 +1,242 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Collecting transformers>=4.43.0
2
+ Downloading transformers-4.53.1-py3-none-any.whl.metadata (40 kB)
3
+ ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 40.9/40.9 kB 1.1 MB/s eta 0:00:00
4
+ Collecting accelerate>=0.30.0
5
+ Downloading accelerate-1.8.1-py3-none-any.whl.metadata (19 kB)
6
+ Collecting bitsandbytes>=0.46.1
7
+ Downloading bitsandbytes-0.46.1-py3-none-manylinux_2_24_x86_64.whl.metadata (10 kB)
8
+ Collecting peft>=0.11.1
9
+ Downloading peft-0.16.0-py3-none-any.whl.metadata (14 kB)
10
+ Collecting datasets
11
+ Downloading datasets-3.6.0-py3-none-any.whl.metadata (19 kB)
12
+ Collecting wandb
13
+ Downloading wandb-0.21.0-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl.metadata (10 kB)
14
+ Collecting deepspeed
15
+ Downloading deepspeed-0.17.1.tar.gz (1.5 MB)
16
+ ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 1.5/1.5 MB 51.0 MB/s eta 0:00:00
17
+ Preparing metadata (setup.py): started
18
+ Preparing metadata (setup.py): finished with status 'done'
19
+ Collecting huggingface_hub[cli]
20
+ Downloading huggingface_hub-0.33.2-py3-none-any.whl.metadata (14 kB)
21
+ Requirement already satisfied: filelock in /usr/local/lib/python3.10/dist-packages (from transformers>=4.43.0) (3.9.0)
22
+ Requirement already satisfied: numpy>=1.17 in /usr/local/lib/python3.10/dist-packages (from transformers>=4.43.0) (1.24.1)
23
+ Requirement already satisfied: packaging>=20.0 in /usr/local/lib/python3.10/dist-packages (from transformers>=4.43.0) (25.0)
24
+ Requirement already satisfied: pyyaml>=5.1 in /usr/local/lib/python3.10/dist-packages (from transformers>=4.43.0) (6.0.1)
25
+ Collecting regex!=2019.12.17 (from transformers>=4.43.0)
26
+ Downloading regex-2024.11.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl.metadata (40 kB)
27
+ ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 40.5/40.5 kB 159.3 MB/s eta 0:00:00
28
+ Requirement already satisfied: requests in /usr/local/lib/python3.10/dist-packages (from transformers>=4.43.0) (2.31.0)
29
+ Collecting tokenizers<0.22,>=0.21 (from transformers>=4.43.0)
30
+ Downloading tokenizers-0.21.2-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl.metadata (6.8 kB)
31
+ Collecting safetensors>=0.4.3 (from transformers>=4.43.0)
32
+ Downloading safetensors-0.5.3-cp38-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl.metadata (3.8 kB)
33
+ Collecting tqdm>=4.27 (from transformers>=4.43.0)
34
+ Downloading tqdm-4.67.1-py3-none-any.whl.metadata (57 kB)
35
+ ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 57.7/57.7 kB 241.5 MB/s eta 0:00:00
36
+ Requirement already satisfied: psutil in /usr/local/lib/python3.10/dist-packages (from accelerate>=0.30.0) (5.9.6)
37
+ Requirement already satisfied: torch>=2.0.0 in /usr/local/lib/python3.10/dist-packages (from accelerate>=0.30.0) (2.3.0+cu121)
38
+ Collecting pyarrow>=15.0.0 (from datasets)
39
+ Downloading pyarrow-20.0.0-cp310-cp310-manylinux_2_28_x86_64.whl.metadata (3.3 kB)
40
+ Collecting dill<0.3.9,>=0.3.0 (from datasets)
41
+ Downloading dill-0.3.8-py3-none-any.whl.metadata (10 kB)
42
+ Collecting pandas (from datasets)
43
+ Downloading pandas-2.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl.metadata (91 kB)
44
+ ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 91.2/91.2 kB 326.0 MB/s eta 0:00:00
45
+ Collecting requests (from transformers>=4.43.0)
46
+ Downloading requests-2.32.4-py3-none-any.whl.metadata (4.9 kB)
47
+ Collecting xxhash (from datasets)
48
+ Downloading xxhash-3.5.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl.metadata (12 kB)
49
+ Collecting multiprocess<0.70.17 (from datasets)
50
+ Downloading multiprocess-0.70.16-py310-none-any.whl.metadata (7.2 kB)
51
+ Requirement already satisfied: fsspec<=2025.3.0,>=2023.1.0 in /usr/local/lib/python3.10/dist-packages (from fsspec[http]<=2025.3.0,>=2023.1.0->datasets) (2023.4.0)
52
+ Collecting click!=8.0.0,>=7.1 (from wandb)
53
+ Downloading click-8.2.1-py3-none-any.whl.metadata (2.5 kB)
54
+ Collecting gitpython!=3.1.29,>=1.0.0 (from wandb)
55
+ Downloading GitPython-3.1.44-py3-none-any.whl.metadata (13 kB)
56
+ Requirement already satisfied: platformdirs in /usr/local/lib/python3.10/dist-packages (from wandb) (3.11.0)
57
+ Collecting protobuf!=4.21.0,!=5.28.0,<7,>=3.19.0 (from wandb)
58
+ Downloading protobuf-6.31.1-cp39-abi3-manylinux2014_x86_64.whl.metadata (593 bytes)
59
+ Collecting pydantic<3 (from wandb)
60
+ Downloading pydantic-2.11.7-py3-none-any.whl.metadata (67 kB)
61
+ ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 68.0/68.0 kB 247.4 MB/s eta 0:00:00
62
+ Collecting sentry-sdk>=2.0.0 (from wandb)
63
+ Downloading sentry_sdk-2.32.0-py2.py3-none-any.whl.metadata (10 kB)
64
+ Requirement already satisfied: typing-extensions<5,>=4.8 in /usr/local/lib/python3.10/dist-packages (from wandb) (4.14.1)
65
+ Collecting einops (from deepspeed)
66
+ Downloading einops-0.8.1-py3-none-any.whl.metadata (13 kB)
67
+ Collecting hjson (from deepspeed)
68
+ Downloading hjson-3.1.0-py3-none-any.whl.metadata (2.6 kB)
69
+ Collecting msgpack (from deepspeed)
70
+ Downloading msgpack-1.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl.metadata (8.4 kB)
71
+ Requirement already satisfied: ninja in /usr/local/lib/python3.10/dist-packages (from deepspeed) (1.11.1.4)
72
+ Collecting py-cpuinfo (from deepspeed)
73
+ Downloading py_cpuinfo-9.0.0-py3-none-any.whl.metadata (794 bytes)
74
+ Collecting nvidia-ml-py (from deepspeed)
75
+ Downloading nvidia_ml_py-12.575.51-py3-none-any.whl.metadata (9.3 kB)
76
+ Collecting fsspec<=2025.3.0,>=2023.1.0 (from fsspec[http]<=2025.3.0,>=2023.1.0->datasets)
77
+ Downloading fsspec-2025.3.0-py3-none-any.whl.metadata (11 kB)
78
+ Collecting hf-xet<2.0.0,>=1.1.2 (from huggingface_hub[cli])
79
+ Downloading hf_xet-1.1.5-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl.metadata (879 bytes)
80
+ Collecting InquirerPy==0.3.4 (from huggingface_hub[cli])
81
+ Downloading InquirerPy-0.3.4-py3-none-any.whl.metadata (8.1 kB)
82
+ Collecting pfzy<0.4.0,>=0.3.1 (from InquirerPy==0.3.4->huggingface_hub[cli])
83
+ Downloading pfzy-0.3.4-py3-none-any.whl.metadata (4.9 kB)
84
+ Requirement already satisfied: prompt-toolkit<4.0.0,>=3.0.1 in /usr/local/lib/python3.10/dist-packages (from InquirerPy==0.3.4->huggingface_hub[cli]) (3.0.39)
85
+ INFO: pip is looking at multiple versions of fsspec[http] to determine which version is compatible with other requirements. This could take a while.
86
+ Collecting aiohttp!=4.0.0a0,!=4.0.0a1 (from fsspec[http]<=2025.3.0,>=2023.1.0->datasets)
87
+ Downloading aiohttp-3.12.13-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl.metadata (7.6 kB)
88
+ Collecting gitdb<5,>=4.0.1 (from gitpython!=3.1.29,>=1.0.0->wandb)
89
+ Downloading gitdb-4.0.12-py3-none-any.whl.metadata (1.2 kB)
90
+ Collecting annotated-types>=0.6.0 (from pydantic<3->wandb)
91
+ Downloading annotated_types-0.7.0-py3-none-any.whl.metadata (15 kB)
92
+ Collecting pydantic-core==2.33.2 (from pydantic<3->wandb)
93
+ Downloading pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl.metadata (6.8 kB)
94
+ Collecting typing-inspection>=0.4.0 (from pydantic<3->wandb)
95
+ Downloading typing_inspection-0.4.1-py3-none-any.whl.metadata (2.6 kB)
96
+ Requirement already satisfied: charset_normalizer<4,>=2 in /usr/local/lib/python3.10/dist-packages (from requests->transformers>=4.43.0) (2.1.1)
97
+ Requirement already satisfied: idna<4,>=2.5 in /usr/local/lib/python3.10/dist-packages (from requests->transformers>=4.43.0) (3.4)
98
+ Requirement already satisfied: urllib3<3,>=1.21.1 in /usr/local/lib/python3.10/dist-packages (from requests->transformers>=4.43.0) (1.26.13)
99
+ Requirement already satisfied: certifi>=2017.4.17 in /usr/local/lib/python3.10/dist-packages (from requests->transformers>=4.43.0) (2022.12.7)
100
+ Requirement already satisfied: sympy in /usr/local/lib/python3.10/dist-packages (from torch>=2.0.0->accelerate>=0.30.0) (1.12)
101
+ Requirement already satisfied: networkx in /usr/local/lib/python3.10/dist-packages (from torch>=2.0.0->accelerate>=0.30.0) (3.0)
102
+ Requirement already satisfied: jinja2 in /usr/local/lib/python3.10/dist-packages (from torch>=2.0.0->accelerate>=0.30.0) (3.1.2)
103
+ Requirement already satisfied: nvidia-cuda-nvrtc-cu12==12.1.105 in /usr/local/lib/python3.10/dist-packages (from torch>=2.0.0->accelerate>=0.30.0) (12.1.105)
104
+ Requirement already satisfied: nvidia-cuda-runtime-cu12==12.1.105 in /usr/local/lib/python3.10/dist-packages (from torch>=2.0.0->accelerate>=0.30.0) (12.1.105)
105
+ Requirement already satisfied: nvidia-cuda-cupti-cu12==12.1.105 in /usr/local/lib/python3.10/dist-packages (from torch>=2.0.0->accelerate>=0.30.0) (12.1.105)
106
+ Requirement already satisfied: nvidia-cudnn-cu12==8.9.2.26 in /usr/local/lib/python3.10/dist-packages (from torch>=2.0.0->accelerate>=0.30.0) (8.9.2.26)
107
+ Requirement already satisfied: nvidia-cublas-cu12==12.1.3.1 in /usr/local/lib/python3.10/dist-packages (from torch>=2.0.0->accelerate>=0.30.0) (12.1.3.1)
108
+ Requirement already satisfied: nvidia-cufft-cu12==11.0.2.54 in /usr/local/lib/python3.10/dist-packages (from torch>=2.0.0->accelerate>=0.30.0) (11.0.2.54)
109
+ Requirement already satisfied: nvidia-curand-cu12==10.3.2.106 in /usr/local/lib/python3.10/dist-packages (from torch>=2.0.0->accelerate>=0.30.0) (10.3.2.106)
110
+ Requirement already satisfied: nvidia-cusolver-cu12==11.4.5.107 in /usr/local/lib/python3.10/dist-packages (from torch>=2.0.0->accelerate>=0.30.0) (11.4.5.107)
111
+ Requirement already satisfied: nvidia-cusparse-cu12==12.1.0.106 in /usr/local/lib/python3.10/dist-packages (from torch>=2.0.0->accelerate>=0.30.0) (12.1.0.106)
112
+ Requirement already satisfied: nvidia-nccl-cu12==2.20.5 in /usr/local/lib/python3.10/dist-packages (from torch>=2.0.0->accelerate>=0.30.0) (2.20.5)
113
+ Requirement already satisfied: nvidia-nvtx-cu12==12.1.105 in /usr/local/lib/python3.10/dist-packages (from torch>=2.0.0->accelerate>=0.30.0) (12.1.105)
114
+ Requirement already satisfied: triton==2.3.0 in /usr/local/lib/python3.10/dist-packages (from torch>=2.0.0->accelerate>=0.30.0) (2.3.0)
115
+ Requirement already satisfied: nvidia-nvjitlink-cu12 in /usr/local/lib/python3.10/dist-packages (from nvidia-cusolver-cu12==11.4.5.107->torch>=2.0.0->accelerate>=0.30.0) (12.9.86)
116
+ Requirement already satisfied: python-dateutil>=2.8.2 in /usr/local/lib/python3.10/dist-packages (from pandas->datasets) (2.8.2)
117
+ Collecting pytz>=2020.1 (from pandas->datasets)
118
+ Downloading pytz-2025.2-py2.py3-none-any.whl.metadata (22 kB)
119
+ Collecting tzdata>=2022.7 (from pandas->datasets)
120
+ Downloading tzdata-2025.2-py2.py3-none-any.whl.metadata (1.4 kB)
121
+ Collecting aiohappyeyeballs>=2.5.0 (from aiohttp!=4.0.0a0,!=4.0.0a1->fsspec[http]<=2025.3.0,>=2023.1.0->datasets)
122
+ Downloading aiohappyeyeballs-2.6.1-py3-none-any.whl.metadata (5.9 kB)
123
+ Collecting aiosignal>=1.1.2 (from aiohttp!=4.0.0a0,!=4.0.0a1->fsspec[http]<=2025.3.0,>=2023.1.0->datasets)
124
+ Downloading aiosignal-1.4.0-py3-none-any.whl.metadata (3.7 kB)
125
+ Collecting async-timeout<6.0,>=4.0 (from aiohttp!=4.0.0a0,!=4.0.0a1->fsspec[http]<=2025.3.0,>=2023.1.0->datasets)
126
+ Downloading async_timeout-5.0.1-py3-none-any.whl.metadata (5.1 kB)
127
+ Requirement already satisfied: attrs>=17.3.0 in /usr/local/lib/python3.10/dist-packages (from aiohttp!=4.0.0a0,!=4.0.0a1->fsspec[http]<=2025.3.0,>=2023.1.0->datasets) (23.1.0)
128
+ Collecting frozenlist>=1.1.1 (from aiohttp!=4.0.0a0,!=4.0.0a1->fsspec[http]<=2025.3.0,>=2023.1.0->datasets)
129
+ Downloading frozenlist-1.7.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl.metadata (18 kB)
130
+ Collecting multidict<7.0,>=4.5 (from aiohttp!=4.0.0a0,!=4.0.0a1->fsspec[http]<=2025.3.0,>=2023.1.0->datasets)
131
+ Downloading multidict-6.6.3-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl.metadata (5.3 kB)
132
+ Collecting propcache>=0.2.0 (from aiohttp!=4.0.0a0,!=4.0.0a1->fsspec[http]<=2025.3.0,>=2023.1.0->datasets)
133
+ Downloading propcache-0.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl.metadata (12 kB)
134
+ Collecting yarl<2.0,>=1.17.0 (from aiohttp!=4.0.0a0,!=4.0.0a1->fsspec[http]<=2025.3.0,>=2023.1.0->datasets)
135
+ Downloading yarl-1.20.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl.metadata (73 kB)
136
+ ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 73.9/73.9 kB 504.9 MB/s eta 0:00:00
137
+ Collecting smmap<6,>=3.0.1 (from gitdb<5,>=4.0.1->gitpython!=3.1.29,>=1.0.0->wandb)
138
+ Downloading smmap-5.0.2-py3-none-any.whl.metadata (4.3 kB)
139
+ Requirement already satisfied: wcwidth in /usr/local/lib/python3.10/dist-packages (from prompt-toolkit<4.0.0,>=3.0.1->InquirerPy==0.3.4->huggingface_hub[cli]) (0.2.9)
140
+ Requirement already satisfied: six>=1.5 in /usr/lib/python3/dist-packages (from python-dateutil>=2.8.2->pandas->datasets) (1.16.0)
141
+ Requirement already satisfied: MarkupSafe>=2.0 in /usr/local/lib/python3.10/dist-packages (from jinja2->torch>=2.0.0->accelerate>=0.30.0) (2.1.2)
142
+ Requirement already satisfied: mpmath>=0.19 in /usr/local/lib/python3.10/dist-packages (from sympy->torch>=2.0.0->accelerate>=0.30.0) (1.3.0)
143
+ Downloading transformers-4.53.1-py3-none-any.whl (10.8 MB)
144
+ ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 10.8/10.8 MB 266.3 MB/s eta 0:00:00
145
+ Downloading accelerate-1.8.1-py3-none-any.whl (365 kB)
146
+ ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 365.3/365.3 kB 415.9 MB/s eta 0:00:00
147
+ Downloading bitsandbytes-0.46.1-py3-none-manylinux_2_24_x86_64.whl (72.9 MB)
148
+ ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 72.9/72.9 MB 214.8 MB/s eta 0:00:00
149
+ Downloading peft-0.16.0-py3-none-any.whl (472 kB)
150
+ ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 472.3/472.3 kB 375.6 MB/s eta 0:00:00
151
+ Downloading datasets-3.6.0-py3-none-any.whl (491 kB)
152
+ ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 491.5/491.5 kB 359.3 MB/s eta 0:00:00
153
+ Downloading wandb-0.21.0-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (22.2 MB)
154
+ ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 22.2/22.2 MB 368.4 MB/s eta 0:00:00
155
+ Downloading InquirerPy-0.3.4-py3-none-any.whl (67 kB)
156
+ ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 67.7/67.7 kB 271.4 MB/s eta 0:00:00
157
+ Downloading click-8.2.1-py3-none-any.whl (102 kB)
158
+ ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 102.2/102.2 kB 318.2 MB/s eta 0:00:00
159
+ Downloading dill-0.3.8-py3-none-any.whl (116 kB)
160
+ ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 116.3/116.3 kB 311.4 MB/s eta 0:00:00
161
+ Downloading fsspec-2025.3.0-py3-none-any.whl (193 kB)
162
+ ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 193.6/193.6 kB 349.3 MB/s eta 0:00:00
163
+ Downloading GitPython-3.1.44-py3-none-any.whl (207 kB)
164
+ ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 207.6/207.6 kB 311.2 MB/s eta 0:00:00
165
+ Downloading hf_xet-1.1.5-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (3.1 MB)
166
+ ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 3.1/3.1 MB 314.8 MB/s eta 0:00:00
167
+ Downloading huggingface_hub-0.33.2-py3-none-any.whl (515 kB)
168
+ ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 515.4/515.4 kB 380.0 MB/s eta 0:00:00
169
+ Downloading multiprocess-0.70.16-py310-none-any.whl (134 kB)
170
+ ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 134.8/134.8 kB 308.2 MB/s eta 0:00:00
171
+ Downloading protobuf-6.31.1-cp39-abi3-manylinux2014_x86_64.whl (321 kB)
172
+ ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 321.1/321.1 kB 359.8 MB/s eta 0:00:00
173
+ Downloading pyarrow-20.0.0-cp310-cp310-manylinux_2_28_x86_64.whl (42.3 MB)
174
+ ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 42.3/42.3 MB 330.5 MB/s eta 0:00:00
175
+ Downloading pydantic-2.11.7-py3-none-any.whl (444 kB)
176
+ ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 444.8/444.8 kB 379.9 MB/s eta 0:00:00
177
+ Downloading pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (2.0 MB)
178
+ ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 2.0/2.0 MB 396.4 MB/s eta 0:00:00
179
+ Downloading regex-2024.11.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (781 kB)
180
+ ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 781.7/781.7 kB 378.0 MB/s eta 0:00:00
181
+ Downloading requests-2.32.4-py3-none-any.whl (64 kB)
182
+ ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 64.8/64.8 kB 254.9 MB/s eta 0:00:00
183
+ Downloading safetensors-0.5.3-cp38-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (471 kB)
184
+ ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 471.6/471.6 kB 349.4 MB/s eta 0:00:00
185
+ Downloading sentry_sdk-2.32.0-py2.py3-none-any.whl (356 kB)
186
+ ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 356.1/356.1 kB 347.2 MB/s eta 0:00:00
187
+ Downloading tokenizers-0.21.2-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (3.1 MB)
188
+ ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 3.1/3.1 MB 283.2 MB/s eta 0:00:00
189
+ Downloading tqdm-4.67.1-py3-none-any.whl (78 kB)
190
+ ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 78.5/78.5 kB 275.4 MB/s eta 0:00:00
191
+ Downloading einops-0.8.1-py3-none-any.whl (64 kB)
192
+ ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 64.4/64.4 kB 257.4 MB/s eta 0:00:00
193
+ Downloading hjson-3.1.0-py3-none-any.whl (54 kB)
194
+ ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 54.0/54.0 kB 238.2 MB/s eta 0:00:00
195
+ Downloading msgpack-1.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (408 kB)
196
+ ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 408.6/408.6 kB 341.6 MB/s eta 0:00:00
197
+ Downloading nvidia_ml_py-12.575.51-py3-none-any.whl (47 kB)
198
+ ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 47.5/47.5 kB 216.1 MB/s eta 0:00:00
199
+ Downloading pandas-2.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (12.3 MB)
200
+ ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 12.3/12.3 MB 355.1 MB/s eta 0:00:00
201
+ Downloading py_cpuinfo-9.0.0-py3-none-any.whl (22 kB)
202
+ Downloading xxhash-3.5.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (194 kB)
203
+ ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 194.1/194.1 kB 334.7 MB/s eta 0:00:00
204
+ Downloading aiohttp-3.12.13-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (1.6 MB)
205
+ ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 1.6/1.6 MB 291.8 MB/s eta 0:00:00
206
+ Downloading annotated_types-0.7.0-py3-none-any.whl (13 kB)
207
+ Downloading gitdb-4.0.12-py3-none-any.whl (62 kB)
208
+ ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 62.8/62.8 kB 215.9 MB/s eta 0:00:00
209
+ Downloading pfzy-0.3.4-py3-none-any.whl (8.5 kB)
210
+ Downloading pytz-2025.2-py2.py3-none-any.whl (509 kB)
211
+ ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 509.2/509.2 kB 291.5 MB/s eta 0:00:00
212
+ Downloading typing_inspection-0.4.1-py3-none-any.whl (14 kB)
213
+ Downloading tzdata-2025.2-py2.py3-none-any.whl (347 kB)
214
+ ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 347.8/347.8 kB 275.1 MB/s eta 0:00:00
215
+ Downloading aiohappyeyeballs-2.6.1-py3-none-any.whl (15 kB)
216
+ Downloading aiosignal-1.4.0-py3-none-any.whl (7.5 kB)
217
+ Downloading async_timeout-5.0.1-py3-none-any.whl (6.2 kB)
218
+ Downloading frozenlist-1.7.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl (222 kB)
219
+ ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 222.9/222.9 kB 286.8 MB/s eta 0:00:00
220
+ Downloading multidict-6.6.3-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl (241 kB)
221
+ ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 241.6/241.6 kB 289.6 MB/s eta 0:00:00
222
+ Downloading propcache-0.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (198 kB)
223
+ ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 198.3/198.3 kB 266.9 MB/s eta 0:00:00
224
+ Downloading smmap-5.0.2-py3-none-any.whl (24 kB)
225
+ Downloading yarl-1.20.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (326 kB)
226
+ ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 326.1/326.1 kB 298.7 MB/s eta 0:00:00
227
+ Building wheels for collected packages: deepspeed
228
+ Building wheel for deepspeed (setup.py): started
229
+ Building wheel for deepspeed (setup.py): finished with status 'done'
230
+ Created wheel for deepspeed: filename=deepspeed-0.17.1-py3-none-any.whl size=1690870 sha256=5e1018515c11b4c8a1f418015d09d5a2ba27b51dc6d03ffd3ba89ba7d5dd6ae4
231
+ Stored in directory: /tmp/pip-ephem-wheel-cache-gx5xjgn8/wheels/4e/95/62/b46aa29e5aad38d5188313030fa9e911ddc0ef8e89642d7eb0
232
+ Successfully built deepspeed
233
+ Installing collected packages: pytz, py-cpuinfo, nvidia-ml-py, hjson, xxhash, tzdata, typing-inspection, tqdm, smmap, sentry-sdk, safetensors, requests, regex, pydantic-core, pyarrow, protobuf, propcache, pfzy, multidict, msgpack, hf-xet, fsspec, frozenlist, einops, dill, click, async-timeout, annotated-types, aiohappyeyeballs, yarl, pydantic, pandas, multiprocess, InquirerPy, huggingface_hub, gitdb, aiosignal, tokenizers, gitpython, aiohttp, wandb, transformers, deepspeed, bitsandbytes, accelerate, peft, datasets
234
+ Attempting uninstall: requests
235
+ Found existing installation: requests 2.31.0
236
+ Uninstalling requests-2.31.0:
237
+ Successfully uninstalled requests-2.31.0
238
+ Attempting uninstall: fsspec
239
+ Found existing installation: fsspec 2023.4.0
240
+ Uninstalling fsspec-2023.4.0:
241
+ Successfully uninstalled fsspec-2023.4.0
242
+ Successfully installed InquirerPy-0.3.4 accelerate-1.8.1 aiohappyeyeballs-2.6.1 aiohttp-3.12.13 aiosignal-1.4.0 annotated-types-0.7.0 async-timeout-5.0.1 bitsandbytes-0.46.1 click-8.2.1 datasets-3.6.0 deepspeed-0.17.1 dill-0.3.8 einops-0.8.1 frozenlist-1.7.0 fsspec-2025.3.0 gitdb-4.0.12 gitpython-3.1.44 hf-xet-1.1.5 hjson-3.1.0 huggingface_hub-0.33.2 msgpack-1.1.1 multidict-6.6.3 multiprocess-0.70.16 nvidia-ml-py-12.575.51 pandas-2.3.0 peft-0.16.0 pfzy-0.3.4 propcache-0.3.2 protobuf-6.31.1 py-cpuinfo-9.0.0 pyarrow-20.0.0 pydantic-2.11.7 pydantic-core-2.33.2 pytz-2025.2 regex-2024.11.6 requests-2.32.4 safetensors-0.5.3 sentry-sdk-2.32.0 smmap-5.0.2 tokenizers-0.21.2 tqdm-4.67.1 transformers-4.53.1 typing-inspection-0.4.1 tzdata-2025.2 wandb-0.21.0 xxhash-3.5.0 yarl-1.20.1
DeepSeek-V3-AWQ/.gitattributes ADDED
@@ -0,0 +1,36 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ *.7z filter=lfs diff=lfs merge=lfs -text
2
+ *.arrow filter=lfs diff=lfs merge=lfs -text
3
+ *.bin filter=lfs diff=lfs merge=lfs -text
4
+ *.bz2 filter=lfs diff=lfs merge=lfs -text
5
+ *.ckpt filter=lfs diff=lfs merge=lfs -text
6
+ *.ftz filter=lfs diff=lfs merge=lfs -text
7
+ *.gz filter=lfs diff=lfs merge=lfs -text
8
+ *.h5 filter=lfs diff=lfs merge=lfs -text
9
+ *.joblib filter=lfs diff=lfs merge=lfs -text
10
+ *.lfs.* filter=lfs diff=lfs merge=lfs -text
11
+ *.mlmodel filter=lfs diff=lfs merge=lfs -text
12
+ *.model filter=lfs diff=lfs merge=lfs -text
13
+ *.msgpack filter=lfs diff=lfs merge=lfs -text
14
+ *.npy filter=lfs diff=lfs merge=lfs -text
15
+ *.npz filter=lfs diff=lfs merge=lfs -text
16
+ *.onnx filter=lfs diff=lfs merge=lfs -text
17
+ *.ot filter=lfs diff=lfs merge=lfs -text
18
+ *.parquet filter=lfs diff=lfs merge=lfs -text
19
+ *.pb filter=lfs diff=lfs merge=lfs -text
20
+ *.pickle filter=lfs diff=lfs merge=lfs -text
21
+ *.pkl filter=lfs diff=lfs merge=lfs -text
22
+ *.pt filter=lfs diff=lfs merge=lfs -text
23
+ *.pth filter=lfs diff=lfs merge=lfs -text
24
+ *.rar filter=lfs diff=lfs merge=lfs -text
25
+ *.safetensors filter=lfs diff=lfs merge=lfs -text
26
+ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
27
+ *.tar.* filter=lfs diff=lfs merge=lfs -text
28
+ *.tar filter=lfs diff=lfs merge=lfs -text
29
+ *.tflite filter=lfs diff=lfs merge=lfs -text
30
+ *.tgz filter=lfs diff=lfs merge=lfs -text
31
+ *.wasm filter=lfs diff=lfs merge=lfs -text
32
+ *.xz filter=lfs diff=lfs merge=lfs -text
33
+ *.zip filter=lfs diff=lfs merge=lfs -text
34
+ *.zst filter=lfs diff=lfs merge=lfs -text
35
+ *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ model.safetensors.index.json filter=lfs diff=lfs merge=lfs -text
DeepSeek-V3-AWQ/README.md ADDED
@@ -0,0 +1,37 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ license: mit
3
+ language:
4
+ - en
5
+ - zh
6
+ base_model:
7
+ - deepseek-ai/DeepSeek-V3
8
+ pipeline_tag: text-generation
9
+ library_name: transformers
10
+ ---
11
+ # DeepSeek V3 AWQ
12
+ AWQ of DeepSeek V3.
13
+
14
+ Quantized by [Eric Hartford](https://huggingface.co/ehartford) and [v2ray](https://huggingface.co/v2ray).
15
+
16
+ This quant modified some of the model code to fix an overflow issue when using float16.
17
+
18
+ To serve using vLLM with 8x 80GB GPUs, use the following command:
19
+ ```sh
20
+ VLLM_USE_V1=0 VLLM_WORKER_MULTIPROC_METHOD=spawn VLLM_MARLIN_USE_ATOMIC_ADD=1 python -m vllm.entrypoints.openai.api_server --host 0.0.0.0 --port 12345 --max-model-len 65536 --max-seq-len-to-capture 65536 --enable-chunked-prefill --enable-prefix-caching --trust-remote-code --tensor-parallel-size 8 --gpu-memory-utilization 0.95 --served-model-name deepseek-chat --model cognitivecomputations/DeepSeek-V3-AWQ
21
+ ```
22
+ You can download the wheel I built for PyTorch 2.6, Python 3.12 by clicking [here](https://huggingface.co/x2ray/wheels/resolve/main/vllm-0.8.3.dev250%2Bg10afedcfd.cu128-cp312-cp312-linux_x86_64.whl), the benchmark below was done with this wheel, it contains [2 PR merges](https://github.com/vllm-project/vllm/issues?q=is%3Apr+is%3Aopen+author%3Ajinzhen-lin) and an unoptimized FlashMLA (still faster than Triton) for A100 which boosted performance a lot. The vLLM repo which contained A100 FlashMLA can be found at [LagPixelLOL/vllm@sm80_flashmla](https://github.com/LagPixelLOL/vllm/tree/sm80_flashmla), which is a fork of [vllm-project/vllm](https://github.com/vllm-project/vllm). The A100 FlashMLA it used is based on [LagPixelLOL/FlashMLA@vllm](https://github.com/LagPixelLOL/FlashMLA/tree/vllm), which is a fork of [pzhao-eng/FlashMLA](https://github.com/pzhao-eng/FlashMLA).
23
+
24
+ ## TPS Per Request
25
+ | GPU \ Batch Input Output | B: 1 I: 2 O: 2K | B: 32 I: 4K O: 256 | B: 1 I: 63K O: 2K | Prefill |
26
+ |:-:|:-:|:-:|:-:|:-:|
27
+ | **8x H100/H200** | 61.5 | 30.1 | 54.3 | 4732.2 |
28
+ | **4x H200** | 58.4 | 19.8 | 53.7 | 2653.1 |
29
+ | **8x A100 80GB** | 46.8 | 12.8 | 30.4 | 2442.4 |
30
+ | **8x L40S** | 46.3 | OOM | OOM | 688.5 |
31
+
32
+ Note:
33
+ - The A100 config uses an unoptimized FlashMLA implementation, which is only superior than Triton during high context inference, it would be faster if it's optimized.
34
+ - The L40S config doesn't support FlashMLA, thus the Triton implementation is used, this makes it extremely slow with high context. But the L40S doesn't have much VRAM, so it can't really have that much context anyway, and it also doesn't have the fast GPU to GPU interconnection bandwidth, making it even slower. It is not recommended to serve with this config, as you must limit the context to <= 4096, `--gpu-memory-utilization` to 0.98, and `--max-num-seqs` to 4.
35
+ - All types of GPU used during benchmark are SXM form factor except L40S.
36
+ - Inference speed will be better than FP8 at low batch size but worse than FP8 at high batch size, this is the nature of low bit quantization.
37
+ - vLLM supports MLA for AWQ now, you can run this model with full context length on just 8x 80GB GPUs.
DeepSeek-V3-AWQ/configuration_deepseek.py ADDED
@@ -0,0 +1,210 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from transformers.configuration_utils import PretrainedConfig
2
+ from transformers.utils import logging
3
+
4
+ logger = logging.get_logger(__name__)
5
+
6
+ DEEPSEEK_PRETRAINED_CONFIG_ARCHIVE_MAP = {}
7
+ class DeepseekV3Config(PretrainedConfig):
8
+ r"""
9
+ This is the configuration class to store the configuration of a [`DeepseekV3Model`]. It is used to instantiate an DeepSeek
10
+ model according to the specified arguments, defining the model architecture. Instantiating a configuration with the
11
+ defaults will yield a similar configuration to that of the DeepSeek-V3.
12
+
13
+ Configuration objects inherit from [`PretrainedConfig`] and can be used to control the model outputs. Read the
14
+ documentation from [`PretrainedConfig`] for more information.
15
+
16
+
17
+ Args:
18
+ vocab_size (`int`, *optional*, defaults to 129280):
19
+ Vocabulary size of the Deep model. Defines the number of different tokens that can be represented by the
20
+ `inputs_ids` passed when calling [`DeepseekV3Model`]
21
+ hidden_size (`int`, *optional*, defaults to 4096):
22
+ Dimension of the hidden representations.
23
+ intermediate_size (`int`, *optional*, defaults to 11008):
24
+ Dimension of the MLP representations.
25
+ moe_intermediate_size (`int`, *optional*, defaults to 1407):
26
+ Dimension of the MoE representations.
27
+ num_hidden_layers (`int`, *optional*, defaults to 32):
28
+ Number of hidden layers in the Transformer decoder.
29
+ num_nextn_predict_layers (`int`, *optional*, defaults to 1):
30
+ Number of nextn predict layers in the DeepSeekV3 Model.
31
+ num_attention_heads (`int`, *optional*, defaults to 32):
32
+ Number of attention heads for each attention layer in the Transformer decoder.
33
+ n_shared_experts (`int`, *optional*, defaults to None):
34
+ Number of shared experts, None means dense model.
35
+ n_routed_experts (`int`, *optional*, defaults to None):
36
+ Number of routed experts, None means dense model.
37
+ routed_scaling_factor (`float`, *optional*, defaults to 1.0):
38
+ Scaling factor or routed experts.
39
+ topk_method (`str`, *optional*, defaults to `gready`):
40
+ Topk method used in routed gate.
41
+ n_group (`int`, *optional*, defaults to None):
42
+ Number of groups for routed experts.
43
+ topk_group (`int`, *optional*, defaults to None):
44
+ Number of selected groups for each token(for each token, ensuring the selected experts is only within `topk_group` groups).
45
+ num_experts_per_tok (`int`, *optional*, defaults to None):
46
+ Number of selected experts, None means dense model.
47
+ moe_layer_freq (`int`, *optional*, defaults to 1):
48
+ The frequency of the MoE layer: one expert layer for every `moe_layer_freq - 1` dense layers.
49
+ first_k_dense_replace (`int`, *optional*, defaults to 0):
50
+ Number of dense layers in shallow layers(embed->dense->dense->...->dense->moe->moe...->lm_head).
51
+ \--k dense layers--/
52
+ norm_topk_prob (`bool`, *optional*, defaults to False):
53
+ Whether to normalize the weights of the routed experts.
54
+ scoring_func (`str`, *optional*, defaults to 'softmax'):
55
+ Method of computing expert weights.
56
+ aux_loss_alpha (`float`, *optional*, defaults to 0.001):
57
+ Auxiliary loss weight coefficient.
58
+ seq_aux = (`bool`, *optional*, defaults to True):
59
+ Whether to compute the auxiliary loss for each individual sample.
60
+ num_key_value_heads (`int`, *optional*):
61
+ This is the number of key_value heads that should be used to implement Grouped Query Attention. If
62
+ `num_key_value_heads=num_attention_heads`, the model will use Multi Head Attention (MHA), if
63
+ `num_key_value_heads=1 the model will use Multi Query Attention (MQA) otherwise GQA is used. When
64
+ converting a multi-head checkpoint to a GQA checkpoint, each group key and value head should be constructed
65
+ by meanpooling all the original heads within that group. For more details checkout [this
66
+ paper](https://arxiv.org/pdf/2305.13245.pdf). If it is not specified, will default to
67
+ `num_attention_heads`.
68
+ hidden_act (`str` or `function`, *optional*, defaults to `"silu"`):
69
+ The non-linear activation function (function or string) in the decoder.
70
+ max_position_embeddings (`int`, *optional*, defaults to 2048):
71
+ The maximum sequence length that this model might ever be used with.
72
+ initializer_range (`float`, *optional*, defaults to 0.02):
73
+ The standard deviation of the truncated_normal_initializer for initializing all weight matrices.
74
+ rms_norm_eps (`float`, *optional*, defaults to 1e-06):
75
+ The epsilon used by the rms normalization layers.
76
+ use_cache (`bool`, *optional*, defaults to `True`):
77
+ Whether or not the model should return the last key/values attentions (not used by all models). Only
78
+ relevant if `config.is_decoder=True`.
79
+ pad_token_id (`int`, *optional*):
80
+ Padding token id.
81
+ bos_token_id (`int`, *optional*, defaults to 1):
82
+ Beginning of stream token id.
83
+ eos_token_id (`int`, *optional*, defaults to 2):
84
+ End of stream token id.
85
+ pretraining_tp (`int`, *optional*, defaults to 1):
86
+ Experimental feature. Tensor parallelism rank used during pretraining. Please refer to [this
87
+ document](https://huggingface.co/docs/transformers/parallelism) to understand more about it. This value is
88
+ necessary to ensure exact reproducibility of the pretraining results. Please refer to [this
89
+ issue](https://github.com/pytorch/pytorch/issues/76232).
90
+ tie_word_embeddings (`bool`, *optional*, defaults to `False`):
91
+ Whether to tie weight embeddings
92
+ rope_theta (`float`, *optional*, defaults to 10000.0):
93
+ The base period of the RoPE embeddings.
94
+ rope_scaling (`Dict`, *optional*):
95
+ Dictionary containing the scaling configuration for the RoPE embeddings. Currently supports two scaling
96
+ strategies: linear and dynamic. Their scaling factor must be a float greater than 1. The expected format is
97
+ `{"type": strategy name, "factor": scaling factor}`. When using this flag, don't update
98
+ `max_position_embeddings` to the expected new maximum.
99
+ attention_bias (`bool`, defaults to `False`, *optional*, defaults to `False`):
100
+ Whether to use a bias in the query, key, value and output projection layers during self-attention.
101
+ attention_dropout (`float`, *optional*, defaults to 0.0):
102
+ The dropout ratio for the attention probabilities.
103
+
104
+ ```python
105
+ >>> from transformers import DeepseekV3Model, DeepseekV3Config
106
+
107
+ >>> # Initializing a Deepseek-V3 style configuration
108
+ >>> configuration = DeepseekV3Config()
109
+
110
+ >>> # Accessing the model configuration
111
+ >>> configuration = model.config
112
+ ```"""
113
+
114
+ model_type = "deepseek_v3"
115
+ keys_to_ignore_at_inference = ["past_key_values"]
116
+
117
+ def __init__(
118
+ self,
119
+ vocab_size=129280,
120
+ hidden_size=7168,
121
+ intermediate_size=18432,
122
+ moe_intermediate_size = 2048,
123
+ num_hidden_layers=61,
124
+ num_nextn_predict_layers=1,
125
+ num_attention_heads=128,
126
+ num_key_value_heads=128,
127
+ n_shared_experts = 1,
128
+ n_routed_experts = 256,
129
+ ep_size = 1,
130
+ routed_scaling_factor = 2.5,
131
+ kv_lora_rank = 512,
132
+ q_lora_rank = 1536,
133
+ qk_rope_head_dim = 64,
134
+ v_head_dim = 128,
135
+ qk_nope_head_dim = 128,
136
+ topk_method = 'noaux_tc',
137
+ n_group = 8,
138
+ topk_group = 4,
139
+ num_experts_per_tok = 8,
140
+ moe_layer_freq = 1,
141
+ first_k_dense_replace = 3,
142
+ norm_topk_prob = True,
143
+ scoring_func = 'sigmoid',
144
+ aux_loss_alpha = 0.001,
145
+ seq_aux = True,
146
+ hidden_act="silu",
147
+ max_position_embeddings=4096,
148
+ initializer_range=0.02,
149
+ rms_norm_eps=1e-6,
150
+ use_cache=True,
151
+ pad_token_id=None,
152
+ bos_token_id=0,
153
+ eos_token_id=1,
154
+ pretraining_tp=1,
155
+ tie_word_embeddings=False,
156
+ rope_theta=10000.0,
157
+ rope_scaling=None,
158
+ attention_bias=False,
159
+ attention_dropout=0.0,
160
+ **kwargs,
161
+ ):
162
+ self.vocab_size = vocab_size
163
+ self.max_position_embeddings = max_position_embeddings
164
+ self.hidden_size = hidden_size
165
+ self.intermediate_size = intermediate_size
166
+ self.moe_intermediate_size = moe_intermediate_size
167
+ self.num_hidden_layers = num_hidden_layers
168
+ self.num_nextn_predict_layers = num_nextn_predict_layers
169
+ self.num_attention_heads = num_attention_heads
170
+ self.n_shared_experts = n_shared_experts
171
+ self.n_routed_experts = n_routed_experts
172
+ self.ep_size = ep_size
173
+ self.routed_scaling_factor = routed_scaling_factor
174
+ self.kv_lora_rank = kv_lora_rank
175
+ self.q_lora_rank = q_lora_rank
176
+ self.qk_rope_head_dim = qk_rope_head_dim
177
+ self.v_head_dim = v_head_dim
178
+ self.qk_nope_head_dim = qk_nope_head_dim
179
+ self.topk_method = topk_method
180
+ self.n_group = n_group
181
+ self.topk_group = topk_group
182
+ self.num_experts_per_tok = num_experts_per_tok
183
+ self.moe_layer_freq = moe_layer_freq
184
+ self.first_k_dense_replace = first_k_dense_replace
185
+ self.norm_topk_prob = norm_topk_prob
186
+ self.scoring_func = scoring_func
187
+ self.aux_loss_alpha = aux_loss_alpha
188
+ self.seq_aux = seq_aux
189
+ # for backward compatibility
190
+ if num_key_value_heads is None:
191
+ num_key_value_heads = num_attention_heads
192
+
193
+ self.num_key_value_heads = num_key_value_heads
194
+ self.hidden_act = hidden_act
195
+ self.initializer_range = initializer_range
196
+ self.rms_norm_eps = rms_norm_eps
197
+ self.pretraining_tp = pretraining_tp
198
+ self.use_cache = use_cache
199
+ self.rope_theta = rope_theta
200
+ self.rope_scaling = rope_scaling
201
+ self.attention_bias = attention_bias
202
+ self.attention_dropout = attention_dropout
203
+
204
+ super().__init__(
205
+ pad_token_id=pad_token_id,
206
+ bos_token_id=bos_token_id,
207
+ eos_token_id=eos_token_id,
208
+ tie_word_embeddings=tie_word_embeddings,
209
+ **kwargs,
210
+ )
DeepSeek-V3-AWQ/generation_config.json ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "bos_token_id": 0,
4
+ "do_sample": true,
5
+ "eos_token_id": 1,
6
+ "transformers_version": "4.48.0.dev0"
7
+ }
DeepSeek-V3-AWQ/tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
cuda-keyring_1.0-1_all.deb ADDED
Binary file (4.33 kB). View file
 
download.py ADDED
@@ -0,0 +1,150 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python
2
+ # DeepSeek-V3 (MoE) + LoRA + ZeRO-3, 10 k samples, r=16, ep_size=8
3
+ # – weights created empty on CPU, then loaded & sharded with Accelerate
4
+ # – GPU memory capped at ~50 GB per device; everything else off-loaded to RAM
5
+
6
+ import os, time, logging
7
+ from pathlib import Path
8
+
9
+ import torch
10
+ from accelerate import (
11
+ Accelerator,
12
+ init_empty_weights,
13
+ infer_auto_device_map,
14
+ load_checkpoint_and_dispatch, # ← this is the right util
15
+ )
16
+ from datasets import load_dataset, load_from_disk, DatasetDict
17
+ from transformers import AutoConfig, AutoTokenizer, AutoModelForCausalLM
18
+ from peft import LoraConfig, get_peft_model
19
+ from tqdm import tqdm
20
+
21
+ os.environ["PYTORCH_CUDA_ALLOC_CONF"] = (
22
+ "garbage_collection_threshold:0.6,max_split_size_mb:128"
23
+ )
24
+
25
+ logging.basicConfig(
26
+ level=logging.INFO,
27
+ format="%(asctime)s — %(name)s — %(levelname)s — %(message)s",
28
+ )
29
+ logger = logging.getLogger("train_qlora")
30
+
31
+
32
+ def main() -> None:
33
+ accel = Accelerator(mixed_precision="bf16")
34
+ logger.info(f"Accelerate device: {accel.device}")
35
+
36
+ # ─── hyper-params & paths ─────────────────────────────────────────────
37
+ epochs, batch_size, grad_accum, lr = 1, 1, 16, 1e-4
38
+ n_samples = 10_000
39
+ MODEL_DIR = "/workspace/DeepSeek-V3-Base" # local snapshot
40
+ DATA_FILE = "/workspace/data/splits/train.jsonl"
41
+ CACHE_DIR = "/workspace/data/processed_10k"
42
+
43
+ # ─── build *empty* skeleton (no GPU mem yet) ──────────────────────────
44
+ logger.info("Building empty model skeleton in CPU RAM…")
45
+ cfg = AutoConfig.from_pretrained(
46
+ MODEL_DIR, trust_remote_code=True, local_files_only=True
47
+ )
48
+ cfg.ep_size = 8
49
+
50
+ with init_empty_weights():
51
+ model = AutoModelForCausalLM.from_config(cfg, trust_remote_code=True)
52
+
53
+ # tokenizer (loaded once per rank, tiny)
54
+ tok = AutoTokenizer.from_pretrained(
55
+ MODEL_DIR, use_fast=False, trust_remote_code=True, local_files_only=True
56
+ )
57
+
58
+ # ─── device-map & checkpoint dispatch ─────────────────────────────────
59
+ logger.info("Inferring device_map for dispatch + offload…")
60
+ max_mem = {i: "50GB" for i in range(accel.num_processes)} # GPU_i → 50 GB
61
+ max_mem["cpu"] = "2000GB"
62
+ dmap = infer_auto_device_map(model, max_memory=max_mem)
63
+
64
+ logger.info("Loading checkpoint shards & dispatching…")
65
+ t0 = time.time()
66
+ model = load_checkpoint_and_dispatch(
67
+ model,
68
+ MODEL_DIR, # root with *.bin / *.safetensors
69
+ device_map=dmap,
70
+ offload_folder="hf_offload", # gets created automatically
71
+ dtype=torch.bfloat16,
72
+ )
73
+ model.gradient_checkpointing_enable()
74
+ logger.info(f"✅ Model ready in {time.time()-t0:.1f}s")
75
+
76
+ # ─── LoRA adapters ────────────────────────────────────────────────────
77
+ logger.info("Attaching LoRA adapters (r=16)…")
78
+ lora_cfg = LoraConfig(
79
+ r=16, lora_alpha=16, bias="none",
80
+ target_modules=["q_proj","v_proj","o_proj","up_proj","down_proj"],
81
+ task_type="CAUSAL_LM",
82
+ )
83
+ model = get_peft_model(model, lora_cfg)
84
+ logger.info("✅ LoRA attached")
85
+
86
+ # ─── dataset (tokenised or cached) ────────────────────────────────────
87
+ logger.info("Preparing 10 k-sample dataset…")
88
+ if os.path.isdir(CACHE_DIR) and os.listdir(CACHE_DIR):
89
+ ds = load_from_disk(CACHE_DIR)
90
+ logger.info("Loaded cached subset")
91
+ else:
92
+ raw = load_dataset("json", data_files={"data": DATA_FILE}, split="data")
93
+ raw = raw.shuffle(seed=42).select(range(n_samples))
94
+ split = int(n_samples * 0.95)
95
+ tr, va = raw.select(range(split)), raw.select(range(split, n_samples))
96
+
97
+ def tok_fn(batch):
98
+ inp = [f"<|begin_of_sentence|>User: {p}\nAssistant:" for p in batch["prompt"]]
99
+ out = [f"{r}<|end_of_sentence|>" for r in batch["response"]]
100
+ enc = tok(inp, max_length=1024, truncation=True,
101
+ padding="max_length", return_tensors="pt")
102
+ dec = tok(out, max_length=1024, truncation=True,
103
+ padding="max_length", return_tensors="pt")
104
+ enc["labels"] = dec.input_ids
105
+ return enc
106
+
107
+ tr = tr.map(tok_fn, batched=True, remove_columns=["prompt","response"])
108
+ va = va.map(tok_fn, batched=True, remove_columns=["prompt","response"])
109
+ tr.set_format(type="torch", columns=["input_ids","attention_mask","labels"])
110
+ va.set_format(type="torch", columns=["input_ids","attention_mask","labels"])
111
+ ds = DatasetDict({"train": tr, "validation": va})
112
+ ds.save_to_disk(CACHE_DIR)
113
+ logger.info("✅ Tokenised subset cached")
114
+
115
+ # ─── loaders & ZeRO prep ──────────────────────────────────────────────
116
+ train_loader = torch.utils.data.DataLoader(ds["train"], batch_size=batch_size, shuffle=True)
117
+ valid_loader = torch.utils.data.DataLoader(ds["validation"], batch_size=batch_size)
118
+
119
+ logger.info("Preparing for ZeRO-3 distributed training…")
120
+ model, train_loader, valid_loader = accel.prepare(model, train_loader, valid_loader)
121
+ optim = torch.optim.AdamW(model.parameters(), lr=lr)
122
+
123
+ # ─── training ─────────────────────────────────────────────────────────
124
+ logger.info("🚀 Starting training…")
125
+ model.train()
126
+ for epoch in range(epochs):
127
+ t0, tot = time.time(), 0.0
128
+ loop = tqdm(enumerate(train_loader), total=len(train_loader),
129
+ desc=f"Epoch {epoch}", disable=not accel.is_local_main_process)
130
+ for step, batch in loop:
131
+ loss = model(**batch).loss / grad_accum
132
+ accel.backward(loss)
133
+ if (step+1) % grad_accum == 0:
134
+ optim.step(); optim.zero_grad()
135
+ tot += loss.item() * grad_accum
136
+ if accel.is_local_main_process and step % 50 == 0:
137
+ loop.set_postfix(loss=loss.item()*grad_accum)
138
+
139
+ if accel.is_local_main_process:
140
+ logger.info(f"Epoch {epoch} finished in {time.time()-t0:.1f}s ─ avg loss {tot/len(train_loader):.4f}")
141
+
142
+ # ─── save LoRA adapter ────────────────────────────────────────────────
143
+ if accel.is_main_process:
144
+ out = Path("./ckpt/final_adapter"); out.mkdir(parents=True, exist_ok=True)
145
+ model.save_pretrained(out)
146
+ logger.info(f"✅ LoRA adapter saved to {out}")
147
+
148
+
149
+ if __name__ == "__main__":
150
+ main()
eichi_utils/tensor_combiner.py ADDED
@@ -0,0 +1,194 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import sys
3
+ import argparse
4
+ import torch
5
+ import traceback
6
+ import safetensors.torch as sf
7
+ from datetime import datetime
8
+ import gradio as gr
9
+
10
+ # ルートパスをシステムパスに追加
11
+ root_path = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
12
+ if root_path not in sys.path:
13
+ sys.path.append(root_path)
14
+
15
+ # ルートパスを追加した後でインポート
16
+ from locales.i18n_extended import translate
17
+
18
+ def combine_tensor_files(file1_path, file2_path, output_path=None):
19
+ """2つのsafetensorsファイルを読み込み、結合して新しいファイルに保存する
20
+
21
+ Args:
22
+ file1_path (str): 1つ目のsafetensorsファイルパス
23
+ file2_path (str): 2つ目のsafetensorsファイルパス
24
+ output_path (str, optional): 出力ファイルパス。指定しない場合は自動生成
25
+
26
+ Returns:
27
+ tuple: (成功したかどうかのbool, 出力ファイルパス, 結果メッセージ)
28
+ """
29
+ try:
30
+ # ファイル1を読み込み
31
+ print(translate("ファイル1を読み込み中: {0}").format(os.path.basename(file1_path)))
32
+ tensor_dict1 = sf.load_file(file1_path)
33
+
34
+ # ファイル2を読み込み
35
+ print(translate("ファイル2を読み込み中: {0}").format(os.path.basename(file2_path)))
36
+ tensor_dict2 = sf.load_file(file2_path)
37
+
38
+ # テンソルを取得
39
+ if "history_latents" in tensor_dict1 and "history_latents" in tensor_dict2:
40
+ tensor1 = tensor_dict1["history_latents"]
41
+ tensor2 = tensor_dict2["history_latents"]
42
+
43
+ # テンソル情報の表示
44
+ print(translate("テンソル1: shape={0}, dtype={1}, フレーム数={2}").format(tensor1.shape, tensor1.dtype, tensor1.shape[2]))
45
+ print(translate("テンソル2: shape={0}, dtype={1}, フレーム数={2}").format(tensor2.shape, tensor2.dtype, tensor2.shape[2]))
46
+
47
+ # サイズチェック
48
+ if tensor1.shape[3] != tensor2.shape[3] or tensor1.shape[4] != tensor2.shape[4]:
49
+ error_msg = translate("エラー: テンソルサイズが異なります: {0} vs {1}").format(tensor1.shape, tensor2.shape)
50
+ print(error_msg)
51
+ return False, None, error_msg
52
+
53
+ # データ型とデバイスの調整
54
+ if tensor1.dtype != tensor2.dtype:
55
+ print(translate("データ型の変換: {0} → {1}").format(tensor2.dtype, tensor1.dtype))
56
+ tensor2 = tensor2.to(dtype=tensor1.dtype)
57
+
58
+ # 両方CPUに移動
59
+ tensor1 = tensor1.cpu()
60
+ tensor2 = tensor2.cpu()
61
+
62
+ # 結合(テンソル1の後にテンソル2を追加)
63
+ combined_tensor = torch.cat([tensor1, tensor2], dim=2)
64
+
65
+ # 結合されたテンソルの情報を表示
66
+ tensor1_frames = tensor1.shape[2]
67
+ tensor2_frames = tensor2.shape[2]
68
+ combined_frames = combined_tensor.shape[2]
69
+ print(translate("結合成功: 結合後のフレーム数={0} ({1}+{2}フレーム)").format(combined_frames, tensor1_frames, tensor2_frames))
70
+
71
+ # メタデータを更新
72
+ height, width = tensor1.shape[3], tensor1.shape[4]
73
+ metadata = torch.tensor([height, width, combined_frames], dtype=torch.int32)
74
+
75
+ # 出力ファイルパスが指定されていない場合は自動生成
76
+ if output_path is None:
77
+ timestamp = datetime.now().strftime("%y%m%d_%H%M%S")
78
+ output_dir = os.path.dirname(file1_path)
79
+ output_path = os.path.join(output_dir, f"combined_{timestamp}.safetensors")
80
+
81
+ # 結合したテンソルをファイルに保存
82
+ tensor_dict = {
83
+ "history_latents": combined_tensor,
84
+ "metadata": metadata
85
+ }
86
+
87
+ # ファイル保存
88
+ sf.save_file(tensor_dict, output_path)
89
+
90
+ # テンソルデータの保存サイズの概算
91
+ tensor_size_mb = (combined_tensor.element_size() * combined_tensor.nelement()) / (1024 * 1024)
92
+
93
+ success_msg = translate("結合テンソルを保存しました: {0}\n").format(os.path.basename(output_path))
94
+ success_msg += translate("フレーム数: {0}フレーム ({1}+{2}フレーム)\n").format(combined_frames, tensor1_frames, tensor2_frames)
95
+ success_msg += translate("サイズ: {0:.2f}MB, 形状: {1}").format(tensor_size_mb, combined_tensor.shape)
96
+ print(success_msg)
97
+
98
+ return True, output_path, success_msg
99
+ else:
100
+ error_msg = translate("エラー: テンソルファイルに必要なキー'history_latents'がありません")
101
+ print(error_msg)
102
+ return False, None, error_msg
103
+
104
+ except Exception as e:
105
+ error_msg = translate("テンソル結合中にエラーが発生: {0}").format(e)
106
+ print(error_msg)
107
+ traceback.print_exc()
108
+ return False, None, error_msg
109
+
110
+ def create_ui():
111
+ """Gradio UIを作成"""
112
+ with gr.Blocks(title=translate("テンソル結合ツール")) as app:
113
+ gr.Markdown(translate("## テンソルデータ結合ツール"))
114
+ gr.Markdown(translate("safetensors形式のテンソルデータファイルを2つ選択して結合します。結合順序は「テンソル1 + テンソル2」です。"))
115
+
116
+ with gr.Row():
117
+ with gr.Column(scale=1):
118
+ tensor_file1 = gr.File(label=translate("テンソルファイル1 (.safetensors)"), file_types=[".safetensors"])
119
+ with gr.Column(scale=1):
120
+ tensor_file2 = gr.File(label=translate("テンソルファイル2 (.safetensors)"), file_types=[".safetensors"])
121
+
122
+ with gr.Row():
123
+ output_file = gr.Textbox(label=translate("出力ファイル名 (空欄で自動生成)"), placeholder=translate("例: combined.safetensors"))
124
+
125
+ with gr.Row():
126
+ combine_btn = gr.Button(translate("テンソルファイルを結合"), variant="primary")
127
+
128
+ with gr.Row():
129
+ result_output = gr.Textbox(label=translate("結果"), lines=5)
130
+
131
+ def combine_tensors(file1, file2, output_path):
132
+ if file1 is None or file2 is None:
133
+ return translate("エラー: 2つのテンソルファイルを選択してください")
134
+
135
+ file1_path = file1.name
136
+ file2_path = file2.name
137
+
138
+ # 出力パスの決定
139
+ if output_path and output_path.strip():
140
+ # 拡張子のチェックと追加
141
+ if not output_path.lower().endswith('.safetensors'):
142
+ output_path += '.safetensors'
143
+ # ディレクトリパスの決定(入力ファイルと同じ場所)
144
+ output_dir = os.path.dirname(file1_path)
145
+ full_output_path = os.path.join(output_dir, output_path)
146
+ else:
147
+ # 自動生成の場合はNoneのまま(関数内で自動生成)
148
+ full_output_path = None
149
+
150
+ success, result_path, message = combine_tensor_files(file1_path, file2_path, full_output_path)
151
+ if success:
152
+ return message
153
+ else:
154
+ return translate("結合失敗: {0}").format(message)
155
+
156
+ combine_btn.click(
157
+ fn=combine_tensors,
158
+ inputs=[tensor_file1, tensor_file2, output_file],
159
+ outputs=[result_output]
160
+ )
161
+
162
+ return app
163
+
164
+ def main():
165
+ """コマンドライン引数を解析して実行"""
166
+ parser = argparse.ArgumentParser(description=translate("2つのsafetensorsファイルを結合するツール"))
167
+ parser.add_argument('--file1', type=str, help=translate("1つ目のsafetensorsファイルパス"))
168
+ parser.add_argument('--file2', type=str, help=translate("2つ目のsafetensorsファイルパス"))
169
+ parser.add_argument('--output', type=str, default=None, help=translate("出力ファイルパス (省略可能)"))
170
+ parser.add_argument('--ui', action='store_true', help=translate("GradioのUIモードで起動"))
171
+
172
+ args = parser.parse_args()
173
+
174
+ if args.ui:
175
+ # UIモードで起動
176
+ app = create_ui()
177
+ app.launch()
178
+ elif args.file1 and args.file2:
179
+ # コマンドラインモードで実行
180
+ success, output_path, message = combine_tensor_files(args.file1, args.file2, args.output)
181
+ if success:
182
+ print(translate("結合成功:"))
183
+ print(message)
184
+ return 0
185
+ else:
186
+ print(translate("結合失敗:"))
187
+ print(message)
188
+ return 1
189
+ else:
190
+ parser.print_help()
191
+ return 1
192
+
193
+ if __name__ == "__main__":
194
+ sys.exit(main())
eichi_utils/ui_styles.py ADDED
@@ -0,0 +1,210 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ UI関連のスタイルを定義するモジュール
3
+ """
4
+ from diffusers_helper.gradio.progress_bar import make_progress_bar_css
5
+
6
+ from locales.i18n import translate
7
+
8
+ def get_app_css():
9
+ """
10
+ アプリケーションのCSSスタイルを返す
11
+
12
+ Returns:
13
+ str: CSSスタイル定義
14
+ """
15
+ return make_progress_bar_css() + """
16
+ .title-suffix {
17
+ color: currentColor;
18
+ opacity: 0.05;
19
+ }
20
+
21
+ /* 赤枠のキーフレーム - 偶数パターン用 */
22
+ .highlighted-keyframe-red {
23
+ border: 4px solid #ff3860 !important;
24
+ box-shadow: 0 0 10px rgba(255, 56, 96, 0.5) !important;
25
+ background-color: rgba(255, 56, 96, 0.05) !important;
26
+ position: relative;
27
+ }
28
+
29
+ /* 赤枠キーフレームに「偶数番号」のラベルを追加 */
30
+ .highlighted-keyframe-red::after {
31
+ """ + 'content: "' + translate("偶数セクションのコピー元") + '"' + """;
32
+ position: absolute;
33
+ top: 5px;
34
+ right: 5px;
35
+ background: rgba(255, 56, 96, 0.8);
36
+ color: white;
37
+ padding: 2px 6px;
38
+ font-size: 10px;
39
+ border-radius: 4px;
40
+ pointer-events: none;
41
+ }
42
+
43
+ /* 青枠のキーフレーム - 奇数パターン用 */
44
+ .highlighted-keyframe-blue {
45
+ border: 4px solid #3273dc !important;
46
+ box-shadow: 0 0 10px rgba(50, 115, 220, 0.5) !important;
47
+ background-color: rgba(50, 115, 220, 0.05) !important;
48
+ position: relative;
49
+ }
50
+
51
+ /* 青枠キーフレームに「奇数番号」のラベルを追加 */
52
+ .highlighted-keyframe-blue::after {
53
+ """ + 'content: "' + translate("奇数セクションのコピー元") + '"' + """;
54
+ position: absolute;
55
+ top: 5px;
56
+ right: 5px;
57
+ background: rgba(50, 115, 220, 0.8);
58
+ color: white;
59
+ padding: 2px 6px;
60
+ font-size: 10px;
61
+ border-radius: 4px;
62
+ pointer-events: none;
63
+ }
64
+
65
+ /* 引き続きサポート(古いクラス名)- 前方互換性用 */
66
+ .highlighted-keyframe {
67
+ border: 4px solid #ff3860 !important;
68
+ box-shadow: 0 0 10px rgba(255, 56, 96, 0.5) !important;
69
+ background-color: rgba(255, 56, 96, 0.05) !important;
70
+ }
71
+
72
+ /* 赤枠用セクション番号ラベル */
73
+ .highlighted-label-red label {
74
+ color: #ff3860 !important;
75
+ font-weight: bold !important;
76
+ }
77
+
78
+ /* 青枠用セクション番号ラベル */
79
+ .highlighted-label-blue label {
80
+ color: #3273dc !important;
81
+ font-weight: bold !important;
82
+ }
83
+
84
+ /* 引き続きサポート(古いクラス名)- 前方互換性用 */
85
+ .highlighted-label label {
86
+ color: #ff3860 !important;
87
+ font-weight: bold !important;
88
+ }
89
+
90
+ /* オールパディングの高さ調整 */
91
+ #all_padding_checkbox {
92
+ padding-top: 1.5rem;
93
+ min-height: 5.8rem;
94
+ }
95
+
96
+ #all_padding_checkbox .wrap {
97
+ align-items: flex-start;
98
+ }
99
+
100
+ #all_padding_checkbox .label-wrap {
101
+ margin-bottom: 0.8rem;
102
+ font-weight: 500;
103
+ font-size: 14px;
104
+ }
105
+
106
+ #all_padding_checkbox .info {
107
+ margin-top: 0.2rem;
108
+ }
109
+
110
+ /* セクション間の区切り線を太くする */
111
+ .section-row {
112
+ border-bottom: 4px solid #3273dc;
113
+ margin-bottom: 20px;
114
+ padding-bottom: 15px;
115
+ margin-top: 10px;
116
+ position: relative;
117
+ }
118
+
119
+ /* セクション番号を目立たせる */
120
+ .section-row .gr-form:first-child label {
121
+ font-weight: bold;
122
+ font-size: 1.1em;
123
+ color: #3273dc;
124
+ background-color: rgba(50, 115, 220, 0.1);
125
+ padding: 5px 10px;
126
+ border-radius: 4px;
127
+ margin-bottom: 10px;
128
+ display: inline-block;
129
+ }
130
+
131
+ /* セクションの背景を少し強調 */
132
+ .section-row {
133
+ background-color: rgba(50, 115, 220, 0.03);
134
+ border-radius: 8px;
135
+ box-shadow: 0 1px 3px rgba(0, 0, 0, 0.1);
136
+ }
137
+
138
+ /* セクション間の余白を増やす */
139
+ .section-container > .gr-block:not(:first-child) {
140
+ margin-top: 10px;
141
+ }
142
+
143
+ /* アコーディオンセクションのスタイル */
144
+ .section-accordion {
145
+ margin-top: 15px;
146
+ margin-bottom: 15px;
147
+ border-left: 4px solid #3273dc;
148
+ padding-left: 10px;
149
+ }
150
+
151
+ .section-accordion h3 button {
152
+ font-weight: bold;
153
+ color: #3273dc;
154
+ }
155
+
156
+ .section-accordion .gr-block {
157
+ border-radius: 8px;
158
+ }
159
+
160
+ /* 保存対象の設定項目を薄い青色でハイライト(ライト/ダークモード対応) */
161
+ .saveable-setting {
162
+ background-color: rgba(240, 248, 255, 0.5) !important; /* 薄い青色を透過指定(ライトモード) */
163
+ border-left: 3px solid #90caf9 !important; /* 薄いボーダー色 */
164
+ }
165
+
166
+ /* システムのダークモード対応 */
167
+ @media (prefers-color-scheme: dark) {
168
+ .saveable-setting {
169
+ background-color: rgba(25, 35, 60, 0.4) !important; /* ダークモードでの背景色 */
170
+ border-left: 3px solid #64b5f6 !important; /* ダークモードでのボーダー色(少し明るめ) */
171
+ }
172
+ }
173
+
174
+ /* Gradioのダークテーマ対応 */
175
+ .dark .saveable-setting {
176
+ background-color: rgba(25, 35, 60, 0.4) !important; /* ダークモードでの背景色 */
177
+ border-left: 3px solid #64b5f6 !important; /* ダークモードでのボーダー色(少し明るめ) */
178
+ }
179
+
180
+ /* 保存対象項目のラベルにアイコンを追加 */
181
+ .saveable-setting label::before {
182
+ content: "💾 ";
183
+ margin-right: 5px;
184
+ }
185
+
186
+ /* ダークモードでのラベル色調整 */
187
+ .dark .saveable-setting label {
188
+ color: #90caf9 !important; /* ダークモードで少し明るい青に */
189
+ }
190
+
191
+ /* markdownタイトル用 */
192
+ .markdown-title {
193
+ padding: 3px;
194
+ }
195
+
196
+ /* markdownサブタイトル用 */
197
+ .markdown-subtitle {
198
+ padding: 2px;
199
+ }
200
+
201
+ /* markdown領域用 */
202
+ .markdown-desc {
203
+ padding: 2px;
204
+ }
205
+
206
+ /* グルーピング用ボーダー */
207
+ .group-border {
208
+ border: solid 1px;
209
+ }
210
+ """
eichi_utils/vae_cache.py ADDED
@@ -0,0 +1,305 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ VAE Cache Utility for FramePack-eichi
3
+
4
+ 1フレームずつVAEデコードを行うためのキャッシュ機能を提供するモジュール。
5
+ Hunyuan VideoのVAEに対して、フレームごとに処理しながらキャッシュを活用することで
6
+ メモリ使用効率と処理速度を改善します。
7
+ """
8
+
9
+ import torch
10
+ import torch.nn.functional as F
11
+ from typing import Optional
12
+ import time
13
+
14
+ def hook_forward_conv3d(self):
15
+ """HunyuanVideoCausalConv3dのforwardをフック置換する関数"""
16
+ def forward(hidden_states: torch.Tensor) -> torch.Tensor:
17
+ hidden_states = F.pad(hidden_states, self.time_causal_padding, mode=self.pad_mode)
18
+ if self.time_causal_padding[4] > 0:
19
+ if hasattr(self, "cache") and self.cache is not None:
20
+ hidden_states[:, :, :self.time_causal_padding[4]] = self.cache.clone() # 先頭フレームにキャッシュをコピー
21
+ self.cache = hidden_states[:, :, -self.time_causal_padding[4]:].clone() # 末尾フレームをキャッシュ
22
+ return self.conv(hidden_states)
23
+ return forward
24
+
25
+ def hook_forward_upsample(self):
26
+ """HunyuanVideoUpsampleCausal3Dのforwardをフック置換する関数"""
27
+ def forward(hidden_states: torch.Tensor) -> torch.Tensor:
28
+ if hasattr(self.conv, "cache") and self.conv.cache is not None:
29
+ # キャッシュを使用している場合は全フレームをアップサンプリング
30
+ hidden_states = F.interpolate(hidden_states.contiguous(), scale_factor=self.upsample_factor, mode="nearest")
31
+ else:
32
+ num_frames = hidden_states.size(2)
33
+
34
+ first_frame, other_frames = hidden_states.split((1, num_frames - 1), dim=2)
35
+ first_frame = F.interpolate(
36
+ first_frame.squeeze(2), scale_factor=self.upsample_factor[1:], mode="nearest"
37
+ ).unsqueeze(2)
38
+
39
+ if num_frames > 1:
40
+ other_frames = other_frames.contiguous()
41
+ other_frames = F.interpolate(other_frames, scale_factor=self.upsample_factor, mode="nearest")
42
+ hidden_states = torch.cat((first_frame, other_frames), dim=2)
43
+ else:
44
+ hidden_states = first_frame
45
+
46
+ hidden_states = self.conv(hidden_states)
47
+ return hidden_states
48
+ return forward
49
+
50
+ # Attention用のKVキャッシュプロセッサ
51
+ class AttnProcessor2_0_KVCache:
52
+ """KVキャッシュを使用するAttentionプロセッサ"""
53
+
54
+ def __init__(self):
55
+ self.k_cache = None
56
+ self.v_cache = None
57
+ if not hasattr(F, "scaled_dot_product_attention"):
58
+ raise ImportError("AttnProcessor2_0 requires PyTorch 2.0, to use it, please upgrade PyTorch to 2.0.")
59
+
60
+ def __call__(
61
+ self,
62
+ attn,
63
+ hidden_states: torch.Tensor,
64
+ encoder_hidden_states: Optional[torch.Tensor] = None,
65
+ attention_mask: Optional[torch.Tensor] = None,
66
+ temb: Optional[torch.Tensor] = None,
67
+ *args,
68
+ **kwargs,
69
+ ) -> torch.Tensor:
70
+
71
+ residual = hidden_states
72
+ if attn.spatial_norm is not None:
73
+ hidden_states = attn.spatial_norm(hidden_states, temb)
74
+
75
+ input_ndim = hidden_states.ndim
76
+
77
+ if input_ndim == 4:
78
+ batch_size, channel, height, width = hidden_states.shape
79
+ hidden_states = hidden_states.view(batch_size, channel, height * width).transpose(1, 2)
80
+
81
+ batch_size, sequence_length, _ = (
82
+ hidden_states.shape if encoder_hidden_states is None else encoder_hidden_states.shape
83
+ )
84
+
85
+ if attention_mask is not None:
86
+ attention_mask = attn.prepare_attention_mask(attention_mask, sequence_length, batch_size)
87
+ # scaled_dot_product_attention expects attention_mask shape to be
88
+ # (batch, heads, source_length, target_length)
89
+ attention_mask = attention_mask.view(batch_size, attn.heads, -1, attention_mask.shape[-1])
90
+
91
+ if attn.group_norm is not None:
92
+ hidden_states = attn.group_norm(hidden_states.transpose(1, 2)).transpose(1, 2)
93
+
94
+ query = attn.to_q(hidden_states)
95
+
96
+ if encoder_hidden_states is None:
97
+ encoder_hidden_states = hidden_states
98
+ elif attn.norm_cross:
99
+ encoder_hidden_states = attn.norm_encoder_hidden_states(encoder_hidden_states)
100
+
101
+ key = attn.to_k(encoder_hidden_states)
102
+ value = attn.to_v(encoder_hidden_states)
103
+
104
+ inner_dim = key.shape[-1]
105
+ head_dim = inner_dim // attn.heads
106
+
107
+ query = query.view(batch_size, -1, attn.heads, head_dim).transpose(1, 2)
108
+
109
+ key = key.view(batch_size, -1, attn.heads, head_dim).transpose(1, 2)
110
+ value = value.view(batch_size, -1, attn.heads, head_dim).transpose(1, 2)
111
+
112
+ if attn.norm_q is not None:
113
+ query = attn.norm_q(query)
114
+ if attn.norm_k is not None:
115
+ key = attn.norm_k(key)
116
+
117
+ # KVキャッシュの統合
118
+ if self.k_cache is not None:
119
+ key = torch.cat([self.k_cache, key], dim=2)
120
+ value = torch.cat([self.v_cache, value], dim=2)
121
+ attention_mask = torch.cat(
122
+ [torch.zeros(attention_mask.shape[0], attention_mask.shape[1], attention_mask.shape[2], self.k_cache.shape[2]).to(attention_mask), attention_mask], dim=3
123
+ )
124
+
125
+ # 現在のKVをキャッシュとして保存
126
+ self.k_cache = key.clone()
127
+ self.v_cache = value.clone()
128
+
129
+ # Scaled Dot-Product Attention
130
+ hidden_states = F.scaled_dot_product_attention(
131
+ query, key, value, attn_mask=attention_mask, dropout_p=0.0, is_causal=False
132
+ )
133
+
134
+ hidden_states = hidden_states.transpose(1, 2).reshape(batch_size, -1, attn.heads * head_dim)
135
+ hidden_states = hidden_states.to(query.dtype)
136
+
137
+ # 線形変換
138
+ hidden_states = attn.to_out[0](hidden_states)
139
+ # ドロップアウト
140
+ hidden_states = attn.to_out[1](hidden_states)
141
+
142
+ if input_ndim == 4:
143
+ hidden_states = hidden_states.transpose(-1, -2).reshape(batch_size, channel, height, width)
144
+
145
+ if attn.residual_connection:
146
+ hidden_states = hidden_states + residual
147
+
148
+ hidden_states = hidden_states / attn.rescale_output_factor
149
+
150
+ return hidden_states
151
+
152
+ def hook_vae(vae):
153
+ """VAEをキャッシュモードに変更"""
154
+ # 元の設定を保存
155
+ vae._original_use_framewise_decoding = vae.use_framewise_decoding
156
+ vae._original_use_slicing = vae.use_slicing
157
+ vae._original_use_tiling = vae.use_tiling
158
+
159
+ # キャッシュモード用の設定に変更
160
+ vae.use_framewise_decoding = False
161
+ vae.use_slicing = False
162
+ vae.use_tiling = False
163
+
164
+ # 各モジュールをフック
165
+ for module in vae.decoder.modules():
166
+ if module.__class__.__name__ == "HunyuanVideoCausalConv3d":
167
+ module._orginal_forward = module.forward
168
+ module.forward = hook_forward_conv3d(module)
169
+ if module.__class__.__name__ == "HunyuanVideoUpsampleCausal3D":
170
+ module._orginal_forward = module.forward
171
+ module.forward = hook_forward_upsample(module)
172
+ if module.__class__.__name__ == "Attention":
173
+ module._orginal_processor = module.processor
174
+ module.processor = AttnProcessor2_0_KVCache()
175
+
176
+ def restore_vae(vae):
177
+ """VAEを元の状態に戻す"""
178
+ # 設定を元に戻す
179
+ vae.use_framewise_decoding = vae._original_use_framewise_decoding
180
+ vae.use_slicing = vae._original_use_slicing
181
+ vae.use_tiling = vae._original_use_tiling
182
+
183
+ # キャッシュをクリアして元の実装に戻す
184
+ for module in vae.decoder.modules():
185
+ if module.__class__.__name__ == "HunyuanVideoCausalConv3d":
186
+ module.forward = module._orginal_forward
187
+ if hasattr(module, "cache"):
188
+ module.cache = None
189
+ if module.__class__.__name__ == "HunyuanVideoUpsampleCausal3D":
190
+ module.forward = module._orginal_forward
191
+ if hasattr(module.conv, "cache"):
192
+ module.conv.cache = None
193
+ if module.__class__.__name__ == "Attention":
194
+ if hasattr(module.processor, "k_cache"):
195
+ module.processor.k_cache = None
196
+ module.processor.v_cache = None
197
+ module.processor = module._orginal_processor
198
+
199
+ @torch.no_grad()
200
+ def vae_decode_cache(latents, vae):
201
+ """1フレームずつVAEデコードを行う関数"""
202
+ # デバッグログを追加
203
+ print("=== VAEキャッシュデコード開始 ===")
204
+ print(f"入力latents形状: {latents.shape}, デバイス: {latents.device}, 型: {latents.dtype}")
205
+
206
+ # スケーリング係数の適用
207
+ latents = latents / vae.config.scaling_factor
208
+ frames = latents.shape[2]
209
+ print(f"処理フレーム数: {frames}")
210
+
211
+ # VAEにフックを適用
212
+ print("VAEにフックを適用...")
213
+ hook_vae(vae)
214
+ print("フック適用完了")
215
+
216
+ # 1フレームずつ処理
217
+ image = None
218
+ try:
219
+ for i in range(frames):
220
+ print(f"フレーム {i+1}/{frames} 処理中...")
221
+ latents_slice = latents[:, :, i:i+1, :, :]
222
+ # デコード処理(内部でキャッシュを活用)
223
+ image_slice = vae.decode(latents_slice.to(device=vae.device, dtype=vae.dtype)).sample
224
+ print(f"フレーム {i+1} デコード完了: 形状 {image_slice.shape}")
225
+
226
+ # 結果の結合
227
+ if image is None:
228
+ image = image_slice
229
+ else:
230
+ image = torch.cat((image, image_slice), dim=2)
231
+ print(f"現在の結合結果形状: {image.shape}")
232
+ except Exception as e:
233
+ print(f"VAEキャッシュデコード中のエラー: {e}")
234
+ print(f"エラー詳細: {type(e).__name__}")
235
+ import traceback
236
+ traceback.print_exc()
237
+ # エラーが発生した場合、VAEを元の状態に戻してから例外を再スロー
238
+ restore_vae(vae)
239
+ raise e
240
+
241
+ # VAEを元の状態に戻す
242
+ print("VAEを元の状態に戻しています...")
243
+ restore_vae(vae)
244
+ print("VAEを元の状態に戻しました")
245
+
246
+ print(f"出力image形状: {image.shape}, デバイス: {image.device}, 型: {image.dtype}")
247
+ print("=== VAEキャッシュデコード完了 ===")
248
+ return image
249
+
250
+ # 元のデコード関数(比較用)
251
+ @torch.no_grad()
252
+ def vae_decode(latents, vae):
253
+ """通常のVAEデコード処理(全フレーム一括)"""
254
+ latents = latents / vae.config.scaling_factor
255
+ # 一括でデコード
256
+ image = vae.decode(latents.to(device=vae.device, dtype=vae.dtype)).sample
257
+ return image
258
+
259
+ # メモリ・速度のベンチマーク関数
260
+ def benchmark_vae_decode(latents, vae, method="both"):
261
+ """VAEデコードのベンチマーク関数"""
262
+ results = {}
263
+
264
+ if method in ["original", "both"]:
265
+ # 通常のデコード
266
+ torch.cuda.reset_peak_memory_stats()
267
+ torch.cuda.empty_cache()
268
+ with torch.no_grad():
269
+ start = time.time()
270
+ images_o = vae_decode(latents, vae)
271
+ torch.cuda.synchronize()
272
+ end = time.time()
273
+
274
+ mem_o = torch.cuda.max_memory_allocated()
275
+ results["original"] = {
276
+ "images": images_o,
277
+ "memory": mem_o / (1024**2),
278
+ "time": end - start
279
+ }
280
+ print(f"vae_decode() メモリ使用量: {mem_o / (1024**2):.2f} MB 実行時間: {end - start:.4f} 秒")
281
+
282
+ if method in ["cache", "both"]:
283
+ # キャッシュを使用したデコード
284
+ torch.cuda.reset_peak_memory_stats()
285
+ torch.cuda.empty_cache()
286
+ with torch.no_grad():
287
+ start = time.time()
288
+ images_c = vae_decode_cache(latents, vae)
289
+ torch.cuda.synchronize()
290
+ end = time.time()
291
+
292
+ mem_c = torch.cuda.max_memory_allocated()
293
+ results["cache"] = {
294
+ "images": images_c,
295
+ "memory": mem_c / (1024**2),
296
+ "time": end - start
297
+ }
298
+ print(f"vae_decode_cache() メモリ使用量: {mem_c / (1024**2):.2f} MB 実行時間: {end - start:.4f} 秒")
299
+
300
+ # 両方のメソッドを実行した場合に結果の差異を表示
301
+ if method == "both":
302
+ diff = (results["original"]["images"] - results["cache"]["images"]).abs().mean()
303
+ print(f"出力画像の平均差異: {diff.item():.6f}")
304
+
305
+ return results
hf_download/hub/.locks/models--hunyuanvideo-community--HunyuanVideo/469be27c5c010538f845f518c4f5e8574c78f7c8.lock ADDED
File without changes
hf_download/hub/.locks/models--hunyuanvideo-community--HunyuanVideo/54accb98811931fca7598da4f7239b03b912eaa2bd5fe639f2da00923374f4a0.lock ADDED
File without changes
hf_download/hub/.locks/models--hunyuanvideo-community--HunyuanVideo/660c6f5b1abae9dc498ac2d21e1347d2abdb0cf6c0c0c8576cd796491d9a6cdd.lock ADDED
File without changes
hf_download/hub/.locks/models--hunyuanvideo-community--HunyuanVideo/715167338723844a8b46281e6dedaf9e2000f771.lock ADDED
File without changes
hf_download/hub/.locks/models--hunyuanvideo-community--HunyuanVideo/747d2159aaebc628e8105b91c2ab77d50a289f17.lock ADDED
File without changes
hf_download/hub/.locks/models--hunyuanvideo-community--HunyuanVideo/76e821f1b6f0a9709293c3b6b51ed90980b3166b.lock ADDED
File without changes
hf_download/hub/.locks/models--hunyuanvideo-community--HunyuanVideo/7c6fa7065265909bd500cafb38cc939b81b1b018.lock ADDED
File without changes
hf_download/hub/.locks/models--hunyuanvideo-community--HunyuanVideo/b6e7a9e010002205834fd4f2808ca042bad4a246.lock ADDED
File without changes
hf_download/hub/.locks/models--hunyuanvideo-community--HunyuanVideo/c93f133c65ab1aeaa9ed1e998901a306636375b2f57fa53cd279241147a9a0e9.lock ADDED
File without changes
hf_download/hub/.locks/models--hunyuanvideo-community--HunyuanVideo/cf0682d6de72c1547f41b4f6d7c59f62deffef94.lock ADDED
File without changes
hf_download/hub/.locks/models--hunyuanvideo-community--HunyuanVideo/d2c593db4aa75b17a42c1f74d7cc38e257eaeed222e6a52674c65544165dcbaa.lock ADDED
File without changes
hf_download/hub/.locks/models--hunyuanvideo-community--HunyuanVideo/d67c77f57cab4c9bf7f4420c256aed684c8ac7b49c6ab72cff9924e9513db9f1.lock ADDED
File without changes
hf_download/hub/.locks/models--hunyuanvideo-community--HunyuanVideo/f5ad57d3eda300a3195bc9c0bb36ab76ebe88831f128e9851e63440aff4a6741.lock ADDED
File without changes
hf_download/hub/.locks/models--hunyuanvideo-community--HunyuanVideo/f5f2205251eb0b863c5b0f9a60cd9fad069c5872.lock ADDED
File without changes
hf_download/hub/models--hunyuanvideo-community--HunyuanVideo/blobs/22f91ac3aeb401be0a10d294e00bb1d6293bc4c5 ADDED
@@ -0,0 +1,31 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_prefix_space": false,
3
+ "added_tokens_decoder": {
4
+ "49406": {
5
+ "content": "<|startoftext|>",
6
+ "lstrip": false,
7
+ "normalized": true,
8
+ "rstrip": false,
9
+ "single_word": false,
10
+ "special": true
11
+ },
12
+ "49407": {
13
+ "content": "<|endoftext|>",
14
+ "lstrip": false,
15
+ "normalized": false,
16
+ "rstrip": false,
17
+ "single_word": false,
18
+ "special": true
19
+ }
20
+ },
21
+ "bos_token": "<|startoftext|>",
22
+ "clean_up_tokenization_spaces": false,
23
+ "do_lower_case": true,
24
+ "eos_token": "<|endoftext|>",
25
+ "errors": "replace",
26
+ "extra_special_tokens": {},
27
+ "model_max_length": 77,
28
+ "pad_token": "<|endoftext|>",
29
+ "tokenizer_class": "CLIPTokenizer",
30
+ "unk_token": "<|endoftext|>"
31
+ }
hf_download/hub/models--hunyuanvideo-community--HunyuanVideo/blobs/469be27c5c010538f845f518c4f5e8574c78f7c8 ADDED
The diff for this file is too large to render. See raw diff
 
hf_download/hub/models--hunyuanvideo-community--HunyuanVideo/blobs/747d2159aaebc628e8105b91c2ab77d50a289f17 ADDED
@@ -0,0 +1,2096 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_bos_token": true,
3
+ "add_eos_token": false,
4
+ "add_prefix_space": null,
5
+ "added_tokens_decoder": {
6
+ "128000": {
7
+ "content": "<|begin_of_text|>",
8
+ "lstrip": false,
9
+ "normalized": false,
10
+ "rstrip": false,
11
+ "single_word": false,
12
+ "special": true
13
+ },
14
+ "128001": {
15
+ "content": "<|end_of_text|>",
16
+ "lstrip": false,
17
+ "normalized": false,
18
+ "rstrip": false,
19
+ "single_word": false,
20
+ "special": true
21
+ },
22
+ "128002": {
23
+ "content": "<|reserved_special_token_0|>",
24
+ "lstrip": false,
25
+ "normalized": false,
26
+ "rstrip": false,
27
+ "single_word": false,
28
+ "special": true
29
+ },
30
+ "128003": {
31
+ "content": "<|reserved_special_token_1|>",
32
+ "lstrip": false,
33
+ "normalized": false,
34
+ "rstrip": false,
35
+ "single_word": false,
36
+ "special": true
37
+ },
38
+ "128004": {
39
+ "content": "<|reserved_special_token_2|>",
40
+ "lstrip": false,
41
+ "normalized": false,
42
+ "rstrip": false,
43
+ "single_word": false,
44
+ "special": true
45
+ },
46
+ "128005": {
47
+ "content": "<|reserved_special_token_3|>",
48
+ "lstrip": false,
49
+ "normalized": false,
50
+ "rstrip": false,
51
+ "single_word": false,
52
+ "special": true
53
+ },
54
+ "128006": {
55
+ "content": "<|start_header_id|>",
56
+ "lstrip": false,
57
+ "normalized": false,
58
+ "rstrip": false,
59
+ "single_word": false,
60
+ "special": true
61
+ },
62
+ "128007": {
63
+ "content": "<|end_header_id|>",
64
+ "lstrip": false,
65
+ "normalized": false,
66
+ "rstrip": false,
67
+ "single_word": false,
68
+ "special": true
69
+ },
70
+ "128008": {
71
+ "content": "<|reserved_special_token_4|>",
72
+ "lstrip": false,
73
+ "normalized": false,
74
+ "rstrip": false,
75
+ "single_word": false,
76
+ "special": true
77
+ },
78
+ "128009": {
79
+ "content": "<|eot_id|>",
80
+ "lstrip": false,
81
+ "normalized": false,
82
+ "rstrip": false,
83
+ "single_word": false,
84
+ "special": true
85
+ },
86
+ "128010": {
87
+ "content": "<|reserved_special_token_5|>",
88
+ "lstrip": false,
89
+ "normalized": false,
90
+ "rstrip": false,
91
+ "single_word": false,
92
+ "special": true
93
+ },
94
+ "128011": {
95
+ "content": "<|reserved_special_token_6|>",
96
+ "lstrip": false,
97
+ "normalized": false,
98
+ "rstrip": false,
99
+ "single_word": false,
100
+ "special": true
101
+ },
102
+ "128012": {
103
+ "content": "<|reserved_special_token_7|>",
104
+ "lstrip": false,
105
+ "normalized": false,
106
+ "rstrip": false,
107
+ "single_word": false,
108
+ "special": true
109
+ },
110
+ "128013": {
111
+ "content": "<|reserved_special_token_8|>",
112
+ "lstrip": false,
113
+ "normalized": false,
114
+ "rstrip": false,
115
+ "single_word": false,
116
+ "special": true
117
+ },
118
+ "128014": {
119
+ "content": "<|reserved_special_token_9|>",
120
+ "lstrip": false,
121
+ "normalized": false,
122
+ "rstrip": false,
123
+ "single_word": false,
124
+ "special": true
125
+ },
126
+ "128015": {
127
+ "content": "<|reserved_special_token_10|>",
128
+ "lstrip": false,
129
+ "normalized": false,
130
+ "rstrip": false,
131
+ "single_word": false,
132
+ "special": true
133
+ },
134
+ "128016": {
135
+ "content": "<|reserved_special_token_11|>",
136
+ "lstrip": false,
137
+ "normalized": false,
138
+ "rstrip": false,
139
+ "single_word": false,
140
+ "special": true
141
+ },
142
+ "128017": {
143
+ "content": "<|reserved_special_token_12|>",
144
+ "lstrip": false,
145
+ "normalized": false,
146
+ "rstrip": false,
147
+ "single_word": false,
148
+ "special": true
149
+ },
150
+ "128018": {
151
+ "content": "<|reserved_special_token_13|>",
152
+ "lstrip": false,
153
+ "normalized": false,
154
+ "rstrip": false,
155
+ "single_word": false,
156
+ "special": true
157
+ },
158
+ "128019": {
159
+ "content": "<|reserved_special_token_14|>",
160
+ "lstrip": false,
161
+ "normalized": false,
162
+ "rstrip": false,
163
+ "single_word": false,
164
+ "special": true
165
+ },
166
+ "128020": {
167
+ "content": "<|reserved_special_token_15|>",
168
+ "lstrip": false,
169
+ "normalized": false,
170
+ "rstrip": false,
171
+ "single_word": false,
172
+ "special": true
173
+ },
174
+ "128021": {
175
+ "content": "<|reserved_special_token_16|>",
176
+ "lstrip": false,
177
+ "normalized": false,
178
+ "rstrip": false,
179
+ "single_word": false,
180
+ "special": true
181
+ },
182
+ "128022": {
183
+ "content": "<|reserved_special_token_17|>",
184
+ "lstrip": false,
185
+ "normalized": false,
186
+ "rstrip": false,
187
+ "single_word": false,
188
+ "special": true
189
+ },
190
+ "128023": {
191
+ "content": "<|reserved_special_token_18|>",
192
+ "lstrip": false,
193
+ "normalized": false,
194
+ "rstrip": false,
195
+ "single_word": false,
196
+ "special": true
197
+ },
198
+ "128024": {
199
+ "content": "<|reserved_special_token_19|>",
200
+ "lstrip": false,
201
+ "normalized": false,
202
+ "rstrip": false,
203
+ "single_word": false,
204
+ "special": true
205
+ },
206
+ "128025": {
207
+ "content": "<|reserved_special_token_20|>",
208
+ "lstrip": false,
209
+ "normalized": false,
210
+ "rstrip": false,
211
+ "single_word": false,
212
+ "special": true
213
+ },
214
+ "128026": {
215
+ "content": "<|reserved_special_token_21|>",
216
+ "lstrip": false,
217
+ "normalized": false,
218
+ "rstrip": false,
219
+ "single_word": false,
220
+ "special": true
221
+ },
222
+ "128027": {
223
+ "content": "<|reserved_special_token_22|>",
224
+ "lstrip": false,
225
+ "normalized": false,
226
+ "rstrip": false,
227
+ "single_word": false,
228
+ "special": true
229
+ },
230
+ "128028": {
231
+ "content": "<|reserved_special_token_23|>",
232
+ "lstrip": false,
233
+ "normalized": false,
234
+ "rstrip": false,
235
+ "single_word": false,
236
+ "special": true
237
+ },
238
+ "128029": {
239
+ "content": "<|reserved_special_token_24|>",
240
+ "lstrip": false,
241
+ "normalized": false,
242
+ "rstrip": false,
243
+ "single_word": false,
244
+ "special": true
245
+ },
246
+ "128030": {
247
+ "content": "<|reserved_special_token_25|>",
248
+ "lstrip": false,
249
+ "normalized": false,
250
+ "rstrip": false,
251
+ "single_word": false,
252
+ "special": true
253
+ },
254
+ "128031": {
255
+ "content": "<|reserved_special_token_26|>",
256
+ "lstrip": false,
257
+ "normalized": false,
258
+ "rstrip": false,
259
+ "single_word": false,
260
+ "special": true
261
+ },
262
+ "128032": {
263
+ "content": "<|reserved_special_token_27|>",
264
+ "lstrip": false,
265
+ "normalized": false,
266
+ "rstrip": false,
267
+ "single_word": false,
268
+ "special": true
269
+ },
270
+ "128033": {
271
+ "content": "<|reserved_special_token_28|>",
272
+ "lstrip": false,
273
+ "normalized": false,
274
+ "rstrip": false,
275
+ "single_word": false,
276
+ "special": true
277
+ },
278
+ "128034": {
279
+ "content": "<|reserved_special_token_29|>",
280
+ "lstrip": false,
281
+ "normalized": false,
282
+ "rstrip": false,
283
+ "single_word": false,
284
+ "special": true
285
+ },
286
+ "128035": {
287
+ "content": "<|reserved_special_token_30|>",
288
+ "lstrip": false,
289
+ "normalized": false,
290
+ "rstrip": false,
291
+ "single_word": false,
292
+ "special": true
293
+ },
294
+ "128036": {
295
+ "content": "<|reserved_special_token_31|>",
296
+ "lstrip": false,
297
+ "normalized": false,
298
+ "rstrip": false,
299
+ "single_word": false,
300
+ "special": true
301
+ },
302
+ "128037": {
303
+ "content": "<|reserved_special_token_32|>",
304
+ "lstrip": false,
305
+ "normalized": false,
306
+ "rstrip": false,
307
+ "single_word": false,
308
+ "special": true
309
+ },
310
+ "128038": {
311
+ "content": "<|reserved_special_token_33|>",
312
+ "lstrip": false,
313
+ "normalized": false,
314
+ "rstrip": false,
315
+ "single_word": false,
316
+ "special": true
317
+ },
318
+ "128039": {
319
+ "content": "<|reserved_special_token_34|>",
320
+ "lstrip": false,
321
+ "normalized": false,
322
+ "rstrip": false,
323
+ "single_word": false,
324
+ "special": true
325
+ },
326
+ "128040": {
327
+ "content": "<|reserved_special_token_35|>",
328
+ "lstrip": false,
329
+ "normalized": false,
330
+ "rstrip": false,
331
+ "single_word": false,
332
+ "special": true
333
+ },
334
+ "128041": {
335
+ "content": "<|reserved_special_token_36|>",
336
+ "lstrip": false,
337
+ "normalized": false,
338
+ "rstrip": false,
339
+ "single_word": false,
340
+ "special": true
341
+ },
342
+ "128042": {
343
+ "content": "<|reserved_special_token_37|>",
344
+ "lstrip": false,
345
+ "normalized": false,
346
+ "rstrip": false,
347
+ "single_word": false,
348
+ "special": true
349
+ },
350
+ "128043": {
351
+ "content": "<|reserved_special_token_38|>",
352
+ "lstrip": false,
353
+ "normalized": false,
354
+ "rstrip": false,
355
+ "single_word": false,
356
+ "special": true
357
+ },
358
+ "128044": {
359
+ "content": "<|reserved_special_token_39|>",
360
+ "lstrip": false,
361
+ "normalized": false,
362
+ "rstrip": false,
363
+ "single_word": false,
364
+ "special": true
365
+ },
366
+ "128045": {
367
+ "content": "<|reserved_special_token_40|>",
368
+ "lstrip": false,
369
+ "normalized": false,
370
+ "rstrip": false,
371
+ "single_word": false,
372
+ "special": true
373
+ },
374
+ "128046": {
375
+ "content": "<|reserved_special_token_41|>",
376
+ "lstrip": false,
377
+ "normalized": false,
378
+ "rstrip": false,
379
+ "single_word": false,
380
+ "special": true
381
+ },
382
+ "128047": {
383
+ "content": "<|reserved_special_token_42|>",
384
+ "lstrip": false,
385
+ "normalized": false,
386
+ "rstrip": false,
387
+ "single_word": false,
388
+ "special": true
389
+ },
390
+ "128048": {
391
+ "content": "<|reserved_special_token_43|>",
392
+ "lstrip": false,
393
+ "normalized": false,
394
+ "rstrip": false,
395
+ "single_word": false,
396
+ "special": true
397
+ },
398
+ "128049": {
399
+ "content": "<|reserved_special_token_44|>",
400
+ "lstrip": false,
401
+ "normalized": false,
402
+ "rstrip": false,
403
+ "single_word": false,
404
+ "special": true
405
+ },
406
+ "128050": {
407
+ "content": "<|reserved_special_token_45|>",
408
+ "lstrip": false,
409
+ "normalized": false,
410
+ "rstrip": false,
411
+ "single_word": false,
412
+ "special": true
413
+ },
414
+ "128051": {
415
+ "content": "<|reserved_special_token_46|>",
416
+ "lstrip": false,
417
+ "normalized": false,
418
+ "rstrip": false,
419
+ "single_word": false,
420
+ "special": true
421
+ },
422
+ "128052": {
423
+ "content": "<|reserved_special_token_47|>",
424
+ "lstrip": false,
425
+ "normalized": false,
426
+ "rstrip": false,
427
+ "single_word": false,
428
+ "special": true
429
+ },
430
+ "128053": {
431
+ "content": "<|reserved_special_token_48|>",
432
+ "lstrip": false,
433
+ "normalized": false,
434
+ "rstrip": false,
435
+ "single_word": false,
436
+ "special": true
437
+ },
438
+ "128054": {
439
+ "content": "<|reserved_special_token_49|>",
440
+ "lstrip": false,
441
+ "normalized": false,
442
+ "rstrip": false,
443
+ "single_word": false,
444
+ "special": true
445
+ },
446
+ "128055": {
447
+ "content": "<|reserved_special_token_50|>",
448
+ "lstrip": false,
449
+ "normalized": false,
450
+ "rstrip": false,
451
+ "single_word": false,
452
+ "special": true
453
+ },
454
+ "128056": {
455
+ "content": "<|reserved_special_token_51|>",
456
+ "lstrip": false,
457
+ "normalized": false,
458
+ "rstrip": false,
459
+ "single_word": false,
460
+ "special": true
461
+ },
462
+ "128057": {
463
+ "content": "<|reserved_special_token_52|>",
464
+ "lstrip": false,
465
+ "normalized": false,
466
+ "rstrip": false,
467
+ "single_word": false,
468
+ "special": true
469
+ },
470
+ "128058": {
471
+ "content": "<|reserved_special_token_53|>",
472
+ "lstrip": false,
473
+ "normalized": false,
474
+ "rstrip": false,
475
+ "single_word": false,
476
+ "special": true
477
+ },
478
+ "128059": {
479
+ "content": "<|reserved_special_token_54|>",
480
+ "lstrip": false,
481
+ "normalized": false,
482
+ "rstrip": false,
483
+ "single_word": false,
484
+ "special": true
485
+ },
486
+ "128060": {
487
+ "content": "<|reserved_special_token_55|>",
488
+ "lstrip": false,
489
+ "normalized": false,
490
+ "rstrip": false,
491
+ "single_word": false,
492
+ "special": true
493
+ },
494
+ "128061": {
495
+ "content": "<|reserved_special_token_56|>",
496
+ "lstrip": false,
497
+ "normalized": false,
498
+ "rstrip": false,
499
+ "single_word": false,
500
+ "special": true
501
+ },
502
+ "128062": {
503
+ "content": "<|reserved_special_token_57|>",
504
+ "lstrip": false,
505
+ "normalized": false,
506
+ "rstrip": false,
507
+ "single_word": false,
508
+ "special": true
509
+ },
510
+ "128063": {
511
+ "content": "<|reserved_special_token_58|>",
512
+ "lstrip": false,
513
+ "normalized": false,
514
+ "rstrip": false,
515
+ "single_word": false,
516
+ "special": true
517
+ },
518
+ "128064": {
519
+ "content": "<|reserved_special_token_59|>",
520
+ "lstrip": false,
521
+ "normalized": false,
522
+ "rstrip": false,
523
+ "single_word": false,
524
+ "special": true
525
+ },
526
+ "128065": {
527
+ "content": "<|reserved_special_token_60|>",
528
+ "lstrip": false,
529
+ "normalized": false,
530
+ "rstrip": false,
531
+ "single_word": false,
532
+ "special": true
533
+ },
534
+ "128066": {
535
+ "content": "<|reserved_special_token_61|>",
536
+ "lstrip": false,
537
+ "normalized": false,
538
+ "rstrip": false,
539
+ "single_word": false,
540
+ "special": true
541
+ },
542
+ "128067": {
543
+ "content": "<|reserved_special_token_62|>",
544
+ "lstrip": false,
545
+ "normalized": false,
546
+ "rstrip": false,
547
+ "single_word": false,
548
+ "special": true
549
+ },
550
+ "128068": {
551
+ "content": "<|reserved_special_token_63|>",
552
+ "lstrip": false,
553
+ "normalized": false,
554
+ "rstrip": false,
555
+ "single_word": false,
556
+ "special": true
557
+ },
558
+ "128069": {
559
+ "content": "<|reserved_special_token_64|>",
560
+ "lstrip": false,
561
+ "normalized": false,
562
+ "rstrip": false,
563
+ "single_word": false,
564
+ "special": true
565
+ },
566
+ "128070": {
567
+ "content": "<|reserved_special_token_65|>",
568
+ "lstrip": false,
569
+ "normalized": false,
570
+ "rstrip": false,
571
+ "single_word": false,
572
+ "special": true
573
+ },
574
+ "128071": {
575
+ "content": "<|reserved_special_token_66|>",
576
+ "lstrip": false,
577
+ "normalized": false,
578
+ "rstrip": false,
579
+ "single_word": false,
580
+ "special": true
581
+ },
582
+ "128072": {
583
+ "content": "<|reserved_special_token_67|>",
584
+ "lstrip": false,
585
+ "normalized": false,
586
+ "rstrip": false,
587
+ "single_word": false,
588
+ "special": true
589
+ },
590
+ "128073": {
591
+ "content": "<|reserved_special_token_68|>",
592
+ "lstrip": false,
593
+ "normalized": false,
594
+ "rstrip": false,
595
+ "single_word": false,
596
+ "special": true
597
+ },
598
+ "128074": {
599
+ "content": "<|reserved_special_token_69|>",
600
+ "lstrip": false,
601
+ "normalized": false,
602
+ "rstrip": false,
603
+ "single_word": false,
604
+ "special": true
605
+ },
606
+ "128075": {
607
+ "content": "<|reserved_special_token_70|>",
608
+ "lstrip": false,
609
+ "normalized": false,
610
+ "rstrip": false,
611
+ "single_word": false,
612
+ "special": true
613
+ },
614
+ "128076": {
615
+ "content": "<|reserved_special_token_71|>",
616
+ "lstrip": false,
617
+ "normalized": false,
618
+ "rstrip": false,
619
+ "single_word": false,
620
+ "special": true
621
+ },
622
+ "128077": {
623
+ "content": "<|reserved_special_token_72|>",
624
+ "lstrip": false,
625
+ "normalized": false,
626
+ "rstrip": false,
627
+ "single_word": false,
628
+ "special": true
629
+ },
630
+ "128078": {
631
+ "content": "<|reserved_special_token_73|>",
632
+ "lstrip": false,
633
+ "normalized": false,
634
+ "rstrip": false,
635
+ "single_word": false,
636
+ "special": true
637
+ },
638
+ "128079": {
639
+ "content": "<|reserved_special_token_74|>",
640
+ "lstrip": false,
641
+ "normalized": false,
642
+ "rstrip": false,
643
+ "single_word": false,
644
+ "special": true
645
+ },
646
+ "128080": {
647
+ "content": "<|reserved_special_token_75|>",
648
+ "lstrip": false,
649
+ "normalized": false,
650
+ "rstrip": false,
651
+ "single_word": false,
652
+ "special": true
653
+ },
654
+ "128081": {
655
+ "content": "<|reserved_special_token_76|>",
656
+ "lstrip": false,
657
+ "normalized": false,
658
+ "rstrip": false,
659
+ "single_word": false,
660
+ "special": true
661
+ },
662
+ "128082": {
663
+ "content": "<|reserved_special_token_77|>",
664
+ "lstrip": false,
665
+ "normalized": false,
666
+ "rstrip": false,
667
+ "single_word": false,
668
+ "special": true
669
+ },
670
+ "128083": {
671
+ "content": "<|reserved_special_token_78|>",
672
+ "lstrip": false,
673
+ "normalized": false,
674
+ "rstrip": false,
675
+ "single_word": false,
676
+ "special": true
677
+ },
678
+ "128084": {
679
+ "content": "<|reserved_special_token_79|>",
680
+ "lstrip": false,
681
+ "normalized": false,
682
+ "rstrip": false,
683
+ "single_word": false,
684
+ "special": true
685
+ },
686
+ "128085": {
687
+ "content": "<|reserved_special_token_80|>",
688
+ "lstrip": false,
689
+ "normalized": false,
690
+ "rstrip": false,
691
+ "single_word": false,
692
+ "special": true
693
+ },
694
+ "128086": {
695
+ "content": "<|reserved_special_token_81|>",
696
+ "lstrip": false,
697
+ "normalized": false,
698
+ "rstrip": false,
699
+ "single_word": false,
700
+ "special": true
701
+ },
702
+ "128087": {
703
+ "content": "<|reserved_special_token_82|>",
704
+ "lstrip": false,
705
+ "normalized": false,
706
+ "rstrip": false,
707
+ "single_word": false,
708
+ "special": true
709
+ },
710
+ "128088": {
711
+ "content": "<|reserved_special_token_83|>",
712
+ "lstrip": false,
713
+ "normalized": false,
714
+ "rstrip": false,
715
+ "single_word": false,
716
+ "special": true
717
+ },
718
+ "128089": {
719
+ "content": "<|reserved_special_token_84|>",
720
+ "lstrip": false,
721
+ "normalized": false,
722
+ "rstrip": false,
723
+ "single_word": false,
724
+ "special": true
725
+ },
726
+ "128090": {
727
+ "content": "<|reserved_special_token_85|>",
728
+ "lstrip": false,
729
+ "normalized": false,
730
+ "rstrip": false,
731
+ "single_word": false,
732
+ "special": true
733
+ },
734
+ "128091": {
735
+ "content": "<|reserved_special_token_86|>",
736
+ "lstrip": false,
737
+ "normalized": false,
738
+ "rstrip": false,
739
+ "single_word": false,
740
+ "special": true
741
+ },
742
+ "128092": {
743
+ "content": "<|reserved_special_token_87|>",
744
+ "lstrip": false,
745
+ "normalized": false,
746
+ "rstrip": false,
747
+ "single_word": false,
748
+ "special": true
749
+ },
750
+ "128093": {
751
+ "content": "<|reserved_special_token_88|>",
752
+ "lstrip": false,
753
+ "normalized": false,
754
+ "rstrip": false,
755
+ "single_word": false,
756
+ "special": true
757
+ },
758
+ "128094": {
759
+ "content": "<|reserved_special_token_89|>",
760
+ "lstrip": false,
761
+ "normalized": false,
762
+ "rstrip": false,
763
+ "single_word": false,
764
+ "special": true
765
+ },
766
+ "128095": {
767
+ "content": "<|reserved_special_token_90|>",
768
+ "lstrip": false,
769
+ "normalized": false,
770
+ "rstrip": false,
771
+ "single_word": false,
772
+ "special": true
773
+ },
774
+ "128096": {
775
+ "content": "<|reserved_special_token_91|>",
776
+ "lstrip": false,
777
+ "normalized": false,
778
+ "rstrip": false,
779
+ "single_word": false,
780
+ "special": true
781
+ },
782
+ "128097": {
783
+ "content": "<|reserved_special_token_92|>",
784
+ "lstrip": false,
785
+ "normalized": false,
786
+ "rstrip": false,
787
+ "single_word": false,
788
+ "special": true
789
+ },
790
+ "128098": {
791
+ "content": "<|reserved_special_token_93|>",
792
+ "lstrip": false,
793
+ "normalized": false,
794
+ "rstrip": false,
795
+ "single_word": false,
796
+ "special": true
797
+ },
798
+ "128099": {
799
+ "content": "<|reserved_special_token_94|>",
800
+ "lstrip": false,
801
+ "normalized": false,
802
+ "rstrip": false,
803
+ "single_word": false,
804
+ "special": true
805
+ },
806
+ "128100": {
807
+ "content": "<|reserved_special_token_95|>",
808
+ "lstrip": false,
809
+ "normalized": false,
810
+ "rstrip": false,
811
+ "single_word": false,
812
+ "special": true
813
+ },
814
+ "128101": {
815
+ "content": "<|reserved_special_token_96|>",
816
+ "lstrip": false,
817
+ "normalized": false,
818
+ "rstrip": false,
819
+ "single_word": false,
820
+ "special": true
821
+ },
822
+ "128102": {
823
+ "content": "<|reserved_special_token_97|>",
824
+ "lstrip": false,
825
+ "normalized": false,
826
+ "rstrip": false,
827
+ "single_word": false,
828
+ "special": true
829
+ },
830
+ "128103": {
831
+ "content": "<|reserved_special_token_98|>",
832
+ "lstrip": false,
833
+ "normalized": false,
834
+ "rstrip": false,
835
+ "single_word": false,
836
+ "special": true
837
+ },
838
+ "128104": {
839
+ "content": "<|reserved_special_token_99|>",
840
+ "lstrip": false,
841
+ "normalized": false,
842
+ "rstrip": false,
843
+ "single_word": false,
844
+ "special": true
845
+ },
846
+ "128105": {
847
+ "content": "<|reserved_special_token_100|>",
848
+ "lstrip": false,
849
+ "normalized": false,
850
+ "rstrip": false,
851
+ "single_word": false,
852
+ "special": true
853
+ },
854
+ "128106": {
855
+ "content": "<|reserved_special_token_101|>",
856
+ "lstrip": false,
857
+ "normalized": false,
858
+ "rstrip": false,
859
+ "single_word": false,
860
+ "special": true
861
+ },
862
+ "128107": {
863
+ "content": "<|reserved_special_token_102|>",
864
+ "lstrip": false,
865
+ "normalized": false,
866
+ "rstrip": false,
867
+ "single_word": false,
868
+ "special": true
869
+ },
870
+ "128108": {
871
+ "content": "<|reserved_special_token_103|>",
872
+ "lstrip": false,
873
+ "normalized": false,
874
+ "rstrip": false,
875
+ "single_word": false,
876
+ "special": true
877
+ },
878
+ "128109": {
879
+ "content": "<|reserved_special_token_104|>",
880
+ "lstrip": false,
881
+ "normalized": false,
882
+ "rstrip": false,
883
+ "single_word": false,
884
+ "special": true
885
+ },
886
+ "128110": {
887
+ "content": "<|reserved_special_token_105|>",
888
+ "lstrip": false,
889
+ "normalized": false,
890
+ "rstrip": false,
891
+ "single_word": false,
892
+ "special": true
893
+ },
894
+ "128111": {
895
+ "content": "<|reserved_special_token_106|>",
896
+ "lstrip": false,
897
+ "normalized": false,
898
+ "rstrip": false,
899
+ "single_word": false,
900
+ "special": true
901
+ },
902
+ "128112": {
903
+ "content": "<|reserved_special_token_107|>",
904
+ "lstrip": false,
905
+ "normalized": false,
906
+ "rstrip": false,
907
+ "single_word": false,
908
+ "special": true
909
+ },
910
+ "128113": {
911
+ "content": "<|reserved_special_token_108|>",
912
+ "lstrip": false,
913
+ "normalized": false,
914
+ "rstrip": false,
915
+ "single_word": false,
916
+ "special": true
917
+ },
918
+ "128114": {
919
+ "content": "<|reserved_special_token_109|>",
920
+ "lstrip": false,
921
+ "normalized": false,
922
+ "rstrip": false,
923
+ "single_word": false,
924
+ "special": true
925
+ },
926
+ "128115": {
927
+ "content": "<|reserved_special_token_110|>",
928
+ "lstrip": false,
929
+ "normalized": false,
930
+ "rstrip": false,
931
+ "single_word": false,
932
+ "special": true
933
+ },
934
+ "128116": {
935
+ "content": "<|reserved_special_token_111|>",
936
+ "lstrip": false,
937
+ "normalized": false,
938
+ "rstrip": false,
939
+ "single_word": false,
940
+ "special": true
941
+ },
942
+ "128117": {
943
+ "content": "<|reserved_special_token_112|>",
944
+ "lstrip": false,
945
+ "normalized": false,
946
+ "rstrip": false,
947
+ "single_word": false,
948
+ "special": true
949
+ },
950
+ "128118": {
951
+ "content": "<|reserved_special_token_113|>",
952
+ "lstrip": false,
953
+ "normalized": false,
954
+ "rstrip": false,
955
+ "single_word": false,
956
+ "special": true
957
+ },
958
+ "128119": {
959
+ "content": "<|reserved_special_token_114|>",
960
+ "lstrip": false,
961
+ "normalized": false,
962
+ "rstrip": false,
963
+ "single_word": false,
964
+ "special": true
965
+ },
966
+ "128120": {
967
+ "content": "<|reserved_special_token_115|>",
968
+ "lstrip": false,
969
+ "normalized": false,
970
+ "rstrip": false,
971
+ "single_word": false,
972
+ "special": true
973
+ },
974
+ "128121": {
975
+ "content": "<|reserved_special_token_116|>",
976
+ "lstrip": false,
977
+ "normalized": false,
978
+ "rstrip": false,
979
+ "single_word": false,
980
+ "special": true
981
+ },
982
+ "128122": {
983
+ "content": "<|reserved_special_token_117|>",
984
+ "lstrip": false,
985
+ "normalized": false,
986
+ "rstrip": false,
987
+ "single_word": false,
988
+ "special": true
989
+ },
990
+ "128123": {
991
+ "content": "<|reserved_special_token_118|>",
992
+ "lstrip": false,
993
+ "normalized": false,
994
+ "rstrip": false,
995
+ "single_word": false,
996
+ "special": true
997
+ },
998
+ "128124": {
999
+ "content": "<|reserved_special_token_119|>",
1000
+ "lstrip": false,
1001
+ "normalized": false,
1002
+ "rstrip": false,
1003
+ "single_word": false,
1004
+ "special": true
1005
+ },
1006
+ "128125": {
1007
+ "content": "<|reserved_special_token_120|>",
1008
+ "lstrip": false,
1009
+ "normalized": false,
1010
+ "rstrip": false,
1011
+ "single_word": false,
1012
+ "special": true
1013
+ },
1014
+ "128126": {
1015
+ "content": "<|reserved_special_token_121|>",
1016
+ "lstrip": false,
1017
+ "normalized": false,
1018
+ "rstrip": false,
1019
+ "single_word": false,
1020
+ "special": true
1021
+ },
1022
+ "128127": {
1023
+ "content": "<|reserved_special_token_122|>",
1024
+ "lstrip": false,
1025
+ "normalized": false,
1026
+ "rstrip": false,
1027
+ "single_word": false,
1028
+ "special": true
1029
+ },
1030
+ "128128": {
1031
+ "content": "<|reserved_special_token_123|>",
1032
+ "lstrip": false,
1033
+ "normalized": false,
1034
+ "rstrip": false,
1035
+ "single_word": false,
1036
+ "special": true
1037
+ },
1038
+ "128129": {
1039
+ "content": "<|reserved_special_token_124|>",
1040
+ "lstrip": false,
1041
+ "normalized": false,
1042
+ "rstrip": false,
1043
+ "single_word": false,
1044
+ "special": true
1045
+ },
1046
+ "128130": {
1047
+ "content": "<|reserved_special_token_125|>",
1048
+ "lstrip": false,
1049
+ "normalized": false,
1050
+ "rstrip": false,
1051
+ "single_word": false,
1052
+ "special": true
1053
+ },
1054
+ "128131": {
1055
+ "content": "<|reserved_special_token_126|>",
1056
+ "lstrip": false,
1057
+ "normalized": false,
1058
+ "rstrip": false,
1059
+ "single_word": false,
1060
+ "special": true
1061
+ },
1062
+ "128132": {
1063
+ "content": "<|reserved_special_token_127|>",
1064
+ "lstrip": false,
1065
+ "normalized": false,
1066
+ "rstrip": false,
1067
+ "single_word": false,
1068
+ "special": true
1069
+ },
1070
+ "128133": {
1071
+ "content": "<|reserved_special_token_128|>",
1072
+ "lstrip": false,
1073
+ "normalized": false,
1074
+ "rstrip": false,
1075
+ "single_word": false,
1076
+ "special": true
1077
+ },
1078
+ "128134": {
1079
+ "content": "<|reserved_special_token_129|>",
1080
+ "lstrip": false,
1081
+ "normalized": false,
1082
+ "rstrip": false,
1083
+ "single_word": false,
1084
+ "special": true
1085
+ },
1086
+ "128135": {
1087
+ "content": "<|reserved_special_token_130|>",
1088
+ "lstrip": false,
1089
+ "normalized": false,
1090
+ "rstrip": false,
1091
+ "single_word": false,
1092
+ "special": true
1093
+ },
1094
+ "128136": {
1095
+ "content": "<|reserved_special_token_131|>",
1096
+ "lstrip": false,
1097
+ "normalized": false,
1098
+ "rstrip": false,
1099
+ "single_word": false,
1100
+ "special": true
1101
+ },
1102
+ "128137": {
1103
+ "content": "<|reserved_special_token_132|>",
1104
+ "lstrip": false,
1105
+ "normalized": false,
1106
+ "rstrip": false,
1107
+ "single_word": false,
1108
+ "special": true
1109
+ },
1110
+ "128138": {
1111
+ "content": "<|reserved_special_token_133|>",
1112
+ "lstrip": false,
1113
+ "normalized": false,
1114
+ "rstrip": false,
1115
+ "single_word": false,
1116
+ "special": true
1117
+ },
1118
+ "128139": {
1119
+ "content": "<|reserved_special_token_134|>",
1120
+ "lstrip": false,
1121
+ "normalized": false,
1122
+ "rstrip": false,
1123
+ "single_word": false,
1124
+ "special": true
1125
+ },
1126
+ "128140": {
1127
+ "content": "<|reserved_special_token_135|>",
1128
+ "lstrip": false,
1129
+ "normalized": false,
1130
+ "rstrip": false,
1131
+ "single_word": false,
1132
+ "special": true
1133
+ },
1134
+ "128141": {
1135
+ "content": "<|reserved_special_token_136|>",
1136
+ "lstrip": false,
1137
+ "normalized": false,
1138
+ "rstrip": false,
1139
+ "single_word": false,
1140
+ "special": true
1141
+ },
1142
+ "128142": {
1143
+ "content": "<|reserved_special_token_137|>",
1144
+ "lstrip": false,
1145
+ "normalized": false,
1146
+ "rstrip": false,
1147
+ "single_word": false,
1148
+ "special": true
1149
+ },
1150
+ "128143": {
1151
+ "content": "<|reserved_special_token_138|>",
1152
+ "lstrip": false,
1153
+ "normalized": false,
1154
+ "rstrip": false,
1155
+ "single_word": false,
1156
+ "special": true
1157
+ },
1158
+ "128144": {
1159
+ "content": "<|reserved_special_token_139|>",
1160
+ "lstrip": false,
1161
+ "normalized": false,
1162
+ "rstrip": false,
1163
+ "single_word": false,
1164
+ "special": true
1165
+ },
1166
+ "128145": {
1167
+ "content": "<|reserved_special_token_140|>",
1168
+ "lstrip": false,
1169
+ "normalized": false,
1170
+ "rstrip": false,
1171
+ "single_word": false,
1172
+ "special": true
1173
+ },
1174
+ "128146": {
1175
+ "content": "<|reserved_special_token_141|>",
1176
+ "lstrip": false,
1177
+ "normalized": false,
1178
+ "rstrip": false,
1179
+ "single_word": false,
1180
+ "special": true
1181
+ },
1182
+ "128147": {
1183
+ "content": "<|reserved_special_token_142|>",
1184
+ "lstrip": false,
1185
+ "normalized": false,
1186
+ "rstrip": false,
1187
+ "single_word": false,
1188
+ "special": true
1189
+ },
1190
+ "128148": {
1191
+ "content": "<|reserved_special_token_143|>",
1192
+ "lstrip": false,
1193
+ "normalized": false,
1194
+ "rstrip": false,
1195
+ "single_word": false,
1196
+ "special": true
1197
+ },
1198
+ "128149": {
1199
+ "content": "<|reserved_special_token_144|>",
1200
+ "lstrip": false,
1201
+ "normalized": false,
1202
+ "rstrip": false,
1203
+ "single_word": false,
1204
+ "special": true
1205
+ },
1206
+ "128150": {
1207
+ "content": "<|reserved_special_token_145|>",
1208
+ "lstrip": false,
1209
+ "normalized": false,
1210
+ "rstrip": false,
1211
+ "single_word": false,
1212
+ "special": true
1213
+ },
1214
+ "128151": {
1215
+ "content": "<|reserved_special_token_146|>",
1216
+ "lstrip": false,
1217
+ "normalized": false,
1218
+ "rstrip": false,
1219
+ "single_word": false,
1220
+ "special": true
1221
+ },
1222
+ "128152": {
1223
+ "content": "<|reserved_special_token_147|>",
1224
+ "lstrip": false,
1225
+ "normalized": false,
1226
+ "rstrip": false,
1227
+ "single_word": false,
1228
+ "special": true
1229
+ },
1230
+ "128153": {
1231
+ "content": "<|reserved_special_token_148|>",
1232
+ "lstrip": false,
1233
+ "normalized": false,
1234
+ "rstrip": false,
1235
+ "single_word": false,
1236
+ "special": true
1237
+ },
1238
+ "128154": {
1239
+ "content": "<|reserved_special_token_149|>",
1240
+ "lstrip": false,
1241
+ "normalized": false,
1242
+ "rstrip": false,
1243
+ "single_word": false,
1244
+ "special": true
1245
+ },
1246
+ "128155": {
1247
+ "content": "<|reserved_special_token_150|>",
1248
+ "lstrip": false,
1249
+ "normalized": false,
1250
+ "rstrip": false,
1251
+ "single_word": false,
1252
+ "special": true
1253
+ },
1254
+ "128156": {
1255
+ "content": "<|reserved_special_token_151|>",
1256
+ "lstrip": false,
1257
+ "normalized": false,
1258
+ "rstrip": false,
1259
+ "single_word": false,
1260
+ "special": true
1261
+ },
1262
+ "128157": {
1263
+ "content": "<|reserved_special_token_152|>",
1264
+ "lstrip": false,
1265
+ "normalized": false,
1266
+ "rstrip": false,
1267
+ "single_word": false,
1268
+ "special": true
1269
+ },
1270
+ "128158": {
1271
+ "content": "<|reserved_special_token_153|>",
1272
+ "lstrip": false,
1273
+ "normalized": false,
1274
+ "rstrip": false,
1275
+ "single_word": false,
1276
+ "special": true
1277
+ },
1278
+ "128159": {
1279
+ "content": "<|reserved_special_token_154|>",
1280
+ "lstrip": false,
1281
+ "normalized": false,
1282
+ "rstrip": false,
1283
+ "single_word": false,
1284
+ "special": true
1285
+ },
1286
+ "128160": {
1287
+ "content": "<|reserved_special_token_155|>",
1288
+ "lstrip": false,
1289
+ "normalized": false,
1290
+ "rstrip": false,
1291
+ "single_word": false,
1292
+ "special": true
1293
+ },
1294
+ "128161": {
1295
+ "content": "<|reserved_special_token_156|>",
1296
+ "lstrip": false,
1297
+ "normalized": false,
1298
+ "rstrip": false,
1299
+ "single_word": false,
1300
+ "special": true
1301
+ },
1302
+ "128162": {
1303
+ "content": "<|reserved_special_token_157|>",
1304
+ "lstrip": false,
1305
+ "normalized": false,
1306
+ "rstrip": false,
1307
+ "single_word": false,
1308
+ "special": true
1309
+ },
1310
+ "128163": {
1311
+ "content": "<|reserved_special_token_158|>",
1312
+ "lstrip": false,
1313
+ "normalized": false,
1314
+ "rstrip": false,
1315
+ "single_word": false,
1316
+ "special": true
1317
+ },
1318
+ "128164": {
1319
+ "content": "<|reserved_special_token_159|>",
1320
+ "lstrip": false,
1321
+ "normalized": false,
1322
+ "rstrip": false,
1323
+ "single_word": false,
1324
+ "special": true
1325
+ },
1326
+ "128165": {
1327
+ "content": "<|reserved_special_token_160|>",
1328
+ "lstrip": false,
1329
+ "normalized": false,
1330
+ "rstrip": false,
1331
+ "single_word": false,
1332
+ "special": true
1333
+ },
1334
+ "128166": {
1335
+ "content": "<|reserved_special_token_161|>",
1336
+ "lstrip": false,
1337
+ "normalized": false,
1338
+ "rstrip": false,
1339
+ "single_word": false,
1340
+ "special": true
1341
+ },
1342
+ "128167": {
1343
+ "content": "<|reserved_special_token_162|>",
1344
+ "lstrip": false,
1345
+ "normalized": false,
1346
+ "rstrip": false,
1347
+ "single_word": false,
1348
+ "special": true
1349
+ },
1350
+ "128168": {
1351
+ "content": "<|reserved_special_token_163|>",
1352
+ "lstrip": false,
1353
+ "normalized": false,
1354
+ "rstrip": false,
1355
+ "single_word": false,
1356
+ "special": true
1357
+ },
1358
+ "128169": {
1359
+ "content": "<|reserved_special_token_164|>",
1360
+ "lstrip": false,
1361
+ "normalized": false,
1362
+ "rstrip": false,
1363
+ "single_word": false,
1364
+ "special": true
1365
+ },
1366
+ "128170": {
1367
+ "content": "<|reserved_special_token_165|>",
1368
+ "lstrip": false,
1369
+ "normalized": false,
1370
+ "rstrip": false,
1371
+ "single_word": false,
1372
+ "special": true
1373
+ },
1374
+ "128171": {
1375
+ "content": "<|reserved_special_token_166|>",
1376
+ "lstrip": false,
1377
+ "normalized": false,
1378
+ "rstrip": false,
1379
+ "single_word": false,
1380
+ "special": true
1381
+ },
1382
+ "128172": {
1383
+ "content": "<|reserved_special_token_167|>",
1384
+ "lstrip": false,
1385
+ "normalized": false,
1386
+ "rstrip": false,
1387
+ "single_word": false,
1388
+ "special": true
1389
+ },
1390
+ "128173": {
1391
+ "content": "<|reserved_special_token_168|>",
1392
+ "lstrip": false,
1393
+ "normalized": false,
1394
+ "rstrip": false,
1395
+ "single_word": false,
1396
+ "special": true
1397
+ },
1398
+ "128174": {
1399
+ "content": "<|reserved_special_token_169|>",
1400
+ "lstrip": false,
1401
+ "normalized": false,
1402
+ "rstrip": false,
1403
+ "single_word": false,
1404
+ "special": true
1405
+ },
1406
+ "128175": {
1407
+ "content": "<|reserved_special_token_170|>",
1408
+ "lstrip": false,
1409
+ "normalized": false,
1410
+ "rstrip": false,
1411
+ "single_word": false,
1412
+ "special": true
1413
+ },
1414
+ "128176": {
1415
+ "content": "<|reserved_special_token_171|>",
1416
+ "lstrip": false,
1417
+ "normalized": false,
1418
+ "rstrip": false,
1419
+ "single_word": false,
1420
+ "special": true
1421
+ },
1422
+ "128177": {
1423
+ "content": "<|reserved_special_token_172|>",
1424
+ "lstrip": false,
1425
+ "normalized": false,
1426
+ "rstrip": false,
1427
+ "single_word": false,
1428
+ "special": true
1429
+ },
1430
+ "128178": {
1431
+ "content": "<|reserved_special_token_173|>",
1432
+ "lstrip": false,
1433
+ "normalized": false,
1434
+ "rstrip": false,
1435
+ "single_word": false,
1436
+ "special": true
1437
+ },
1438
+ "128179": {
1439
+ "content": "<|reserved_special_token_174|>",
1440
+ "lstrip": false,
1441
+ "normalized": false,
1442
+ "rstrip": false,
1443
+ "single_word": false,
1444
+ "special": true
1445
+ },
1446
+ "128180": {
1447
+ "content": "<|reserved_special_token_175|>",
1448
+ "lstrip": false,
1449
+ "normalized": false,
1450
+ "rstrip": false,
1451
+ "single_word": false,
1452
+ "special": true
1453
+ },
1454
+ "128181": {
1455
+ "content": "<|reserved_special_token_176|>",
1456
+ "lstrip": false,
1457
+ "normalized": false,
1458
+ "rstrip": false,
1459
+ "single_word": false,
1460
+ "special": true
1461
+ },
1462
+ "128182": {
1463
+ "content": "<|reserved_special_token_177|>",
1464
+ "lstrip": false,
1465
+ "normalized": false,
1466
+ "rstrip": false,
1467
+ "single_word": false,
1468
+ "special": true
1469
+ },
1470
+ "128183": {
1471
+ "content": "<|reserved_special_token_178|>",
1472
+ "lstrip": false,
1473
+ "normalized": false,
1474
+ "rstrip": false,
1475
+ "single_word": false,
1476
+ "special": true
1477
+ },
1478
+ "128184": {
1479
+ "content": "<|reserved_special_token_179|>",
1480
+ "lstrip": false,
1481
+ "normalized": false,
1482
+ "rstrip": false,
1483
+ "single_word": false,
1484
+ "special": true
1485
+ },
1486
+ "128185": {
1487
+ "content": "<|reserved_special_token_180|>",
1488
+ "lstrip": false,
1489
+ "normalized": false,
1490
+ "rstrip": false,
1491
+ "single_word": false,
1492
+ "special": true
1493
+ },
1494
+ "128186": {
1495
+ "content": "<|reserved_special_token_181|>",
1496
+ "lstrip": false,
1497
+ "normalized": false,
1498
+ "rstrip": false,
1499
+ "single_word": false,
1500
+ "special": true
1501
+ },
1502
+ "128187": {
1503
+ "content": "<|reserved_special_token_182|>",
1504
+ "lstrip": false,
1505
+ "normalized": false,
1506
+ "rstrip": false,
1507
+ "single_word": false,
1508
+ "special": true
1509
+ },
1510
+ "128188": {
1511
+ "content": "<|reserved_special_token_183|>",
1512
+ "lstrip": false,
1513
+ "normalized": false,
1514
+ "rstrip": false,
1515
+ "single_word": false,
1516
+ "special": true
1517
+ },
1518
+ "128189": {
1519
+ "content": "<|reserved_special_token_184|>",
1520
+ "lstrip": false,
1521
+ "normalized": false,
1522
+ "rstrip": false,
1523
+ "single_word": false,
1524
+ "special": true
1525
+ },
1526
+ "128190": {
1527
+ "content": "<|reserved_special_token_185|>",
1528
+ "lstrip": false,
1529
+ "normalized": false,
1530
+ "rstrip": false,
1531
+ "single_word": false,
1532
+ "special": true
1533
+ },
1534
+ "128191": {
1535
+ "content": "<|reserved_special_token_186|>",
1536
+ "lstrip": false,
1537
+ "normalized": false,
1538
+ "rstrip": false,
1539
+ "single_word": false,
1540
+ "special": true
1541
+ },
1542
+ "128192": {
1543
+ "content": "<|reserved_special_token_187|>",
1544
+ "lstrip": false,
1545
+ "normalized": false,
1546
+ "rstrip": false,
1547
+ "single_word": false,
1548
+ "special": true
1549
+ },
1550
+ "128193": {
1551
+ "content": "<|reserved_special_token_188|>",
1552
+ "lstrip": false,
1553
+ "normalized": false,
1554
+ "rstrip": false,
1555
+ "single_word": false,
1556
+ "special": true
1557
+ },
1558
+ "128194": {
1559
+ "content": "<|reserved_special_token_189|>",
1560
+ "lstrip": false,
1561
+ "normalized": false,
1562
+ "rstrip": false,
1563
+ "single_word": false,
1564
+ "special": true
1565
+ },
1566
+ "128195": {
1567
+ "content": "<|reserved_special_token_190|>",
1568
+ "lstrip": false,
1569
+ "normalized": false,
1570
+ "rstrip": false,
1571
+ "single_word": false,
1572
+ "special": true
1573
+ },
1574
+ "128196": {
1575
+ "content": "<|reserved_special_token_191|>",
1576
+ "lstrip": false,
1577
+ "normalized": false,
1578
+ "rstrip": false,
1579
+ "single_word": false,
1580
+ "special": true
1581
+ },
1582
+ "128197": {
1583
+ "content": "<|reserved_special_token_192|>",
1584
+ "lstrip": false,
1585
+ "normalized": false,
1586
+ "rstrip": false,
1587
+ "single_word": false,
1588
+ "special": true
1589
+ },
1590
+ "128198": {
1591
+ "content": "<|reserved_special_token_193|>",
1592
+ "lstrip": false,
1593
+ "normalized": false,
1594
+ "rstrip": false,
1595
+ "single_word": false,
1596
+ "special": true
1597
+ },
1598
+ "128199": {
1599
+ "content": "<|reserved_special_token_194|>",
1600
+ "lstrip": false,
1601
+ "normalized": false,
1602
+ "rstrip": false,
1603
+ "single_word": false,
1604
+ "special": true
1605
+ },
1606
+ "128200": {
1607
+ "content": "<|reserved_special_token_195|>",
1608
+ "lstrip": false,
1609
+ "normalized": false,
1610
+ "rstrip": false,
1611
+ "single_word": false,
1612
+ "special": true
1613
+ },
1614
+ "128201": {
1615
+ "content": "<|reserved_special_token_196|>",
1616
+ "lstrip": false,
1617
+ "normalized": false,
1618
+ "rstrip": false,
1619
+ "single_word": false,
1620
+ "special": true
1621
+ },
1622
+ "128202": {
1623
+ "content": "<|reserved_special_token_197|>",
1624
+ "lstrip": false,
1625
+ "normalized": false,
1626
+ "rstrip": false,
1627
+ "single_word": false,
1628
+ "special": true
1629
+ },
1630
+ "128203": {
1631
+ "content": "<|reserved_special_token_198|>",
1632
+ "lstrip": false,
1633
+ "normalized": false,
1634
+ "rstrip": false,
1635
+ "single_word": false,
1636
+ "special": true
1637
+ },
1638
+ "128204": {
1639
+ "content": "<|reserved_special_token_199|>",
1640
+ "lstrip": false,
1641
+ "normalized": false,
1642
+ "rstrip": false,
1643
+ "single_word": false,
1644
+ "special": true
1645
+ },
1646
+ "128205": {
1647
+ "content": "<|reserved_special_token_200|>",
1648
+ "lstrip": false,
1649
+ "normalized": false,
1650
+ "rstrip": false,
1651
+ "single_word": false,
1652
+ "special": true
1653
+ },
1654
+ "128206": {
1655
+ "content": "<|reserved_special_token_201|>",
1656
+ "lstrip": false,
1657
+ "normalized": false,
1658
+ "rstrip": false,
1659
+ "single_word": false,
1660
+ "special": true
1661
+ },
1662
+ "128207": {
1663
+ "content": "<|reserved_special_token_202|>",
1664
+ "lstrip": false,
1665
+ "normalized": false,
1666
+ "rstrip": false,
1667
+ "single_word": false,
1668
+ "special": true
1669
+ },
1670
+ "128208": {
1671
+ "content": "<|reserved_special_token_203|>",
1672
+ "lstrip": false,
1673
+ "normalized": false,
1674
+ "rstrip": false,
1675
+ "single_word": false,
1676
+ "special": true
1677
+ },
1678
+ "128209": {
1679
+ "content": "<|reserved_special_token_204|>",
1680
+ "lstrip": false,
1681
+ "normalized": false,
1682
+ "rstrip": false,
1683
+ "single_word": false,
1684
+ "special": true
1685
+ },
1686
+ "128210": {
1687
+ "content": "<|reserved_special_token_205|>",
1688
+ "lstrip": false,
1689
+ "normalized": false,
1690
+ "rstrip": false,
1691
+ "single_word": false,
1692
+ "special": true
1693
+ },
1694
+ "128211": {
1695
+ "content": "<|reserved_special_token_206|>",
1696
+ "lstrip": false,
1697
+ "normalized": false,
1698
+ "rstrip": false,
1699
+ "single_word": false,
1700
+ "special": true
1701
+ },
1702
+ "128212": {
1703
+ "content": "<|reserved_special_token_207|>",
1704
+ "lstrip": false,
1705
+ "normalized": false,
1706
+ "rstrip": false,
1707
+ "single_word": false,
1708
+ "special": true
1709
+ },
1710
+ "128213": {
1711
+ "content": "<|reserved_special_token_208|>",
1712
+ "lstrip": false,
1713
+ "normalized": false,
1714
+ "rstrip": false,
1715
+ "single_word": false,
1716
+ "special": true
1717
+ },
1718
+ "128214": {
1719
+ "content": "<|reserved_special_token_209|>",
1720
+ "lstrip": false,
1721
+ "normalized": false,
1722
+ "rstrip": false,
1723
+ "single_word": false,
1724
+ "special": true
1725
+ },
1726
+ "128215": {
1727
+ "content": "<|reserved_special_token_210|>",
1728
+ "lstrip": false,
1729
+ "normalized": false,
1730
+ "rstrip": false,
1731
+ "single_word": false,
1732
+ "special": true
1733
+ },
1734
+ "128216": {
1735
+ "content": "<|reserved_special_token_211|>",
1736
+ "lstrip": false,
1737
+ "normalized": false,
1738
+ "rstrip": false,
1739
+ "single_word": false,
1740
+ "special": true
1741
+ },
1742
+ "128217": {
1743
+ "content": "<|reserved_special_token_212|>",
1744
+ "lstrip": false,
1745
+ "normalized": false,
1746
+ "rstrip": false,
1747
+ "single_word": false,
1748
+ "special": true
1749
+ },
1750
+ "128218": {
1751
+ "content": "<|reserved_special_token_213|>",
1752
+ "lstrip": false,
1753
+ "normalized": false,
1754
+ "rstrip": false,
1755
+ "single_word": false,
1756
+ "special": true
1757
+ },
1758
+ "128219": {
1759
+ "content": "<|reserved_special_token_214|>",
1760
+ "lstrip": false,
1761
+ "normalized": false,
1762
+ "rstrip": false,
1763
+ "single_word": false,
1764
+ "special": true
1765
+ },
1766
+ "128220": {
1767
+ "content": "<|reserved_special_token_215|>",
1768
+ "lstrip": false,
1769
+ "normalized": false,
1770
+ "rstrip": false,
1771
+ "single_word": false,
1772
+ "special": true
1773
+ },
1774
+ "128221": {
1775
+ "content": "<|reserved_special_token_216|>",
1776
+ "lstrip": false,
1777
+ "normalized": false,
1778
+ "rstrip": false,
1779
+ "single_word": false,
1780
+ "special": true
1781
+ },
1782
+ "128222": {
1783
+ "content": "<|reserved_special_token_217|>",
1784
+ "lstrip": false,
1785
+ "normalized": false,
1786
+ "rstrip": false,
1787
+ "single_word": false,
1788
+ "special": true
1789
+ },
1790
+ "128223": {
1791
+ "content": "<|reserved_special_token_218|>",
1792
+ "lstrip": false,
1793
+ "normalized": false,
1794
+ "rstrip": false,
1795
+ "single_word": false,
1796
+ "special": true
1797
+ },
1798
+ "128224": {
1799
+ "content": "<|reserved_special_token_219|>",
1800
+ "lstrip": false,
1801
+ "normalized": false,
1802
+ "rstrip": false,
1803
+ "single_word": false,
1804
+ "special": true
1805
+ },
1806
+ "128225": {
1807
+ "content": "<|reserved_special_token_220|>",
1808
+ "lstrip": false,
1809
+ "normalized": false,
1810
+ "rstrip": false,
1811
+ "single_word": false,
1812
+ "special": true
1813
+ },
1814
+ "128226": {
1815
+ "content": "<|reserved_special_token_221|>",
1816
+ "lstrip": false,
1817
+ "normalized": false,
1818
+ "rstrip": false,
1819
+ "single_word": false,
1820
+ "special": true
1821
+ },
1822
+ "128227": {
1823
+ "content": "<|reserved_special_token_222|>",
1824
+ "lstrip": false,
1825
+ "normalized": false,
1826
+ "rstrip": false,
1827
+ "single_word": false,
1828
+ "special": true
1829
+ },
1830
+ "128228": {
1831
+ "content": "<|reserved_special_token_223|>",
1832
+ "lstrip": false,
1833
+ "normalized": false,
1834
+ "rstrip": false,
1835
+ "single_word": false,
1836
+ "special": true
1837
+ },
1838
+ "128229": {
1839
+ "content": "<|reserved_special_token_224|>",
1840
+ "lstrip": false,
1841
+ "normalized": false,
1842
+ "rstrip": false,
1843
+ "single_word": false,
1844
+ "special": true
1845
+ },
1846
+ "128230": {
1847
+ "content": "<|reserved_special_token_225|>",
1848
+ "lstrip": false,
1849
+ "normalized": false,
1850
+ "rstrip": false,
1851
+ "single_word": false,
1852
+ "special": true
1853
+ },
1854
+ "128231": {
1855
+ "content": "<|reserved_special_token_226|>",
1856
+ "lstrip": false,
1857
+ "normalized": false,
1858
+ "rstrip": false,
1859
+ "single_word": false,
1860
+ "special": true
1861
+ },
1862
+ "128232": {
1863
+ "content": "<|reserved_special_token_227|>",
1864
+ "lstrip": false,
1865
+ "normalized": false,
1866
+ "rstrip": false,
1867
+ "single_word": false,
1868
+ "special": true
1869
+ },
1870
+ "128233": {
1871
+ "content": "<|reserved_special_token_228|>",
1872
+ "lstrip": false,
1873
+ "normalized": false,
1874
+ "rstrip": false,
1875
+ "single_word": false,
1876
+ "special": true
1877
+ },
1878
+ "128234": {
1879
+ "content": "<|reserved_special_token_229|>",
1880
+ "lstrip": false,
1881
+ "normalized": false,
1882
+ "rstrip": false,
1883
+ "single_word": false,
1884
+ "special": true
1885
+ },
1886
+ "128235": {
1887
+ "content": "<|reserved_special_token_230|>",
1888
+ "lstrip": false,
1889
+ "normalized": false,
1890
+ "rstrip": false,
1891
+ "single_word": false,
1892
+ "special": true
1893
+ },
1894
+ "128236": {
1895
+ "content": "<|reserved_special_token_231|>",
1896
+ "lstrip": false,
1897
+ "normalized": false,
1898
+ "rstrip": false,
1899
+ "single_word": false,
1900
+ "special": true
1901
+ },
1902
+ "128237": {
1903
+ "content": "<|reserved_special_token_232|>",
1904
+ "lstrip": false,
1905
+ "normalized": false,
1906
+ "rstrip": false,
1907
+ "single_word": false,
1908
+ "special": true
1909
+ },
1910
+ "128238": {
1911
+ "content": "<|reserved_special_token_233|>",
1912
+ "lstrip": false,
1913
+ "normalized": false,
1914
+ "rstrip": false,
1915
+ "single_word": false,
1916
+ "special": true
1917
+ },
1918
+ "128239": {
1919
+ "content": "<|reserved_special_token_234|>",
1920
+ "lstrip": false,
1921
+ "normalized": false,
1922
+ "rstrip": false,
1923
+ "single_word": false,
1924
+ "special": true
1925
+ },
1926
+ "128240": {
1927
+ "content": "<|reserved_special_token_235|>",
1928
+ "lstrip": false,
1929
+ "normalized": false,
1930
+ "rstrip": false,
1931
+ "single_word": false,
1932
+ "special": true
1933
+ },
1934
+ "128241": {
1935
+ "content": "<|reserved_special_token_236|>",
1936
+ "lstrip": false,
1937
+ "normalized": false,
1938
+ "rstrip": false,
1939
+ "single_word": false,
1940
+ "special": true
1941
+ },
1942
+ "128242": {
1943
+ "content": "<|reserved_special_token_237|>",
1944
+ "lstrip": false,
1945
+ "normalized": false,
1946
+ "rstrip": false,
1947
+ "single_word": false,
1948
+ "special": true
1949
+ },
1950
+ "128243": {
1951
+ "content": "<|reserved_special_token_238|>",
1952
+ "lstrip": false,
1953
+ "normalized": false,
1954
+ "rstrip": false,
1955
+ "single_word": false,
1956
+ "special": true
1957
+ },
1958
+ "128244": {
1959
+ "content": "<|reserved_special_token_239|>",
1960
+ "lstrip": false,
1961
+ "normalized": false,
1962
+ "rstrip": false,
1963
+ "single_word": false,
1964
+ "special": true
1965
+ },
1966
+ "128245": {
1967
+ "content": "<|reserved_special_token_240|>",
1968
+ "lstrip": false,
1969
+ "normalized": false,
1970
+ "rstrip": false,
1971
+ "single_word": false,
1972
+ "special": true
1973
+ },
1974
+ "128246": {
1975
+ "content": "<|reserved_special_token_241|>",
1976
+ "lstrip": false,
1977
+ "normalized": false,
1978
+ "rstrip": false,
1979
+ "single_word": false,
1980
+ "special": true
1981
+ },
1982
+ "128247": {
1983
+ "content": "<|reserved_special_token_242|>",
1984
+ "lstrip": false,
1985
+ "normalized": false,
1986
+ "rstrip": false,
1987
+ "single_word": false,
1988
+ "special": true
1989
+ },
1990
+ "128248": {
1991
+ "content": "<|reserved_special_token_243|>",
1992
+ "lstrip": false,
1993
+ "normalized": false,
1994
+ "rstrip": false,
1995
+ "single_word": false,
1996
+ "special": true
1997
+ },
1998
+ "128249": {
1999
+ "content": "<|reserved_special_token_244|>",
2000
+ "lstrip": false,
2001
+ "normalized": false,
2002
+ "rstrip": false,
2003
+ "single_word": false,
2004
+ "special": true
2005
+ },
2006
+ "128250": {
2007
+ "content": "<|reserved_special_token_245|>",
2008
+ "lstrip": false,
2009
+ "normalized": false,
2010
+ "rstrip": false,
2011
+ "single_word": false,
2012
+ "special": true
2013
+ },
2014
+ "128251": {
2015
+ "content": "<|reserved_special_token_246|>",
2016
+ "lstrip": false,
2017
+ "normalized": false,
2018
+ "rstrip": false,
2019
+ "single_word": false,
2020
+ "special": true
2021
+ },
2022
+ "128252": {
2023
+ "content": "<|reserved_special_token_247|>",
2024
+ "lstrip": false,
2025
+ "normalized": false,
2026
+ "rstrip": false,
2027
+ "single_word": false,
2028
+ "special": true
2029
+ },
2030
+ "128253": {
2031
+ "content": "<|reserved_special_token_248|>",
2032
+ "lstrip": false,
2033
+ "normalized": false,
2034
+ "rstrip": false,
2035
+ "single_word": false,
2036
+ "special": true
2037
+ },
2038
+ "128254": {
2039
+ "content": "<|reserved_special_token_249|>",
2040
+ "lstrip": false,
2041
+ "normalized": false,
2042
+ "rstrip": false,
2043
+ "single_word": false,
2044
+ "special": true
2045
+ },
2046
+ "128255": {
2047
+ "content": "<|reserved_special_token_250|>",
2048
+ "lstrip": false,
2049
+ "normalized": false,
2050
+ "rstrip": false,
2051
+ "single_word": false,
2052
+ "special": true
2053
+ },
2054
+ "128256": {
2055
+ "content": "<unk>",
2056
+ "lstrip": false,
2057
+ "normalized": false,
2058
+ "rstrip": false,
2059
+ "single_word": false,
2060
+ "special": true
2061
+ },
2062
+ "128257": {
2063
+ "content": "<image>",
2064
+ "lstrip": false,
2065
+ "normalized": false,
2066
+ "rstrip": false,
2067
+ "single_word": false,
2068
+ "special": true
2069
+ },
2070
+ "128258": {
2071
+ "content": "<pad>",
2072
+ "lstrip": false,
2073
+ "normalized": false,
2074
+ "rstrip": false,
2075
+ "single_word": false,
2076
+ "special": true
2077
+ }
2078
+ },
2079
+ "bos_token": "<|begin_of_text|>",
2080
+ "chat_template": "{% set loop_messages = messages %}{% for message in loop_messages %}{% set content = '<|start_header_id|>' + message['role'] + '<|end_header_id|>\n\n'+ message['content'] | trim + '<|eot_id|>' %}{% if loop.index0 == 0 %}{% set content = bos_token + content %}{% endif %}{{ content }}{% endfor %}{{ '<|start_header_id|>assistant<|end_header_id|>\n\n' }}",
2081
+ "clean_up_tokenization_spaces": true,
2082
+ "eos_token": "<|end_of_text|>",
2083
+ "extra_special_tokens": {},
2084
+ "legacy": true,
2085
+ "model_input_names": [
2086
+ "input_ids",
2087
+ "attention_mask"
2088
+ ],
2089
+ "model_max_length": 1000000000000000019884624838656,
2090
+ "pad_token": "<pad>",
2091
+ "padding_side": "right",
2092
+ "processor_class": "LlavaProcessor",
2093
+ "tokenizer_class": "LlamaTokenizer",
2094
+ "unk_token": "<unk>",
2095
+ "use_default_system_prompt": false
2096
+ }
hf_download/hub/models--hunyuanvideo-community--HunyuanVideo/blobs/76e821f1b6f0a9709293c3b6b51ed90980b3166b ADDED
The diff for this file is too large to render. See raw diff
 
hf_download/hub/models--hunyuanvideo-community--HunyuanVideo/blobs/7c6fa7065265909bd500cafb38cc939b81b1b018 ADDED
@@ -0,0 +1,30 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": {
3
+ "content": "<|begin_of_text|>",
4
+ "lstrip": false,
5
+ "normalized": false,
6
+ "rstrip": false,
7
+ "single_word": false
8
+ },
9
+ "eos_token": {
10
+ "content": "<|end_of_text|>",
11
+ "lstrip": false,
12
+ "normalized": false,
13
+ "rstrip": false,
14
+ "single_word": false
15
+ },
16
+ "pad_token": {
17
+ "content": "<pad>",
18
+ "lstrip": false,
19
+ "normalized": false,
20
+ "rstrip": false,
21
+ "single_word": false
22
+ },
23
+ "unk_token": {
24
+ "content": "<unk>",
25
+ "lstrip": false,
26
+ "normalized": false,
27
+ "rstrip": false,
28
+ "single_word": false
29
+ }
30
+ }
hf_download/hub/models--hunyuanvideo-community--HunyuanVideo/blobs/b6e7a9e010002205834fd4f2808ca042bad4a246 ADDED
@@ -0,0 +1,297 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "metadata": {
3
+ "total_size": 15010373632
4
+ },
5
+ "weight_map": {
6
+ "embed_tokens.weight": "model-00001-of-00004.safetensors",
7
+ "layers.0.input_layernorm.weight": "model-00001-of-00004.safetensors",
8
+ "layers.0.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
9
+ "layers.0.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
10
+ "layers.0.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
11
+ "layers.0.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
12
+ "layers.0.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
13
+ "layers.0.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
14
+ "layers.0.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
15
+ "layers.0.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
16
+ "layers.1.input_layernorm.weight": "model-00001-of-00004.safetensors",
17
+ "layers.1.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
18
+ "layers.1.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
19
+ "layers.1.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
20
+ "layers.1.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
21
+ "layers.1.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
22
+ "layers.1.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
23
+ "layers.1.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
24
+ "layers.1.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
25
+ "layers.10.input_layernorm.weight": "model-00002-of-00004.safetensors",
26
+ "layers.10.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
27
+ "layers.10.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
28
+ "layers.10.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
29
+ "layers.10.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
30
+ "layers.10.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
31
+ "layers.10.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
32
+ "layers.10.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
33
+ "layers.10.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
34
+ "layers.11.input_layernorm.weight": "model-00002-of-00004.safetensors",
35
+ "layers.11.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
36
+ "layers.11.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
37
+ "layers.11.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
38
+ "layers.11.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
39
+ "layers.11.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
40
+ "layers.11.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
41
+ "layers.11.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
42
+ "layers.11.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
43
+ "layers.12.input_layernorm.weight": "model-00002-of-00004.safetensors",
44
+ "layers.12.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
45
+ "layers.12.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
46
+ "layers.12.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
47
+ "layers.12.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
48
+ "layers.12.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
49
+ "layers.12.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
50
+ "layers.12.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
51
+ "layers.12.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
52
+ "layers.13.input_layernorm.weight": "model-00002-of-00004.safetensors",
53
+ "layers.13.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
54
+ "layers.13.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
55
+ "layers.13.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
56
+ "layers.13.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
57
+ "layers.13.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
58
+ "layers.13.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
59
+ "layers.13.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
60
+ "layers.13.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
61
+ "layers.14.input_layernorm.weight": "model-00002-of-00004.safetensors",
62
+ "layers.14.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
63
+ "layers.14.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
64
+ "layers.14.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
65
+ "layers.14.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
66
+ "layers.14.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
67
+ "layers.14.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
68
+ "layers.14.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
69
+ "layers.14.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
70
+ "layers.15.input_layernorm.weight": "model-00002-of-00004.safetensors",
71
+ "layers.15.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
72
+ "layers.15.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
73
+ "layers.15.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
74
+ "layers.15.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
75
+ "layers.15.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
76
+ "layers.15.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
77
+ "layers.15.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
78
+ "layers.15.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
79
+ "layers.16.input_layernorm.weight": "model-00002-of-00004.safetensors",
80
+ "layers.16.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
81
+ "layers.16.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
82
+ "layers.16.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
83
+ "layers.16.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
84
+ "layers.16.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
85
+ "layers.16.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
86
+ "layers.16.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
87
+ "layers.16.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
88
+ "layers.17.input_layernorm.weight": "model-00002-of-00004.safetensors",
89
+ "layers.17.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
90
+ "layers.17.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
91
+ "layers.17.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
92
+ "layers.17.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
93
+ "layers.17.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
94
+ "layers.17.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
95
+ "layers.17.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
96
+ "layers.17.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
97
+ "layers.18.input_layernorm.weight": "model-00002-of-00004.safetensors",
98
+ "layers.18.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
99
+ "layers.18.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
100
+ "layers.18.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
101
+ "layers.18.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
102
+ "layers.18.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
103
+ "layers.18.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
104
+ "layers.18.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
105
+ "layers.18.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
106
+ "layers.19.input_layernorm.weight": "model-00002-of-00004.safetensors",
107
+ "layers.19.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
108
+ "layers.19.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
109
+ "layers.19.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
110
+ "layers.19.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
111
+ "layers.19.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
112
+ "layers.19.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
113
+ "layers.19.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
114
+ "layers.19.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
115
+ "layers.2.input_layernorm.weight": "model-00001-of-00004.safetensors",
116
+ "layers.2.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
117
+ "layers.2.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
118
+ "layers.2.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
119
+ "layers.2.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
120
+ "layers.2.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
121
+ "layers.2.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
122
+ "layers.2.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
123
+ "layers.2.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
124
+ "layers.20.input_layernorm.weight": "model-00003-of-00004.safetensors",
125
+ "layers.20.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
126
+ "layers.20.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
127
+ "layers.20.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
128
+ "layers.20.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
129
+ "layers.20.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
130
+ "layers.20.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
131
+ "layers.20.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
132
+ "layers.20.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
133
+ "layers.21.input_layernorm.weight": "model-00003-of-00004.safetensors",
134
+ "layers.21.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
135
+ "layers.21.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
136
+ "layers.21.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
137
+ "layers.21.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
138
+ "layers.21.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
139
+ "layers.21.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
140
+ "layers.21.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
141
+ "layers.21.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
142
+ "layers.22.input_layernorm.weight": "model-00003-of-00004.safetensors",
143
+ "layers.22.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
144
+ "layers.22.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
145
+ "layers.22.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
146
+ "layers.22.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
147
+ "layers.22.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
148
+ "layers.22.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
149
+ "layers.22.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
150
+ "layers.22.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
151
+ "layers.23.input_layernorm.weight": "model-00003-of-00004.safetensors",
152
+ "layers.23.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
153
+ "layers.23.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
154
+ "layers.23.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
155
+ "layers.23.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
156
+ "layers.23.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
157
+ "layers.23.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
158
+ "layers.23.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
159
+ "layers.23.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
160
+ "layers.24.input_layernorm.weight": "model-00003-of-00004.safetensors",
161
+ "layers.24.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
162
+ "layers.24.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
163
+ "layers.24.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
164
+ "layers.24.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
165
+ "layers.24.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
166
+ "layers.24.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
167
+ "layers.24.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
168
+ "layers.24.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
169
+ "layers.25.input_layernorm.weight": "model-00003-of-00004.safetensors",
170
+ "layers.25.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
171
+ "layers.25.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
172
+ "layers.25.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
173
+ "layers.25.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
174
+ "layers.25.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
175
+ "layers.25.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
176
+ "layers.25.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
177
+ "layers.25.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
178
+ "layers.26.input_layernorm.weight": "model-00003-of-00004.safetensors",
179
+ "layers.26.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
180
+ "layers.26.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
181
+ "layers.26.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
182
+ "layers.26.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
183
+ "layers.26.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
184
+ "layers.26.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
185
+ "layers.26.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
186
+ "layers.26.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
187
+ "layers.27.input_layernorm.weight": "model-00003-of-00004.safetensors",
188
+ "layers.27.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
189
+ "layers.27.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
190
+ "layers.27.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
191
+ "layers.27.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
192
+ "layers.27.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
193
+ "layers.27.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
194
+ "layers.27.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
195
+ "layers.27.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
196
+ "layers.28.input_layernorm.weight": "model-00003-of-00004.safetensors",
197
+ "layers.28.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
198
+ "layers.28.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
199
+ "layers.28.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
200
+ "layers.28.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
201
+ "layers.28.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
202
+ "layers.28.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
203
+ "layers.28.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
204
+ "layers.28.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
205
+ "layers.29.input_layernorm.weight": "model-00003-of-00004.safetensors",
206
+ "layers.29.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
207
+ "layers.29.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
208
+ "layers.29.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
209
+ "layers.29.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
210
+ "layers.29.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
211
+ "layers.29.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
212
+ "layers.29.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
213
+ "layers.29.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
214
+ "layers.3.input_layernorm.weight": "model-00001-of-00004.safetensors",
215
+ "layers.3.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
216
+ "layers.3.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
217
+ "layers.3.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
218
+ "layers.3.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
219
+ "layers.3.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
220
+ "layers.3.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
221
+ "layers.3.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
222
+ "layers.3.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
223
+ "layers.30.input_layernorm.weight": "model-00003-of-00004.safetensors",
224
+ "layers.30.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
225
+ "layers.30.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
226
+ "layers.30.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
227
+ "layers.30.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
228
+ "layers.30.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
229
+ "layers.30.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
230
+ "layers.30.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
231
+ "layers.30.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
232
+ "layers.31.input_layernorm.weight": "model-00004-of-00004.safetensors",
233
+ "layers.31.mlp.down_proj.weight": "model-00004-of-00004.safetensors",
234
+ "layers.31.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
235
+ "layers.31.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
236
+ "layers.31.post_attention_layernorm.weight": "model-00004-of-00004.safetensors",
237
+ "layers.31.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
238
+ "layers.31.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
239
+ "layers.31.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
240
+ "layers.31.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
241
+ "layers.4.input_layernorm.weight": "model-00001-of-00004.safetensors",
242
+ "layers.4.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
243
+ "layers.4.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
244
+ "layers.4.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
245
+ "layers.4.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
246
+ "layers.4.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
247
+ "layers.4.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
248
+ "layers.4.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
249
+ "layers.4.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
250
+ "layers.5.input_layernorm.weight": "model-00001-of-00004.safetensors",
251
+ "layers.5.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
252
+ "layers.5.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
253
+ "layers.5.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
254
+ "layers.5.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
255
+ "layers.5.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
256
+ "layers.5.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
257
+ "layers.5.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
258
+ "layers.5.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
259
+ "layers.6.input_layernorm.weight": "model-00001-of-00004.safetensors",
260
+ "layers.6.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
261
+ "layers.6.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
262
+ "layers.6.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
263
+ "layers.6.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
264
+ "layers.6.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
265
+ "layers.6.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
266
+ "layers.6.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
267
+ "layers.6.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
268
+ "layers.7.input_layernorm.weight": "model-00001-of-00004.safetensors",
269
+ "layers.7.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
270
+ "layers.7.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
271
+ "layers.7.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
272
+ "layers.7.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
273
+ "layers.7.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
274
+ "layers.7.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
275
+ "layers.7.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
276
+ "layers.7.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
277
+ "layers.8.input_layernorm.weight": "model-00001-of-00004.safetensors",
278
+ "layers.8.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
279
+ "layers.8.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
280
+ "layers.8.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
281
+ "layers.8.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
282
+ "layers.8.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
283
+ "layers.8.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
284
+ "layers.8.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
285
+ "layers.8.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
286
+ "layers.9.input_layernorm.weight": "model-00002-of-00004.safetensors",
287
+ "layers.9.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
288
+ "layers.9.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
289
+ "layers.9.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
290
+ "layers.9.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
291
+ "layers.9.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
292
+ "layers.9.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
293
+ "layers.9.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
294
+ "layers.9.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
295
+ "norm.weight": "model-00004-of-00004.safetensors"
296
+ }
297
+ }
hf_download/hub/models--hunyuanvideo-community--HunyuanVideo/blobs/b70acd51d20aeee27af7a81cea7d68f5288b8f4b ADDED
@@ -0,0 +1,32 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_class_name": "AutoencoderKLHunyuanVideo",
3
+ "_diffusers_version": "0.32.0.dev0",
4
+ "act_fn": "silu",
5
+ "block_out_channels": [
6
+ 128,
7
+ 256,
8
+ 512,
9
+ 512
10
+ ],
11
+ "down_block_types": [
12
+ "HunyuanVideoDownBlock3D",
13
+ "HunyuanVideoDownBlock3D",
14
+ "HunyuanVideoDownBlock3D",
15
+ "HunyuanVideoDownBlock3D"
16
+ ],
17
+ "in_channels": 3,
18
+ "latent_channels": 16,
19
+ "layers_per_block": 2,
20
+ "mid_block_add_attention": true,
21
+ "norm_num_groups": 32,
22
+ "out_channels": 3,
23
+ "scaling_factor": 0.476986,
24
+ "spatial_compression_ratio": 8,
25
+ "temporal_compression_ratio": 4,
26
+ "up_block_types": [
27
+ "HunyuanVideoUpBlock3D",
28
+ "HunyuanVideoUpBlock3D",
29
+ "HunyuanVideoUpBlock3D",
30
+ "HunyuanVideoUpBlock3D"
31
+ ]
32
+ }
hf_download/hub/models--hunyuanvideo-community--HunyuanVideo/blobs/cf0682d6de72c1547f41b4f6d7c59f62deffef94 ADDED
@@ -0,0 +1,30 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": {
3
+ "content": "<|startoftext|>",
4
+ "lstrip": false,
5
+ "normalized": true,
6
+ "rstrip": false,
7
+ "single_word": false
8
+ },
9
+ "eos_token": {
10
+ "content": "<|endoftext|>",
11
+ "lstrip": false,
12
+ "normalized": false,
13
+ "rstrip": false,
14
+ "single_word": false
15
+ },
16
+ "pad_token": {
17
+ "content": "<|endoftext|>",
18
+ "lstrip": false,
19
+ "normalized": false,
20
+ "rstrip": false,
21
+ "single_word": false
22
+ },
23
+ "unk_token": {
24
+ "content": "<|endoftext|>",
25
+ "lstrip": false,
26
+ "normalized": false,
27
+ "rstrip": false,
28
+ "single_word": false
29
+ }
30
+ }
hf_download/hub/models--hunyuanvideo-community--HunyuanVideo/blobs/f5f2205251eb0b863c5b0f9a60cd9fad069c5872 ADDED
@@ -0,0 +1,30 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "/raid/aryan/llava-llama-3-8b-v1_1-extracted/text_encoder",
3
+ "architectures": [
4
+ "LlamaModel"
5
+ ],
6
+ "attention_bias": false,
7
+ "attention_dropout": 0.0,
8
+ "bos_token_id": 128000,
9
+ "eos_token_id": 128001,
10
+ "head_dim": 128,
11
+ "hidden_act": "silu",
12
+ "hidden_size": 4096,
13
+ "initializer_range": 0.02,
14
+ "intermediate_size": 14336,
15
+ "max_position_embeddings": 8192,
16
+ "mlp_bias": false,
17
+ "model_type": "llama",
18
+ "num_attention_heads": 32,
19
+ "num_hidden_layers": 32,
20
+ "num_key_value_heads": 8,
21
+ "pretraining_tp": 1,
22
+ "rms_norm_eps": 1e-05,
23
+ "rope_scaling": null,
24
+ "rope_theta": 500000.0,
25
+ "tie_word_embeddings": false,
26
+ "torch_dtype": "float16",
27
+ "transformers_version": "4.48.0.dev0",
28
+ "use_cache": true,
29
+ "vocab_size": 128320
30
+ }
hf_download/hub/models--hunyuanvideo-community--HunyuanVideo/snapshots/e8c2aaa66fe3742a32c11a6766aecbf07c56e773/text_encoder/config.json ADDED
@@ -0,0 +1,30 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "/raid/aryan/llava-llama-3-8b-v1_1-extracted/text_encoder",
3
+ "architectures": [
4
+ "LlamaModel"
5
+ ],
6
+ "attention_bias": false,
7
+ "attention_dropout": 0.0,
8
+ "bos_token_id": 128000,
9
+ "eos_token_id": 128001,
10
+ "head_dim": 128,
11
+ "hidden_act": "silu",
12
+ "hidden_size": 4096,
13
+ "initializer_range": 0.02,
14
+ "intermediate_size": 14336,
15
+ "max_position_embeddings": 8192,
16
+ "mlp_bias": false,
17
+ "model_type": "llama",
18
+ "num_attention_heads": 32,
19
+ "num_hidden_layers": 32,
20
+ "num_key_value_heads": 8,
21
+ "pretraining_tp": 1,
22
+ "rms_norm_eps": 1e-05,
23
+ "rope_scaling": null,
24
+ "rope_theta": 500000.0,
25
+ "tie_word_embeddings": false,
26
+ "torch_dtype": "float16",
27
+ "transformers_version": "4.48.0.dev0",
28
+ "use_cache": true,
29
+ "vocab_size": 128320
30
+ }
hf_download/hub/models--hunyuanvideo-community--HunyuanVideo/snapshots/e8c2aaa66fe3742a32c11a6766aecbf07c56e773/text_encoder_2/config.json ADDED
@@ -0,0 +1,25 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "/raid/aryan/llava-llama-3-8b-v1_1-extracted/text_encoder_2",
3
+ "architectures": [
4
+ "CLIPTextModel"
5
+ ],
6
+ "attention_dropout": 0.0,
7
+ "bos_token_id": 0,
8
+ "dropout": 0.0,
9
+ "eos_token_id": 2,
10
+ "hidden_act": "quick_gelu",
11
+ "hidden_size": 768,
12
+ "initializer_factor": 1.0,
13
+ "initializer_range": 0.02,
14
+ "intermediate_size": 3072,
15
+ "layer_norm_eps": 1e-05,
16
+ "max_position_embeddings": 77,
17
+ "model_type": "clip_text_model",
18
+ "num_attention_heads": 12,
19
+ "num_hidden_layers": 12,
20
+ "pad_token_id": 1,
21
+ "projection_dim": 768,
22
+ "torch_dtype": "float16",
23
+ "transformers_version": "4.48.0.dev0",
24
+ "vocab_size": 49408
25
+ }
hf_download/hub/models--hunyuanvideo-community--HunyuanVideo/snapshots/e8c2aaa66fe3742a32c11a6766aecbf07c56e773/tokenizer/special_tokens_map.json ADDED
@@ -0,0 +1,30 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": {
3
+ "content": "<|begin_of_text|>",
4
+ "lstrip": false,
5
+ "normalized": false,
6
+ "rstrip": false,
7
+ "single_word": false
8
+ },
9
+ "eos_token": {
10
+ "content": "<|end_of_text|>",
11
+ "lstrip": false,
12
+ "normalized": false,
13
+ "rstrip": false,
14
+ "single_word": false
15
+ },
16
+ "pad_token": {
17
+ "content": "<pad>",
18
+ "lstrip": false,
19
+ "normalized": false,
20
+ "rstrip": false,
21
+ "single_word": false
22
+ },
23
+ "unk_token": {
24
+ "content": "<unk>",
25
+ "lstrip": false,
26
+ "normalized": false,
27
+ "rstrip": false,
28
+ "single_word": false
29
+ }
30
+ }
hf_download/hub/models--hunyuanvideo-community--HunyuanVideo/snapshots/e8c2aaa66fe3742a32c11a6766aecbf07c56e773/tokenizer/tokenizer_config.json ADDED
@@ -0,0 +1,2096 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_bos_token": true,
3
+ "add_eos_token": false,
4
+ "add_prefix_space": null,
5
+ "added_tokens_decoder": {
6
+ "128000": {
7
+ "content": "<|begin_of_text|>",
8
+ "lstrip": false,
9
+ "normalized": false,
10
+ "rstrip": false,
11
+ "single_word": false,
12
+ "special": true
13
+ },
14
+ "128001": {
15
+ "content": "<|end_of_text|>",
16
+ "lstrip": false,
17
+ "normalized": false,
18
+ "rstrip": false,
19
+ "single_word": false,
20
+ "special": true
21
+ },
22
+ "128002": {
23
+ "content": "<|reserved_special_token_0|>",
24
+ "lstrip": false,
25
+ "normalized": false,
26
+ "rstrip": false,
27
+ "single_word": false,
28
+ "special": true
29
+ },
30
+ "128003": {
31
+ "content": "<|reserved_special_token_1|>",
32
+ "lstrip": false,
33
+ "normalized": false,
34
+ "rstrip": false,
35
+ "single_word": false,
36
+ "special": true
37
+ },
38
+ "128004": {
39
+ "content": "<|reserved_special_token_2|>",
40
+ "lstrip": false,
41
+ "normalized": false,
42
+ "rstrip": false,
43
+ "single_word": false,
44
+ "special": true
45
+ },
46
+ "128005": {
47
+ "content": "<|reserved_special_token_3|>",
48
+ "lstrip": false,
49
+ "normalized": false,
50
+ "rstrip": false,
51
+ "single_word": false,
52
+ "special": true
53
+ },
54
+ "128006": {
55
+ "content": "<|start_header_id|>",
56
+ "lstrip": false,
57
+ "normalized": false,
58
+ "rstrip": false,
59
+ "single_word": false,
60
+ "special": true
61
+ },
62
+ "128007": {
63
+ "content": "<|end_header_id|>",
64
+ "lstrip": false,
65
+ "normalized": false,
66
+ "rstrip": false,
67
+ "single_word": false,
68
+ "special": true
69
+ },
70
+ "128008": {
71
+ "content": "<|reserved_special_token_4|>",
72
+ "lstrip": false,
73
+ "normalized": false,
74
+ "rstrip": false,
75
+ "single_word": false,
76
+ "special": true
77
+ },
78
+ "128009": {
79
+ "content": "<|eot_id|>",
80
+ "lstrip": false,
81
+ "normalized": false,
82
+ "rstrip": false,
83
+ "single_word": false,
84
+ "special": true
85
+ },
86
+ "128010": {
87
+ "content": "<|reserved_special_token_5|>",
88
+ "lstrip": false,
89
+ "normalized": false,
90
+ "rstrip": false,
91
+ "single_word": false,
92
+ "special": true
93
+ },
94
+ "128011": {
95
+ "content": "<|reserved_special_token_6|>",
96
+ "lstrip": false,
97
+ "normalized": false,
98
+ "rstrip": false,
99
+ "single_word": false,
100
+ "special": true
101
+ },
102
+ "128012": {
103
+ "content": "<|reserved_special_token_7|>",
104
+ "lstrip": false,
105
+ "normalized": false,
106
+ "rstrip": false,
107
+ "single_word": false,
108
+ "special": true
109
+ },
110
+ "128013": {
111
+ "content": "<|reserved_special_token_8|>",
112
+ "lstrip": false,
113
+ "normalized": false,
114
+ "rstrip": false,
115
+ "single_word": false,
116
+ "special": true
117
+ },
118
+ "128014": {
119
+ "content": "<|reserved_special_token_9|>",
120
+ "lstrip": false,
121
+ "normalized": false,
122
+ "rstrip": false,
123
+ "single_word": false,
124
+ "special": true
125
+ },
126
+ "128015": {
127
+ "content": "<|reserved_special_token_10|>",
128
+ "lstrip": false,
129
+ "normalized": false,
130
+ "rstrip": false,
131
+ "single_word": false,
132
+ "special": true
133
+ },
134
+ "128016": {
135
+ "content": "<|reserved_special_token_11|>",
136
+ "lstrip": false,
137
+ "normalized": false,
138
+ "rstrip": false,
139
+ "single_word": false,
140
+ "special": true
141
+ },
142
+ "128017": {
143
+ "content": "<|reserved_special_token_12|>",
144
+ "lstrip": false,
145
+ "normalized": false,
146
+ "rstrip": false,
147
+ "single_word": false,
148
+ "special": true
149
+ },
150
+ "128018": {
151
+ "content": "<|reserved_special_token_13|>",
152
+ "lstrip": false,
153
+ "normalized": false,
154
+ "rstrip": false,
155
+ "single_word": false,
156
+ "special": true
157
+ },
158
+ "128019": {
159
+ "content": "<|reserved_special_token_14|>",
160
+ "lstrip": false,
161
+ "normalized": false,
162
+ "rstrip": false,
163
+ "single_word": false,
164
+ "special": true
165
+ },
166
+ "128020": {
167
+ "content": "<|reserved_special_token_15|>",
168
+ "lstrip": false,
169
+ "normalized": false,
170
+ "rstrip": false,
171
+ "single_word": false,
172
+ "special": true
173
+ },
174
+ "128021": {
175
+ "content": "<|reserved_special_token_16|>",
176
+ "lstrip": false,
177
+ "normalized": false,
178
+ "rstrip": false,
179
+ "single_word": false,
180
+ "special": true
181
+ },
182
+ "128022": {
183
+ "content": "<|reserved_special_token_17|>",
184
+ "lstrip": false,
185
+ "normalized": false,
186
+ "rstrip": false,
187
+ "single_word": false,
188
+ "special": true
189
+ },
190
+ "128023": {
191
+ "content": "<|reserved_special_token_18|>",
192
+ "lstrip": false,
193
+ "normalized": false,
194
+ "rstrip": false,
195
+ "single_word": false,
196
+ "special": true
197
+ },
198
+ "128024": {
199
+ "content": "<|reserved_special_token_19|>",
200
+ "lstrip": false,
201
+ "normalized": false,
202
+ "rstrip": false,
203
+ "single_word": false,
204
+ "special": true
205
+ },
206
+ "128025": {
207
+ "content": "<|reserved_special_token_20|>",
208
+ "lstrip": false,
209
+ "normalized": false,
210
+ "rstrip": false,
211
+ "single_word": false,
212
+ "special": true
213
+ },
214
+ "128026": {
215
+ "content": "<|reserved_special_token_21|>",
216
+ "lstrip": false,
217
+ "normalized": false,
218
+ "rstrip": false,
219
+ "single_word": false,
220
+ "special": true
221
+ },
222
+ "128027": {
223
+ "content": "<|reserved_special_token_22|>",
224
+ "lstrip": false,
225
+ "normalized": false,
226
+ "rstrip": false,
227
+ "single_word": false,
228
+ "special": true
229
+ },
230
+ "128028": {
231
+ "content": "<|reserved_special_token_23|>",
232
+ "lstrip": false,
233
+ "normalized": false,
234
+ "rstrip": false,
235
+ "single_word": false,
236
+ "special": true
237
+ },
238
+ "128029": {
239
+ "content": "<|reserved_special_token_24|>",
240
+ "lstrip": false,
241
+ "normalized": false,
242
+ "rstrip": false,
243
+ "single_word": false,
244
+ "special": true
245
+ },
246
+ "128030": {
247
+ "content": "<|reserved_special_token_25|>",
248
+ "lstrip": false,
249
+ "normalized": false,
250
+ "rstrip": false,
251
+ "single_word": false,
252
+ "special": true
253
+ },
254
+ "128031": {
255
+ "content": "<|reserved_special_token_26|>",
256
+ "lstrip": false,
257
+ "normalized": false,
258
+ "rstrip": false,
259
+ "single_word": false,
260
+ "special": true
261
+ },
262
+ "128032": {
263
+ "content": "<|reserved_special_token_27|>",
264
+ "lstrip": false,
265
+ "normalized": false,
266
+ "rstrip": false,
267
+ "single_word": false,
268
+ "special": true
269
+ },
270
+ "128033": {
271
+ "content": "<|reserved_special_token_28|>",
272
+ "lstrip": false,
273
+ "normalized": false,
274
+ "rstrip": false,
275
+ "single_word": false,
276
+ "special": true
277
+ },
278
+ "128034": {
279
+ "content": "<|reserved_special_token_29|>",
280
+ "lstrip": false,
281
+ "normalized": false,
282
+ "rstrip": false,
283
+ "single_word": false,
284
+ "special": true
285
+ },
286
+ "128035": {
287
+ "content": "<|reserved_special_token_30|>",
288
+ "lstrip": false,
289
+ "normalized": false,
290
+ "rstrip": false,
291
+ "single_word": false,
292
+ "special": true
293
+ },
294
+ "128036": {
295
+ "content": "<|reserved_special_token_31|>",
296
+ "lstrip": false,
297
+ "normalized": false,
298
+ "rstrip": false,
299
+ "single_word": false,
300
+ "special": true
301
+ },
302
+ "128037": {
303
+ "content": "<|reserved_special_token_32|>",
304
+ "lstrip": false,
305
+ "normalized": false,
306
+ "rstrip": false,
307
+ "single_word": false,
308
+ "special": true
309
+ },
310
+ "128038": {
311
+ "content": "<|reserved_special_token_33|>",
312
+ "lstrip": false,
313
+ "normalized": false,
314
+ "rstrip": false,
315
+ "single_word": false,
316
+ "special": true
317
+ },
318
+ "128039": {
319
+ "content": "<|reserved_special_token_34|>",
320
+ "lstrip": false,
321
+ "normalized": false,
322
+ "rstrip": false,
323
+ "single_word": false,
324
+ "special": true
325
+ },
326
+ "128040": {
327
+ "content": "<|reserved_special_token_35|>",
328
+ "lstrip": false,
329
+ "normalized": false,
330
+ "rstrip": false,
331
+ "single_word": false,
332
+ "special": true
333
+ },
334
+ "128041": {
335
+ "content": "<|reserved_special_token_36|>",
336
+ "lstrip": false,
337
+ "normalized": false,
338
+ "rstrip": false,
339
+ "single_word": false,
340
+ "special": true
341
+ },
342
+ "128042": {
343
+ "content": "<|reserved_special_token_37|>",
344
+ "lstrip": false,
345
+ "normalized": false,
346
+ "rstrip": false,
347
+ "single_word": false,
348
+ "special": true
349
+ },
350
+ "128043": {
351
+ "content": "<|reserved_special_token_38|>",
352
+ "lstrip": false,
353
+ "normalized": false,
354
+ "rstrip": false,
355
+ "single_word": false,
356
+ "special": true
357
+ },
358
+ "128044": {
359
+ "content": "<|reserved_special_token_39|>",
360
+ "lstrip": false,
361
+ "normalized": false,
362
+ "rstrip": false,
363
+ "single_word": false,
364
+ "special": true
365
+ },
366
+ "128045": {
367
+ "content": "<|reserved_special_token_40|>",
368
+ "lstrip": false,
369
+ "normalized": false,
370
+ "rstrip": false,
371
+ "single_word": false,
372
+ "special": true
373
+ },
374
+ "128046": {
375
+ "content": "<|reserved_special_token_41|>",
376
+ "lstrip": false,
377
+ "normalized": false,
378
+ "rstrip": false,
379
+ "single_word": false,
380
+ "special": true
381
+ },
382
+ "128047": {
383
+ "content": "<|reserved_special_token_42|>",
384
+ "lstrip": false,
385
+ "normalized": false,
386
+ "rstrip": false,
387
+ "single_word": false,
388
+ "special": true
389
+ },
390
+ "128048": {
391
+ "content": "<|reserved_special_token_43|>",
392
+ "lstrip": false,
393
+ "normalized": false,
394
+ "rstrip": false,
395
+ "single_word": false,
396
+ "special": true
397
+ },
398
+ "128049": {
399
+ "content": "<|reserved_special_token_44|>",
400
+ "lstrip": false,
401
+ "normalized": false,
402
+ "rstrip": false,
403
+ "single_word": false,
404
+ "special": true
405
+ },
406
+ "128050": {
407
+ "content": "<|reserved_special_token_45|>",
408
+ "lstrip": false,
409
+ "normalized": false,
410
+ "rstrip": false,
411
+ "single_word": false,
412
+ "special": true
413
+ },
414
+ "128051": {
415
+ "content": "<|reserved_special_token_46|>",
416
+ "lstrip": false,
417
+ "normalized": false,
418
+ "rstrip": false,
419
+ "single_word": false,
420
+ "special": true
421
+ },
422
+ "128052": {
423
+ "content": "<|reserved_special_token_47|>",
424
+ "lstrip": false,
425
+ "normalized": false,
426
+ "rstrip": false,
427
+ "single_word": false,
428
+ "special": true
429
+ },
430
+ "128053": {
431
+ "content": "<|reserved_special_token_48|>",
432
+ "lstrip": false,
433
+ "normalized": false,
434
+ "rstrip": false,
435
+ "single_word": false,
436
+ "special": true
437
+ },
438
+ "128054": {
439
+ "content": "<|reserved_special_token_49|>",
440
+ "lstrip": false,
441
+ "normalized": false,
442
+ "rstrip": false,
443
+ "single_word": false,
444
+ "special": true
445
+ },
446
+ "128055": {
447
+ "content": "<|reserved_special_token_50|>",
448
+ "lstrip": false,
449
+ "normalized": false,
450
+ "rstrip": false,
451
+ "single_word": false,
452
+ "special": true
453
+ },
454
+ "128056": {
455
+ "content": "<|reserved_special_token_51|>",
456
+ "lstrip": false,
457
+ "normalized": false,
458
+ "rstrip": false,
459
+ "single_word": false,
460
+ "special": true
461
+ },
462
+ "128057": {
463
+ "content": "<|reserved_special_token_52|>",
464
+ "lstrip": false,
465
+ "normalized": false,
466
+ "rstrip": false,
467
+ "single_word": false,
468
+ "special": true
469
+ },
470
+ "128058": {
471
+ "content": "<|reserved_special_token_53|>",
472
+ "lstrip": false,
473
+ "normalized": false,
474
+ "rstrip": false,
475
+ "single_word": false,
476
+ "special": true
477
+ },
478
+ "128059": {
479
+ "content": "<|reserved_special_token_54|>",
480
+ "lstrip": false,
481
+ "normalized": false,
482
+ "rstrip": false,
483
+ "single_word": false,
484
+ "special": true
485
+ },
486
+ "128060": {
487
+ "content": "<|reserved_special_token_55|>",
488
+ "lstrip": false,
489
+ "normalized": false,
490
+ "rstrip": false,
491
+ "single_word": false,
492
+ "special": true
493
+ },
494
+ "128061": {
495
+ "content": "<|reserved_special_token_56|>",
496
+ "lstrip": false,
497
+ "normalized": false,
498
+ "rstrip": false,
499
+ "single_word": false,
500
+ "special": true
501
+ },
502
+ "128062": {
503
+ "content": "<|reserved_special_token_57|>",
504
+ "lstrip": false,
505
+ "normalized": false,
506
+ "rstrip": false,
507
+ "single_word": false,
508
+ "special": true
509
+ },
510
+ "128063": {
511
+ "content": "<|reserved_special_token_58|>",
512
+ "lstrip": false,
513
+ "normalized": false,
514
+ "rstrip": false,
515
+ "single_word": false,
516
+ "special": true
517
+ },
518
+ "128064": {
519
+ "content": "<|reserved_special_token_59|>",
520
+ "lstrip": false,
521
+ "normalized": false,
522
+ "rstrip": false,
523
+ "single_word": false,
524
+ "special": true
525
+ },
526
+ "128065": {
527
+ "content": "<|reserved_special_token_60|>",
528
+ "lstrip": false,
529
+ "normalized": false,
530
+ "rstrip": false,
531
+ "single_word": false,
532
+ "special": true
533
+ },
534
+ "128066": {
535
+ "content": "<|reserved_special_token_61|>",
536
+ "lstrip": false,
537
+ "normalized": false,
538
+ "rstrip": false,
539
+ "single_word": false,
540
+ "special": true
541
+ },
542
+ "128067": {
543
+ "content": "<|reserved_special_token_62|>",
544
+ "lstrip": false,
545
+ "normalized": false,
546
+ "rstrip": false,
547
+ "single_word": false,
548
+ "special": true
549
+ },
550
+ "128068": {
551
+ "content": "<|reserved_special_token_63|>",
552
+ "lstrip": false,
553
+ "normalized": false,
554
+ "rstrip": false,
555
+ "single_word": false,
556
+ "special": true
557
+ },
558
+ "128069": {
559
+ "content": "<|reserved_special_token_64|>",
560
+ "lstrip": false,
561
+ "normalized": false,
562
+ "rstrip": false,
563
+ "single_word": false,
564
+ "special": true
565
+ },
566
+ "128070": {
567
+ "content": "<|reserved_special_token_65|>",
568
+ "lstrip": false,
569
+ "normalized": false,
570
+ "rstrip": false,
571
+ "single_word": false,
572
+ "special": true
573
+ },
574
+ "128071": {
575
+ "content": "<|reserved_special_token_66|>",
576
+ "lstrip": false,
577
+ "normalized": false,
578
+ "rstrip": false,
579
+ "single_word": false,
580
+ "special": true
581
+ },
582
+ "128072": {
583
+ "content": "<|reserved_special_token_67|>",
584
+ "lstrip": false,
585
+ "normalized": false,
586
+ "rstrip": false,
587
+ "single_word": false,
588
+ "special": true
589
+ },
590
+ "128073": {
591
+ "content": "<|reserved_special_token_68|>",
592
+ "lstrip": false,
593
+ "normalized": false,
594
+ "rstrip": false,
595
+ "single_word": false,
596
+ "special": true
597
+ },
598
+ "128074": {
599
+ "content": "<|reserved_special_token_69|>",
600
+ "lstrip": false,
601
+ "normalized": false,
602
+ "rstrip": false,
603
+ "single_word": false,
604
+ "special": true
605
+ },
606
+ "128075": {
607
+ "content": "<|reserved_special_token_70|>",
608
+ "lstrip": false,
609
+ "normalized": false,
610
+ "rstrip": false,
611
+ "single_word": false,
612
+ "special": true
613
+ },
614
+ "128076": {
615
+ "content": "<|reserved_special_token_71|>",
616
+ "lstrip": false,
617
+ "normalized": false,
618
+ "rstrip": false,
619
+ "single_word": false,
620
+ "special": true
621
+ },
622
+ "128077": {
623
+ "content": "<|reserved_special_token_72|>",
624
+ "lstrip": false,
625
+ "normalized": false,
626
+ "rstrip": false,
627
+ "single_word": false,
628
+ "special": true
629
+ },
630
+ "128078": {
631
+ "content": "<|reserved_special_token_73|>",
632
+ "lstrip": false,
633
+ "normalized": false,
634
+ "rstrip": false,
635
+ "single_word": false,
636
+ "special": true
637
+ },
638
+ "128079": {
639
+ "content": "<|reserved_special_token_74|>",
640
+ "lstrip": false,
641
+ "normalized": false,
642
+ "rstrip": false,
643
+ "single_word": false,
644
+ "special": true
645
+ },
646
+ "128080": {
647
+ "content": "<|reserved_special_token_75|>",
648
+ "lstrip": false,
649
+ "normalized": false,
650
+ "rstrip": false,
651
+ "single_word": false,
652
+ "special": true
653
+ },
654
+ "128081": {
655
+ "content": "<|reserved_special_token_76|>",
656
+ "lstrip": false,
657
+ "normalized": false,
658
+ "rstrip": false,
659
+ "single_word": false,
660
+ "special": true
661
+ },
662
+ "128082": {
663
+ "content": "<|reserved_special_token_77|>",
664
+ "lstrip": false,
665
+ "normalized": false,
666
+ "rstrip": false,
667
+ "single_word": false,
668
+ "special": true
669
+ },
670
+ "128083": {
671
+ "content": "<|reserved_special_token_78|>",
672
+ "lstrip": false,
673
+ "normalized": false,
674
+ "rstrip": false,
675
+ "single_word": false,
676
+ "special": true
677
+ },
678
+ "128084": {
679
+ "content": "<|reserved_special_token_79|>",
680
+ "lstrip": false,
681
+ "normalized": false,
682
+ "rstrip": false,
683
+ "single_word": false,
684
+ "special": true
685
+ },
686
+ "128085": {
687
+ "content": "<|reserved_special_token_80|>",
688
+ "lstrip": false,
689
+ "normalized": false,
690
+ "rstrip": false,
691
+ "single_word": false,
692
+ "special": true
693
+ },
694
+ "128086": {
695
+ "content": "<|reserved_special_token_81|>",
696
+ "lstrip": false,
697
+ "normalized": false,
698
+ "rstrip": false,
699
+ "single_word": false,
700
+ "special": true
701
+ },
702
+ "128087": {
703
+ "content": "<|reserved_special_token_82|>",
704
+ "lstrip": false,
705
+ "normalized": false,
706
+ "rstrip": false,
707
+ "single_word": false,
708
+ "special": true
709
+ },
710
+ "128088": {
711
+ "content": "<|reserved_special_token_83|>",
712
+ "lstrip": false,
713
+ "normalized": false,
714
+ "rstrip": false,
715
+ "single_word": false,
716
+ "special": true
717
+ },
718
+ "128089": {
719
+ "content": "<|reserved_special_token_84|>",
720
+ "lstrip": false,
721
+ "normalized": false,
722
+ "rstrip": false,
723
+ "single_word": false,
724
+ "special": true
725
+ },
726
+ "128090": {
727
+ "content": "<|reserved_special_token_85|>",
728
+ "lstrip": false,
729
+ "normalized": false,
730
+ "rstrip": false,
731
+ "single_word": false,
732
+ "special": true
733
+ },
734
+ "128091": {
735
+ "content": "<|reserved_special_token_86|>",
736
+ "lstrip": false,
737
+ "normalized": false,
738
+ "rstrip": false,
739
+ "single_word": false,
740
+ "special": true
741
+ },
742
+ "128092": {
743
+ "content": "<|reserved_special_token_87|>",
744
+ "lstrip": false,
745
+ "normalized": false,
746
+ "rstrip": false,
747
+ "single_word": false,
748
+ "special": true
749
+ },
750
+ "128093": {
751
+ "content": "<|reserved_special_token_88|>",
752
+ "lstrip": false,
753
+ "normalized": false,
754
+ "rstrip": false,
755
+ "single_word": false,
756
+ "special": true
757
+ },
758
+ "128094": {
759
+ "content": "<|reserved_special_token_89|>",
760
+ "lstrip": false,
761
+ "normalized": false,
762
+ "rstrip": false,
763
+ "single_word": false,
764
+ "special": true
765
+ },
766
+ "128095": {
767
+ "content": "<|reserved_special_token_90|>",
768
+ "lstrip": false,
769
+ "normalized": false,
770
+ "rstrip": false,
771
+ "single_word": false,
772
+ "special": true
773
+ },
774
+ "128096": {
775
+ "content": "<|reserved_special_token_91|>",
776
+ "lstrip": false,
777
+ "normalized": false,
778
+ "rstrip": false,
779
+ "single_word": false,
780
+ "special": true
781
+ },
782
+ "128097": {
783
+ "content": "<|reserved_special_token_92|>",
784
+ "lstrip": false,
785
+ "normalized": false,
786
+ "rstrip": false,
787
+ "single_word": false,
788
+ "special": true
789
+ },
790
+ "128098": {
791
+ "content": "<|reserved_special_token_93|>",
792
+ "lstrip": false,
793
+ "normalized": false,
794
+ "rstrip": false,
795
+ "single_word": false,
796
+ "special": true
797
+ },
798
+ "128099": {
799
+ "content": "<|reserved_special_token_94|>",
800
+ "lstrip": false,
801
+ "normalized": false,
802
+ "rstrip": false,
803
+ "single_word": false,
804
+ "special": true
805
+ },
806
+ "128100": {
807
+ "content": "<|reserved_special_token_95|>",
808
+ "lstrip": false,
809
+ "normalized": false,
810
+ "rstrip": false,
811
+ "single_word": false,
812
+ "special": true
813
+ },
814
+ "128101": {
815
+ "content": "<|reserved_special_token_96|>",
816
+ "lstrip": false,
817
+ "normalized": false,
818
+ "rstrip": false,
819
+ "single_word": false,
820
+ "special": true
821
+ },
822
+ "128102": {
823
+ "content": "<|reserved_special_token_97|>",
824
+ "lstrip": false,
825
+ "normalized": false,
826
+ "rstrip": false,
827
+ "single_word": false,
828
+ "special": true
829
+ },
830
+ "128103": {
831
+ "content": "<|reserved_special_token_98|>",
832
+ "lstrip": false,
833
+ "normalized": false,
834
+ "rstrip": false,
835
+ "single_word": false,
836
+ "special": true
837
+ },
838
+ "128104": {
839
+ "content": "<|reserved_special_token_99|>",
840
+ "lstrip": false,
841
+ "normalized": false,
842
+ "rstrip": false,
843
+ "single_word": false,
844
+ "special": true
845
+ },
846
+ "128105": {
847
+ "content": "<|reserved_special_token_100|>",
848
+ "lstrip": false,
849
+ "normalized": false,
850
+ "rstrip": false,
851
+ "single_word": false,
852
+ "special": true
853
+ },
854
+ "128106": {
855
+ "content": "<|reserved_special_token_101|>",
856
+ "lstrip": false,
857
+ "normalized": false,
858
+ "rstrip": false,
859
+ "single_word": false,
860
+ "special": true
861
+ },
862
+ "128107": {
863
+ "content": "<|reserved_special_token_102|>",
864
+ "lstrip": false,
865
+ "normalized": false,
866
+ "rstrip": false,
867
+ "single_word": false,
868
+ "special": true
869
+ },
870
+ "128108": {
871
+ "content": "<|reserved_special_token_103|>",
872
+ "lstrip": false,
873
+ "normalized": false,
874
+ "rstrip": false,
875
+ "single_word": false,
876
+ "special": true
877
+ },
878
+ "128109": {
879
+ "content": "<|reserved_special_token_104|>",
880
+ "lstrip": false,
881
+ "normalized": false,
882
+ "rstrip": false,
883
+ "single_word": false,
884
+ "special": true
885
+ },
886
+ "128110": {
887
+ "content": "<|reserved_special_token_105|>",
888
+ "lstrip": false,
889
+ "normalized": false,
890
+ "rstrip": false,
891
+ "single_word": false,
892
+ "special": true
893
+ },
894
+ "128111": {
895
+ "content": "<|reserved_special_token_106|>",
896
+ "lstrip": false,
897
+ "normalized": false,
898
+ "rstrip": false,
899
+ "single_word": false,
900
+ "special": true
901
+ },
902
+ "128112": {
903
+ "content": "<|reserved_special_token_107|>",
904
+ "lstrip": false,
905
+ "normalized": false,
906
+ "rstrip": false,
907
+ "single_word": false,
908
+ "special": true
909
+ },
910
+ "128113": {
911
+ "content": "<|reserved_special_token_108|>",
912
+ "lstrip": false,
913
+ "normalized": false,
914
+ "rstrip": false,
915
+ "single_word": false,
916
+ "special": true
917
+ },
918
+ "128114": {
919
+ "content": "<|reserved_special_token_109|>",
920
+ "lstrip": false,
921
+ "normalized": false,
922
+ "rstrip": false,
923
+ "single_word": false,
924
+ "special": true
925
+ },
926
+ "128115": {
927
+ "content": "<|reserved_special_token_110|>",
928
+ "lstrip": false,
929
+ "normalized": false,
930
+ "rstrip": false,
931
+ "single_word": false,
932
+ "special": true
933
+ },
934
+ "128116": {
935
+ "content": "<|reserved_special_token_111|>",
936
+ "lstrip": false,
937
+ "normalized": false,
938
+ "rstrip": false,
939
+ "single_word": false,
940
+ "special": true
941
+ },
942
+ "128117": {
943
+ "content": "<|reserved_special_token_112|>",
944
+ "lstrip": false,
945
+ "normalized": false,
946
+ "rstrip": false,
947
+ "single_word": false,
948
+ "special": true
949
+ },
950
+ "128118": {
951
+ "content": "<|reserved_special_token_113|>",
952
+ "lstrip": false,
953
+ "normalized": false,
954
+ "rstrip": false,
955
+ "single_word": false,
956
+ "special": true
957
+ },
958
+ "128119": {
959
+ "content": "<|reserved_special_token_114|>",
960
+ "lstrip": false,
961
+ "normalized": false,
962
+ "rstrip": false,
963
+ "single_word": false,
964
+ "special": true
965
+ },
966
+ "128120": {
967
+ "content": "<|reserved_special_token_115|>",
968
+ "lstrip": false,
969
+ "normalized": false,
970
+ "rstrip": false,
971
+ "single_word": false,
972
+ "special": true
973
+ },
974
+ "128121": {
975
+ "content": "<|reserved_special_token_116|>",
976
+ "lstrip": false,
977
+ "normalized": false,
978
+ "rstrip": false,
979
+ "single_word": false,
980
+ "special": true
981
+ },
982
+ "128122": {
983
+ "content": "<|reserved_special_token_117|>",
984
+ "lstrip": false,
985
+ "normalized": false,
986
+ "rstrip": false,
987
+ "single_word": false,
988
+ "special": true
989
+ },
990
+ "128123": {
991
+ "content": "<|reserved_special_token_118|>",
992
+ "lstrip": false,
993
+ "normalized": false,
994
+ "rstrip": false,
995
+ "single_word": false,
996
+ "special": true
997
+ },
998
+ "128124": {
999
+ "content": "<|reserved_special_token_119|>",
1000
+ "lstrip": false,
1001
+ "normalized": false,
1002
+ "rstrip": false,
1003
+ "single_word": false,
1004
+ "special": true
1005
+ },
1006
+ "128125": {
1007
+ "content": "<|reserved_special_token_120|>",
1008
+ "lstrip": false,
1009
+ "normalized": false,
1010
+ "rstrip": false,
1011
+ "single_word": false,
1012
+ "special": true
1013
+ },
1014
+ "128126": {
1015
+ "content": "<|reserved_special_token_121|>",
1016
+ "lstrip": false,
1017
+ "normalized": false,
1018
+ "rstrip": false,
1019
+ "single_word": false,
1020
+ "special": true
1021
+ },
1022
+ "128127": {
1023
+ "content": "<|reserved_special_token_122|>",
1024
+ "lstrip": false,
1025
+ "normalized": false,
1026
+ "rstrip": false,
1027
+ "single_word": false,
1028
+ "special": true
1029
+ },
1030
+ "128128": {
1031
+ "content": "<|reserved_special_token_123|>",
1032
+ "lstrip": false,
1033
+ "normalized": false,
1034
+ "rstrip": false,
1035
+ "single_word": false,
1036
+ "special": true
1037
+ },
1038
+ "128129": {
1039
+ "content": "<|reserved_special_token_124|>",
1040
+ "lstrip": false,
1041
+ "normalized": false,
1042
+ "rstrip": false,
1043
+ "single_word": false,
1044
+ "special": true
1045
+ },
1046
+ "128130": {
1047
+ "content": "<|reserved_special_token_125|>",
1048
+ "lstrip": false,
1049
+ "normalized": false,
1050
+ "rstrip": false,
1051
+ "single_word": false,
1052
+ "special": true
1053
+ },
1054
+ "128131": {
1055
+ "content": "<|reserved_special_token_126|>",
1056
+ "lstrip": false,
1057
+ "normalized": false,
1058
+ "rstrip": false,
1059
+ "single_word": false,
1060
+ "special": true
1061
+ },
1062
+ "128132": {
1063
+ "content": "<|reserved_special_token_127|>",
1064
+ "lstrip": false,
1065
+ "normalized": false,
1066
+ "rstrip": false,
1067
+ "single_word": false,
1068
+ "special": true
1069
+ },
1070
+ "128133": {
1071
+ "content": "<|reserved_special_token_128|>",
1072
+ "lstrip": false,
1073
+ "normalized": false,
1074
+ "rstrip": false,
1075
+ "single_word": false,
1076
+ "special": true
1077
+ },
1078
+ "128134": {
1079
+ "content": "<|reserved_special_token_129|>",
1080
+ "lstrip": false,
1081
+ "normalized": false,
1082
+ "rstrip": false,
1083
+ "single_word": false,
1084
+ "special": true
1085
+ },
1086
+ "128135": {
1087
+ "content": "<|reserved_special_token_130|>",
1088
+ "lstrip": false,
1089
+ "normalized": false,
1090
+ "rstrip": false,
1091
+ "single_word": false,
1092
+ "special": true
1093
+ },
1094
+ "128136": {
1095
+ "content": "<|reserved_special_token_131|>",
1096
+ "lstrip": false,
1097
+ "normalized": false,
1098
+ "rstrip": false,
1099
+ "single_word": false,
1100
+ "special": true
1101
+ },
1102
+ "128137": {
1103
+ "content": "<|reserved_special_token_132|>",
1104
+ "lstrip": false,
1105
+ "normalized": false,
1106
+ "rstrip": false,
1107
+ "single_word": false,
1108
+ "special": true
1109
+ },
1110
+ "128138": {
1111
+ "content": "<|reserved_special_token_133|>",
1112
+ "lstrip": false,
1113
+ "normalized": false,
1114
+ "rstrip": false,
1115
+ "single_word": false,
1116
+ "special": true
1117
+ },
1118
+ "128139": {
1119
+ "content": "<|reserved_special_token_134|>",
1120
+ "lstrip": false,
1121
+ "normalized": false,
1122
+ "rstrip": false,
1123
+ "single_word": false,
1124
+ "special": true
1125
+ },
1126
+ "128140": {
1127
+ "content": "<|reserved_special_token_135|>",
1128
+ "lstrip": false,
1129
+ "normalized": false,
1130
+ "rstrip": false,
1131
+ "single_word": false,
1132
+ "special": true
1133
+ },
1134
+ "128141": {
1135
+ "content": "<|reserved_special_token_136|>",
1136
+ "lstrip": false,
1137
+ "normalized": false,
1138
+ "rstrip": false,
1139
+ "single_word": false,
1140
+ "special": true
1141
+ },
1142
+ "128142": {
1143
+ "content": "<|reserved_special_token_137|>",
1144
+ "lstrip": false,
1145
+ "normalized": false,
1146
+ "rstrip": false,
1147
+ "single_word": false,
1148
+ "special": true
1149
+ },
1150
+ "128143": {
1151
+ "content": "<|reserved_special_token_138|>",
1152
+ "lstrip": false,
1153
+ "normalized": false,
1154
+ "rstrip": false,
1155
+ "single_word": false,
1156
+ "special": true
1157
+ },
1158
+ "128144": {
1159
+ "content": "<|reserved_special_token_139|>",
1160
+ "lstrip": false,
1161
+ "normalized": false,
1162
+ "rstrip": false,
1163
+ "single_word": false,
1164
+ "special": true
1165
+ },
1166
+ "128145": {
1167
+ "content": "<|reserved_special_token_140|>",
1168
+ "lstrip": false,
1169
+ "normalized": false,
1170
+ "rstrip": false,
1171
+ "single_word": false,
1172
+ "special": true
1173
+ },
1174
+ "128146": {
1175
+ "content": "<|reserved_special_token_141|>",
1176
+ "lstrip": false,
1177
+ "normalized": false,
1178
+ "rstrip": false,
1179
+ "single_word": false,
1180
+ "special": true
1181
+ },
1182
+ "128147": {
1183
+ "content": "<|reserved_special_token_142|>",
1184
+ "lstrip": false,
1185
+ "normalized": false,
1186
+ "rstrip": false,
1187
+ "single_word": false,
1188
+ "special": true
1189
+ },
1190
+ "128148": {
1191
+ "content": "<|reserved_special_token_143|>",
1192
+ "lstrip": false,
1193
+ "normalized": false,
1194
+ "rstrip": false,
1195
+ "single_word": false,
1196
+ "special": true
1197
+ },
1198
+ "128149": {
1199
+ "content": "<|reserved_special_token_144|>",
1200
+ "lstrip": false,
1201
+ "normalized": false,
1202
+ "rstrip": false,
1203
+ "single_word": false,
1204
+ "special": true
1205
+ },
1206
+ "128150": {
1207
+ "content": "<|reserved_special_token_145|>",
1208
+ "lstrip": false,
1209
+ "normalized": false,
1210
+ "rstrip": false,
1211
+ "single_word": false,
1212
+ "special": true
1213
+ },
1214
+ "128151": {
1215
+ "content": "<|reserved_special_token_146|>",
1216
+ "lstrip": false,
1217
+ "normalized": false,
1218
+ "rstrip": false,
1219
+ "single_word": false,
1220
+ "special": true
1221
+ },
1222
+ "128152": {
1223
+ "content": "<|reserved_special_token_147|>",
1224
+ "lstrip": false,
1225
+ "normalized": false,
1226
+ "rstrip": false,
1227
+ "single_word": false,
1228
+ "special": true
1229
+ },
1230
+ "128153": {
1231
+ "content": "<|reserved_special_token_148|>",
1232
+ "lstrip": false,
1233
+ "normalized": false,
1234
+ "rstrip": false,
1235
+ "single_word": false,
1236
+ "special": true
1237
+ },
1238
+ "128154": {
1239
+ "content": "<|reserved_special_token_149|>",
1240
+ "lstrip": false,
1241
+ "normalized": false,
1242
+ "rstrip": false,
1243
+ "single_word": false,
1244
+ "special": true
1245
+ },
1246
+ "128155": {
1247
+ "content": "<|reserved_special_token_150|>",
1248
+ "lstrip": false,
1249
+ "normalized": false,
1250
+ "rstrip": false,
1251
+ "single_word": false,
1252
+ "special": true
1253
+ },
1254
+ "128156": {
1255
+ "content": "<|reserved_special_token_151|>",
1256
+ "lstrip": false,
1257
+ "normalized": false,
1258
+ "rstrip": false,
1259
+ "single_word": false,
1260
+ "special": true
1261
+ },
1262
+ "128157": {
1263
+ "content": "<|reserved_special_token_152|>",
1264
+ "lstrip": false,
1265
+ "normalized": false,
1266
+ "rstrip": false,
1267
+ "single_word": false,
1268
+ "special": true
1269
+ },
1270
+ "128158": {
1271
+ "content": "<|reserved_special_token_153|>",
1272
+ "lstrip": false,
1273
+ "normalized": false,
1274
+ "rstrip": false,
1275
+ "single_word": false,
1276
+ "special": true
1277
+ },
1278
+ "128159": {
1279
+ "content": "<|reserved_special_token_154|>",
1280
+ "lstrip": false,
1281
+ "normalized": false,
1282
+ "rstrip": false,
1283
+ "single_word": false,
1284
+ "special": true
1285
+ },
1286
+ "128160": {
1287
+ "content": "<|reserved_special_token_155|>",
1288
+ "lstrip": false,
1289
+ "normalized": false,
1290
+ "rstrip": false,
1291
+ "single_word": false,
1292
+ "special": true
1293
+ },
1294
+ "128161": {
1295
+ "content": "<|reserved_special_token_156|>",
1296
+ "lstrip": false,
1297
+ "normalized": false,
1298
+ "rstrip": false,
1299
+ "single_word": false,
1300
+ "special": true
1301
+ },
1302
+ "128162": {
1303
+ "content": "<|reserved_special_token_157|>",
1304
+ "lstrip": false,
1305
+ "normalized": false,
1306
+ "rstrip": false,
1307
+ "single_word": false,
1308
+ "special": true
1309
+ },
1310
+ "128163": {
1311
+ "content": "<|reserved_special_token_158|>",
1312
+ "lstrip": false,
1313
+ "normalized": false,
1314
+ "rstrip": false,
1315
+ "single_word": false,
1316
+ "special": true
1317
+ },
1318
+ "128164": {
1319
+ "content": "<|reserved_special_token_159|>",
1320
+ "lstrip": false,
1321
+ "normalized": false,
1322
+ "rstrip": false,
1323
+ "single_word": false,
1324
+ "special": true
1325
+ },
1326
+ "128165": {
1327
+ "content": "<|reserved_special_token_160|>",
1328
+ "lstrip": false,
1329
+ "normalized": false,
1330
+ "rstrip": false,
1331
+ "single_word": false,
1332
+ "special": true
1333
+ },
1334
+ "128166": {
1335
+ "content": "<|reserved_special_token_161|>",
1336
+ "lstrip": false,
1337
+ "normalized": false,
1338
+ "rstrip": false,
1339
+ "single_word": false,
1340
+ "special": true
1341
+ },
1342
+ "128167": {
1343
+ "content": "<|reserved_special_token_162|>",
1344
+ "lstrip": false,
1345
+ "normalized": false,
1346
+ "rstrip": false,
1347
+ "single_word": false,
1348
+ "special": true
1349
+ },
1350
+ "128168": {
1351
+ "content": "<|reserved_special_token_163|>",
1352
+ "lstrip": false,
1353
+ "normalized": false,
1354
+ "rstrip": false,
1355
+ "single_word": false,
1356
+ "special": true
1357
+ },
1358
+ "128169": {
1359
+ "content": "<|reserved_special_token_164|>",
1360
+ "lstrip": false,
1361
+ "normalized": false,
1362
+ "rstrip": false,
1363
+ "single_word": false,
1364
+ "special": true
1365
+ },
1366
+ "128170": {
1367
+ "content": "<|reserved_special_token_165|>",
1368
+ "lstrip": false,
1369
+ "normalized": false,
1370
+ "rstrip": false,
1371
+ "single_word": false,
1372
+ "special": true
1373
+ },
1374
+ "128171": {
1375
+ "content": "<|reserved_special_token_166|>",
1376
+ "lstrip": false,
1377
+ "normalized": false,
1378
+ "rstrip": false,
1379
+ "single_word": false,
1380
+ "special": true
1381
+ },
1382
+ "128172": {
1383
+ "content": "<|reserved_special_token_167|>",
1384
+ "lstrip": false,
1385
+ "normalized": false,
1386
+ "rstrip": false,
1387
+ "single_word": false,
1388
+ "special": true
1389
+ },
1390
+ "128173": {
1391
+ "content": "<|reserved_special_token_168|>",
1392
+ "lstrip": false,
1393
+ "normalized": false,
1394
+ "rstrip": false,
1395
+ "single_word": false,
1396
+ "special": true
1397
+ },
1398
+ "128174": {
1399
+ "content": "<|reserved_special_token_169|>",
1400
+ "lstrip": false,
1401
+ "normalized": false,
1402
+ "rstrip": false,
1403
+ "single_word": false,
1404
+ "special": true
1405
+ },
1406
+ "128175": {
1407
+ "content": "<|reserved_special_token_170|>",
1408
+ "lstrip": false,
1409
+ "normalized": false,
1410
+ "rstrip": false,
1411
+ "single_word": false,
1412
+ "special": true
1413
+ },
1414
+ "128176": {
1415
+ "content": "<|reserved_special_token_171|>",
1416
+ "lstrip": false,
1417
+ "normalized": false,
1418
+ "rstrip": false,
1419
+ "single_word": false,
1420
+ "special": true
1421
+ },
1422
+ "128177": {
1423
+ "content": "<|reserved_special_token_172|>",
1424
+ "lstrip": false,
1425
+ "normalized": false,
1426
+ "rstrip": false,
1427
+ "single_word": false,
1428
+ "special": true
1429
+ },
1430
+ "128178": {
1431
+ "content": "<|reserved_special_token_173|>",
1432
+ "lstrip": false,
1433
+ "normalized": false,
1434
+ "rstrip": false,
1435
+ "single_word": false,
1436
+ "special": true
1437
+ },
1438
+ "128179": {
1439
+ "content": "<|reserved_special_token_174|>",
1440
+ "lstrip": false,
1441
+ "normalized": false,
1442
+ "rstrip": false,
1443
+ "single_word": false,
1444
+ "special": true
1445
+ },
1446
+ "128180": {
1447
+ "content": "<|reserved_special_token_175|>",
1448
+ "lstrip": false,
1449
+ "normalized": false,
1450
+ "rstrip": false,
1451
+ "single_word": false,
1452
+ "special": true
1453
+ },
1454
+ "128181": {
1455
+ "content": "<|reserved_special_token_176|>",
1456
+ "lstrip": false,
1457
+ "normalized": false,
1458
+ "rstrip": false,
1459
+ "single_word": false,
1460
+ "special": true
1461
+ },
1462
+ "128182": {
1463
+ "content": "<|reserved_special_token_177|>",
1464
+ "lstrip": false,
1465
+ "normalized": false,
1466
+ "rstrip": false,
1467
+ "single_word": false,
1468
+ "special": true
1469
+ },
1470
+ "128183": {
1471
+ "content": "<|reserved_special_token_178|>",
1472
+ "lstrip": false,
1473
+ "normalized": false,
1474
+ "rstrip": false,
1475
+ "single_word": false,
1476
+ "special": true
1477
+ },
1478
+ "128184": {
1479
+ "content": "<|reserved_special_token_179|>",
1480
+ "lstrip": false,
1481
+ "normalized": false,
1482
+ "rstrip": false,
1483
+ "single_word": false,
1484
+ "special": true
1485
+ },
1486
+ "128185": {
1487
+ "content": "<|reserved_special_token_180|>",
1488
+ "lstrip": false,
1489
+ "normalized": false,
1490
+ "rstrip": false,
1491
+ "single_word": false,
1492
+ "special": true
1493
+ },
1494
+ "128186": {
1495
+ "content": "<|reserved_special_token_181|>",
1496
+ "lstrip": false,
1497
+ "normalized": false,
1498
+ "rstrip": false,
1499
+ "single_word": false,
1500
+ "special": true
1501
+ },
1502
+ "128187": {
1503
+ "content": "<|reserved_special_token_182|>",
1504
+ "lstrip": false,
1505
+ "normalized": false,
1506
+ "rstrip": false,
1507
+ "single_word": false,
1508
+ "special": true
1509
+ },
1510
+ "128188": {
1511
+ "content": "<|reserved_special_token_183|>",
1512
+ "lstrip": false,
1513
+ "normalized": false,
1514
+ "rstrip": false,
1515
+ "single_word": false,
1516
+ "special": true
1517
+ },
1518
+ "128189": {
1519
+ "content": "<|reserved_special_token_184|>",
1520
+ "lstrip": false,
1521
+ "normalized": false,
1522
+ "rstrip": false,
1523
+ "single_word": false,
1524
+ "special": true
1525
+ },
1526
+ "128190": {
1527
+ "content": "<|reserved_special_token_185|>",
1528
+ "lstrip": false,
1529
+ "normalized": false,
1530
+ "rstrip": false,
1531
+ "single_word": false,
1532
+ "special": true
1533
+ },
1534
+ "128191": {
1535
+ "content": "<|reserved_special_token_186|>",
1536
+ "lstrip": false,
1537
+ "normalized": false,
1538
+ "rstrip": false,
1539
+ "single_word": false,
1540
+ "special": true
1541
+ },
1542
+ "128192": {
1543
+ "content": "<|reserved_special_token_187|>",
1544
+ "lstrip": false,
1545
+ "normalized": false,
1546
+ "rstrip": false,
1547
+ "single_word": false,
1548
+ "special": true
1549
+ },
1550
+ "128193": {
1551
+ "content": "<|reserved_special_token_188|>",
1552
+ "lstrip": false,
1553
+ "normalized": false,
1554
+ "rstrip": false,
1555
+ "single_word": false,
1556
+ "special": true
1557
+ },
1558
+ "128194": {
1559
+ "content": "<|reserved_special_token_189|>",
1560
+ "lstrip": false,
1561
+ "normalized": false,
1562
+ "rstrip": false,
1563
+ "single_word": false,
1564
+ "special": true
1565
+ },
1566
+ "128195": {
1567
+ "content": "<|reserved_special_token_190|>",
1568
+ "lstrip": false,
1569
+ "normalized": false,
1570
+ "rstrip": false,
1571
+ "single_word": false,
1572
+ "special": true
1573
+ },
1574
+ "128196": {
1575
+ "content": "<|reserved_special_token_191|>",
1576
+ "lstrip": false,
1577
+ "normalized": false,
1578
+ "rstrip": false,
1579
+ "single_word": false,
1580
+ "special": true
1581
+ },
1582
+ "128197": {
1583
+ "content": "<|reserved_special_token_192|>",
1584
+ "lstrip": false,
1585
+ "normalized": false,
1586
+ "rstrip": false,
1587
+ "single_word": false,
1588
+ "special": true
1589
+ },
1590
+ "128198": {
1591
+ "content": "<|reserved_special_token_193|>",
1592
+ "lstrip": false,
1593
+ "normalized": false,
1594
+ "rstrip": false,
1595
+ "single_word": false,
1596
+ "special": true
1597
+ },
1598
+ "128199": {
1599
+ "content": "<|reserved_special_token_194|>",
1600
+ "lstrip": false,
1601
+ "normalized": false,
1602
+ "rstrip": false,
1603
+ "single_word": false,
1604
+ "special": true
1605
+ },
1606
+ "128200": {
1607
+ "content": "<|reserved_special_token_195|>",
1608
+ "lstrip": false,
1609
+ "normalized": false,
1610
+ "rstrip": false,
1611
+ "single_word": false,
1612
+ "special": true
1613
+ },
1614
+ "128201": {
1615
+ "content": "<|reserved_special_token_196|>",
1616
+ "lstrip": false,
1617
+ "normalized": false,
1618
+ "rstrip": false,
1619
+ "single_word": false,
1620
+ "special": true
1621
+ },
1622
+ "128202": {
1623
+ "content": "<|reserved_special_token_197|>",
1624
+ "lstrip": false,
1625
+ "normalized": false,
1626
+ "rstrip": false,
1627
+ "single_word": false,
1628
+ "special": true
1629
+ },
1630
+ "128203": {
1631
+ "content": "<|reserved_special_token_198|>",
1632
+ "lstrip": false,
1633
+ "normalized": false,
1634
+ "rstrip": false,
1635
+ "single_word": false,
1636
+ "special": true
1637
+ },
1638
+ "128204": {
1639
+ "content": "<|reserved_special_token_199|>",
1640
+ "lstrip": false,
1641
+ "normalized": false,
1642
+ "rstrip": false,
1643
+ "single_word": false,
1644
+ "special": true
1645
+ },
1646
+ "128205": {
1647
+ "content": "<|reserved_special_token_200|>",
1648
+ "lstrip": false,
1649
+ "normalized": false,
1650
+ "rstrip": false,
1651
+ "single_word": false,
1652
+ "special": true
1653
+ },
1654
+ "128206": {
1655
+ "content": "<|reserved_special_token_201|>",
1656
+ "lstrip": false,
1657
+ "normalized": false,
1658
+ "rstrip": false,
1659
+ "single_word": false,
1660
+ "special": true
1661
+ },
1662
+ "128207": {
1663
+ "content": "<|reserved_special_token_202|>",
1664
+ "lstrip": false,
1665
+ "normalized": false,
1666
+ "rstrip": false,
1667
+ "single_word": false,
1668
+ "special": true
1669
+ },
1670
+ "128208": {
1671
+ "content": "<|reserved_special_token_203|>",
1672
+ "lstrip": false,
1673
+ "normalized": false,
1674
+ "rstrip": false,
1675
+ "single_word": false,
1676
+ "special": true
1677
+ },
1678
+ "128209": {
1679
+ "content": "<|reserved_special_token_204|>",
1680
+ "lstrip": false,
1681
+ "normalized": false,
1682
+ "rstrip": false,
1683
+ "single_word": false,
1684
+ "special": true
1685
+ },
1686
+ "128210": {
1687
+ "content": "<|reserved_special_token_205|>",
1688
+ "lstrip": false,
1689
+ "normalized": false,
1690
+ "rstrip": false,
1691
+ "single_word": false,
1692
+ "special": true
1693
+ },
1694
+ "128211": {
1695
+ "content": "<|reserved_special_token_206|>",
1696
+ "lstrip": false,
1697
+ "normalized": false,
1698
+ "rstrip": false,
1699
+ "single_word": false,
1700
+ "special": true
1701
+ },
1702
+ "128212": {
1703
+ "content": "<|reserved_special_token_207|>",
1704
+ "lstrip": false,
1705
+ "normalized": false,
1706
+ "rstrip": false,
1707
+ "single_word": false,
1708
+ "special": true
1709
+ },
1710
+ "128213": {
1711
+ "content": "<|reserved_special_token_208|>",
1712
+ "lstrip": false,
1713
+ "normalized": false,
1714
+ "rstrip": false,
1715
+ "single_word": false,
1716
+ "special": true
1717
+ },
1718
+ "128214": {
1719
+ "content": "<|reserved_special_token_209|>",
1720
+ "lstrip": false,
1721
+ "normalized": false,
1722
+ "rstrip": false,
1723
+ "single_word": false,
1724
+ "special": true
1725
+ },
1726
+ "128215": {
1727
+ "content": "<|reserved_special_token_210|>",
1728
+ "lstrip": false,
1729
+ "normalized": false,
1730
+ "rstrip": false,
1731
+ "single_word": false,
1732
+ "special": true
1733
+ },
1734
+ "128216": {
1735
+ "content": "<|reserved_special_token_211|>",
1736
+ "lstrip": false,
1737
+ "normalized": false,
1738
+ "rstrip": false,
1739
+ "single_word": false,
1740
+ "special": true
1741
+ },
1742
+ "128217": {
1743
+ "content": "<|reserved_special_token_212|>",
1744
+ "lstrip": false,
1745
+ "normalized": false,
1746
+ "rstrip": false,
1747
+ "single_word": false,
1748
+ "special": true
1749
+ },
1750
+ "128218": {
1751
+ "content": "<|reserved_special_token_213|>",
1752
+ "lstrip": false,
1753
+ "normalized": false,
1754
+ "rstrip": false,
1755
+ "single_word": false,
1756
+ "special": true
1757
+ },
1758
+ "128219": {
1759
+ "content": "<|reserved_special_token_214|>",
1760
+ "lstrip": false,
1761
+ "normalized": false,
1762
+ "rstrip": false,
1763
+ "single_word": false,
1764
+ "special": true
1765
+ },
1766
+ "128220": {
1767
+ "content": "<|reserved_special_token_215|>",
1768
+ "lstrip": false,
1769
+ "normalized": false,
1770
+ "rstrip": false,
1771
+ "single_word": false,
1772
+ "special": true
1773
+ },
1774
+ "128221": {
1775
+ "content": "<|reserved_special_token_216|>",
1776
+ "lstrip": false,
1777
+ "normalized": false,
1778
+ "rstrip": false,
1779
+ "single_word": false,
1780
+ "special": true
1781
+ },
1782
+ "128222": {
1783
+ "content": "<|reserved_special_token_217|>",
1784
+ "lstrip": false,
1785
+ "normalized": false,
1786
+ "rstrip": false,
1787
+ "single_word": false,
1788
+ "special": true
1789
+ },
1790
+ "128223": {
1791
+ "content": "<|reserved_special_token_218|>",
1792
+ "lstrip": false,
1793
+ "normalized": false,
1794
+ "rstrip": false,
1795
+ "single_word": false,
1796
+ "special": true
1797
+ },
1798
+ "128224": {
1799
+ "content": "<|reserved_special_token_219|>",
1800
+ "lstrip": false,
1801
+ "normalized": false,
1802
+ "rstrip": false,
1803
+ "single_word": false,
1804
+ "special": true
1805
+ },
1806
+ "128225": {
1807
+ "content": "<|reserved_special_token_220|>",
1808
+ "lstrip": false,
1809
+ "normalized": false,
1810
+ "rstrip": false,
1811
+ "single_word": false,
1812
+ "special": true
1813
+ },
1814
+ "128226": {
1815
+ "content": "<|reserved_special_token_221|>",
1816
+ "lstrip": false,
1817
+ "normalized": false,
1818
+ "rstrip": false,
1819
+ "single_word": false,
1820
+ "special": true
1821
+ },
1822
+ "128227": {
1823
+ "content": "<|reserved_special_token_222|>",
1824
+ "lstrip": false,
1825
+ "normalized": false,
1826
+ "rstrip": false,
1827
+ "single_word": false,
1828
+ "special": true
1829
+ },
1830
+ "128228": {
1831
+ "content": "<|reserved_special_token_223|>",
1832
+ "lstrip": false,
1833
+ "normalized": false,
1834
+ "rstrip": false,
1835
+ "single_word": false,
1836
+ "special": true
1837
+ },
1838
+ "128229": {
1839
+ "content": "<|reserved_special_token_224|>",
1840
+ "lstrip": false,
1841
+ "normalized": false,
1842
+ "rstrip": false,
1843
+ "single_word": false,
1844
+ "special": true
1845
+ },
1846
+ "128230": {
1847
+ "content": "<|reserved_special_token_225|>",
1848
+ "lstrip": false,
1849
+ "normalized": false,
1850
+ "rstrip": false,
1851
+ "single_word": false,
1852
+ "special": true
1853
+ },
1854
+ "128231": {
1855
+ "content": "<|reserved_special_token_226|>",
1856
+ "lstrip": false,
1857
+ "normalized": false,
1858
+ "rstrip": false,
1859
+ "single_word": false,
1860
+ "special": true
1861
+ },
1862
+ "128232": {
1863
+ "content": "<|reserved_special_token_227|>",
1864
+ "lstrip": false,
1865
+ "normalized": false,
1866
+ "rstrip": false,
1867
+ "single_word": false,
1868
+ "special": true
1869
+ },
1870
+ "128233": {
1871
+ "content": "<|reserved_special_token_228|>",
1872
+ "lstrip": false,
1873
+ "normalized": false,
1874
+ "rstrip": false,
1875
+ "single_word": false,
1876
+ "special": true
1877
+ },
1878
+ "128234": {
1879
+ "content": "<|reserved_special_token_229|>",
1880
+ "lstrip": false,
1881
+ "normalized": false,
1882
+ "rstrip": false,
1883
+ "single_word": false,
1884
+ "special": true
1885
+ },
1886
+ "128235": {
1887
+ "content": "<|reserved_special_token_230|>",
1888
+ "lstrip": false,
1889
+ "normalized": false,
1890
+ "rstrip": false,
1891
+ "single_word": false,
1892
+ "special": true
1893
+ },
1894
+ "128236": {
1895
+ "content": "<|reserved_special_token_231|>",
1896
+ "lstrip": false,
1897
+ "normalized": false,
1898
+ "rstrip": false,
1899
+ "single_word": false,
1900
+ "special": true
1901
+ },
1902
+ "128237": {
1903
+ "content": "<|reserved_special_token_232|>",
1904
+ "lstrip": false,
1905
+ "normalized": false,
1906
+ "rstrip": false,
1907
+ "single_word": false,
1908
+ "special": true
1909
+ },
1910
+ "128238": {
1911
+ "content": "<|reserved_special_token_233|>",
1912
+ "lstrip": false,
1913
+ "normalized": false,
1914
+ "rstrip": false,
1915
+ "single_word": false,
1916
+ "special": true
1917
+ },
1918
+ "128239": {
1919
+ "content": "<|reserved_special_token_234|>",
1920
+ "lstrip": false,
1921
+ "normalized": false,
1922
+ "rstrip": false,
1923
+ "single_word": false,
1924
+ "special": true
1925
+ },
1926
+ "128240": {
1927
+ "content": "<|reserved_special_token_235|>",
1928
+ "lstrip": false,
1929
+ "normalized": false,
1930
+ "rstrip": false,
1931
+ "single_word": false,
1932
+ "special": true
1933
+ },
1934
+ "128241": {
1935
+ "content": "<|reserved_special_token_236|>",
1936
+ "lstrip": false,
1937
+ "normalized": false,
1938
+ "rstrip": false,
1939
+ "single_word": false,
1940
+ "special": true
1941
+ },
1942
+ "128242": {
1943
+ "content": "<|reserved_special_token_237|>",
1944
+ "lstrip": false,
1945
+ "normalized": false,
1946
+ "rstrip": false,
1947
+ "single_word": false,
1948
+ "special": true
1949
+ },
1950
+ "128243": {
1951
+ "content": "<|reserved_special_token_238|>",
1952
+ "lstrip": false,
1953
+ "normalized": false,
1954
+ "rstrip": false,
1955
+ "single_word": false,
1956
+ "special": true
1957
+ },
1958
+ "128244": {
1959
+ "content": "<|reserved_special_token_239|>",
1960
+ "lstrip": false,
1961
+ "normalized": false,
1962
+ "rstrip": false,
1963
+ "single_word": false,
1964
+ "special": true
1965
+ },
1966
+ "128245": {
1967
+ "content": "<|reserved_special_token_240|>",
1968
+ "lstrip": false,
1969
+ "normalized": false,
1970
+ "rstrip": false,
1971
+ "single_word": false,
1972
+ "special": true
1973
+ },
1974
+ "128246": {
1975
+ "content": "<|reserved_special_token_241|>",
1976
+ "lstrip": false,
1977
+ "normalized": false,
1978
+ "rstrip": false,
1979
+ "single_word": false,
1980
+ "special": true
1981
+ },
1982
+ "128247": {
1983
+ "content": "<|reserved_special_token_242|>",
1984
+ "lstrip": false,
1985
+ "normalized": false,
1986
+ "rstrip": false,
1987
+ "single_word": false,
1988
+ "special": true
1989
+ },
1990
+ "128248": {
1991
+ "content": "<|reserved_special_token_243|>",
1992
+ "lstrip": false,
1993
+ "normalized": false,
1994
+ "rstrip": false,
1995
+ "single_word": false,
1996
+ "special": true
1997
+ },
1998
+ "128249": {
1999
+ "content": "<|reserved_special_token_244|>",
2000
+ "lstrip": false,
2001
+ "normalized": false,
2002
+ "rstrip": false,
2003
+ "single_word": false,
2004
+ "special": true
2005
+ },
2006
+ "128250": {
2007
+ "content": "<|reserved_special_token_245|>",
2008
+ "lstrip": false,
2009
+ "normalized": false,
2010
+ "rstrip": false,
2011
+ "single_word": false,
2012
+ "special": true
2013
+ },
2014
+ "128251": {
2015
+ "content": "<|reserved_special_token_246|>",
2016
+ "lstrip": false,
2017
+ "normalized": false,
2018
+ "rstrip": false,
2019
+ "single_word": false,
2020
+ "special": true
2021
+ },
2022
+ "128252": {
2023
+ "content": "<|reserved_special_token_247|>",
2024
+ "lstrip": false,
2025
+ "normalized": false,
2026
+ "rstrip": false,
2027
+ "single_word": false,
2028
+ "special": true
2029
+ },
2030
+ "128253": {
2031
+ "content": "<|reserved_special_token_248|>",
2032
+ "lstrip": false,
2033
+ "normalized": false,
2034
+ "rstrip": false,
2035
+ "single_word": false,
2036
+ "special": true
2037
+ },
2038
+ "128254": {
2039
+ "content": "<|reserved_special_token_249|>",
2040
+ "lstrip": false,
2041
+ "normalized": false,
2042
+ "rstrip": false,
2043
+ "single_word": false,
2044
+ "special": true
2045
+ },
2046
+ "128255": {
2047
+ "content": "<|reserved_special_token_250|>",
2048
+ "lstrip": false,
2049
+ "normalized": false,
2050
+ "rstrip": false,
2051
+ "single_word": false,
2052
+ "special": true
2053
+ },
2054
+ "128256": {
2055
+ "content": "<unk>",
2056
+ "lstrip": false,
2057
+ "normalized": false,
2058
+ "rstrip": false,
2059
+ "single_word": false,
2060
+ "special": true
2061
+ },
2062
+ "128257": {
2063
+ "content": "<image>",
2064
+ "lstrip": false,
2065
+ "normalized": false,
2066
+ "rstrip": false,
2067
+ "single_word": false,
2068
+ "special": true
2069
+ },
2070
+ "128258": {
2071
+ "content": "<pad>",
2072
+ "lstrip": false,
2073
+ "normalized": false,
2074
+ "rstrip": false,
2075
+ "single_word": false,
2076
+ "special": true
2077
+ }
2078
+ },
2079
+ "bos_token": "<|begin_of_text|>",
2080
+ "chat_template": "{% set loop_messages = messages %}{% for message in loop_messages %}{% set content = '<|start_header_id|>' + message['role'] + '<|end_header_id|>\n\n'+ message['content'] | trim + '<|eot_id|>' %}{% if loop.index0 == 0 %}{% set content = bos_token + content %}{% endif %}{{ content }}{% endfor %}{{ '<|start_header_id|>assistant<|end_header_id|>\n\n' }}",
2081
+ "clean_up_tokenization_spaces": true,
2082
+ "eos_token": "<|end_of_text|>",
2083
+ "extra_special_tokens": {},
2084
+ "legacy": true,
2085
+ "model_input_names": [
2086
+ "input_ids",
2087
+ "attention_mask"
2088
+ ],
2089
+ "model_max_length": 1000000000000000019884624838656,
2090
+ "pad_token": "<pad>",
2091
+ "padding_side": "right",
2092
+ "processor_class": "LlavaProcessor",
2093
+ "tokenizer_class": "LlamaTokenizer",
2094
+ "unk_token": "<unk>",
2095
+ "use_default_system_prompt": false
2096
+ }
hf_download/hub/models--hunyuanvideo-community--HunyuanVideo/snapshots/e8c2aaa66fe3742a32c11a6766aecbf07c56e773/tokenizer_2/merges.txt ADDED
The diff for this file is too large to render. See raw diff
 
hf_download/hub/models--hunyuanvideo-community--HunyuanVideo/snapshots/e8c2aaa66fe3742a32c11a6766aecbf07c56e773/tokenizer_2/special_tokens_map.json ADDED
@@ -0,0 +1,30 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": {
3
+ "content": "<|startoftext|>",
4
+ "lstrip": false,
5
+ "normalized": true,
6
+ "rstrip": false,
7
+ "single_word": false
8
+ },
9
+ "eos_token": {
10
+ "content": "<|endoftext|>",
11
+ "lstrip": false,
12
+ "normalized": false,
13
+ "rstrip": false,
14
+ "single_word": false
15
+ },
16
+ "pad_token": {
17
+ "content": "<|endoftext|>",
18
+ "lstrip": false,
19
+ "normalized": false,
20
+ "rstrip": false,
21
+ "single_word": false
22
+ },
23
+ "unk_token": {
24
+ "content": "<|endoftext|>",
25
+ "lstrip": false,
26
+ "normalized": false,
27
+ "rstrip": false,
28
+ "single_word": false
29
+ }
30
+ }
hf_download/hub/models--hunyuanvideo-community--HunyuanVideo/snapshots/e8c2aaa66fe3742a32c11a6766aecbf07c56e773/tokenizer_2/tokenizer_config.json ADDED
@@ -0,0 +1,31 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_prefix_space": false,
3
+ "added_tokens_decoder": {
4
+ "49406": {
5
+ "content": "<|startoftext|>",
6
+ "lstrip": false,
7
+ "normalized": true,
8
+ "rstrip": false,
9
+ "single_word": false,
10
+ "special": true
11
+ },
12
+ "49407": {
13
+ "content": "<|endoftext|>",
14
+ "lstrip": false,
15
+ "normalized": false,
16
+ "rstrip": false,
17
+ "single_word": false,
18
+ "special": true
19
+ }
20
+ },
21
+ "bos_token": "<|startoftext|>",
22
+ "clean_up_tokenization_spaces": false,
23
+ "do_lower_case": true,
24
+ "eos_token": "<|endoftext|>",
25
+ "errors": "replace",
26
+ "extra_special_tokens": {},
27
+ "model_max_length": 77,
28
+ "pad_token": "<|endoftext|>",
29
+ "tokenizer_class": "CLIPTokenizer",
30
+ "unk_token": "<|endoftext|>"
31
+ }
hf_download/hub/models--hunyuanvideo-community--HunyuanVideo/snapshots/e8c2aaa66fe3742a32c11a6766aecbf07c56e773/vae/config.json ADDED
@@ -0,0 +1,32 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_class_name": "AutoencoderKLHunyuanVideo",
3
+ "_diffusers_version": "0.32.0.dev0",
4
+ "act_fn": "silu",
5
+ "block_out_channels": [
6
+ 128,
7
+ 256,
8
+ 512,
9
+ 512
10
+ ],
11
+ "down_block_types": [
12
+ "HunyuanVideoDownBlock3D",
13
+ "HunyuanVideoDownBlock3D",
14
+ "HunyuanVideoDownBlock3D",
15
+ "HunyuanVideoDownBlock3D"
16
+ ],
17
+ "in_channels": 3,
18
+ "latent_channels": 16,
19
+ "layers_per_block": 2,
20
+ "mid_block_add_attention": true,
21
+ "norm_num_groups": 32,
22
+ "out_channels": 3,
23
+ "scaling_factor": 0.476986,
24
+ "spatial_compression_ratio": 8,
25
+ "temporal_compression_ratio": 4,
26
+ "up_block_types": [
27
+ "HunyuanVideoUpBlock3D",
28
+ "HunyuanVideoUpBlock3D",
29
+ "HunyuanVideoUpBlock3D",
30
+ "HunyuanVideoUpBlock3D"
31
+ ]
32
+ }
hf_download/hub/models--lllyasviel--flux_redux_bfl/refs/main ADDED
@@ -0,0 +1 @@
 
 
1
+ 45b801affc54ff2af4e5daf1b282e0921901db87
kaggle.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"username":"kiranparthiban123","key":"51e8f0ad0a0c4ea1b633c66adb55b17e"}
locales/i18n.py ADDED
@@ -0,0 +1,126 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import json
2
+ import os.path
3
+
4
+ # デフォルト言語設定
5
+ # 注意: init関数を呼び出すまでは翻訳機能は使用できません
6
+ lang = "ja" # 明示的にデフォルト言語を日本語(ja)に設定
7
+ translateContext = None
8
+
9
+ class I18nString(str):
10
+ def __new__(cls, value):
11
+ result = translateContext.get(lang, {}).get(value, value)
12
+ return result
13
+
14
+ def __init__(self, value):
15
+ if isinstance(value, I18nString):
16
+ self.add_values = value.add_values
17
+ self.radd_values = value.radd_values
18
+ else:
19
+ self.add_values = []
20
+ self.radd_values = []
21
+
22
+ def __str__(self):
23
+ result = translateContext.get(lang, {}).get(self, super().__str__())
24
+
25
+ for v in self.radd_values:
26
+ result = str(v) + result
27
+
28
+ for v in self.add_values:
29
+ result = result + str(v)
30
+
31
+ # hotfix, remove unexpected single quotes
32
+ while len(result) >= 2 and result.startswith("'") and result.endswith("'"):
33
+ result = result[1:-1]
34
+
35
+ return result
36
+
37
+ def __add__(self, other):
38
+ v = str(self)
39
+ if isinstance(v, I18nString):
40
+ self.add_values.append(other)
41
+ return self
42
+ return v.__add__(other)
43
+
44
+ def __radd__(self, other):
45
+ v = str(self)
46
+ if isinstance(v, I18nString):
47
+ self.radd_values.append(other)
48
+ return self
49
+ return other.__add__(v)
50
+
51
+ def __hash__(self) -> int:
52
+ return super().__hash__()
53
+
54
+ def format(self, *args, **kwargs) -> str:
55
+ v = str(self)
56
+ if isinstance(v, I18nString):
57
+ return super().format(*args, **kwargs)
58
+ return v.format(*args, **kwargs)
59
+
60
+ def unwrap(self):
61
+ return super().__str__()
62
+
63
+ @staticmethod
64
+ def unwrap_strings(obj):
65
+ """Unwrap all keys in I18nStrings in the object"""
66
+ if isinstance(obj, I18nString):
67
+ yield obj.unwrap()
68
+ for v in obj.add_values:
69
+ yield from I18nString.unwrap_strings(v)
70
+ for v in obj.radd_values:
71
+ yield from I18nString.unwrap_strings(v)
72
+ return
73
+ yield obj
74
+
75
+ def translate(key: str):
76
+ """指定されたキーに対応する翻訳文字列を返します。
77
+
78
+ Args:
79
+ key: 翻訳したい文字列のキー
80
+
81
+ Returns:
82
+ I18nString: 現在の言語設定に基づいた翻訳文字列
83
+ """
84
+ # デバッグ用:translateContextがロードされていない場合に自動的にロード
85
+ global translateContext
86
+ if translateContext is None:
87
+ # 自動的にinitializeを呼び出す
88
+ init(lang)
89
+
90
+ return I18nString(key)
91
+
92
+ def load_translations():
93
+ translations = {}
94
+ locales_dir = os.path.join(os.path.dirname(__file__), './')
95
+
96
+ for locale in ["en", "ja", "zh-tw", "ru"]:
97
+ json_file = os.path.join(locales_dir, f"{locale}.json")
98
+ if os.path.exists(json_file):
99
+ with open(json_file, 'r', encoding='utf-8') as f:
100
+ translations[locale] = json.load(f)
101
+ else:
102
+ print("Warning: Translation file {0} not found".format(json_file))
103
+ translations[locale] = {}
104
+
105
+ return translations
106
+
107
+ def init(locale="ja"):
108
+ """言語を初期化します。
109
+
110
+ Args:
111
+ locale: 使用する言語コード(例: 'ja', 'en', 'zh-tw')。
112
+ 未対応の言語の場合は自動的に'ja'が使用されます。
113
+ """
114
+ global lang
115
+ global translateContext
116
+
117
+ # 対応言語のリスト
118
+ supported_locales = ["ja", "en", "zh-tw", "ru"]
119
+
120
+ # 対応していない言語の場合はデフォルト言語(ja)を使用
121
+ if locale not in supported_locales:
122
+ print("Unsupported language: {0}. Falling back to 'ja'".format(locale))
123
+ locale = "ja"
124
+
125
+ lang = locale
126
+ translateContext = load_translations()
locales/ja.json ADDED
The diff for this file is too large to render. See raw diff
 
locales/ru.json ADDED
The diff for this file is too large to render. See raw diff
 
locales/zh-tw.json ADDED
The diff for this file is too large to render. See raw diff
 
lora_utils/__init__.py ADDED
@@ -0,0 +1,46 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # FramePack-eichi LoRA Utilities
2
+ #
3
+ # LoRAの適用、FP8最適化、LoRAフォーマット検出と変換のための機能を提供します。
4
+
5
+ from .lora_utils import (
6
+ merge_lora_to_state_dict,
7
+ load_safetensors_with_lora_and_fp8,
8
+ load_safetensors_with_fp8_optimization,
9
+ convert_hunyuan_to_framepack,
10
+ convert_from_diffusion_pipe_or_something
11
+ )
12
+
13
+ from .fp8_optimization_utils import (
14
+ calculate_fp8_maxval,
15
+ quantize_tensor_to_fp8,
16
+ optimize_state_dict_with_fp8_on_the_fly,
17
+ fp8_linear_forward_patch,
18
+ apply_fp8_monkey_patch,
19
+ check_fp8_support
20
+ )
21
+
22
+ from .lora_loader import (
23
+ load_and_apply_lora
24
+ )
25
+
26
+ from .safetensors_utils import (
27
+ MemoryEfficientSafeOpen
28
+ )
29
+
30
+ # 国際化対応ヘルパー
31
+ try:
32
+ from locales import i18n
33
+ HAS_I18N = True
34
+ except ImportError:
35
+ HAS_I18N = False
36
+ print("Warning: i18n module not found, using fallback translations")
37
+
38
+ # 翻訳ヘルパー関数
39
+ def _(text):
40
+ """国際化対応のためのヘルパー関数"""
41
+ if HAS_I18N:
42
+ return i18n.translate(text)
43
+ return text
44
+
45
+ # バージョン情報
46
+ __version__ = "1.0.0"
lora_utils/dynamic_swap_lora.py ADDED
@@ -0,0 +1,76 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # FramePack-eichi Dynamic Swap LoRA
2
+ #
3
+ # このモジュールは後方互換性のために残されていますが、
4
+ # 実際にはdirect_applicationによるLoRA適用が使用されています。
5
+
6
+ import os
7
+ import torch
8
+ import warnings
9
+
10
+ # 国際化対応
11
+ from locales.i18n_extended import translate as _
12
+
13
+
14
+ class DynamicSwapLoRAManager:
15
+ """
16
+ この旧式のLoRA管理クラスは後方互換性のために残されていますが、
17
+ 実際の処理では使用されません。代わりに直接的なLoRA適用が行われます。
18
+ """
19
+
20
+ def __init__(self):
21
+ """初期化"""
22
+ self.is_active = False
23
+ self.lora_path = None
24
+ self.lora_scale = 0.8
25
+ warnings.warn(
26
+ _("DynamicSwapLoRAManagerは非推奨です。代わりにlora_loader.load_and_apply_lora()を使用してください。"),
27
+ DeprecationWarning,
28
+ stacklevel=2
29
+ )
30
+
31
+ def load_lora(self, lora_path, is_diffusers=False):
32
+ """
33
+ LoRAファイルをロードする (実際には、パスの記録のみ)
34
+
35
+ Args:
36
+ lora_path: LoRAファイルのパス
37
+ is_diffusers: 互換性のために残されたパラメータ(使用されない)
38
+ """
39
+ if not os.path.exists(lora_path):
40
+ raise FileNotFoundError(_("LoRAファイルが見つかりません: {0}").format(lora_path))
41
+
42
+ self.lora_path = lora_path
43
+ self.is_active = True
44
+
45
+ print(_("LoRAファイルがロードされました (非推奨インターフェース): {0}").format(lora_path))
46
+ print(_("注意: ") + _("DynamicSwapLoRAManagerは非推奨です。代わりにlora_loader.load_and_apply_lora()を使用してください。"))
47
+
48
+ def set_scale(self, scale):
49
+ """
50
+ LoRA適用スケールを設定する
51
+
52
+ Args:
53
+ scale: LoRAの適用強度
54
+ """
55
+ self.lora_scale = scale
56
+
57
+ def install_hooks(self, model):
58
+ """
59
+ モデルにLoRAフックをインストールする (実際には、直接適用を行う)
60
+
61
+ Args:
62
+ model: フックをインストールするモデル
63
+ """
64
+ # 直接適用モードを使用してLoRAを適用
65
+ from .lora_loader import load_and_apply_lora
66
+
67
+ print(_("警告: DynamicSwapLoRAManagerは非推奨です。直接適用モードにリダイレクトします。"))
68
+
69
+ load_and_apply_lora(
70
+ model,
71
+ self.lora_path,
72
+ self.lora_scale,
73
+ device=torch.device("cuda" if torch.cuda.is_available() else "cpu")
74
+ )
75
+
76
+ print(_("LoRAは直接適用モードで適用されました。"))