diff --git a/.gitattributes b/.gitattributes
index 53d7257..21b3632 100644
--- a/.gitattributes
+++ b/.gitattributes
@@ -44,4 +44,6 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
*.tar filter=lfs diff=lfs merge=lfs -text
*.wasm filter=lfs diff=lfs merge=lfs -text
*.zst filter=lfs diff=lfs merge=lfs -text
-*tfevents* filter=lfs diff=lfs merge=lfs -text
\ No newline at end of file
+*tfevents* filter=lfs diff=lfs merge=lfs -text
+
+tokenizer.json filter=lfs diff=lfs merge=lfs -text
\ No newline at end of file
diff --git a/LICENSE b/LICENSE
new file mode 100644
index 0000000..d42fae9
--- /dev/null
+++ b/LICENSE
@@ -0,0 +1,21 @@
+MIT License
+
+Copyright (c) 2023 DeepSeek
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
\ No newline at end of file
diff --git a/README.md b/README.md
index f556eca..d714f25 100644
--- a/README.md
+++ b/README.md
@@ -1,47 +1,10 @@
---
-license: Apache License 2.0
-
-#model-type:
-##如 gpt、phi、llama、chatglm、baichuan 等
-#- gpt
-
-#domain:
-##如 nlp、cv、audio、multi-modal
-#- nlp
-
-#language:
-##语言代码列表 https://help.aliyun.com/document_detail/215387.html?spm=a2c4g.11186623.0.0.9f8d7467kni6Aa
-#- cn
-
-#metrics:
-##如 CIDEr、Blue、ROUGE 等
-#- CIDEr
-
-#tags:
-##各种自定义,包括 pretrained、fine-tuned、instruction-tuned、RL-tuned 等训练方法和其他
-#- pretrained
-
-#tools:
-##如 vllm、fastchat、llamacpp、AdaSeq 等
-#- vllm
----
-### 当前模型的贡献者未提供更加详细的模型介绍。模型文件和权重,可浏览“模型文件”页面获取。
-#### 您可以通过如下git clone命令,或者ModelScope SDK来下载模型
-
-SDK下载
-```bash
-#安装ModelScope
-pip install modelscope
-```
-```python
-#SDK模型下载
-from modelscope import snapshot_download
-model_dir = snapshot_download('unsloth/DeepSeek-V3.1-Base')
-```
-Git下载
-```
-#Git模型下载
-git clone https://www.modelscope.cn/unsloth/DeepSeek-V3.1-Base.git
-```
-
-
如果您是本模型的贡献者,我们邀请您根据模型贡献文档,及时完善模型卡片内容。
\ No newline at end of file
+base_model:
+- deepseek-ai/DeepSeek-V3.1-Base
+license: mit
+tags:
+- deepseek
+- unsloth
+pipeline_tag: text-generation
+library_name: transformers
+---
\ No newline at end of file
diff --git a/chat_template.jinja b/chat_template.jinja
new file mode 100644
index 0000000..e565619
--- /dev/null
+++ b/chat_template.jinja
@@ -0,0 +1,3 @@
+{% if not add_generation_prompt is defined %}{% set add_generation_prompt = false %}{% endif %}{% if not thinking is defined %}{% set thinking = false %}{% endif %}{% set ns = namespace(is_first=false, is_tool=false, system_prompt='', is_first_sp=true, is_last_user=false) %}{%- for message in messages %}{%- if message['role'] == 'system' %}{%- if ns.is_first_sp %}{% set ns.system_prompt = ns.system_prompt + message['content'] %}{% set ns.is_first_sp = false %}{%- else %}{% set ns.system_prompt = ns.system_prompt + '
+
+' + message['content'] %}{%- endif %}{%- endif %}{%- endfor %}{{ bos_token }}{{ ns.system_prompt }}{%- for message in messages %}{%- if message['role'] == 'user' %}{%- set ns.is_tool = false -%}{%- set ns.is_first = false -%}{%- set ns.is_last_user = true -%}{{'<|User|>' + message['content']}}{%- endif %}{%- if message['role'] == 'assistant' and message['tool_calls'] is defined and message['tool_calls'] is not none %}{%- if ns.is_last_user %}{{'<|Assistant|>'}}{%- endif %}{%- set ns.is_last_user = false -%}{%- set ns.is_first = false %}{%- set ns.is_tool = false -%}{%- for tool in message['tool_calls'] %}{%- if not ns.is_first %}{%- if message['content'] is none %}{{'<|tool▁calls▁begin|><|tool▁call▁begin|>'+ tool['function']['name'] + '<|tool▁sep|>' + tool['function']['arguments'] + '<|tool▁call▁end|>'}}{%- else %}{{message['content'] + '<|tool▁calls▁begin|><|tool▁call▁begin|>' + tool['function']['name'] + '<|tool▁sep|>' + tool['function']['arguments'] + '<|tool▁call▁end|>'}}{%- endif %}{%- set ns.is_first = true -%}{%- else %}{{'<|tool▁call▁begin|>'+ tool['function']['name'] + '<|tool▁sep|>' + tool['function']['arguments'] + '<|tool▁call▁end|>'}}{%- endif %}{%- endfor %}{{'<|tool▁calls▁end|><|end▁of▁sentence|>'}}{%- endif %}{%- if message['role'] == 'assistant' and (message['tool_calls'] is not defined or message['tool_calls'] is none) %}{%- if ns.is_last_user %}{{'<|Assistant|>'}}{%- if message['prefix'] is defined and message['prefix'] and thinking %}{{''}} {%- else %}{{''}}{%- endif %}{%- endif %}{%- set ns.is_last_user = false -%}{%- if ns.is_tool %}{{message['content'] + '<|end▁of▁sentence|>'}}{%- set ns.is_tool = false -%}{%- else %}{%- set content = message['content'] -%}{%- if '' in content %}{%- set content = content.split('', 1)[1] -%}{%- endif %}{{content + '<|end▁of▁sentence|>'}}{%- endif %}{%- endif %}{%- if message['role'] == 'tool' %}{%- set ns.is_last_user = false -%}{%- set ns.is_tool = true -%}{{'<|tool▁output▁begin|>' + message['content'] + '<|tool▁output▁end|>'}}{%- endif %}{%- endfor -%}{%- if add_generation_prompt and ns.is_last_user and not ns.is_tool %}{{'<|Assistant|>'}}{%- if not thinking %}{{''}}{%- else %}{{''}}{%- endif %}{% endif %}
\ No newline at end of file
diff --git a/config.json b/config.json
new file mode 100644
index 0000000..e7ca49f
--- /dev/null
+++ b/config.json
@@ -0,0 +1,69 @@
+{
+ "architectures": [
+ "DeepseekV3ForCausalLM"
+ ],
+ "attention_bias": false,
+ "attention_dropout": 0.0,
+ "auto_map": {
+ "AutoConfig": "configuration_deepseek.DeepseekV3Config",
+ "AutoModel": "modeling_deepseek.DeepseekV3Model",
+ "AutoModelForCausalLM": "modeling_deepseek.DeepseekV3ForCausalLM"
+ },
+ "bos_token_id": 0,
+ "eos_token_id": 1,
+ "ep_size": 1,
+ "first_k_dense_replace": 3,
+ "hidden_act": "silu",
+ "hidden_size": 7168,
+ "initializer_range": 0.02,
+ "intermediate_size": 18432,
+ "kv_lora_rank": 512,
+ "max_position_embeddings": 163840,
+ "model_type": "deepseek_v3",
+ "moe_intermediate_size": 2048,
+ "moe_layer_freq": 1,
+ "n_group": 8,
+ "n_routed_experts": 256,
+ "n_shared_experts": 1,
+ "norm_topk_prob": true,
+ "num_attention_heads": 128,
+ "num_experts_per_tok": 8,
+ "num_hidden_layers": 61,
+ "num_key_value_heads": 128,
+ "num_nextn_predict_layers": 1,
+ "pad_token_id": 2,
+ "q_lora_rank": 1536,
+ "qk_nope_head_dim": 128,
+ "qk_rope_head_dim": 64,
+ "quantization_config": {
+ "activation_scheme": "dynamic",
+ "fmt": "e4m3",
+ "quant_method": "fp8",
+ "weight_block_size": [
+ 128,
+ 128
+ ]
+ },
+ "rms_norm_eps": 1e-06,
+ "rope_scaling": {
+ "beta_fast": 32,
+ "beta_slow": 1,
+ "factor": 40,
+ "mscale": 1.0,
+ "mscale_all_dim": 1.0,
+ "original_max_position_embeddings": 4096,
+ "type": "yarn"
+ },
+ "rope_theta": 10000,
+ "routed_scaling_factor": 2.5,
+ "scoring_func": "sigmoid",
+ "tie_word_embeddings": false,
+ "topk_group": 4,
+ "topk_method": "noaux_tc",
+ "torch_dtype": "bfloat16",
+ "transformers_version": "4.55.2",
+ "unsloth_fixed": true,
+ "use_cache": true,
+ "v_head_dim": 128,
+ "vocab_size": 129280
+}
diff --git a/configuration.json b/configuration.json
new file mode 100644
index 0000000..bbeeda1
--- /dev/null
+++ b/configuration.json
@@ -0,0 +1 @@
+{"framework": "pytorch", "task": "text-generation", "allow_remote": true}
\ No newline at end of file
diff --git a/configuration_deepseek.py b/configuration_deepseek.py
new file mode 100644
index 0000000..f549f2b
--- /dev/null
+++ b/configuration_deepseek.py
@@ -0,0 +1,199 @@
+from transformers.configuration_utils import PretrainedConfig
+from transformers.utils import logging
+
+logger = logging.get_logger(__name__)
+
+DEEPSEEK_PRETRAINED_CONFIG_ARCHIVE_MAP = {}
+class DeepseekV3Config(PretrainedConfig):
+ r"""
+ This is the configuration class to store the configuration of a [`DeepseekV3Model`]. It is used to instantiate an DeepSeek
+ model according to the specified arguments, defining the model architecture. Instantiating a configuration with the
+ defaults will yield a similar configuration to that of the DeepSeek-V3.
+
+ Configuration objects inherit from [`PretrainedConfig`] and can be used to control the model outputs. Read the
+ documentation from [`PretrainedConfig`] for more information.
+
+
+ Args:
+ vocab_size (`int`, *optional*, defaults to 129280):
+ Vocabulary size of the Deep model. Defines the number of different tokens that can be represented by the
+ `inputs_ids` passed when calling [`DeepseekV3Model`]
+ hidden_size (`int`, *optional*, defaults to 4096):
+ Dimension of the hidden representations.
+ intermediate_size (`int`, *optional*, defaults to 11008):
+ Dimension of the MLP representations.
+ moe_intermediate_size (`int`, *optional*, defaults to 1407):
+ Dimension of the MoE representations.
+ num_hidden_layers (`int`, *optional*, defaults to 32):
+ Number of hidden layers in the Transformer decoder.
+ num_nextn_predict_layers (`int`, *optional*, defaults to 1):
+ Number of nextn predict layers in the DeepSeekV3 Model.
+ num_attention_heads (`int`, *optional*, defaults to 32):
+ Number of attention heads for each attention layer in the Transformer decoder.
+ n_shared_experts (`int`, *optional*, defaults to None):
+ Number of shared experts, None means dense model.
+ n_routed_experts (`int`, *optional*, defaults to None):
+ Number of routed experts, None means dense model.
+ routed_scaling_factor (`float`, *optional*, defaults to 1.0):
+ Scaling factor or routed experts.
+ topk_method (`str`, *optional*, defaults to `gready`):
+ Topk method used in routed gate.
+ n_group (`int`, *optional*, defaults to None):
+ Number of groups for routed experts.
+ topk_group (`int`, *optional*, defaults to None):
+ Number of selected groups for each token(for each token, ensuring the selected experts is only within `topk_group` groups).
+ num_experts_per_tok (`int`, *optional*, defaults to None):
+ Number of selected experts, None means dense model.
+ moe_layer_freq (`int`, *optional*, defaults to 1):
+ The frequency of the MoE layer: one expert layer for every `moe_layer_freq - 1` dense layers.
+ first_k_dense_replace (`int`, *optional*, defaults to 0):
+ Number of dense layers in shallow layers(embed->dense->dense->...->dense->moe->moe...->lm_head).
+ \--k dense layers--/
+ norm_topk_prob (`bool`, *optional*, defaults to False):
+ Whether to normalize the weights of the routed experts.
+ scoring_func (`str`, *optional*, defaults to 'softmax'):
+ Method of computing expert weights.
+ aux_loss_alpha (`float`, *optional*, defaults to 0.001):
+ Auxiliary loss weight coefficient.
+ seq_aux = (`bool`, *optional*, defaults to True):
+ Whether to compute the auxiliary loss for each individual sample.
+ num_key_value_heads (`int`, *optional*):
+ This is the number of key_value heads that should be used to implement Grouped Query Attention. If
+ `num_key_value_heads=num_attention_heads`, the model will use Multi Head Attention (MHA), if
+ `num_key_value_heads=1 the model will use Multi Query Attention (MQA) otherwise GQA is used. When
+ converting a multi-head checkpoint to a GQA checkpoint, each group key and value head should be constructed
+ by meanpooling all the original heads within that group. For more details checkout [this
+ paper](https://arxiv.org/pdf/2305.13245.pdf). If it is not specified, will default to
+ `num_attention_heads`.
+ hidden_act (`str` or `function`, *optional*, defaults to `"silu"`):
+ The non-linear activation function (function or string) in the decoder.
+ max_position_embeddings (`int`, *optional*, defaults to 2048):
+ The maximum sequence length that this model might ever be used with.
+ initializer_range (`float`, *optional*, defaults to 0.02):
+ The standard deviation of the truncated_normal_initializer for initializing all weight matrices.
+ rms_norm_eps (`float`, *optional*, defaults to 1e-06):
+ The epsilon used by the rms normalization layers.
+ use_cache (`bool`, *optional*, defaults to `True`):
+ Whether or not the model should return the last key/values attentions (not used by all models). Only
+ relevant if `config.is_decoder=True`.
+ pad_token_id (`int`, *optional*):
+ Padding token id.
+ bos_token_id (`int`, *optional*, defaults to 1):
+ Beginning of stream token id.
+ eos_token_id (`int`, *optional*, defaults to 2):
+ End of stream token id.
+ tie_word_embeddings (`bool`, *optional*, defaults to `False`):
+ Whether to tie weight embeddings
+ rope_theta (`float`, *optional*, defaults to 10000.0):
+ The base period of the RoPE embeddings.
+ rope_scaling (`Dict`, *optional*):
+ Dictionary containing the scaling configuration for the RoPE embeddings. Currently supports two scaling
+ strategies: linear and dynamic. Their scaling factor must be a float greater than 1. The expected format is
+ `{"type": strategy name, "factor": scaling factor}`. When using this flag, don't update
+ `max_position_embeddings` to the expected new maximum.
+ attention_bias (`bool`, defaults to `False`, *optional*, defaults to `False`):
+ Whether to use a bias in the query, key, value and output projection layers during self-attention.
+ attention_dropout (`float`, *optional*, defaults to 0.0):
+ The dropout ratio for the attention probabilities.
+
+ ```python
+ >>> from transformers import DeepseekV3Model, DeepseekV3Config
+
+ >>> # Initializing a Deepseek-V3 style configuration
+ >>> configuration = DeepseekV3Config()
+
+ >>> # Accessing the model configuration
+ >>> configuration = model.config
+ ```"""
+
+ model_type = "deepseek_v3"
+ keys_to_ignore_at_inference = ["past_key_values"]
+
+ def __init__(
+ self,
+ vocab_size=129280,
+ hidden_size=7168,
+ intermediate_size=18432,
+ moe_intermediate_size = 2048,
+ num_hidden_layers=61,
+ num_nextn_predict_layers=1,
+ num_attention_heads=128,
+ num_key_value_heads=128,
+ n_shared_experts = 1,
+ n_routed_experts = 256,
+ ep_size = 1,
+ routed_scaling_factor = 2.5,
+ kv_lora_rank = 512,
+ q_lora_rank = 1536,
+ qk_rope_head_dim = 64,
+ v_head_dim = 128,
+ qk_nope_head_dim = 128,
+ topk_method = 'noaux_tc',
+ n_group = 8,
+ topk_group = 4,
+ num_experts_per_tok = 8,
+ moe_layer_freq = 1,
+ first_k_dense_replace = 3,
+ norm_topk_prob = True,
+ scoring_func = 'sigmoid',
+ hidden_act="silu",
+ max_position_embeddings=4096,
+ initializer_range=0.02,
+ rms_norm_eps=1e-6,
+ use_cache=True,
+ pad_token_id=None,
+ bos_token_id=0,
+ eos_token_id=1,
+ tie_word_embeddings=False,
+ rope_theta=10000.0,
+ rope_scaling=None,
+ attention_bias=False,
+ attention_dropout=0.0,
+ **kwargs,
+ ):
+ self.vocab_size = vocab_size
+ self.max_position_embeddings = max_position_embeddings
+ self.hidden_size = hidden_size
+ self.intermediate_size = intermediate_size
+ self.moe_intermediate_size = moe_intermediate_size
+ self.num_hidden_layers = num_hidden_layers
+ self.num_nextn_predict_layers = num_nextn_predict_layers
+ self.num_attention_heads = num_attention_heads
+ self.n_shared_experts = n_shared_experts
+ self.n_routed_experts = n_routed_experts
+ self.ep_size = ep_size
+ self.routed_scaling_factor = routed_scaling_factor
+ self.kv_lora_rank = kv_lora_rank
+ self.q_lora_rank = q_lora_rank
+ self.qk_rope_head_dim = qk_rope_head_dim
+ self.v_head_dim = v_head_dim
+ self.qk_nope_head_dim = qk_nope_head_dim
+ self.topk_method = topk_method
+ self.n_group = n_group
+ self.topk_group = topk_group
+ self.num_experts_per_tok = num_experts_per_tok
+ self.moe_layer_freq = moe_layer_freq
+ self.first_k_dense_replace = first_k_dense_replace
+ self.norm_topk_prob = norm_topk_prob
+ self.scoring_func = scoring_func
+ # for backward compatibility
+ if num_key_value_heads is None:
+ num_key_value_heads = num_attention_heads
+
+ self.num_key_value_heads = num_key_value_heads
+ self.hidden_act = hidden_act
+ self.initializer_range = initializer_range
+ self.rms_norm_eps = rms_norm_eps
+ self.use_cache = use_cache
+ self.rope_theta = rope_theta
+ self.rope_scaling = rope_scaling
+ self.attention_bias = attention_bias
+ self.attention_dropout = attention_dropout
+
+ super().__init__(
+ pad_token_id=pad_token_id,
+ bos_token_id=bos_token_id,
+ eos_token_id=eos_token_id,
+ tie_word_embeddings=tie_word_embeddings,
+ **kwargs,
+ )
\ No newline at end of file
diff --git a/generation_config.json b/generation_config.json
new file mode 100644
index 0000000..15c27a7
--- /dev/null
+++ b/generation_config.json
@@ -0,0 +1,9 @@
+{
+ "_from_model_config": true,
+ "bos_token_id": 0,
+ "eos_token_id": 1,
+ "do_sample": true,
+ "temperature": 0.6,
+ "top_p": 0.95,
+ "transformers_version": "4.46.3"
+}
diff --git a/model-00001-of-000163.safetensors b/model-00001-of-000163.safetensors
new file mode 100644
index 0000000..32b2efc
--- /dev/null
+++ b/model-00001-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:2668b4d448e73199bff9a5b3a1a5f788bcbfa49b7f4b93e96e67bec00c545a57
+size 135
diff --git a/model-00002-of-000163.safetensors b/model-00002-of-000163.safetensors
new file mode 100644
index 0000000..9a959a9
--- /dev/null
+++ b/model-00002-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:f58ffd56aa8cba99c0e95875c98ef0483c712865182f62846d3dff7fc83a7d41
+size 135
diff --git a/model-00003-of-000163.safetensors b/model-00003-of-000163.safetensors
new file mode 100644
index 0000000..f4da3ba
--- /dev/null
+++ b/model-00003-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:fc791f38a19044e0736bb4d9054a54d7e9cef2300c5e016d0c68bebb1d348ff1
+size 135
diff --git a/model-00004-of-000163.safetensors b/model-00004-of-000163.safetensors
new file mode 100644
index 0000000..660e8d2
--- /dev/null
+++ b/model-00004-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:9ed432b78307315333608bf742abd27140a1aa81acd82a4e3571e4dcf21e8abc
+size 135
diff --git a/model-00005-of-000163.safetensors b/model-00005-of-000163.safetensors
new file mode 100644
index 0000000..4895402
--- /dev/null
+++ b/model-00005-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:2bf7355f7079a253371a4ed668bc81c280ad14844d3e432753bd7bcbcc96a7ea
+size 135
diff --git a/model-00006-of-000163.safetensors b/model-00006-of-000163.safetensors
new file mode 100644
index 0000000..5cec82d
--- /dev/null
+++ b/model-00006-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:bb2ecb04bacc8b5c02d9c2cde350f7a7504cd985cfd7811de54f60ae5c67ffea
+size 135
diff --git a/model-00007-of-000163.safetensors b/model-00007-of-000163.safetensors
new file mode 100644
index 0000000..6dd0b6d
--- /dev/null
+++ b/model-00007-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:39a054e8c7dd397ea7b6e949e7dd910b3181d1e343c0b32bb1f5fd866decef55
+size 135
diff --git a/model-00008-of-000163.safetensors b/model-00008-of-000163.safetensors
new file mode 100644
index 0000000..d5aae15
--- /dev/null
+++ b/model-00008-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:b2a52475e56372ad21b584395f438574d4071960d6f36f44a6145d225e569dcb
+size 135
diff --git a/model-00009-of-000163.safetensors b/model-00009-of-000163.safetensors
new file mode 100644
index 0000000..3fdc3fc
--- /dev/null
+++ b/model-00009-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:7d7f7c6444a9799e7f4c9e55259abc7149dc9535b1cfa91b48694cf53f24d0ce
+size 135
diff --git a/model-00010-of-000163.safetensors b/model-00010-of-000163.safetensors
new file mode 100644
index 0000000..7752b49
--- /dev/null
+++ b/model-00010-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:2dbcf9e10ecedcf764ebf18944ded22ac4da332de8f4cb48ee2d1c08ce9e092b
+size 135
diff --git a/model-00011-of-000163.safetensors b/model-00011-of-000163.safetensors
new file mode 100644
index 0000000..612ca97
--- /dev/null
+++ b/model-00011-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:f58da1bacdd06ee92ec23431b5a328b4c96c6ad8e236e91140573675823acff9
+size 135
diff --git a/model-00012-of-000163.safetensors b/model-00012-of-000163.safetensors
new file mode 100644
index 0000000..61a0237
--- /dev/null
+++ b/model-00012-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:bbd32497450fe9a60f20606c661ac05276d9084f45c36895763fe462b4db51d4
+size 135
diff --git a/model-00013-of-000163.safetensors b/model-00013-of-000163.safetensors
new file mode 100644
index 0000000..0c50e52
--- /dev/null
+++ b/model-00013-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:8a25990066ee05072c079624b22db37477e7fbeb54d48e5cb49d0b5433e48af8
+size 135
diff --git a/model-00014-of-000163.safetensors b/model-00014-of-000163.safetensors
new file mode 100644
index 0000000..d029de8
--- /dev/null
+++ b/model-00014-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:ad4596ca52064e34a94a48066192b888ef8fb15236f32a1837b2eb5ac3c9f858
+size 135
diff --git a/model-00015-of-000163.safetensors b/model-00015-of-000163.safetensors
new file mode 100644
index 0000000..49d2842
--- /dev/null
+++ b/model-00015-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:06fd2aed7564c2038e3da1aab80e9d05a7823586f98a817faa33bce24a07a429
+size 135
diff --git a/model-00016-of-000163.safetensors b/model-00016-of-000163.safetensors
new file mode 100644
index 0000000..4ea5009
--- /dev/null
+++ b/model-00016-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:7f619b812dbf922cbc87a89f0d4e4cf87bc11fb108e4f7e45b926ae0d803ceb8
+size 135
diff --git a/model-00017-of-000163.safetensors b/model-00017-of-000163.safetensors
new file mode 100644
index 0000000..2a99b65
--- /dev/null
+++ b/model-00017-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:599c722fcd03215bcb9b67aad7c79710a485507b305b5d2e57ab545b8515d9a8
+size 135
diff --git a/model-00018-of-000163.safetensors b/model-00018-of-000163.safetensors
new file mode 100644
index 0000000..aa8c6eb
--- /dev/null
+++ b/model-00018-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:f689f307ddfd68bb79ee13b6b355898bf55c9ea26212303c16eeedeeac666dad
+size 135
diff --git a/model-00019-of-000163.safetensors b/model-00019-of-000163.safetensors
new file mode 100644
index 0000000..bdfac77
--- /dev/null
+++ b/model-00019-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:7bab82e7159bc2817a42e84e3db916fb39398bfa807edab4678abb7e3e41aae5
+size 135
diff --git a/model-00020-of-000163.safetensors b/model-00020-of-000163.safetensors
new file mode 100644
index 0000000..e74c99a
--- /dev/null
+++ b/model-00020-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:55119f2bb00b5a29522f85d726bd8be77cebe02f7af39fbd407c11b26feeb3f9
+size 135
diff --git a/model-00021-of-000163.safetensors b/model-00021-of-000163.safetensors
new file mode 100644
index 0000000..88a1749
--- /dev/null
+++ b/model-00021-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:8ac2cb0f4ae8754554217b8f16c714ab1b265f710ad87d95db6ba853fba8663a
+size 135
diff --git a/model-00022-of-000163.safetensors b/model-00022-of-000163.safetensors
new file mode 100644
index 0000000..d3e3672
--- /dev/null
+++ b/model-00022-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:bd0fc0a440175e2ae630c3a182ff64fa6706ee992c6c30febedcea7c64703b52
+size 135
diff --git a/model-00023-of-000163.safetensors b/model-00023-of-000163.safetensors
new file mode 100644
index 0000000..61dfeeb
--- /dev/null
+++ b/model-00023-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:95d262bac52c827bb4cb60b5e757f04749ef66105d4cdc933112541583ff7d11
+size 135
diff --git a/model-00024-of-000163.safetensors b/model-00024-of-000163.safetensors
new file mode 100644
index 0000000..26bd5f6
--- /dev/null
+++ b/model-00024-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:a55227356ca83ac3451f1525835cba139a030342dad26b7fb78aca561d88f575
+size 135
diff --git a/model-00025-of-000163.safetensors b/model-00025-of-000163.safetensors
new file mode 100644
index 0000000..9575ca6
--- /dev/null
+++ b/model-00025-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:bda6c025980d3ed183524ef05a37f61b05e55cb0307fe798a9fc73b0cedef0a4
+size 135
diff --git a/model-00026-of-000163.safetensors b/model-00026-of-000163.safetensors
new file mode 100644
index 0000000..fb517ee
--- /dev/null
+++ b/model-00026-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:41918ba65ed14914ec1f8b2f69c60b0617fd560037332b1f86cd288e748b4bf1
+size 135
diff --git a/model-00027-of-000163.safetensors b/model-00027-of-000163.safetensors
new file mode 100644
index 0000000..013822a
--- /dev/null
+++ b/model-00027-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:96b5e5b908e2cb2cfa45a531bdfbf1ce2cb0babf059dfb022e82df150b4c22fa
+size 135
diff --git a/model-00028-of-000163.safetensors b/model-00028-of-000163.safetensors
new file mode 100644
index 0000000..0da0d2f
--- /dev/null
+++ b/model-00028-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:4849cfe03f358aef1842e68c8a53f8ea46eeff1e00323adfcae538d0ca3a85f2
+size 135
diff --git a/model-00029-of-000163.safetensors b/model-00029-of-000163.safetensors
new file mode 100644
index 0000000..6953eda
--- /dev/null
+++ b/model-00029-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:cacaca536cfc99ba59a59999cb28763bf68c88225b16a0e22b45a756cb9ec34c
+size 135
diff --git a/model-00030-of-000163.safetensors b/model-00030-of-000163.safetensors
new file mode 100644
index 0000000..d1d9b07
--- /dev/null
+++ b/model-00030-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:7fc025adc39a7c0c8d4acd40a83679d9ada2b1fab7c37c5bbecdf0af25be659d
+size 135
diff --git a/model-00031-of-000163.safetensors b/model-00031-of-000163.safetensors
new file mode 100644
index 0000000..0905067
--- /dev/null
+++ b/model-00031-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:660a9ad518189088c3ee81d67a0c2cedfc4a2cc9e74a1efdac649325dddc6a13
+size 135
diff --git a/model-00032-of-000163.safetensors b/model-00032-of-000163.safetensors
new file mode 100644
index 0000000..27c5040
--- /dev/null
+++ b/model-00032-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:47479268742fe4b4ae439692bcf312b57037841990608c1b96561a3b39c76d42
+size 135
diff --git a/model-00033-of-000163.safetensors b/model-00033-of-000163.safetensors
new file mode 100644
index 0000000..2d6eb00
--- /dev/null
+++ b/model-00033-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:46c66c6a31f17aab307b86bacf26e2e34dd01d60f8a8a78ce096230163b3b1b0
+size 135
diff --git a/model-00034-of-000163.safetensors b/model-00034-of-000163.safetensors
new file mode 100644
index 0000000..b1195b0
--- /dev/null
+++ b/model-00034-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:d59ac0d81e9d33674cd793311dc5a5fee4596fbcd390e279e7132a58957abf05
+size 135
diff --git a/model-00035-of-000163.safetensors b/model-00035-of-000163.safetensors
new file mode 100644
index 0000000..a5f60ba
--- /dev/null
+++ b/model-00035-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:15cff7b774eac897954f30b41f69d3a4b91c66c0ec6b69e74685990fa1d76908
+size 135
diff --git a/model-00036-of-000163.safetensors b/model-00036-of-000163.safetensors
new file mode 100644
index 0000000..d8c7779
--- /dev/null
+++ b/model-00036-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:e0678e3ae8b2da82da27ffb26a474e25ae751ac3e87c86345dc6b4df62bb69bc
+size 135
diff --git a/model-00037-of-000163.safetensors b/model-00037-of-000163.safetensors
new file mode 100644
index 0000000..c344c5a
--- /dev/null
+++ b/model-00037-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:93292c682d7ca30d0b06dbb826aa883058b3349487074892a7f1dabf7c412edd
+size 135
diff --git a/model-00038-of-000163.safetensors b/model-00038-of-000163.safetensors
new file mode 100644
index 0000000..2fc1c45
--- /dev/null
+++ b/model-00038-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:5568f217c228ade51b58bc4cdc831f402cc7e89c37f0d492aadfa7f13e1c0662
+size 135
diff --git a/model-00039-of-000163.safetensors b/model-00039-of-000163.safetensors
new file mode 100644
index 0000000..e9e4820
--- /dev/null
+++ b/model-00039-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:f2565ee39413812ce9c9267c67c34905e888b3cdfe77ff247981e181ed9f45ee
+size 135
diff --git a/model-00040-of-000163.safetensors b/model-00040-of-000163.safetensors
new file mode 100644
index 0000000..ff85533
--- /dev/null
+++ b/model-00040-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:74bd9a99782d32b8adced910da9ca443cade4880e7ec6ecf47e815fdd49c329c
+size 135
diff --git a/model-00041-of-000163.safetensors b/model-00041-of-000163.safetensors
new file mode 100644
index 0000000..e4a6392
--- /dev/null
+++ b/model-00041-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:18fa5459a000467b1a141a73cb6739dd6809130e58dbff786aadef35f8642815
+size 135
diff --git a/model-00042-of-000163.safetensors b/model-00042-of-000163.safetensors
new file mode 100644
index 0000000..0917109
--- /dev/null
+++ b/model-00042-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:f4031e1cbb13261dba2d703e7eeafa06319306800a780da63ae38c89ed8d2dbe
+size 135
diff --git a/model-00043-of-000163.safetensors b/model-00043-of-000163.safetensors
new file mode 100644
index 0000000..55898d3
--- /dev/null
+++ b/model-00043-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:4567ced090e57572b5868215a2b4e915e49a63e04ab2e97087ae3d1f547d2f0f
+size 135
diff --git a/model-00044-of-000163.safetensors b/model-00044-of-000163.safetensors
new file mode 100644
index 0000000..6c17aa7
--- /dev/null
+++ b/model-00044-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:cc53f8e0e707ec741725f884d465b84a8b240d9d1348932d41cc584e9a73695c
+size 135
diff --git a/model-00045-of-000163.safetensors b/model-00045-of-000163.safetensors
new file mode 100644
index 0000000..e412bd4
--- /dev/null
+++ b/model-00045-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:e50942ca5fe2f7e2d284f7a910a89eef31483f603df440d06ed3174fc4626215
+size 135
diff --git a/model-00046-of-000163.safetensors b/model-00046-of-000163.safetensors
new file mode 100644
index 0000000..a5d2a37
--- /dev/null
+++ b/model-00046-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:759286becb920e482eb718db39cf05efdb0e1652004121a34ee2d76812081539
+size 135
diff --git a/model-00047-of-000163.safetensors b/model-00047-of-000163.safetensors
new file mode 100644
index 0000000..8e6b29b
--- /dev/null
+++ b/model-00047-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:154788662d4184fcaa43e1e29db9d1aa7c104040d262d24d13889a12625fd58f
+size 135
diff --git a/model-00048-of-000163.safetensors b/model-00048-of-000163.safetensors
new file mode 100644
index 0000000..7c0ac01
--- /dev/null
+++ b/model-00048-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:af11893c18bf9033e8b4ac14abe128f6308a657b576a33dcc7a97f75a92cd2e8
+size 135
diff --git a/model-00049-of-000163.safetensors b/model-00049-of-000163.safetensors
new file mode 100644
index 0000000..43d4fba
--- /dev/null
+++ b/model-00049-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:a2e589ea6b86bb52ca7982177372fd20873c0e2c1a0014550a33b3e1eb53e4f2
+size 135
diff --git a/model-00050-of-000163.safetensors b/model-00050-of-000163.safetensors
new file mode 100644
index 0000000..be74533
--- /dev/null
+++ b/model-00050-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:d887a00bf3f64acd54fdd0106ad8a3bf849b7b0426969e23ba9f7a285f977023
+size 135
diff --git a/model-00051-of-000163.safetensors b/model-00051-of-000163.safetensors
new file mode 100644
index 0000000..fc27a43
--- /dev/null
+++ b/model-00051-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:75ab8179dac0dbc3f41c33d4007b66fd6f7580ab141d63eb79d364434b4fe727
+size 135
diff --git a/model-00052-of-000163.safetensors b/model-00052-of-000163.safetensors
new file mode 100644
index 0000000..568661c
--- /dev/null
+++ b/model-00052-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:d4000fa109c66730a2866c6488a62afa11575394d037f05dc63a141ad83191a9
+size 135
diff --git a/model-00053-of-000163.safetensors b/model-00053-of-000163.safetensors
new file mode 100644
index 0000000..4cf39f9
--- /dev/null
+++ b/model-00053-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:597be10069c2e1faf37c69334c227d8c75b968de2e264c6504fdb8bbef352e79
+size 135
diff --git a/model-00054-of-000163.safetensors b/model-00054-of-000163.safetensors
new file mode 100644
index 0000000..5b54698
--- /dev/null
+++ b/model-00054-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:cde2aa0bbe6ed85a80fe489ee239672ed2759f95a3f954113ab1cf3d2d5a3d47
+size 135
diff --git a/model-00055-of-000163.safetensors b/model-00055-of-000163.safetensors
new file mode 100644
index 0000000..1e75871
--- /dev/null
+++ b/model-00055-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:f777f6c549b8dba5eda656dbd1a94d0d490e5df0153bac7763856f3026ac9d21
+size 135
diff --git a/model-00056-of-000163.safetensors b/model-00056-of-000163.safetensors
new file mode 100644
index 0000000..5886404
--- /dev/null
+++ b/model-00056-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:8cb45e8e361e4caa3ed9e75adb7552bd658c066e615fd44c5162ba9abd2b8933
+size 135
diff --git a/model-00057-of-000163.safetensors b/model-00057-of-000163.safetensors
new file mode 100644
index 0000000..295b9da
--- /dev/null
+++ b/model-00057-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:a3112231e41489ad3f9f61c09b3c738ffae1bd4240a683d6f2b26bd5aefa9393
+size 135
diff --git a/model-00058-of-000163.safetensors b/model-00058-of-000163.safetensors
new file mode 100644
index 0000000..f1bf366
--- /dev/null
+++ b/model-00058-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:1fb8d09457639ad64ddcd09652d4987ef6bdbd158531462d4ea9ff55f31ff240
+size 135
diff --git a/model-00059-of-000163.safetensors b/model-00059-of-000163.safetensors
new file mode 100644
index 0000000..0b2a583
--- /dev/null
+++ b/model-00059-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:e48d4c5a8d0b3556d3ba7b4a4e7d92262538316417c49e910344d5f06e55ebd1
+size 135
diff --git a/model-00060-of-000163.safetensors b/model-00060-of-000163.safetensors
new file mode 100644
index 0000000..5aeb83e
--- /dev/null
+++ b/model-00060-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:6123ebe7ef6ed37b6deddfca5a3255091f00b5fa8d928ae083f49ffdc7d716f5
+size 135
diff --git a/model-00061-of-000163.safetensors b/model-00061-of-000163.safetensors
new file mode 100644
index 0000000..097b876
--- /dev/null
+++ b/model-00061-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:9ed57a6051cd14a61b729892db7f90c9329d0134a734502ab95099f547b2d379
+size 135
diff --git a/model-00062-of-000163.safetensors b/model-00062-of-000163.safetensors
new file mode 100644
index 0000000..61ade93
--- /dev/null
+++ b/model-00062-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:bda18502e0bb6b02bc9d6bf8c369605ff72b6c7cbc26eb6009c5ec494689f136
+size 135
diff --git a/model-00063-of-000163.safetensors b/model-00063-of-000163.safetensors
new file mode 100644
index 0000000..cb780cc
--- /dev/null
+++ b/model-00063-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:4a81815a45c2d1fe06041e525d15990b476cdd06601d6a4a16c898b9059c0128
+size 135
diff --git a/model-00064-of-000163.safetensors b/model-00064-of-000163.safetensors
new file mode 100644
index 0000000..4eab1b7
--- /dev/null
+++ b/model-00064-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:0cee68e66c81f53bf51a3f6a0507b60f888774e892b1e1390e714cb36cbd40b1
+size 135
diff --git a/model-00065-of-000163.safetensors b/model-00065-of-000163.safetensors
new file mode 100644
index 0000000..eb5b440
--- /dev/null
+++ b/model-00065-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:67bf1bca527d8b9d446c0bd7f8514410d401941f152d9542e4cf6bb1e8ba6c81
+size 135
diff --git a/model-00066-of-000163.safetensors b/model-00066-of-000163.safetensors
new file mode 100644
index 0000000..d57e338
--- /dev/null
+++ b/model-00066-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:016255960efa5cef56cb4984db1cb3385e6f22c2137f03fa820f785563c88d80
+size 135
diff --git a/model-00067-of-000163.safetensors b/model-00067-of-000163.safetensors
new file mode 100644
index 0000000..cd0c129
--- /dev/null
+++ b/model-00067-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:ff7c6538240079d73160b7a2354c954848a865f20124911dc60d742ba31303e8
+size 135
diff --git a/model-00068-of-000163.safetensors b/model-00068-of-000163.safetensors
new file mode 100644
index 0000000..83213c3
--- /dev/null
+++ b/model-00068-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:1a79cd431b9b348afc8fb1050b4789bfb43f2d37102572f0f1195af8e5d0296e
+size 135
diff --git a/model-00069-of-000163.safetensors b/model-00069-of-000163.safetensors
new file mode 100644
index 0000000..7b78a39
--- /dev/null
+++ b/model-00069-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:83772ac6a6e3aa956401a0b55205771f5895b8a3c0b1157ab06463a594ff779b
+size 135
diff --git a/model-00070-of-000163.safetensors b/model-00070-of-000163.safetensors
new file mode 100644
index 0000000..80d5879
--- /dev/null
+++ b/model-00070-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:cceab811df5939cfb8c17eb21e123841bc206e1a6a2175ea21c48be2fe958c89
+size 135
diff --git a/model-00071-of-000163.safetensors b/model-00071-of-000163.safetensors
new file mode 100644
index 0000000..3f103d7
--- /dev/null
+++ b/model-00071-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:8b171e636c403b83fa9fee4de8319dbae5c8bf5d8149ae2df43e8f3d1b1907a9
+size 135
diff --git a/model-00072-of-000163.safetensors b/model-00072-of-000163.safetensors
new file mode 100644
index 0000000..474e9f3
--- /dev/null
+++ b/model-00072-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:4347dc7212b1ede05013ef3b95674b789a775ad6148ad8714e8fb17a4ecf5bc4
+size 135
diff --git a/model-00073-of-000163.safetensors b/model-00073-of-000163.safetensors
new file mode 100644
index 0000000..38b1b67
--- /dev/null
+++ b/model-00073-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:42f9aeaf1d67a8a6a4ba960b99eca12fab37f9311a652af21633079c9aa29e66
+size 135
diff --git a/model-00074-of-000163.safetensors b/model-00074-of-000163.safetensors
new file mode 100644
index 0000000..148e9d8
--- /dev/null
+++ b/model-00074-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:ab6c4a7f9d173d0af5ee7849a77ad3b963ec1f9722abed8a66493effcbebd3d1
+size 135
diff --git a/model-00075-of-000163.safetensors b/model-00075-of-000163.safetensors
new file mode 100644
index 0000000..31e5ea5
--- /dev/null
+++ b/model-00075-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:d037aa4af7d8483f1262f9936f1701094b52eafa3b7dc45f27caa531bc28f874
+size 135
diff --git a/model-00076-of-000163.safetensors b/model-00076-of-000163.safetensors
new file mode 100644
index 0000000..3ca9d9e
--- /dev/null
+++ b/model-00076-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:d8bb3d31a2818027888563738466603aa6a3bdb3711268e40293022aa03e51e0
+size 135
diff --git a/model-00077-of-000163.safetensors b/model-00077-of-000163.safetensors
new file mode 100644
index 0000000..3bee643
--- /dev/null
+++ b/model-00077-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:168c91490a144c2d6ff976b230ff9deede7a268a1b6d065519bbfee25d627a49
+size 135
diff --git a/model-00078-of-000163.safetensors b/model-00078-of-000163.safetensors
new file mode 100644
index 0000000..d81dfb6
--- /dev/null
+++ b/model-00078-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:03346d85a9ca16bf2502fdc40ae6546c3cab523813adfa26ac0653cb7f1c26f2
+size 135
diff --git a/model-00079-of-000163.safetensors b/model-00079-of-000163.safetensors
new file mode 100644
index 0000000..7a0aa3f
--- /dev/null
+++ b/model-00079-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:0657ea778424c318216a4ca9d3202bbe566485aa40a4d9401ad377185e044827
+size 135
diff --git a/model-00080-of-000163.safetensors b/model-00080-of-000163.safetensors
new file mode 100644
index 0000000..7b9d1c7
--- /dev/null
+++ b/model-00080-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:76528eef82b1067f649da3ffebc81644396107f243dbdddd75fbccb10556236d
+size 135
diff --git a/model-00081-of-000163.safetensors b/model-00081-of-000163.safetensors
new file mode 100644
index 0000000..390da8f
--- /dev/null
+++ b/model-00081-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:d77bbcd7b9ddcd794a71a629c019aa4aa879de09b1fe52a56d4af3ce80e3029f
+size 135
diff --git a/model-00082-of-000163.safetensors b/model-00082-of-000163.safetensors
new file mode 100644
index 0000000..96be9e7
--- /dev/null
+++ b/model-00082-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:a9bd5e3c9aef2ac6728e1b75e77d2954a67b4dc43e4dce9238c845455fd0eb9a
+size 135
diff --git a/model-00083-of-000163.safetensors b/model-00083-of-000163.safetensors
new file mode 100644
index 0000000..3ca4745
--- /dev/null
+++ b/model-00083-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:3b532c1d14e497d808510ed59a0d677d20588ac07c847cfe3f947b61f8a26f14
+size 135
diff --git a/model-00084-of-000163.safetensors b/model-00084-of-000163.safetensors
new file mode 100644
index 0000000..774a01a
--- /dev/null
+++ b/model-00084-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:021a7a8ae956fca942e6f3b6fa9f30020d9f76ffe612475957660e3c1cd580b6
+size 135
diff --git a/model-00085-of-000163.safetensors b/model-00085-of-000163.safetensors
new file mode 100644
index 0000000..c09063d
--- /dev/null
+++ b/model-00085-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:c595cb9376ffa7d5cbf95aba48f44469788e12a06e13e2d32e964b516c7ebda1
+size 135
diff --git a/model-00086-of-000163.safetensors b/model-00086-of-000163.safetensors
new file mode 100644
index 0000000..4793040
--- /dev/null
+++ b/model-00086-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:9339f8eb574ceac9b94e3cfd59b5be3693a6dc6ea57d11ce8ea15c350560013c
+size 135
diff --git a/model-00087-of-000163.safetensors b/model-00087-of-000163.safetensors
new file mode 100644
index 0000000..bcbf67a
--- /dev/null
+++ b/model-00087-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:6a46b398cd293c0b3c17151120ce844aee08acac21630e70a6af71bb68111450
+size 135
diff --git a/model-00088-of-000163.safetensors b/model-00088-of-000163.safetensors
new file mode 100644
index 0000000..3c01dd5
--- /dev/null
+++ b/model-00088-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:e63eba99a3b6be34b539780dacbd9a647a0fcd1daf66b71a64606fcc2abb8d8e
+size 135
diff --git a/model-00089-of-000163.safetensors b/model-00089-of-000163.safetensors
new file mode 100644
index 0000000..7236775
--- /dev/null
+++ b/model-00089-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:bb4776ddd0bdfa7fd60e164f33caee99337c78469c1961d8f5951f1445317bb0
+size 135
diff --git a/model-00090-of-000163.safetensors b/model-00090-of-000163.safetensors
new file mode 100644
index 0000000..0b7e528
--- /dev/null
+++ b/model-00090-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:2f5d535c03b9c95101717a84f0bacba4767ed3248f13a78adb38a5730224bd34
+size 135
diff --git a/model-00091-of-000163.safetensors b/model-00091-of-000163.safetensors
new file mode 100644
index 0000000..b75eaf6
--- /dev/null
+++ b/model-00091-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:a35a838d23d8d38c8e51322c1d4d7b8355ea48531cd6636d6ce1d53e1c618522
+size 135
diff --git a/model-00092-of-000163.safetensors b/model-00092-of-000163.safetensors
new file mode 100644
index 0000000..195bd28
--- /dev/null
+++ b/model-00092-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:bcc3e39c50a11eba5460691335030b9fba540bab5ebbb94c131946c4ff2777c4
+size 135
diff --git a/model-00093-of-000163.safetensors b/model-00093-of-000163.safetensors
new file mode 100644
index 0000000..aae929d
--- /dev/null
+++ b/model-00093-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:29bf9c7862704626abacb3b479102ec3e1f94be888f475e7e9b8b59547edb04f
+size 135
diff --git a/model-00094-of-000163.safetensors b/model-00094-of-000163.safetensors
new file mode 100644
index 0000000..9699db7
--- /dev/null
+++ b/model-00094-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:0d3dd91dbe2652de8f6a50140b7e42ad9cecf9dcaceb0f47e99f1fd01d22430d
+size 135
diff --git a/model-00095-of-000163.safetensors b/model-00095-of-000163.safetensors
new file mode 100644
index 0000000..cd35d22
--- /dev/null
+++ b/model-00095-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:778d15e70e1d0a7128a5d80772535fe7e9505f59db011864a635d0b0dc27517c
+size 135
diff --git a/model-00096-of-000163.safetensors b/model-00096-of-000163.safetensors
new file mode 100644
index 0000000..b776c87
--- /dev/null
+++ b/model-00096-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:0bc90971e6c42ea418e5675113e1c56ed77541169446cf194578cc42be863bcf
+size 135
diff --git a/model-00097-of-000163.safetensors b/model-00097-of-000163.safetensors
new file mode 100644
index 0000000..527b5a8
--- /dev/null
+++ b/model-00097-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:cce8c44f682639f5ee47b9bc9b1c23fc7897d9b1c971ec9ec969e9dccbc74c85
+size 135
diff --git a/model-00098-of-000163.safetensors b/model-00098-of-000163.safetensors
new file mode 100644
index 0000000..7a349cc
--- /dev/null
+++ b/model-00098-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:3d4270199832016e6061f05989c248f4062590f6f7867c92b0debb27015d207e
+size 135
diff --git a/model-00099-of-000163.safetensors b/model-00099-of-000163.safetensors
new file mode 100644
index 0000000..febf3bf
--- /dev/null
+++ b/model-00099-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:cfcb4d1fb79d67842d56d0ce93d56e19bcbd914af53db97ff8d3c2beda7fd155
+size 135
diff --git a/model-00100-of-000163.safetensors b/model-00100-of-000163.safetensors
new file mode 100644
index 0000000..156eb0d
--- /dev/null
+++ b/model-00100-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:be9a4f7c6c308a68fa0aabbaa331a91642496520dd247ac1060ff69f37349047
+size 135
diff --git a/model-00101-of-000163.safetensors b/model-00101-of-000163.safetensors
new file mode 100644
index 0000000..b7c6f52
--- /dev/null
+++ b/model-00101-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:dc9484beea1ea4b840549a8882a3eabe60b252e063775d911221764d240d0911
+size 135
diff --git a/model-00102-of-000163.safetensors b/model-00102-of-000163.safetensors
new file mode 100644
index 0000000..b2fcd4b
--- /dev/null
+++ b/model-00102-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:dcbf0f26986e5b2389c9ca69f0e8511fa45c1e7546e10d857a44fe23fbf01b83
+size 135
diff --git a/model-00103-of-000163.safetensors b/model-00103-of-000163.safetensors
new file mode 100644
index 0000000..8b1048f
--- /dev/null
+++ b/model-00103-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:582b6ed6c14b21f4caf93189ebd84d935461f324098a31881dd2ed12852612c1
+size 135
diff --git a/model-00104-of-000163.safetensors b/model-00104-of-000163.safetensors
new file mode 100644
index 0000000..cbacdd7
--- /dev/null
+++ b/model-00104-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:9de59d46fcf64acf667c841e16cf5f19e0d54a2e75d6e6c2ffdda88d7f4e9fb5
+size 135
diff --git a/model-00105-of-000163.safetensors b/model-00105-of-000163.safetensors
new file mode 100644
index 0000000..53c7771
--- /dev/null
+++ b/model-00105-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:c83a4316e7cd7604219c7501b57f651d50760a449333dc300d0414f7e215c5ac
+size 135
diff --git a/model-00106-of-000163.safetensors b/model-00106-of-000163.safetensors
new file mode 100644
index 0000000..03b1a47
--- /dev/null
+++ b/model-00106-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:3870f8f7f320b5c1c8f456153064447e58d94b6148b98e13ef6880089e36ee6a
+size 135
diff --git a/model-00107-of-000163.safetensors b/model-00107-of-000163.safetensors
new file mode 100644
index 0000000..3726310
--- /dev/null
+++ b/model-00107-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:bd5e5dd7a9700ec2f24c41a41b808a5075a0a3124288b94c046defb5839fab83
+size 135
diff --git a/model-00108-of-000163.safetensors b/model-00108-of-000163.safetensors
new file mode 100644
index 0000000..02f3e64
--- /dev/null
+++ b/model-00108-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:30a6180cbe3b1cc1bf3a81e164596f4e8323bf5bfb0e3554a8283c8fd27d62d3
+size 135
diff --git a/model-00109-of-000163.safetensors b/model-00109-of-000163.safetensors
new file mode 100644
index 0000000..ba14e0b
--- /dev/null
+++ b/model-00109-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:9f47a8090bc695f349892ad256fc8739f1b3b008b3a83143e6f71cc36cff13a1
+size 135
diff --git a/model-00110-of-000163.safetensors b/model-00110-of-000163.safetensors
new file mode 100644
index 0000000..5fea35b
--- /dev/null
+++ b/model-00110-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:c025bf128931624ee0efd6adf2e21386b12eb677bc9f22abab704de2886a020e
+size 135
diff --git a/model-00111-of-000163.safetensors b/model-00111-of-000163.safetensors
new file mode 100644
index 0000000..a6bda40
--- /dev/null
+++ b/model-00111-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:b4b3c0fffab6adcceddcdabe5a44ce6331e8d2062da53c06482850cce45ed434
+size 135
diff --git a/model-00112-of-000163.safetensors b/model-00112-of-000163.safetensors
new file mode 100644
index 0000000..5cba3ec
--- /dev/null
+++ b/model-00112-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:e5879c73803bc43423fb8532d8a2656d05b00e9333c3d3bdb3c5ae14cdaa6265
+size 135
diff --git a/model-00113-of-000163.safetensors b/model-00113-of-000163.safetensors
new file mode 100644
index 0000000..8eeb60b
--- /dev/null
+++ b/model-00113-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:d623b32978aa6d97e833d4772ecd71e3e46ce4c5361896063a296d81f479b788
+size 135
diff --git a/model-00114-of-000163.safetensors b/model-00114-of-000163.safetensors
new file mode 100644
index 0000000..ba4662e
--- /dev/null
+++ b/model-00114-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:f25f49811085918741222bd5a2e391898606fd10f22ceac5ec7de1425def662a
+size 135
diff --git a/model-00115-of-000163.safetensors b/model-00115-of-000163.safetensors
new file mode 100644
index 0000000..6b795ae
--- /dev/null
+++ b/model-00115-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:8ae8299e4157a1bdc8876529935b273aa04d2aee4b8450ce9d588899db42949d
+size 135
diff --git a/model-00116-of-000163.safetensors b/model-00116-of-000163.safetensors
new file mode 100644
index 0000000..9fbc7b0
--- /dev/null
+++ b/model-00116-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:4b3f3b6fc1cc2a762c4fb1631cbb75fa2f3098cb7c54dbed969c6310f3eb8fe2
+size 135
diff --git a/model-00117-of-000163.safetensors b/model-00117-of-000163.safetensors
new file mode 100644
index 0000000..97cf48b
--- /dev/null
+++ b/model-00117-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:5027bcf801bf466dfcdcea8bfbb47b4f174c3e4e8442759aebdcadbea851d1d0
+size 135
diff --git a/model-00118-of-000163.safetensors b/model-00118-of-000163.safetensors
new file mode 100644
index 0000000..8ced1d5
--- /dev/null
+++ b/model-00118-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:d67c0568408894a32edbb90a44f96f729034d5f6ab5a8330d80850109367fd43
+size 135
diff --git a/model-00119-of-000163.safetensors b/model-00119-of-000163.safetensors
new file mode 100644
index 0000000..043fc6f
--- /dev/null
+++ b/model-00119-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:0f4813dd2d9f5450b11e8d126eda0db85a3775a3c889baf92aedf628f1d51df3
+size 135
diff --git a/model-00120-of-000163.safetensors b/model-00120-of-000163.safetensors
new file mode 100644
index 0000000..5a1b611
--- /dev/null
+++ b/model-00120-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:e700f710e6234217bb93d2417a4b7aad59e0cc92f62e68bbccf8c375552e1c7a
+size 135
diff --git a/model-00121-of-000163.safetensors b/model-00121-of-000163.safetensors
new file mode 100644
index 0000000..b4fdfbc
--- /dev/null
+++ b/model-00121-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:73949d873bfd164a7793433aa4f9709653b513cd8d2f8c951cd5926a99ea483f
+size 135
diff --git a/model-00122-of-000163.safetensors b/model-00122-of-000163.safetensors
new file mode 100644
index 0000000..201c201
--- /dev/null
+++ b/model-00122-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:8d8fd90f1357d9c4b7dc276b6f9e26b80e2748157beb901eef7c6ecfa79665ad
+size 135
diff --git a/model-00123-of-000163.safetensors b/model-00123-of-000163.safetensors
new file mode 100644
index 0000000..31b35fc
--- /dev/null
+++ b/model-00123-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:b96dedc2b0ec1fe268fed529bdb3245e42fb8af8a175018280cfce04ee85e8d0
+size 135
diff --git a/model-00124-of-000163.safetensors b/model-00124-of-000163.safetensors
new file mode 100644
index 0000000..98168ad
--- /dev/null
+++ b/model-00124-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:3ee7d2293b4a7b6a398038acc384eac50d8d2bf23d4b9b1b0a4076e58dcb6ff1
+size 135
diff --git a/model-00125-of-000163.safetensors b/model-00125-of-000163.safetensors
new file mode 100644
index 0000000..b1128f2
--- /dev/null
+++ b/model-00125-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:6a138e44ace0e749aca7630374d9b404cefe1af8c189fdd675e9863a4afd2c04
+size 135
diff --git a/model-00126-of-000163.safetensors b/model-00126-of-000163.safetensors
new file mode 100644
index 0000000..51c5ac5
--- /dev/null
+++ b/model-00126-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:026f19d646650be4c82309dd3d79b514ad1da4de737cfa7e22516ab50a7e466c
+size 135
diff --git a/model-00127-of-000163.safetensors b/model-00127-of-000163.safetensors
new file mode 100644
index 0000000..21e8aa5
--- /dev/null
+++ b/model-00127-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:c3ee01f08cf4c5fd6b63b991b0577e0dfb44c77d034a14ec75a2c062fb546004
+size 135
diff --git a/model-00128-of-000163.safetensors b/model-00128-of-000163.safetensors
new file mode 100644
index 0000000..fd39cbb
--- /dev/null
+++ b/model-00128-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:577e6ef202b758d59321710a9a7572e162a8ba1e23fc29a4c6a4c12074be1ee8
+size 135
diff --git a/model-00129-of-000163.safetensors b/model-00129-of-000163.safetensors
new file mode 100644
index 0000000..50a4001
--- /dev/null
+++ b/model-00129-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:b7152ee7f4873f47b09ffac88de45fbb8641960b955f184f7342b5d1688abb26
+size 135
diff --git a/model-00130-of-000163.safetensors b/model-00130-of-000163.safetensors
new file mode 100644
index 0000000..e3bbbbf
--- /dev/null
+++ b/model-00130-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:2282613ec4cfbc14e2826e7b93d90ab8a230d0cdb3ff0d2368127b4e98a86070
+size 135
diff --git a/model-00131-of-000163.safetensors b/model-00131-of-000163.safetensors
new file mode 100644
index 0000000..0e46528
--- /dev/null
+++ b/model-00131-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:23b3b2dd8a7468fc5a203f0d0355cb7042d50b6952bcb3145975e0812ac52946
+size 135
diff --git a/model-00132-of-000163.safetensors b/model-00132-of-000163.safetensors
new file mode 100644
index 0000000..1e98ecd
--- /dev/null
+++ b/model-00132-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:0329110bf8db1e8e29b3bc77365802013fcbee264d1b3e9bb8e1279687ba7f4b
+size 135
diff --git a/model-00133-of-000163.safetensors b/model-00133-of-000163.safetensors
new file mode 100644
index 0000000..d57c04a
--- /dev/null
+++ b/model-00133-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:5230bde23ea1954ccb13171459c7a845cbb593ba448b802552ca3d9e33dfba30
+size 135
diff --git a/model-00134-of-000163.safetensors b/model-00134-of-000163.safetensors
new file mode 100644
index 0000000..786b376
--- /dev/null
+++ b/model-00134-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:4fdd1272d7265e327a8d87f70c9aea031213575758dd091486cfaff25d6c2ae5
+size 135
diff --git a/model-00135-of-000163.safetensors b/model-00135-of-000163.safetensors
new file mode 100644
index 0000000..9ccb873
--- /dev/null
+++ b/model-00135-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:423381c8400e8859160f1f66c3c73027aa5230b3353fd5574b663f1ccc71661d
+size 135
diff --git a/model-00136-of-000163.safetensors b/model-00136-of-000163.safetensors
new file mode 100644
index 0000000..73dd301
--- /dev/null
+++ b/model-00136-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:1c1f1b6bb44c805b86d40865cfc9a3e47907bfab98ec03d8c9b5c440e4cb7981
+size 135
diff --git a/model-00137-of-000163.safetensors b/model-00137-of-000163.safetensors
new file mode 100644
index 0000000..002de0e
--- /dev/null
+++ b/model-00137-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:531f22a71f6b6f2fde817d43d23f480e712935bbcd3186648a3e8fc34e25bf54
+size 135
diff --git a/model-00138-of-000163.safetensors b/model-00138-of-000163.safetensors
new file mode 100644
index 0000000..4c56d30
--- /dev/null
+++ b/model-00138-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:31d5f4a83ed0799a34a36db31cdd62056e53acaddfebc5e3dd0a155af5a3db74
+size 135
diff --git a/model-00139-of-000163.safetensors b/model-00139-of-000163.safetensors
new file mode 100644
index 0000000..80a5d94
--- /dev/null
+++ b/model-00139-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:72ad6bbbb1f5af495092e1f7c901e06e60962c3e914203c50e71862d498f41c7
+size 135
diff --git a/model-00140-of-000163.safetensors b/model-00140-of-000163.safetensors
new file mode 100644
index 0000000..478cbf4
--- /dev/null
+++ b/model-00140-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:f0262fd0c36cacf8a8e669c54bfd25f2a48404b4a157305e291f832bab704df7
+size 135
diff --git a/model-00141-of-000163.safetensors b/model-00141-of-000163.safetensors
new file mode 100644
index 0000000..3dd3d8d
--- /dev/null
+++ b/model-00141-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:737a174ebabc61b8004f66477e73c5d1b502efebdb07fd0b28be248fdbc2eede
+size 135
diff --git a/model-00142-of-000163.safetensors b/model-00142-of-000163.safetensors
new file mode 100644
index 0000000..443b342
--- /dev/null
+++ b/model-00142-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:d279ece0551035743b0bd9e706077b0d864ca5d280eac2f443c870499325e067
+size 135
diff --git a/model-00143-of-000163.safetensors b/model-00143-of-000163.safetensors
new file mode 100644
index 0000000..482445c
--- /dev/null
+++ b/model-00143-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:c7f5652dd6b7692b6a2016f606d9b95b6f2cb9fa83482f430cce4b63b7f0e964
+size 135
diff --git a/model-00144-of-000163.safetensors b/model-00144-of-000163.safetensors
new file mode 100644
index 0000000..b865cba
--- /dev/null
+++ b/model-00144-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:8bd4cc15e80dea5f7f5a9b65e8c6e169a410a08a72a32bf488a78376a51b7a58
+size 135
diff --git a/model-00145-of-000163.safetensors b/model-00145-of-000163.safetensors
new file mode 100644
index 0000000..97c0ad0
--- /dev/null
+++ b/model-00145-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:aaca846586d2f91d5ad9e17e514c4518fc4dba76629b30a734c5f9e7d3f699d2
+size 135
diff --git a/model-00146-of-000163.safetensors b/model-00146-of-000163.safetensors
new file mode 100644
index 0000000..623be38
--- /dev/null
+++ b/model-00146-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:ece137c43cc7669696c907807ffa558bd2aae5f9ce06c6961202c873f5a6c837
+size 135
diff --git a/model-00147-of-000163.safetensors b/model-00147-of-000163.safetensors
new file mode 100644
index 0000000..55d841e
--- /dev/null
+++ b/model-00147-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:01722c1b2918cfd98619834d4970c636e3ad2d3af403d24e533ecdc1d0c6c5f4
+size 135
diff --git a/model-00148-of-000163.safetensors b/model-00148-of-000163.safetensors
new file mode 100644
index 0000000..0835299
--- /dev/null
+++ b/model-00148-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:4659a19a96beae5d786c295561f84af63f70dc240a3221531b1043d51d2f20a7
+size 135
diff --git a/model-00149-of-000163.safetensors b/model-00149-of-000163.safetensors
new file mode 100644
index 0000000..e13d6e3
--- /dev/null
+++ b/model-00149-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:c3fc8cdaf8d1c06a68b6fa3178633661eb405b2a39a8ab55696ad3a8702925d4
+size 135
diff --git a/model-00150-of-000163.safetensors b/model-00150-of-000163.safetensors
new file mode 100644
index 0000000..ea0932a
--- /dev/null
+++ b/model-00150-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:7cf4fcfe53b6efb538903eb74a368b3e1777c1f8aafabf833ced31a146c7c6fb
+size 135
diff --git a/model-00151-of-000163.safetensors b/model-00151-of-000163.safetensors
new file mode 100644
index 0000000..e38307c
--- /dev/null
+++ b/model-00151-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:9726c992c6a1ed4c07246d0582699d6147aa2e449ed8cc990577685945ab9a14
+size 135
diff --git a/model-00152-of-000163.safetensors b/model-00152-of-000163.safetensors
new file mode 100644
index 0000000..240586c
--- /dev/null
+++ b/model-00152-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:b5ea660e6f031c835f9944583a0006a7b2c785117927f611086486774c4fee72
+size 135
diff --git a/model-00153-of-000163.safetensors b/model-00153-of-000163.safetensors
new file mode 100644
index 0000000..1cfa6d7
--- /dev/null
+++ b/model-00153-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:3911c96913a6d21577434c83c6d54b06e22964633ef43a5ecf95cf2252e177a5
+size 135
diff --git a/model-00154-of-000163.safetensors b/model-00154-of-000163.safetensors
new file mode 100644
index 0000000..436833f
--- /dev/null
+++ b/model-00154-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:151ba56481a9be02bde56a445d1176910b4c7060ee3738c1041131b7d9aadea4
+size 135
diff --git a/model-00155-of-000163.safetensors b/model-00155-of-000163.safetensors
new file mode 100644
index 0000000..77d1abe
--- /dev/null
+++ b/model-00155-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:9a72296681bb7c5147ed68fc9f2448d0392563180aa883273f3183e99d2d8608
+size 135
diff --git a/model-00156-of-000163.safetensors b/model-00156-of-000163.safetensors
new file mode 100644
index 0000000..976103e
--- /dev/null
+++ b/model-00156-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:094e2b52e39889c85b2540a38bd7602cead89025063bf1f6c153915f8df52382
+size 135
diff --git a/model-00157-of-000163.safetensors b/model-00157-of-000163.safetensors
new file mode 100644
index 0000000..89b2eb1
--- /dev/null
+++ b/model-00157-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:4b27b82b6ea363bb6c4ccfbf6faa4ca4a23ed2c11e4c3b43c46a3238b9f368c4
+size 135
diff --git a/model-00158-of-000163.safetensors b/model-00158-of-000163.safetensors
new file mode 100644
index 0000000..a922e5e
--- /dev/null
+++ b/model-00158-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:943c0471e9d0018fbc6845a23b3236002973946910d6d696e57bb79b4b833c44
+size 135
diff --git a/model-00159-of-000163.safetensors b/model-00159-of-000163.safetensors
new file mode 100644
index 0000000..a651e14
--- /dev/null
+++ b/model-00159-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:81e539ea218dae4335e47c509479b509c3353535dcdfa64167793bb9fd0df49e
+size 135
diff --git a/model-00160-of-000163.safetensors b/model-00160-of-000163.safetensors
new file mode 100644
index 0000000..0b7eb1a
--- /dev/null
+++ b/model-00160-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:8991cf245a4a8aa3cffe48cdbf874aadd58f1fc5159563387c1a550b49302103
+size 135
diff --git a/model-00161-of-000163.safetensors b/model-00161-of-000163.safetensors
new file mode 100644
index 0000000..7fdb341
--- /dev/null
+++ b/model-00161-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:4ed4d3834f1942db90080f83f147f25a78c6eee1daec5a26c3a5ce2630c2cea5
+size 135
diff --git a/model-00162-of-000163.safetensors b/model-00162-of-000163.safetensors
new file mode 100644
index 0000000..ba94d3f
--- /dev/null
+++ b/model-00162-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:48aa1d052852d9c50c9768912180b02825dda020b8ce7ddb5d2fe44217c84a26
+size 135
diff --git a/model-00163-of-000163.safetensors b/model-00163-of-000163.safetensors
new file mode 100644
index 0000000..04a1e2e
--- /dev/null
+++ b/model-00163-of-000163.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:e7497f74b4db3080c75255f721baa22d1442124f3a1987b330178db4263f46e1
+size 135
diff --git a/model.safetensors.index.json b/model.safetensors.index.json
new file mode 100644
index 0000000..8fb5064
--- /dev/null
+++ b/model.safetensors.index.json
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:a2c5d238ab1e8301a117336e9797e0d3551cb8f6dc9dc787318b23d8f54c9248
+size 8898324
diff --git a/modeling_deepseek.py b/modeling_deepseek.py
new file mode 100644
index 0000000..28d9ea2
--- /dev/null
+++ b/modeling_deepseek.py
@@ -0,0 +1,1848 @@
+# coding=utf-8
+# Copyright 2023 DeepSeek-AI and The HuggingFace Inc. team. All rights reserved.
+#
+# This code is based on EleutherAI's GPT-NeoX library and the GPT-NeoX
+# and OPT implementations in this library. It has been modified from its
+# original forms to accommodate minor architectural differences compared
+# to GPT-NeoX and OPT used by the Meta AI team that trained the model.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+""" PyTorch DeepSeek model."""
+import math
+import warnings
+from typing import List, Optional, Tuple, Union
+
+import torch
+import torch.nn.functional as F
+import torch.utils.checkpoint
+from torch import nn
+from torch.nn import BCEWithLogitsLoss, CrossEntropyLoss, MSELoss
+
+from transformers.activations import ACT2FN
+from transformers.cache_utils import Cache, DynamicCache
+from transformers.modeling_attn_mask_utils import (
+ AttentionMaskConverter,
+ _prepare_4d_attention_mask,
+ _prepare_4d_causal_attention_mask,
+)
+from transformers.modeling_outputs import (
+ BaseModelOutputWithPast,
+ CausalLMOutputWithPast,
+ SequenceClassifierOutputWithPast,
+)
+from transformers.modeling_utils import PreTrainedModel
+from transformers.pytorch_utils import (
+ ALL_LAYERNORM_LAYERS,
+ is_torch_greater_or_equal_than_1_13,
+)
+from transformers.utils import (
+ add_start_docstrings,
+ add_start_docstrings_to_model_forward,
+ is_flash_attn_2_available,
+ is_flash_attn_greater_or_equal_2_10,
+ logging,
+ replace_return_docstrings,
+)
+from transformers.utils.import_utils import is_torch_fx_available
+from .configuration_deepseek import DeepseekV3Config
+import torch.distributed as dist
+import numpy as np
+
+if is_flash_attn_2_available():
+ from flash_attn import flash_attn_func, flash_attn_varlen_func
+ from flash_attn.bert_padding import index_first_axis, pad_input, unpad_input # noqa
+
+
+# This makes `_prepare_4d_causal_attention_mask` a leaf function in the FX graph.
+# It means that the function will not be traced through and simply appear as a node in the graph.
+if is_torch_fx_available():
+ if not is_torch_greater_or_equal_than_1_13:
+ import torch.fx
+
+ _prepare_4d_causal_attention_mask = torch.fx.wrap(_prepare_4d_causal_attention_mask)
+
+
+logger = logging.get_logger(__name__)
+
+_CONFIG_FOR_DOC = "DeepseekV3Config"
+
+
+def _get_unpad_data(attention_mask):
+ seqlens_in_batch = attention_mask.sum(dim=-1, dtype=torch.int32)
+ indices = torch.nonzero(attention_mask.flatten(), as_tuple=False).flatten()
+ max_seqlen_in_batch = seqlens_in_batch.max().item()
+ cu_seqlens = F.pad(
+ torch.cumsum(seqlens_in_batch, dim=0, dtype=torch.torch.int32), (1, 0)
+ )
+ return (
+ indices,
+ cu_seqlens,
+ max_seqlen_in_batch,
+ )
+
+
+class DeepseekV3RMSNorm(nn.Module):
+ def __init__(self, hidden_size, eps=1e-6):
+ """
+ DeepseekV3RMSNorm is equivalent to T5LayerNorm
+ """
+ super().__init__()
+ self.weight = nn.Parameter(torch.ones(hidden_size))
+ self.variance_epsilon = eps
+
+ def forward(self, hidden_states):
+ input_dtype = hidden_states.dtype
+ hidden_states = hidden_states.to(torch.float32)
+ variance = hidden_states.pow(2).mean(-1, keepdim=True)
+ hidden_states = hidden_states * torch.rsqrt(variance + self.variance_epsilon)
+ return self.weight * hidden_states.to(input_dtype)
+
+
+ALL_LAYERNORM_LAYERS.append(DeepseekV3RMSNorm)
+
+
+class DeepseekV3RotaryEmbedding(nn.Module):
+ def __init__(self, dim, max_position_embeddings=2048, base=10000, device=None):
+ super().__init__()
+
+ self.dim = dim
+ self.max_position_embeddings = max_position_embeddings
+ self.base = base
+ inv_freq = 1.0 / (
+ self.base ** (torch.arange(0, self.dim, 2).float().to(device) / self.dim)
+ )
+ self.register_buffer("inv_freq", inv_freq, persistent=False)
+
+ # Build here to make `torch.jit.trace` work.
+ self._set_cos_sin_cache(
+ seq_len=max_position_embeddings,
+ device=self.inv_freq.device,
+ dtype=torch.get_default_dtype(),
+ )
+ self.max_seq_len_cached = None
+
+ def _set_cos_sin_cache(self, seq_len, device, dtype):
+ self.max_seq_len_cached = seq_len
+ t = torch.arange(
+ self.max_seq_len_cached, device=device, dtype=self.inv_freq.dtype
+ )
+
+ freqs = torch.outer(t, self.inv_freq.to(t.device))
+ # Different from paper, but it uses a different permutation in order to obtain the same calculation
+ emb = torch.cat((freqs, freqs), dim=-1)
+ self.register_buffer("cos_cached", emb.cos().to(dtype), persistent=False)
+ self.register_buffer("sin_cached", emb.sin().to(dtype), persistent=False)
+
+ def forward(self, x, seq_len=None):
+ # x: [bs, num_attention_heads, seq_len, head_size]
+ if self.max_seq_len_cached is None or seq_len > self.max_seq_len_cached:
+ self._set_cos_sin_cache(seq_len=seq_len, device=x.device, dtype=x.dtype)
+
+ return (
+ self.cos_cached[:seq_len].to(dtype=x.dtype),
+ self.sin_cached[:seq_len].to(dtype=x.dtype),
+ )
+
+
+# Copied from transformers.models.llama.modeling_llama.LlamaLinearScalingRotaryEmbedding with Llama->DeepseekV3
+class DeepseekV3LinearScalingRotaryEmbedding(DeepseekV3RotaryEmbedding):
+ """DeepseekV3RotaryEmbedding extended with linear scaling. Credits to the Reddit user /u/kaiokendev"""
+
+ def __init__(
+ self,
+ dim,
+ max_position_embeddings=2048,
+ base=10000,
+ device=None,
+ scaling_factor=1.0,
+ ):
+ self.scaling_factor = scaling_factor
+ super().__init__(dim, max_position_embeddings, base, device)
+
+ def _set_cos_sin_cache(self, seq_len, device, dtype):
+ self.max_seq_len_cached = seq_len
+ t = torch.arange(
+ self.max_seq_len_cached, device=device, dtype=self.inv_freq.dtype
+ )
+ t = t / self.scaling_factor
+
+ freqs = torch.outer(t, self.inv_freq)
+ # Different from paper, but it uses a different permutation in order to obtain the same calculation
+ emb = torch.cat((freqs, freqs), dim=-1)
+ self.register_buffer("cos_cached", emb.cos().to(dtype), persistent=False)
+ self.register_buffer("sin_cached", emb.sin().to(dtype), persistent=False)
+
+
+# Copied from transformers.models.llama.modeling_llama.LlamaDynamicNTKScalingRotaryEmbedding with Llama->DeepseekV3
+class DeepseekV3DynamicNTKScalingRotaryEmbedding(DeepseekV3RotaryEmbedding):
+ """DeepseekV3RotaryEmbedding extended with Dynamic NTK scaling. Credits to the Reddit users /u/bloc97 and /u/emozilla"""
+
+ def __init__(
+ self,
+ dim,
+ max_position_embeddings=2048,
+ base=10000,
+ device=None,
+ scaling_factor=1.0,
+ ):
+ self.scaling_factor = scaling_factor
+ super().__init__(dim, max_position_embeddings, base, device)
+
+ def _set_cos_sin_cache(self, seq_len, device, dtype):
+ self.max_seq_len_cached = seq_len
+
+ if seq_len > self.max_position_embeddings:
+ base = self.base * (
+ (self.scaling_factor * seq_len / self.max_position_embeddings)
+ - (self.scaling_factor - 1)
+ ) ** (self.dim / (self.dim - 2))
+ inv_freq = 1.0 / (
+ base ** (torch.arange(0, self.dim, 2).float().to(device) / self.dim)
+ )
+ self.register_buffer("inv_freq", inv_freq, persistent=False)
+
+ t = torch.arange(
+ self.max_seq_len_cached, device=device, dtype=self.inv_freq.dtype
+ )
+
+ freqs = torch.outer(t, self.inv_freq)
+ # Different from paper, but it uses a different permutation in order to obtain the same calculation
+ emb = torch.cat((freqs, freqs), dim=-1)
+ self.register_buffer("cos_cached", emb.cos().to(dtype), persistent=False)
+ self.register_buffer("sin_cached", emb.sin().to(dtype), persistent=False)
+
+
+# Inverse dim formula to find dim based on number of rotations
+def yarn_find_correction_dim(
+ num_rotations, dim, base=10000, max_position_embeddings=2048
+):
+ return (dim * math.log(max_position_embeddings / (num_rotations * 2 * math.pi))) / (
+ 2 * math.log(base)
+ )
+
+
+# Find dim range bounds based on rotations
+def yarn_find_correction_range(
+ low_rot, high_rot, dim, base=10000, max_position_embeddings=2048
+):
+ low = math.floor(
+ yarn_find_correction_dim(low_rot, dim, base, max_position_embeddings)
+ )
+ high = math.ceil(
+ yarn_find_correction_dim(high_rot, dim, base, max_position_embeddings)
+ )
+ return max(low, 0), min(high, dim - 1) # Clamp values just in case
+
+
+def yarn_get_mscale(scale=1, mscale=1):
+ if scale <= 1:
+ return 1.0
+ return 0.1 * mscale * math.log(scale) + 1.0
+
+
+def yarn_linear_ramp_mask(min, max, dim):
+ if min == max:
+ max += 0.001 # Prevent singularity
+
+ linear_func = (torch.arange(dim, dtype=torch.float32) - min) / (max - min)
+ ramp_func = torch.clamp(linear_func, 0, 1)
+ return ramp_func
+
+
+class DeepseekV3YarnRotaryEmbedding(DeepseekV3RotaryEmbedding):
+
+ def __init__(
+ self,
+ dim,
+ max_position_embeddings=2048,
+ base=10000,
+ device=None,
+ scaling_factor=1.0,
+ original_max_position_embeddings=4096,
+ beta_fast=32,
+ beta_slow=1,
+ mscale=1,
+ mscale_all_dim=0,
+ ):
+ self.scaling_factor = scaling_factor
+ self.original_max_position_embeddings = original_max_position_embeddings
+ self.beta_fast = beta_fast
+ self.beta_slow = beta_slow
+ self.mscale = mscale
+ self.mscale_all_dim = mscale_all_dim
+ super().__init__(dim, max_position_embeddings, base, device)
+
+ def _set_cos_sin_cache(self, seq_len, device, dtype):
+ self.max_seq_len_cached = seq_len
+ dim = self.dim
+
+ freq_extra = 1.0 / (
+ self.base
+ ** (torch.arange(0, dim, 2, dtype=torch.float32, device=device) / dim)
+ )
+ freq_inter = 1.0 / (
+ self.scaling_factor
+ * self.base
+ ** (torch.arange(0, dim, 2, dtype=torch.float32, device=device) / dim)
+ )
+
+ low, high = yarn_find_correction_range(
+ self.beta_fast,
+ self.beta_slow,
+ dim,
+ self.base,
+ self.original_max_position_embeddings,
+ )
+ inv_freq_mask = 1.0 - yarn_linear_ramp_mask(low, high, dim // 2).to(
+ device=device, dtype=torch.float32
+ )
+ inv_freq = freq_inter * (1 - inv_freq_mask) + freq_extra * inv_freq_mask
+ self.register_buffer("inv_freq", inv_freq, persistent=False)
+
+ t = torch.arange(seq_len, device=device, dtype=torch.float32)
+
+ freqs = torch.outer(t, inv_freq)
+
+ _mscale = float(
+ yarn_get_mscale(self.scaling_factor, self.mscale)
+ / yarn_get_mscale(self.scaling_factor, self.mscale_all_dim)
+ )
+
+ emb = torch.cat((freqs, freqs), dim=-1)
+ self.register_buffer(
+ "cos_cached", (emb.cos() * _mscale).to(dtype), persistent=False
+ )
+ self.register_buffer(
+ "sin_cached", (emb.sin() * _mscale).to(dtype), persistent=False
+ )
+
+
+# Copied from transformers.models.llama.modeling_llama.rotate_half
+def rotate_half(x):
+ """Rotates half the hidden dims of the input."""
+ x1 = x[..., : x.shape[-1] // 2]
+ x2 = x[..., x.shape[-1] // 2 :]
+ return torch.cat((-x2, x1), dim=-1)
+
+
+# Copied from transformers.models.llama.modeling_llama.apply_rotary_pos_emb
+def apply_rotary_pos_emb(q, k, cos, sin, position_ids, unsqueeze_dim=1):
+ """Applies Rotary Position Embedding to the query and key tensors.
+
+ Args:
+ q (`torch.Tensor`): The query tensor.
+ k (`torch.Tensor`): The key tensor.
+ cos (`torch.Tensor`): The cosine part of the rotary embedding.
+ sin (`torch.Tensor`): The sine part of the rotary embedding.
+ position_ids (`torch.Tensor`):
+ The position indices of the tokens corresponding to the query and key tensors. For example, this can be
+ used to pass offsetted position ids when working with a KV-cache.
+ unsqueeze_dim (`int`, *optional*, defaults to 1):
+ The 'unsqueeze_dim' argument specifies the dimension along which to unsqueeze cos[position_ids] and
+ sin[position_ids] so that they can be properly broadcasted to the dimensions of q and k. For example, note
+ that cos[position_ids] and sin[position_ids] have the shape [batch_size, seq_len, head_dim]. Then, if q and
+ k have the shape [batch_size, heads, seq_len, head_dim], then setting unsqueeze_dim=1 makes
+ cos[position_ids] and sin[position_ids] broadcastable to the shapes of q and k. Similarly, if q and k have
+ the shape [batch_size, seq_len, heads, head_dim], then set unsqueeze_dim=2.
+ Returns:
+ `tuple(torch.Tensor)` comprising of the query and key tensors rotated using the Rotary Position Embedding.
+ """
+ cos = cos[position_ids].unsqueeze(unsqueeze_dim)
+ sin = sin[position_ids].unsqueeze(unsqueeze_dim)
+
+ b, h, s, d = q.shape
+ q = q.view(b, h, s, d // 2, 2).transpose(4, 3).reshape(b, h, s, d)
+
+ b, h, s, d = k.shape
+ k = k.view(b, h, s, d // 2, 2).transpose(4, 3).reshape(b, h, s, d)
+
+ q_embed = (q * cos) + (rotate_half(q) * sin)
+ k_embed = (k * cos) + (rotate_half(k) * sin)
+ return q_embed, k_embed
+
+
+class DeepseekV3MLP(nn.Module):
+ def __init__(self, config, hidden_size=None, intermediate_size=None):
+ super().__init__()
+ self.config = config
+ self.hidden_size = config.hidden_size if hidden_size is None else hidden_size
+ self.intermediate_size = (
+ config.intermediate_size if intermediate_size is None else intermediate_size
+ )
+
+ self.gate_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False)
+ self.up_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False)
+ self.down_proj = nn.Linear(self.intermediate_size, self.hidden_size, bias=False)
+ self.act_fn = ACT2FN[config.hidden_act]
+
+ def forward(self, x):
+ down_proj = self.down_proj(self.act_fn(self.gate_proj(x)) * self.up_proj(x))
+ return down_proj
+
+
+class MoEGate(nn.Module):
+ def __init__(self, config):
+ super().__init__()
+ self.config = config
+ self.top_k = config.num_experts_per_tok
+ self.n_routed_experts = config.n_routed_experts
+ self.routed_scaling_factor = config.routed_scaling_factor
+ self.scoring_func = config.scoring_func
+ self.topk_method = config.topk_method
+ self.n_group = config.n_group
+ self.topk_group = config.topk_group
+
+ # topk selection algorithm
+ self.norm_topk_prob = config.norm_topk_prob
+ self.gating_dim = config.hidden_size
+ self.weight = nn.Parameter(
+ torch.empty((self.n_routed_experts, self.gating_dim))
+ )
+ if self.topk_method == "noaux_tc":
+ self.e_score_correction_bias = nn.Parameter(
+ torch.empty((self.n_routed_experts))
+ )
+ self.reset_parameters()
+
+ def reset_parameters(self) -> None:
+ import torch.nn.init as init
+
+ init.kaiming_uniform_(self.weight, a=math.sqrt(5))
+
+ def forward(self, hidden_states):
+ bsz, seq_len, h = hidden_states.shape
+ ### compute gating score
+ hidden_states = hidden_states.view(-1, h)
+ logits = F.linear(
+ hidden_states.type(torch.float32), self.weight.type(torch.float32), None
+ )
+ if self.scoring_func == "sigmoid":
+ scores = logits.sigmoid()
+ else:
+ raise NotImplementedError(
+ f"insupportable scoring function for MoE gating: {self.scoring_func}"
+ )
+
+ ### select top-k experts
+ if self.topk_method == "noaux_tc":
+ assert not self.training
+ scores_for_choice = scores.view(bsz * seq_len, -1) + self.e_score_correction_bias.unsqueeze(0)
+ group_scores = (
+ scores_for_choice.view(bsz * seq_len, self.n_group, -1).topk(2, dim=-1)[0].sum(dim = -1)
+ ) # [n, n_group]
+ group_idx = torch.topk(
+ group_scores, k=self.topk_group, dim=-1, sorted=False
+ )[
+ 1
+ ] # [n, top_k_group]
+ group_mask = torch.zeros_like(group_scores) # [n, n_group]
+ group_mask.scatter_(1, group_idx, 1) # [n, n_group]
+ score_mask = (
+ group_mask.unsqueeze(-1)
+ .expand(
+ bsz * seq_len, self.n_group, self.n_routed_experts // self.n_group
+ )
+ .reshape(bsz * seq_len, -1)
+ ) # [n, e]
+ tmp_scores = scores_for_choice.masked_fill(~score_mask.bool(), float("-inf")) # [n, e]
+ _, topk_idx = torch.topk(
+ tmp_scores, k=self.top_k, dim=-1, sorted=False
+ )
+ topk_weight = scores.gather(1, topk_idx)
+ else:
+ raise NotImplementedError(
+ f"insupportable TopK function for MoE gating: {self.topk_method}"
+ )
+
+ ### norm gate to sum 1
+ if self.top_k > 1 and self.norm_topk_prob:
+ denominator = topk_weight.sum(dim=-1, keepdim=True) + 1e-20
+ topk_weight = topk_weight / denominator
+ topk_weight = topk_weight * self.routed_scaling_factor # must multiply the scaling factor
+
+ return topk_idx, topk_weight
+
+class DeepseekV3MoE(nn.Module):
+ """
+ A mixed expert module containing shared experts.
+ """
+
+ def __init__(self, config):
+ super().__init__()
+ self.config = config
+ self.num_experts_per_tok = config.num_experts_per_tok
+
+ if hasattr(config, "ep_size") and config.ep_size > 1:
+ assert config.ep_size == dist.get_world_size()
+ self.ep_size = config.ep_size
+ self.experts_per_rank = config.n_routed_experts // config.ep_size
+ self.ep_rank = dist.get_rank()
+ self.experts = nn.ModuleList(
+ [
+ (
+ DeepseekV3MLP(
+ config, intermediate_size=config.moe_intermediate_size
+ )
+ if i >= self.ep_rank * self.experts_per_rank
+ and i < (self.ep_rank + 1) * self.experts_per_rank
+ else None
+ )
+ for i in range(config.n_routed_experts)
+ ]
+ )
+ else:
+ self.ep_size = 1
+ self.experts_per_rank = config.n_routed_experts
+ self.ep_rank = 0
+ self.experts = nn.ModuleList(
+ [
+ DeepseekV3MLP(
+ config, intermediate_size=config.moe_intermediate_size
+ )
+ for i in range(config.n_routed_experts)
+ ]
+ )
+ self.gate = MoEGate(config)
+ if config.n_shared_experts is not None:
+ intermediate_size = config.moe_intermediate_size * config.n_shared_experts
+ self.shared_experts = DeepseekV3MLP(
+ config=config, intermediate_size=intermediate_size
+ )
+
+ def forward(self, hidden_states):
+ identity = hidden_states
+ orig_shape = hidden_states.shape
+ topk_idx, topk_weight = self.gate(hidden_states)
+ hidden_states = hidden_states.view(-1, hidden_states.shape[-1])
+ flat_topk_idx = topk_idx.view(-1)
+ if not self.training:
+ y = self.moe_infer(hidden_states, topk_idx, topk_weight).view(*orig_shape)
+ if self.config.n_shared_experts is not None:
+ y = y + self.shared_experts(identity)
+ return y
+
+ @torch.no_grad()
+ def moe_infer(self, x, topk_ids, topk_weight):
+ cnts = topk_ids.new_zeros((topk_ids.shape[0], len(self.experts)))
+ cnts.scatter_(1, topk_ids, 1)
+ tokens_per_expert = cnts.sum(dim=0)
+ idxs = topk_ids.view(-1).argsort()
+ sorted_tokens = x[idxs // topk_ids.shape[1]]
+ sorted_tokens_shape = sorted_tokens.shape
+ if self.ep_size > 1:
+ tokens_per_ep_rank = tokens_per_expert.view(self.ep_size, -1).sum(dim=1)
+ tokens_per_expert_group = tokens_per_expert.new_empty(
+ tokens_per_expert.shape[0]
+ )
+ dist.all_to_all_single(tokens_per_expert_group, tokens_per_expert)
+ output_splits = (
+ tokens_per_expert_group.view(self.ep_size, -1)
+ .sum(1)
+ .cpu()
+ .numpy()
+ .tolist()
+ )
+ gathered_tokens = sorted_tokens.new_empty(
+ tokens_per_expert_group.sum(dim=0).cpu().item(), sorted_tokens.shape[1]
+ )
+ input_split_sizes = tokens_per_ep_rank.cpu().numpy().tolist()
+ dist.all_to_all(
+ list(gathered_tokens.split(output_splits)),
+ list(sorted_tokens.split(input_split_sizes)),
+ )
+ tokens_per_expert_post_gather = tokens_per_expert_group.view(
+ self.ep_size, self.experts_per_rank
+ ).sum(dim=0)
+ gatherd_idxs = np.zeros(shape=(gathered_tokens.shape[0],), dtype=np.int32)
+ s = 0
+ for i, k in enumerate(tokens_per_expert_group.cpu().numpy()):
+ gatherd_idxs[s : s + k] = i % self.experts_per_rank
+ s += k
+ gatherd_idxs = gatherd_idxs.argsort()
+ sorted_tokens = gathered_tokens[gatherd_idxs]
+ tokens_per_expert = tokens_per_expert_post_gather
+ tokens_per_expert = tokens_per_expert.cpu().numpy()
+
+ outputs = []
+ start_idx = 0
+ for i, num_tokens in enumerate(tokens_per_expert):
+ end_idx = start_idx + num_tokens
+ if num_tokens == 0:
+ continue
+ expert = self.experts[i + self.ep_rank * self.experts_per_rank]
+ tokens_for_this_expert = sorted_tokens[start_idx:end_idx]
+ expert_out = expert(tokens_for_this_expert)
+ outputs.append(expert_out)
+ start_idx = end_idx
+
+ outs = torch.cat(outputs, dim=0) if len(outputs) else sorted_tokens.new_empty(0)
+ if self.ep_size > 1:
+ new_x = torch.empty_like(outs)
+ new_x[gatherd_idxs] = outs
+ gathered_tokens = new_x.new_empty(*sorted_tokens_shape)
+ dist.all_to_all(
+ list(gathered_tokens.split(input_split_sizes)),
+ list(new_x.split(output_splits)),
+ )
+ outs = gathered_tokens
+
+ new_x = torch.empty_like(outs)
+ new_x[idxs] = outs
+ final_out = (
+ new_x.view(*topk_ids.shape, -1)
+ .type(topk_weight.dtype)
+ .mul_(topk_weight.unsqueeze(dim=-1))
+ .sum(dim=1)
+ .type(new_x.dtype)
+ )
+ return final_out
+
+
+# Copied from transformers.models.llama.modeling_llama.repeat_kv
+def repeat_kv(hidden_states: torch.Tensor, n_rep: int) -> torch.Tensor:
+ """
+ This is the equivalent of torch.repeat_interleave(x, dim=1, repeats=n_rep). The hidden states go from (batch,
+ num_key_value_heads, seqlen, head_dim) to (batch, num_attention_heads, seqlen, head_dim)
+ """
+ batch, num_key_value_heads, slen, head_dim = hidden_states.shape
+ if n_rep == 1:
+ return hidden_states
+ hidden_states = hidden_states[:, :, None, :, :].expand(
+ batch, num_key_value_heads, n_rep, slen, head_dim
+ )
+ return hidden_states.reshape(batch, num_key_value_heads * n_rep, slen, head_dim)
+
+
+# Copied from transformers.models.llama.modeling_llama.LlamaAttention with Llama->DeepseekV3
+class DeepseekV3Attention(nn.Module):
+ """Multi-headed attention from 'Attention Is All You Need' paper"""
+
+ def __init__(self, config: DeepseekV3Config, layer_idx: Optional[int] = None):
+ super().__init__()
+ self.config = config
+ self.layer_idx = layer_idx
+ if layer_idx is None:
+ logger.warning_once(
+ f"Instantiating {self.__class__.__name__} without passing `layer_idx` is not recommended and will "
+ "to errors during the forward call, if caching is used. Please make sure to provide a `layer_idx` "
+ "when creating this class."
+ )
+
+ self.attention_dropout = config.attention_dropout
+ self.hidden_size = config.hidden_size
+ self.num_heads = config.num_attention_heads
+
+ self.max_position_embeddings = config.max_position_embeddings
+ self.rope_theta = config.rope_theta
+ self.q_lora_rank = config.q_lora_rank
+ self.qk_rope_head_dim = config.qk_rope_head_dim
+ self.kv_lora_rank = config.kv_lora_rank
+ self.v_head_dim = config.v_head_dim
+ self.qk_nope_head_dim = config.qk_nope_head_dim
+ self.q_head_dim = config.qk_nope_head_dim + config.qk_rope_head_dim
+
+ self.is_causal = True
+
+ if self.q_lora_rank is None:
+ self.q_proj = nn.Linear(
+ self.hidden_size, self.num_heads * self.q_head_dim, bias=False
+ )
+ else:
+ self.q_a_proj = nn.Linear(
+ self.hidden_size, config.q_lora_rank, bias=config.attention_bias
+ )
+ self.q_a_layernorm = DeepseekV3RMSNorm(config.q_lora_rank)
+ self.q_b_proj = nn.Linear(
+ config.q_lora_rank, self.num_heads * self.q_head_dim, bias=False
+ )
+
+ self.kv_a_proj_with_mqa = nn.Linear(
+ self.hidden_size,
+ config.kv_lora_rank + config.qk_rope_head_dim,
+ bias=config.attention_bias,
+ )
+ self.kv_a_layernorm = DeepseekV3RMSNorm(config.kv_lora_rank)
+ self.kv_b_proj = nn.Linear(
+ config.kv_lora_rank,
+ self.num_heads
+ * (self.q_head_dim - self.qk_rope_head_dim + self.v_head_dim),
+ bias=False,
+ )
+
+ self.o_proj = nn.Linear(
+ self.num_heads * self.v_head_dim,
+ self.hidden_size,
+ bias=config.attention_bias,
+ )
+ self._init_rope()
+
+ self.softmax_scale = self.q_head_dim ** (-0.5)
+ if self.config.rope_scaling is not None:
+ mscale_all_dim = self.config.rope_scaling.get("mscale_all_dim", 0)
+ scaling_factor = self.config.rope_scaling["factor"]
+ if mscale_all_dim:
+ mscale = yarn_get_mscale(scaling_factor, mscale_all_dim)
+ self.softmax_scale = self.softmax_scale * mscale * mscale
+
+ def _init_rope(self):
+ if self.config.rope_scaling is None:
+ self.rotary_emb = DeepseekV3RotaryEmbedding(
+ self.qk_rope_head_dim,
+ max_position_embeddings=self.max_position_embeddings,
+ base=self.rope_theta,
+ )
+ else:
+ scaling_type = self.config.rope_scaling["type"]
+ scaling_factor = self.config.rope_scaling["factor"]
+ if scaling_type == "linear":
+ self.rotary_emb = DeepseekV3LinearScalingRotaryEmbedding(
+ self.qk_rope_head_dim,
+ max_position_embeddings=self.max_position_embeddings,
+ scaling_factor=scaling_factor,
+ base=self.rope_theta,
+ )
+ elif scaling_type == "dynamic":
+ self.rotary_emb = DeepseekV3DynamicNTKScalingRotaryEmbedding(
+ self.qk_rope_head_dim,
+ max_position_embeddings=self.max_position_embeddings,
+ scaling_factor=scaling_factor,
+ base=self.rope_theta,
+ )
+ elif scaling_type == "yarn":
+ kwargs = {
+ key: self.config.rope_scaling[key]
+ for key in [
+ "original_max_position_embeddings",
+ "beta_fast",
+ "beta_slow",
+ "mscale",
+ "mscale_all_dim",
+ ]
+ if key in self.config.rope_scaling
+ }
+ self.rotary_emb = DeepseekV3YarnRotaryEmbedding(
+ self.qk_rope_head_dim,
+ max_position_embeddings=self.max_position_embeddings,
+ scaling_factor=scaling_factor,
+ base=self.rope_theta,
+ **kwargs,
+ )
+ else:
+ raise ValueError(f"Unknown RoPE scaling type {scaling_type}")
+
+ def _shape(self, tensor: torch.Tensor, seq_len: int, bsz: int):
+ return (
+ tensor.view(bsz, seq_len, self.num_heads, self.v_head_dim)
+ .transpose(1, 2)
+ .contiguous()
+ )
+
+ def forward(
+ self,
+ hidden_states: torch.Tensor,
+ attention_mask: Optional[torch.Tensor] = None,
+ position_ids: Optional[torch.LongTensor] = None,
+ past_key_value: Optional[Cache] = None,
+ output_attentions: bool = False,
+ use_cache: bool = False,
+ **kwargs,
+ ) -> Tuple[torch.Tensor, Optional[torch.Tensor], Optional[Tuple[torch.Tensor]]]:
+ if "padding_mask" in kwargs:
+ warnings.warn(
+ "Passing `padding_mask` is deprecated and will be removed in v4.37. Please make sure use `attention_mask` instead.`"
+ )
+ bsz, q_len, _ = hidden_states.size()
+
+ if self.q_lora_rank is None:
+ q = self.q_proj(hidden_states)
+ else:
+ q = self.q_b_proj(self.q_a_layernorm(self.q_a_proj(hidden_states)))
+ q = q.view(bsz, q_len, self.num_heads, self.q_head_dim).transpose(1, 2)
+ q_nope, q_pe = torch.split(
+ q, [self.qk_nope_head_dim, self.qk_rope_head_dim], dim=-1
+ )
+
+ compressed_kv = self.kv_a_proj_with_mqa(hidden_states)
+ compressed_kv, k_pe = torch.split(
+ compressed_kv, [self.kv_lora_rank, self.qk_rope_head_dim], dim=-1
+ )
+ k_pe = k_pe.view(bsz, q_len, 1, self.qk_rope_head_dim).transpose(1, 2)
+ kv = (
+ self.kv_b_proj(self.kv_a_layernorm(compressed_kv))
+ .view(bsz, q_len, self.num_heads, self.qk_nope_head_dim + self.v_head_dim)
+ .transpose(1, 2)
+ )
+
+ k_nope, value_states = torch.split(
+ kv, [self.qk_nope_head_dim, self.v_head_dim], dim=-1
+ )
+ kv_seq_len = value_states.shape[-2]
+ if past_key_value is not None:
+ if self.layer_idx is None:
+ raise ValueError(
+ f"The cache structure has changed since version v4.36. If you are using {self.__class__.__name__} "
+ "for auto-regressive decoding with k/v caching, please make sure to initialize the attention class "
+ "with a layer index."
+ )
+ kv_seq_len += past_key_value.get_usable_length(kv_seq_len, self.layer_idx)
+ cos, sin = self.rotary_emb(value_states, seq_len=kv_seq_len)
+
+ q_pe, k_pe = apply_rotary_pos_emb(q_pe, k_pe, cos, sin, position_ids)
+
+ query_states = k_pe.new_empty(bsz, self.num_heads, q_len, self.q_head_dim)
+ query_states[:, :, :, : self.qk_nope_head_dim] = q_nope
+ query_states[:, :, :, self.qk_nope_head_dim :] = q_pe
+
+ key_states = k_pe.new_empty(bsz, self.num_heads, q_len, self.q_head_dim)
+ key_states[:, :, :, : self.qk_nope_head_dim] = k_nope
+ key_states[:, :, :, self.qk_nope_head_dim :] = k_pe
+ if past_key_value is not None:
+ cache_kwargs = {"sin": sin, "cos": cos} # Specific to RoPE models
+ key_states, value_states = past_key_value.update(
+ key_states, value_states, self.layer_idx, cache_kwargs
+ )
+
+ attn_weights = (
+ torch.matmul(query_states, key_states.transpose(2, 3)) * self.softmax_scale
+ )
+
+ if attn_weights.size() != (bsz, self.num_heads, q_len, kv_seq_len):
+ raise ValueError(
+ f"Attention weights should be of size {(bsz, self.num_heads, q_len, kv_seq_len)}, but is"
+ f" {attn_weights.size()}"
+ )
+ assert attention_mask is not None
+ if attention_mask is not None:
+ if attention_mask.size() != (bsz, 1, q_len, kv_seq_len):
+ raise ValueError(
+ f"Attention mask should be of size {(bsz, 1, q_len, kv_seq_len)}, but is {attention_mask.size()}"
+ )
+ attn_weights = attn_weights + attention_mask
+
+ # upcast attention to fp32
+ attn_weights = nn.functional.softmax(
+ attn_weights, dim=-1, dtype=torch.float32
+ ).to(query_states.dtype)
+ attn_weights = nn.functional.dropout(
+ attn_weights, p=self.attention_dropout, training=self.training
+ )
+ attn_output = torch.matmul(attn_weights, value_states)
+
+ if attn_output.size() != (bsz, self.num_heads, q_len, self.v_head_dim):
+ raise ValueError(
+ f"`attn_output` should be of size {(bsz, self.num_heads, q_len, self.v_head_dim)}, but is"
+ f" {attn_output.size()}"
+ )
+
+ attn_output = attn_output.transpose(1, 2).contiguous()
+
+ attn_output = attn_output.reshape(bsz, q_len, self.num_heads * self.v_head_dim)
+
+ attn_output = self.o_proj(attn_output)
+
+ if not output_attentions:
+ attn_weights = None
+
+ return attn_output, attn_weights, past_key_value
+
+
+# Copied from transformers.models.llama.modeling_llama.LlamaFlashAttention2 with Llama->DeepseekV3
+class DeepseekV3FlashAttention2(DeepseekV3Attention):
+ """
+ DeepseekV3 flash attention module. This module inherits from `DeepseekV3Attention` as the weights of the module stays
+ untouched. The only required change would be on the forward pass where it needs to correctly call the public API of
+ flash attention and deal with padding tokens in case the input contains any of them.
+ """
+
+ def __init__(self, *args, **kwargs):
+ super().__init__(*args, **kwargs)
+
+ # TODO: Should be removed once Flash Attention for RoCm is bumped to 2.1.
+ # flash_attn<2.1 generates top-left aligned causal mask, while what is needed here is bottom-right alignement, that was made default for flash_attn>=2.1. This attribute is used to handle this difference. Reference: https://github.com/Dao-AILab/flash-attention/releases/tag/v2.1.0.
+ # Beware that with flash_attn<2.1, using q_seqlen != k_seqlen (except for the case q_seqlen == 1) produces a wrong mask (top-left).
+ self._flash_attn_uses_top_left_mask = not is_flash_attn_greater_or_equal_2_10()
+
+ def forward(
+ self,
+ hidden_states: torch.Tensor,
+ attention_mask: Optional[torch.LongTensor] = None,
+ position_ids: Optional[torch.LongTensor] = None,
+ past_key_value: Optional[Cache] = None,
+ output_attentions: bool = False,
+ use_cache: bool = False,
+ **kwargs,
+ ) -> Tuple[torch.Tensor, Optional[torch.Tensor], Optional[Tuple[torch.Tensor]]]:
+ # DeepseekV3FlashAttention2 attention does not support output_attentions
+ if "padding_mask" in kwargs:
+ warnings.warn(
+ "Passing `padding_mask` is deprecated and will be removed in v4.37. Please make sure use `attention_mask` instead.`"
+ )
+
+ # overwrite attention_mask with padding_mask
+ attention_mask = kwargs.pop("padding_mask")
+
+ output_attentions = False
+
+ bsz, q_len, _ = hidden_states.size()
+
+ if self.q_lora_rank is None:
+ q = self.q_proj(hidden_states)
+ else:
+ q = self.q_b_proj(self.q_a_layernorm(self.q_a_proj(hidden_states)))
+ q = q.view(bsz, q_len, self.num_heads, self.q_head_dim).transpose(1, 2)
+ q_nope, q_pe = torch.split(
+ q, [self.qk_nope_head_dim, self.qk_rope_head_dim], dim=-1
+ )
+
+ # Flash attention requires the input to have the shape
+ # batch_size x seq_length x head_dim x hidden_dim
+ # therefore we just need to keep the original shape
+ compressed_kv = self.kv_a_proj_with_mqa(hidden_states)
+ compressed_kv, k_pe = torch.split(
+ compressed_kv, [self.kv_lora_rank, self.qk_rope_head_dim], dim=-1
+ )
+ k_pe = k_pe.view(bsz, q_len, 1, self.qk_rope_head_dim).transpose(1, 2)
+ kv = (
+ self.kv_b_proj(self.kv_a_layernorm(compressed_kv))
+ .view(bsz, q_len, self.num_heads, self.qk_nope_head_dim + self.v_head_dim)
+ .transpose(1, 2)
+ )
+
+ k_nope, value_states = torch.split(
+ kv, [self.qk_nope_head_dim, self.v_head_dim], dim=-1
+ )
+ kv_seq_len = value_states.shape[-2]
+
+ kv_seq_len = value_states.shape[-2]
+ if past_key_value is not None:
+ kv_seq_len += past_key_value.get_usable_length(kv_seq_len, self.layer_idx)
+
+ cos, sin = self.rotary_emb(value_states, seq_len=kv_seq_len)
+ q_pe, k_pe = apply_rotary_pos_emb(q_pe, k_pe, cos, sin, position_ids)
+
+ query_states = k_pe.new_empty(bsz, self.num_heads, q_len, self.q_head_dim)
+ query_states[:, :, :, : self.qk_nope_head_dim] = q_nope
+ query_states[:, :, :, self.qk_nope_head_dim :] = q_pe
+
+ key_states = k_pe.new_empty(bsz, self.num_heads, q_len, self.q_head_dim)
+ key_states[:, :, :, : self.qk_nope_head_dim] = k_nope
+ key_states[:, :, :, self.qk_nope_head_dim :] = k_pe
+
+ if self.q_head_dim != self.v_head_dim:
+ value_states = F.pad(value_states, [0, self.q_head_dim - self.v_head_dim])
+
+ if past_key_value is not None:
+ cache_kwargs = {"sin": sin, "cos": cos} # Specific to RoPE models
+ key_states, value_states = past_key_value.update(
+ key_states, value_states, self.layer_idx, cache_kwargs
+ )
+
+ # TODO: These transpose are quite inefficient but Flash Attention requires the layout [batch_size, sequence_length, num_heads, head_dim]. We would need to refactor the KV cache
+ # to be able to avoid many of these transpose/reshape/view.
+ query_states = query_states.transpose(1, 2)
+ key_states = key_states.transpose(1, 2)
+ value_states = value_states.transpose(1, 2)
+
+ dropout_rate = self.attention_dropout if self.training else 0.0
+
+ # In PEFT, usually we cast the layer norms in float32 for training stability reasons
+ # therefore the input hidden states gets silently casted in float32. Hence, we need
+ # cast them back in the correct dtype just to be sure everything works as expected.
+ # This might slowdown training & inference so it is recommended to not cast the LayerNorms
+ # in fp32. (DeepseekV3RMSNorm handles it correctly)
+
+ input_dtype = query_states.dtype
+ if input_dtype == torch.float32:
+ # Handle the case where the model is quantized
+ if hasattr(self.config, "_pre_quantization_dtype"):
+ target_dtype = self.config._pre_quantization_dtype
+ elif torch.is_autocast_enabled():
+ target_dtype = torch.get_autocast_gpu_dtype()
+ else:
+ target_dtype = (
+ self.q_proj.weight.dtype
+ if self.q_lora_rank is None
+ else self.q_a_proj.weight.dtype
+ )
+
+ logger.warning_once(
+ f"The input hidden states seems to be silently casted in float32, this might be related to"
+ f" the fact you have upcasted embedding or layer norm layers in float32. We will cast back the input in"
+ f" {target_dtype}."
+ )
+
+ query_states = query_states.to(target_dtype)
+ key_states = key_states.to(target_dtype)
+ value_states = value_states.to(target_dtype)
+
+ attn_output = self._flash_attention_forward(
+ query_states,
+ key_states,
+ value_states,
+ attention_mask,
+ q_len,
+ dropout=dropout_rate,
+ softmax_scale=self.softmax_scale,
+ )
+ if self.q_head_dim != self.v_head_dim:
+ attn_output = attn_output[:, :, :, : self.v_head_dim]
+
+ attn_output = attn_output.reshape(
+ bsz, q_len, self.num_heads * self.v_head_dim
+ ).contiguous()
+ attn_output = self.o_proj(attn_output)
+
+ if not output_attentions:
+ attn_weights = None
+
+ return attn_output, attn_weights, past_key_value
+
+ def _flash_attention_forward(
+ self,
+ query_states,
+ key_states,
+ value_states,
+ attention_mask,
+ query_length,
+ dropout=0.0,
+ softmax_scale=None,
+ ):
+ """
+ Calls the forward method of Flash Attention - if the input hidden states contain at least one padding token
+ first unpad the input, then computes the attention scores and pad the final attention scores.
+
+ Args:
+ query_states (`torch.Tensor`):
+ Input query states to be passed to Flash Attention API
+ key_states (`torch.Tensor`):
+ Input key states to be passed to Flash Attention API
+ value_states (`torch.Tensor`):
+ Input value states to be passed to Flash Attention API
+ attention_mask (`torch.Tensor`):
+ The padding mask - corresponds to a tensor of size `(batch_size, seq_len)` where 0 stands for the
+ position of padding tokens and 1 for the position of non-padding tokens.
+ dropout (`int`, *optional*):
+ Attention dropout
+ softmax_scale (`float`, *optional*):
+ The scaling of QK^T before applying softmax. Default to 1 / sqrt(head_dim)
+ """
+ if not self._flash_attn_uses_top_left_mask:
+ causal = self.is_causal
+ else:
+ # TODO: Remove the `query_length != 1` check once Flash Attention for RoCm is bumped to 2.1. For details, please see the comment in DeepseekV3FlashAttention2 __init__.
+ causal = self.is_causal and query_length != 1
+
+ # Contains at least one padding token in the sequence
+ if attention_mask is not None:
+ batch_size = query_states.shape[0]
+ (
+ query_states,
+ key_states,
+ value_states,
+ indices_q,
+ cu_seq_lens,
+ max_seq_lens,
+ ) = self._upad_input(
+ query_states, key_states, value_states, attention_mask, query_length
+ )
+
+ cu_seqlens_q, cu_seqlens_k = cu_seq_lens
+ max_seqlen_in_batch_q, max_seqlen_in_batch_k = max_seq_lens
+
+ attn_output_unpad = flash_attn_varlen_func(
+ query_states,
+ key_states,
+ value_states,
+ cu_seqlens_q=cu_seqlens_q,
+ cu_seqlens_k=cu_seqlens_k,
+ max_seqlen_q=max_seqlen_in_batch_q,
+ max_seqlen_k=max_seqlen_in_batch_k,
+ dropout_p=dropout,
+ softmax_scale=softmax_scale,
+ causal=causal,
+ )
+
+ attn_output = pad_input(
+ attn_output_unpad, indices_q, batch_size, query_length
+ )
+ else:
+ attn_output = flash_attn_func(
+ query_states,
+ key_states,
+ value_states,
+ dropout,
+ softmax_scale=softmax_scale,
+ causal=causal,
+ )
+
+ return attn_output
+
+ def _upad_input(
+ self, query_layer, key_layer, value_layer, attention_mask, query_length
+ ):
+ indices_k, cu_seqlens_k, max_seqlen_in_batch_k = _get_unpad_data(attention_mask)
+ batch_size, kv_seq_len, num_key_value_heads, head_dim = key_layer.shape
+
+ key_layer = index_first_axis(
+ key_layer.reshape(batch_size * kv_seq_len, num_key_value_heads, head_dim),
+ indices_k,
+ )
+ value_layer = index_first_axis(
+ value_layer.reshape(batch_size * kv_seq_len, num_key_value_heads, head_dim),
+ indices_k,
+ )
+ if query_length == kv_seq_len:
+ query_layer = index_first_axis(
+ query_layer.reshape(batch_size * kv_seq_len, self.num_heads, head_dim),
+ indices_k,
+ )
+ cu_seqlens_q = cu_seqlens_k
+ max_seqlen_in_batch_q = max_seqlen_in_batch_k
+ indices_q = indices_k
+ elif query_length == 1:
+ max_seqlen_in_batch_q = 1
+ cu_seqlens_q = torch.arange(
+ batch_size + 1, dtype=torch.int32, device=query_layer.device
+ ) # There is a memcpy here, that is very bad.
+ indices_q = cu_seqlens_q[:-1]
+ query_layer = query_layer.squeeze(1)
+ else:
+ # The -q_len: slice assumes left padding.
+ attention_mask = attention_mask[:, -query_length:]
+ query_layer, indices_q, cu_seqlens_q, max_seqlen_in_batch_q = unpad_input(
+ query_layer, attention_mask
+ )
+
+ return (
+ query_layer,
+ key_layer,
+ value_layer,
+ indices_q,
+ (cu_seqlens_q, cu_seqlens_k),
+ (max_seqlen_in_batch_q, max_seqlen_in_batch_k),
+ )
+
+
+ATTENTION_CLASSES = {
+ "eager": DeepseekV3Attention,
+ "flash_attention_2": DeepseekV3FlashAttention2,
+}
+
+
+class DeepseekV3DecoderLayer(nn.Module):
+ def __init__(self, config: DeepseekV3Config, layer_idx: int):
+ super().__init__()
+ self.hidden_size = config.hidden_size
+
+ self.self_attn = ATTENTION_CLASSES[config._attn_implementation](
+ config=config, layer_idx=layer_idx
+ )
+
+ self.mlp = (
+ DeepseekV3MoE(config)
+ if (
+ config.n_routed_experts is not None
+ and layer_idx >= config.first_k_dense_replace
+ and layer_idx % config.moe_layer_freq == 0
+ )
+ else DeepseekV3MLP(config)
+ )
+ self.input_layernorm = DeepseekV3RMSNorm(
+ config.hidden_size, eps=config.rms_norm_eps
+ )
+ self.post_attention_layernorm = DeepseekV3RMSNorm(
+ config.hidden_size, eps=config.rms_norm_eps
+ )
+
+ def forward(
+ self,
+ hidden_states: torch.Tensor,
+ attention_mask: Optional[torch.Tensor] = None,
+ position_ids: Optional[torch.LongTensor] = None,
+ past_key_value: Optional[Tuple[torch.Tensor]] = None,
+ output_attentions: Optional[bool] = False,
+ use_cache: Optional[bool] = False,
+ **kwargs,
+ ) -> Tuple[
+ torch.FloatTensor, Optional[Tuple[torch.FloatTensor, torch.FloatTensor]]
+ ]:
+ """
+ Args:
+ hidden_states (`torch.FloatTensor`): input to the layer of shape `(batch, seq_len, embed_dim)`
+ attention_mask (`torch.FloatTensor`, *optional*):
+ attention mask of size `(batch_size, sequence_length)` if flash attention is used or `(batch_size, 1,
+ query_sequence_length, key_sequence_length)` if default attention is used.
+ output_attentions (`bool`, *optional*):
+ Whether or not to return the attentions tensors of all attention layers. See `attentions` under
+ returned tensors for more detail.
+ use_cache (`bool`, *optional*):
+ If set to `True`, `past_key_values` key value states are returned and can be used to speed up decoding
+ (see `past_key_values`).
+ past_key_value (`Tuple(torch.FloatTensor)`, *optional*): cached past key and value projection states
+ """
+ if "padding_mask" in kwargs:
+ warnings.warn(
+ "Passing `padding_mask` is deprecated and will be removed in v4.37. Please make sure use `attention_mask` instead.`"
+ )
+ residual = hidden_states
+
+ hidden_states = self.input_layernorm(hidden_states)
+
+ # Self Attention
+ hidden_states, self_attn_weights, present_key_value = self.self_attn(
+ hidden_states=hidden_states,
+ attention_mask=attention_mask,
+ position_ids=position_ids,
+ past_key_value=past_key_value,
+ output_attentions=output_attentions,
+ use_cache=use_cache,
+ **kwargs,
+ )
+ hidden_states = residual + hidden_states
+
+ # Fully Connected
+ residual = hidden_states
+ hidden_states = self.post_attention_layernorm(hidden_states)
+ hidden_states = self.mlp(hidden_states)
+ hidden_states = residual + hidden_states
+
+ outputs = (hidden_states,)
+
+ if output_attentions:
+ outputs += (self_attn_weights,)
+
+ if use_cache:
+ outputs += (present_key_value,)
+
+ return outputs
+
+
+DeepseekV3_START_DOCSTRING = r"""
+ This model inherits from [`PreTrainedModel`]. Check the superclass documentation for the generic methods the
+ library implements for all its model (such as downloading or saving, resizing the input embeddings, pruning heads
+ etc.)
+
+ This model is also a PyTorch [torch.nn.Module](https://pytorch.org/docs/stable/nn.html#torch.nn.Module) subclass.
+ Use it as a regular PyTorch Module and refer to the PyTorch documentation for all matter related to general usage
+ and behavior.
+
+ Parameters:
+ config ([`DeepseekV3Config`]):
+ Model configuration class with all the parameters of the model. Initializing with a config file does not
+ load the weights associated with the model, only the configuration. Check out the
+ [`~PreTrainedModel.from_pretrained`] method to load the model weights.
+"""
+
+
+@add_start_docstrings(
+ "The bare DeepseekV3 Model outputting raw hidden-states without any specific head on top.",
+ DeepseekV3_START_DOCSTRING,
+)
+class DeepseekV3PreTrainedModel(PreTrainedModel):
+ config_class = DeepseekV3Config
+ base_model_prefix = "model"
+ supports_gradient_checkpointing = True
+ _no_split_modules = ["DeepseekV3DecoderLayer"]
+ _skip_keys_device_placement = "past_key_values"
+ _supports_flash_attn_2 = True
+ _supports_cache_class = True
+
+ def _init_weights(self, module):
+ std = self.config.initializer_range
+ if isinstance(module, nn.Linear):
+ module.weight.data.normal_(mean=0.0, std=std)
+ if module.bias is not None:
+ module.bias.data.zero_()
+ elif isinstance(module, nn.Embedding):
+ module.weight.data.normal_(mean=0.0, std=std)
+ if module.padding_idx is not None:
+ module.weight.data[module.padding_idx].zero_()
+
+
+DeepseekV3_INPUTS_DOCSTRING = r"""
+ Args:
+ input_ids (`torch.LongTensor` of shape `(batch_size, sequence_length)`):
+ Indices of input sequence tokens in the vocabulary. Padding will be ignored by default should you provide
+ it.
+
+ Indices can be obtained using [`AutoTokenizer`]. See [`PreTrainedTokenizer.encode`] and
+ [`PreTrainedTokenizer.__call__`] for details.
+
+ [What are input IDs?](../glossary#input-ids)
+ attention_mask (`torch.Tensor` of shape `(batch_size, sequence_length)`, *optional*):
+ Mask to avoid performing attention on padding token indices. Mask values selected in `[0, 1]`:
+
+ - 1 for tokens that are **not masked**,
+ - 0 for tokens that are **masked**.
+
+ [What are attention masks?](../glossary#attention-mask)
+
+ Indices can be obtained using [`AutoTokenizer`]. See [`PreTrainedTokenizer.encode`] and
+ [`PreTrainedTokenizer.__call__`] for details.
+
+ If `past_key_values` is used, optionally only the last `input_ids` have to be input (see
+ `past_key_values`).
+
+ If you want to change padding behavior, you should read [`modeling_opt._prepare_decoder_attention_mask`]
+ and modify to your needs. See diagram 1 in [the paper](https://arxiv.org/abs/1910.13461) for more
+ information on the default strategy.
+
+ - 1 indicates the head is **not masked**,
+ - 0 indicates the head is **masked**.
+ position_ids (`torch.LongTensor` of shape `(batch_size, sequence_length)`, *optional*):
+ Indices of positions of each input sequence tokens in the position embeddings. Selected in the range `[0,
+ config.n_positions - 1]`.
+
+ [What are position IDs?](../glossary#position-ids)
+ past_key_values (`Cache` or `tuple(tuple(torch.FloatTensor))`, *optional*):
+ Pre-computed hidden-states (key and values in the self-attention blocks and in the cross-attention
+ blocks) that can be used to speed up sequential decoding. This typically consists in the `past_key_values`
+ returned by the model at a previous stage of decoding, when `use_cache=True` or `config.use_cache=True`.
+
+ Two formats are allowed:
+ - a [`~cache_utils.Cache`] instance;
+ - Tuple of `tuple(torch.FloatTensor)` of length `config.n_layers`, with each tuple having 2 tensors of
+ shape `(batch_size, num_heads, sequence_length, embed_size_per_head)`). This is also known as the legacy
+ cache format.
+
+ The model will output the same cache format that is fed as input. If no `past_key_values` are passed, the
+ legacy cache format will be returned.
+
+ If `past_key_values` are used, the user can optionally input only the last `input_ids` (those that don't
+ have their past key value states given to this model) of shape `(batch_size, 1)` instead of all `input_ids`
+ of shape `(batch_size, sequence_length)`.
+ inputs_embeds (`torch.FloatTensor` of shape `(batch_size, sequence_length, hidden_size)`, *optional*):
+ Optionally, instead of passing `input_ids` you can choose to directly pass an embedded representation. This
+ is useful if you want more control over how to convert `input_ids` indices into associated vectors than the
+ model's internal embedding lookup matrix.
+ use_cache (`bool`, *optional*):
+ If set to `True`, `past_key_values` key value states are returned and can be used to speed up decoding (see
+ `past_key_values`).
+ output_attentions (`bool`, *optional*):
+ Whether or not to return the attentions tensors of all attention layers. See `attentions` under returned
+ tensors for more detail.
+ output_hidden_states (`bool`, *optional*):
+ Whether or not to return the hidden states of all layers. See `hidden_states` under returned tensors for
+ more detail.
+ return_dict (`bool`, *optional*):
+ Whether or not to return a [`~utils.ModelOutput`] instead of a plain tuple.
+"""
+
+
+@add_start_docstrings(
+ "The bare DeepseekV3 Model outputting raw hidden-states without any specific head on top.",
+ DeepseekV3_START_DOCSTRING,
+)
+class DeepseekV3Model(DeepseekV3PreTrainedModel):
+ """
+ Transformer decoder consisting of *config.num_hidden_layers* layers. Each layer is a [`DeepseekV3DecoderLayer`]
+
+ Args:
+ config: DeepseekV3Config
+ """
+
+ def __init__(self, config: DeepseekV3Config):
+ super().__init__(config)
+ self.padding_idx = config.pad_token_id
+ self.vocab_size = config.vocab_size
+
+ self.embed_tokens = nn.Embedding(
+ config.vocab_size, config.hidden_size, self.padding_idx
+ )
+ self.layers = nn.ModuleList(
+ [
+ DeepseekV3DecoderLayer(config, layer_idx)
+ for layer_idx in range(config.num_hidden_layers)
+ ]
+ )
+ self._use_flash_attention_2 = config._attn_implementation == "flash_attention_2"
+ self.norm = DeepseekV3RMSNorm(config.hidden_size, eps=config.rms_norm_eps)
+
+ self.gradient_checkpointing = False
+ # Initialize weights and apply final processing
+ self.post_init()
+
+ def get_input_embeddings(self):
+ return self.embed_tokens
+
+ def set_input_embeddings(self, value):
+ self.embed_tokens = value
+
+ @add_start_docstrings_to_model_forward(DeepseekV3_INPUTS_DOCSTRING)
+ def forward(
+ self,
+ input_ids: torch.LongTensor = None,
+ attention_mask: Optional[torch.Tensor] = None,
+ position_ids: Optional[torch.LongTensor] = None,
+ past_key_values: Optional[List[torch.FloatTensor]] = None,
+ inputs_embeds: Optional[torch.FloatTensor] = None,
+ use_cache: Optional[bool] = None,
+ output_attentions: Optional[bool] = None,
+ output_hidden_states: Optional[bool] = None,
+ return_dict: Optional[bool] = None,
+ ) -> Union[Tuple, BaseModelOutputWithPast]:
+ output_attentions = (
+ output_attentions
+ if output_attentions is not None
+ else self.config.output_attentions
+ )
+ output_hidden_states = (
+ output_hidden_states
+ if output_hidden_states is not None
+ else self.config.output_hidden_states
+ )
+ use_cache = use_cache if use_cache is not None else self.config.use_cache
+
+ return_dict = (
+ return_dict if return_dict is not None else self.config.use_return_dict
+ )
+
+ # retrieve input_ids and inputs_embeds
+ if input_ids is not None and inputs_embeds is not None:
+ raise ValueError(
+ "You cannot specify both input_ids and inputs_embeds at the same time"
+ )
+ elif input_ids is not None:
+ batch_size, seq_length = input_ids.shape[:2]
+ elif inputs_embeds is not None:
+ batch_size, seq_length = inputs_embeds.shape[:2]
+ else:
+ raise ValueError("You have to specify either input_ids or inputs_embeds")
+
+ past_key_values_length = 0
+ if use_cache:
+ use_legacy_cache = not isinstance(past_key_values, Cache)
+ if use_legacy_cache:
+ past_key_values = DynamicCache.from_legacy_cache(past_key_values)
+ past_key_values_length = past_key_values.get_usable_length(seq_length)
+
+ if position_ids is None:
+ device = input_ids.device if input_ids is not None else inputs_embeds.device
+ position_ids = torch.arange(
+ past_key_values_length,
+ seq_length + past_key_values_length,
+ dtype=torch.long,
+ device=device,
+ )
+ position_ids = position_ids.unsqueeze(0)
+
+ if inputs_embeds is None:
+ inputs_embeds = self.embed_tokens(input_ids)
+
+ if self._use_flash_attention_2:
+ # 2d mask is passed through the layers
+ attention_mask = (
+ attention_mask
+ if (attention_mask is not None and 0 in attention_mask)
+ else None
+ )
+ else:
+ # 4d mask is passed through the layers
+ attention_mask = _prepare_4d_causal_attention_mask(
+ attention_mask,
+ (batch_size, seq_length),
+ inputs_embeds,
+ past_key_values_length,
+ )
+
+ # embed positions
+ hidden_states = inputs_embeds
+
+ # decoder layers
+ all_hidden_states = () if output_hidden_states else None
+ all_self_attns = () if output_attentions else None
+ next_decoder_cache = None
+
+ for decoder_layer in self.layers:
+ if output_hidden_states:
+ all_hidden_states += (hidden_states,)
+
+ layer_outputs = decoder_layer(
+ hidden_states,
+ attention_mask=attention_mask,
+ position_ids=position_ids,
+ past_key_value=past_key_values,
+ output_attentions=output_attentions,
+ use_cache=use_cache,
+ )
+
+ hidden_states = layer_outputs[0]
+
+ if use_cache:
+ next_decoder_cache = layer_outputs[2 if output_attentions else 1]
+
+ if output_attentions:
+ all_self_attns += (layer_outputs[1],)
+
+ hidden_states = self.norm(hidden_states)
+
+ # add hidden states from the last decoder layer
+ if output_hidden_states:
+ all_hidden_states += (hidden_states,)
+
+ next_cache = None
+ if use_cache:
+ next_cache = (
+ next_decoder_cache.to_legacy_cache()
+ if use_legacy_cache
+ else next_decoder_cache
+ )
+ if not return_dict:
+ return tuple(
+ v
+ for v in [hidden_states, next_cache, all_hidden_states, all_self_attns]
+ if v is not None
+ )
+ return BaseModelOutputWithPast(
+ last_hidden_state=hidden_states,
+ past_key_values=next_cache,
+ hidden_states=all_hidden_states,
+ attentions=all_self_attns,
+ )
+
+
+class DeepseekV3ForCausalLM(DeepseekV3PreTrainedModel):
+ _tied_weights_keys = ["lm_head.weight"]
+
+ def __init__(self, config):
+ super().__init__(config)
+ self.model = DeepseekV3Model(config)
+ self.vocab_size = config.vocab_size
+ self.lm_head = nn.Linear(config.hidden_size, config.vocab_size, bias=False)
+
+ # Initialize weights and apply final processing
+ self.post_init()
+
+ def get_input_embeddings(self):
+ return self.model.embed_tokens
+
+ def set_input_embeddings(self, value):
+ self.model.embed_tokens = value
+
+ def get_output_embeddings(self):
+ return self.lm_head
+
+ def set_output_embeddings(self, new_embeddings):
+ self.lm_head = new_embeddings
+
+ def set_decoder(self, decoder):
+ self.model = decoder
+
+ def get_decoder(self):
+ return self.model
+
+ @add_start_docstrings_to_model_forward(DeepseekV3_INPUTS_DOCSTRING)
+ @replace_return_docstrings(
+ output_type=CausalLMOutputWithPast, config_class=_CONFIG_FOR_DOC
+ )
+ def forward(
+ self,
+ input_ids: torch.LongTensor = None,
+ attention_mask: Optional[torch.Tensor] = None,
+ position_ids: Optional[torch.LongTensor] = None,
+ past_key_values: Optional[List[torch.FloatTensor]] = None,
+ inputs_embeds: Optional[torch.FloatTensor] = None,
+ labels: Optional[torch.LongTensor] = None,
+ use_cache: Optional[bool] = None,
+ output_attentions: Optional[bool] = None,
+ output_hidden_states: Optional[bool] = None,
+ return_dict: Optional[bool] = None,
+ ) -> Union[Tuple, CausalLMOutputWithPast]:
+ r"""
+ Args:
+ labels (`torch.LongTensor` of shape `(batch_size, sequence_length)`, *optional*):
+ Labels for computing the masked language modeling loss. Indices should either be in `[0, transformers.,
+ config.vocab_size]` or -100 (see `input_ids` docstring). Tokens with indices set to `-100` are ignored
+ (masked), the loss is only computed for the tokens with labels in `[0, transformers., config.vocab_size]`.
+
+ Returns:
+
+ Example:
+
+ ```python
+ >>> from transformers import AutoTokenizer, DeepseekV3ForCausalLM
+
+ >>> model = DeepseekV3ForCausalLM.from_pretrained(PATH_TO_CONVERTED_WEIGHTS)
+ >>> tokenizer = AutoTokenizer.from_pretrained(PATH_TO_CONVERTED_TOKENIZER)
+
+ >>> prompt = "Hey, are you conscious? Can you talk to me?"
+ >>> inputs = tokenizer(prompt, return_tensors="pt")
+
+ >>> # Generate
+ >>> generate_ids = model.generate(inputs.input_ids, max_length=30)
+ >>> tokenizer.batch_decode(generate_ids, skip_special_tokens=True, clean_up_tokenization_spaces=False)[0]
+ "Hey, are you conscious? Can you talk to me?\nI'm not conscious, but I can talk to you."
+ ```"""
+ output_attentions = (
+ output_attentions
+ if output_attentions is not None
+ else self.config.output_attentions
+ )
+ output_hidden_states = (
+ output_hidden_states
+ if output_hidden_states is not None
+ else self.config.output_hidden_states
+ )
+ return_dict = (
+ return_dict if return_dict is not None else self.config.use_return_dict
+ )
+
+ # decoder outputs consists of (dec_features, layer_state, dec_hidden, dec_attn)
+ outputs = self.model(
+ input_ids=input_ids,
+ attention_mask=attention_mask,
+ position_ids=position_ids,
+ past_key_values=past_key_values,
+ inputs_embeds=inputs_embeds,
+ use_cache=use_cache,
+ output_attentions=output_attentions,
+ output_hidden_states=output_hidden_states,
+ return_dict=return_dict,
+ )
+
+ hidden_states = outputs[0]
+ logits = self.lm_head(hidden_states)
+ logits = logits.float()
+
+ loss = None
+ if labels is not None:
+ # Shift so that tokens < n predict n
+ shift_logits = logits[..., :-1, :].contiguous()
+ shift_labels = labels[..., 1:].contiguous()
+ # Flatten the tokens
+ loss_fct = CrossEntropyLoss()
+ shift_logits = shift_logits.view(-1, self.config.vocab_size)
+ shift_labels = shift_labels.view(-1)
+ # Enable model parallelism
+ shift_labels = shift_labels.to(shift_logits.device)
+ loss = loss_fct(shift_logits, shift_labels)
+
+ if not return_dict:
+ output = (logits,) + outputs[1:]
+ return (loss,) + output if loss is not None else output
+
+ return CausalLMOutputWithPast(
+ loss=loss,
+ logits=logits,
+ past_key_values=outputs.past_key_values,
+ hidden_states=outputs.hidden_states,
+ attentions=outputs.attentions,
+ )
+
+ def prepare_inputs_for_generation(
+ self,
+ input_ids,
+ past_key_values=None,
+ attention_mask=None,
+ inputs_embeds=None,
+ **kwargs,
+ ):
+ if past_key_values is not None:
+ if isinstance(past_key_values, Cache):
+ cache_length = past_key_values.get_seq_length()
+ past_length = past_key_values.seen_tokens
+ max_cache_length = past_key_values.get_max_length()
+ else:
+ cache_length = past_length = past_key_values[0][0].shape[2]
+ max_cache_length = None
+
+ # Keep only the unprocessed tokens:
+ # 1 - If the length of the attention_mask exceeds the length of input_ids, then we are in a setting where
+ # some of the inputs are exclusivelly passed as part of the cache (e.g. when passing input_embeds as
+ # input)
+ if (
+ attention_mask is not None
+ and attention_mask.shape[1] > input_ids.shape[1]
+ ):
+ input_ids = input_ids[:, -(attention_mask.shape[1] - past_length) :]
+ # 2 - If the past_length is smaller than input_ids', then input_ids holds all input tokens. We can discard
+ # input_ids based on the past_length.
+ elif past_length < input_ids.shape[1]:
+ input_ids = input_ids[:, past_length:]
+ # 3 - Otherwise (past_length >= input_ids.shape[1]), let's assume input_ids only has unprocessed tokens.
+
+ # If we are about to go beyond the maximum cache length, we need to crop the input attention mask.
+ if (
+ max_cache_length is not None
+ and attention_mask is not None
+ and cache_length + input_ids.shape[1] > max_cache_length
+ ):
+ attention_mask = attention_mask[:, -max_cache_length:]
+
+ position_ids = kwargs.get("position_ids", None)
+ if attention_mask is not None and position_ids is None:
+ # create position_ids on the fly for batch generation
+ position_ids = attention_mask.long().cumsum(-1) - 1
+ position_ids.masked_fill_(attention_mask == 0, 1)
+ if past_key_values:
+ position_ids = position_ids[:, -input_ids.shape[1] :]
+
+ # if `inputs_embeds` are passed, we only want to use them in the 1st generation step
+ if inputs_embeds is not None and past_key_values is None:
+ model_inputs = {"inputs_embeds": inputs_embeds}
+ else:
+ model_inputs = {"input_ids": input_ids}
+
+ model_inputs.update(
+ {
+ "position_ids": position_ids,
+ "past_key_values": past_key_values,
+ "use_cache": kwargs.get("use_cache"),
+ "attention_mask": attention_mask,
+ }
+ )
+ return model_inputs
+
+ @staticmethod
+ def _reorder_cache(past_key_values, beam_idx):
+ reordered_past = ()
+ for layer_past in past_key_values:
+ reordered_past += (
+ tuple(
+ past_state.index_select(0, beam_idx.to(past_state.device))
+ for past_state in layer_past
+ ),
+ )
+ return reordered_past
+
+
+@add_start_docstrings(
+ """
+ The DeepseekV3 Model transformer with a sequence classification head on top (linear layer).
+
+ [`DeepseekV3ForSequenceClassification`] uses the last token in order to do the classification, as other causal models
+ (e.g. GPT-2) do.
+
+ Since it does classification on the last token, it requires to know the position of the last token. If a
+ `pad_token_id` is defined in the configuration, it finds the last token that is not a padding token in each row. If
+ no `pad_token_id` is defined, it simply takes the last value in each row of the batch. Since it cannot guess the
+ padding tokens when `inputs_embeds` are passed instead of `input_ids`, it does the same (take the last value in
+ each row of the batch).
+ """,
+ DeepseekV3_START_DOCSTRING,
+)
+class DeepseekV3ForSequenceClassification(DeepseekV3PreTrainedModel):
+ def __init__(self, config):
+ super().__init__(config)
+ self.num_labels = config.num_labels
+ self.model = DeepseekV3Model(config)
+ self.score = nn.Linear(config.hidden_size, self.num_labels, bias=False)
+
+ # Initialize weights and apply final processing
+ self.post_init()
+
+ def get_input_embeddings(self):
+ return self.model.embed_tokens
+
+ def set_input_embeddings(self, value):
+ self.model.embed_tokens = value
+
+ @add_start_docstrings_to_model_forward(DeepseekV3_INPUTS_DOCSTRING)
+ def forward(
+ self,
+ input_ids: torch.LongTensor = None,
+ attention_mask: Optional[torch.Tensor] = None,
+ position_ids: Optional[torch.LongTensor] = None,
+ past_key_values: Optional[List[torch.FloatTensor]] = None,
+ inputs_embeds: Optional[torch.FloatTensor] = None,
+ labels: Optional[torch.LongTensor] = None,
+ use_cache: Optional[bool] = None,
+ output_attentions: Optional[bool] = None,
+ output_hidden_states: Optional[bool] = None,
+ return_dict: Optional[bool] = None,
+ ) -> Union[Tuple, SequenceClassifierOutputWithPast]:
+ r"""
+ labels (`torch.LongTensor` of shape `(batch_size,)`, *optional*):
+ Labels for computing the sequence classification/regression loss. Indices should be in `[0, transformers.,
+ config.num_labels - 1]`. If `config.num_labels == 1` a regression loss is computed (Mean-Square loss), If
+ `config.num_labels > 1` a classification loss is computed (Cross-Entropy).
+ """
+ return_dict = (
+ return_dict if return_dict is not None else self.config.use_return_dict
+ )
+
+ transformer_outputs = self.model(
+ input_ids,
+ attention_mask=attention_mask,
+ position_ids=position_ids,
+ past_key_values=past_key_values,
+ inputs_embeds=inputs_embeds,
+ use_cache=use_cache,
+ output_attentions=output_attentions,
+ output_hidden_states=output_hidden_states,
+ return_dict=return_dict,
+ )
+ hidden_states = transformer_outputs[0]
+ logits = self.score(hidden_states)
+
+ if input_ids is not None:
+ batch_size = input_ids.shape[0]
+ else:
+ batch_size = inputs_embeds.shape[0]
+
+ if self.config.pad_token_id is None and batch_size != 1:
+ raise ValueError(
+ "Cannot handle batch sizes > 1 if no padding token is defined."
+ )
+ if self.config.pad_token_id is None:
+ sequence_lengths = -1
+ else:
+ if input_ids is not None:
+ sequence_lengths = (
+ torch.eq(input_ids, self.config.pad_token_id).int().argmax(-1) - 1
+ ).to(logits.device)
+ else:
+ sequence_lengths = -1
+
+ pooled_logits = logits[
+ torch.arange(batch_size, device=logits.device), sequence_lengths
+ ]
+
+ loss = None
+ if labels is not None:
+ labels = labels.to(logits.device)
+ if self.config.problem_type is None:
+ if self.num_labels == 1:
+ self.config.problem_type = "regression"
+ elif self.num_labels > 1 and (
+ labels.dtype == torch.long or labels.dtype == torch.int
+ ):
+ self.config.problem_type = "single_label_classification"
+ else:
+ self.config.problem_type = "multi_label_classification"
+
+ if self.config.problem_type == "regression":
+ loss_fct = MSELoss()
+ if self.num_labels == 1:
+ loss = loss_fct(pooled_logits.squeeze(), labels.squeeze())
+ else:
+ loss = loss_fct(pooled_logits, labels)
+ elif self.config.problem_type == "single_label_classification":
+ loss_fct = CrossEntropyLoss()
+ loss = loss_fct(
+ pooled_logits.view(-1, self.num_labels), labels.view(-1)
+ )
+ elif self.config.problem_type == "multi_label_classification":
+ loss_fct = BCEWithLogitsLoss()
+ loss = loss_fct(pooled_logits, labels)
+ if not return_dict:
+ output = (pooled_logits,) + transformer_outputs[1:]
+ return ((loss,) + output) if loss is not None else output
+
+ return SequenceClassifierOutputWithPast(
+ loss=loss,
+ logits=pooled_logits,
+ past_key_values=transformer_outputs.past_key_values,
+ hidden_states=transformer_outputs.hidden_states,
+ attentions=transformer_outputs.attentions,
+ )
diff --git a/special_tokens_map.json b/special_tokens_map.json
new file mode 100644
index 0000000..76ed2f8
--- /dev/null
+++ b/special_tokens_map.json
@@ -0,0 +1,23 @@
+{
+ "bos_token": {
+ "content": "<|begin▁of▁sentence|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false
+ },
+ "eos_token": {
+ "content": "<|end▁of▁sentence|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false
+ },
+ "pad_token": {
+ "content": "<|▁pad▁|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false
+ }
+}
diff --git a/tokenizer.json b/tokenizer.json
new file mode 100644
index 0000000..0586ef5
--- /dev/null
+++ b/tokenizer.json
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:a59031bae85a334b70c9f91582c76b3b11a9c7f7500c62a820f3f9433f92e877
+size 9977256
diff --git a/tokenizer_config.json b/tokenizer_config.json
new file mode 100644
index 0000000..cec3b66
--- /dev/null
+++ b/tokenizer_config.json
@@ -0,0 +1,6564 @@
+{
+ "add_bos_token": true,
+ "add_eos_token": false,
+ "add_prefix_space": null,
+ "added_tokens_decoder": {
+ "0": {
+ "content": "<|begin▁of▁sentence|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "1": {
+ "content": "<|end▁of▁sentence|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "2": {
+ "content": "<|▁pad▁|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128000": {
+ "content": "<|place▁holder▁no▁0|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128001": {
+ "content": "<|place▁holder▁no▁1|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128002": {
+ "content": "<|place▁holder▁no▁2|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128003": {
+ "content": "<|place▁holder▁no▁3|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128004": {
+ "content": "<|place▁holder▁no▁4|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128005": {
+ "content": "<|place▁holder▁no▁5|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128006": {
+ "content": "<|place▁holder▁no▁6|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128007": {
+ "content": "<|place▁holder▁no▁7|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128008": {
+ "content": "<|place▁holder▁no▁8|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128009": {
+ "content": "<|place▁holder▁no▁9|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128010": {
+ "content": "<|place▁holder▁no▁10|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128011": {
+ "content": "<|place▁holder▁no▁11|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128012": {
+ "content": "<|place▁holder▁no▁12|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128013": {
+ "content": "<|place▁holder▁no▁13|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128014": {
+ "content": "<|place▁holder▁no▁14|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128015": {
+ "content": "<|place▁holder▁no▁15|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128016": {
+ "content": "<|place▁holder▁no▁16|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128017": {
+ "content": "<|place▁holder▁no▁17|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128018": {
+ "content": "<|place▁holder▁no▁18|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128019": {
+ "content": "<|place▁holder▁no▁19|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128020": {
+ "content": "<|place▁holder▁no▁20|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128021": {
+ "content": "<|place▁holder▁no▁21|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128022": {
+ "content": "<|place▁holder▁no▁22|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128023": {
+ "content": "<|place▁holder▁no▁23|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128024": {
+ "content": "<|place▁holder▁no▁24|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128025": {
+ "content": "<|place▁holder▁no▁25|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128026": {
+ "content": "<|place▁holder▁no▁26|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128027": {
+ "content": "<|place▁holder▁no▁27|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128028": {
+ "content": "<|place▁holder▁no▁28|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128029": {
+ "content": "<|place▁holder▁no▁29|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128030": {
+ "content": "<|place▁holder▁no▁30|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128031": {
+ "content": "<|place▁holder▁no▁31|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128032": {
+ "content": "<|place▁holder▁no▁32|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128033": {
+ "content": "<|place▁holder▁no▁33|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128034": {
+ "content": "<|place▁holder▁no▁34|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128035": {
+ "content": "<|place▁holder▁no▁35|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128036": {
+ "content": "<|place▁holder▁no▁36|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128037": {
+ "content": "<|place▁holder▁no▁37|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128038": {
+ "content": "<|place▁holder▁no▁38|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128039": {
+ "content": "<|place▁holder▁no▁39|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128040": {
+ "content": "<|place▁holder▁no▁40|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128041": {
+ "content": "<|place▁holder▁no▁41|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128042": {
+ "content": "<|place▁holder▁no▁42|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128043": {
+ "content": "<|place▁holder▁no▁43|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128044": {
+ "content": "<|place▁holder▁no▁44|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128045": {
+ "content": "<|place▁holder▁no▁45|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128046": {
+ "content": "<|place▁holder▁no▁46|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128047": {
+ "content": "<|place▁holder▁no▁47|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128048": {
+ "content": "<|place▁holder▁no▁48|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128049": {
+ "content": "<|place▁holder▁no▁49|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128050": {
+ "content": "<|place▁holder▁no▁50|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128051": {
+ "content": "<|place▁holder▁no▁51|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128052": {
+ "content": "<|place▁holder▁no▁52|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128053": {
+ "content": "<|place▁holder▁no▁53|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128054": {
+ "content": "<|place▁holder▁no▁54|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128055": {
+ "content": "<|place▁holder▁no▁55|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128056": {
+ "content": "<|place▁holder▁no▁56|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128057": {
+ "content": "<|place▁holder▁no▁57|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128058": {
+ "content": "<|place▁holder▁no▁58|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128059": {
+ "content": "<|place▁holder▁no▁59|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128060": {
+ "content": "<|place▁holder▁no▁60|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128061": {
+ "content": "<|place▁holder▁no▁61|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128062": {
+ "content": "<|place▁holder▁no▁62|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128063": {
+ "content": "<|place▁holder▁no▁63|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128064": {
+ "content": "<|place▁holder▁no▁64|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128065": {
+ "content": "<|place▁holder▁no▁65|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128066": {
+ "content": "<|place▁holder▁no▁66|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128067": {
+ "content": "<|place▁holder▁no▁67|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128068": {
+ "content": "<|place▁holder▁no▁68|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128069": {
+ "content": "<|place▁holder▁no▁69|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128070": {
+ "content": "<|place▁holder▁no▁70|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128071": {
+ "content": "<|place▁holder▁no▁71|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128072": {
+ "content": "<|place▁holder▁no▁72|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128073": {
+ "content": "<|place▁holder▁no▁73|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128074": {
+ "content": "<|place▁holder▁no▁74|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128075": {
+ "content": "<|place▁holder▁no▁75|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128076": {
+ "content": "<|place▁holder▁no▁76|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128077": {
+ "content": "<|place▁holder▁no▁77|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128078": {
+ "content": "<|place▁holder▁no▁78|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128079": {
+ "content": "<|place▁holder▁no▁79|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128080": {
+ "content": "<|place▁holder▁no▁80|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128081": {
+ "content": "<|place▁holder▁no▁81|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128082": {
+ "content": "<|place▁holder▁no▁82|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128083": {
+ "content": "<|place▁holder▁no▁83|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128084": {
+ "content": "<|place▁holder▁no▁84|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128085": {
+ "content": "<|place▁holder▁no▁85|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128086": {
+ "content": "<|place▁holder▁no▁86|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128087": {
+ "content": "<|place▁holder▁no▁87|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128088": {
+ "content": "<|place▁holder▁no▁88|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128089": {
+ "content": "<|place▁holder▁no▁89|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128090": {
+ "content": "<|place▁holder▁no▁90|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128091": {
+ "content": "<|place▁holder▁no▁91|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128092": {
+ "content": "<|place▁holder▁no▁92|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128093": {
+ "content": "<|place▁holder▁no▁93|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128094": {
+ "content": "<|place▁holder▁no▁94|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128095": {
+ "content": "<|place▁holder▁no▁95|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128096": {
+ "content": "<|place▁holder▁no▁96|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128097": {
+ "content": "<|place▁holder▁no▁97|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128098": {
+ "content": "<|place▁holder▁no▁98|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128099": {
+ "content": "<|place▁holder▁no▁99|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128100": {
+ "content": "<|place▁holder▁no▁100|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128101": {
+ "content": "<|place▁holder▁no▁101|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128102": {
+ "content": "<|place▁holder▁no▁102|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128103": {
+ "content": "<|place▁holder▁no▁103|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128104": {
+ "content": "<|place▁holder▁no▁104|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128105": {
+ "content": "<|place▁holder▁no▁105|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128106": {
+ "content": "<|place▁holder▁no▁106|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128107": {
+ "content": "<|place▁holder▁no▁107|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128108": {
+ "content": "<|place▁holder▁no▁108|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128109": {
+ "content": "<|place▁holder▁no▁109|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128110": {
+ "content": "<|place▁holder▁no▁110|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128111": {
+ "content": "<|place▁holder▁no▁111|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128112": {
+ "content": "<|place▁holder▁no▁112|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128113": {
+ "content": "<|place▁holder▁no▁113|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128114": {
+ "content": "<|place▁holder▁no▁114|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128115": {
+ "content": "<|place▁holder▁no▁115|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128116": {
+ "content": "<|place▁holder▁no▁116|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128117": {
+ "content": "<|place▁holder▁no▁117|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128118": {
+ "content": "<|place▁holder▁no▁118|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128119": {
+ "content": "<|place▁holder▁no▁119|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128120": {
+ "content": "<|place▁holder▁no▁120|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128121": {
+ "content": "<|place▁holder▁no▁121|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128122": {
+ "content": "<|place▁holder▁no▁122|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128123": {
+ "content": "<|place▁holder▁no▁123|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128124": {
+ "content": "<|place▁holder▁no▁124|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128125": {
+ "content": "<|place▁holder▁no▁125|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128126": {
+ "content": "<|place▁holder▁no▁126|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128127": {
+ "content": "<|place▁holder▁no▁127|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128128": {
+ "content": "<|place▁holder▁no▁128|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128129": {
+ "content": "<|place▁holder▁no▁129|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128130": {
+ "content": "<|place▁holder▁no▁130|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128131": {
+ "content": "<|place▁holder▁no▁131|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128132": {
+ "content": "<|place▁holder▁no▁132|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128133": {
+ "content": "<|place▁holder▁no▁133|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128134": {
+ "content": "<|place▁holder▁no▁134|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128135": {
+ "content": "<|place▁holder▁no▁135|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128136": {
+ "content": "<|place▁holder▁no▁136|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128137": {
+ "content": "<|place▁holder▁no▁137|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128138": {
+ "content": "<|place▁holder▁no▁138|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128139": {
+ "content": "<|place▁holder▁no▁139|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128140": {
+ "content": "<|place▁holder▁no▁140|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128141": {
+ "content": "<|place▁holder▁no▁141|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128142": {
+ "content": "<|place▁holder▁no▁142|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128143": {
+ "content": "<|place▁holder▁no▁143|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128144": {
+ "content": "<|place▁holder▁no▁144|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128145": {
+ "content": "<|place▁holder▁no▁145|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128146": {
+ "content": "<|place▁holder▁no▁146|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128147": {
+ "content": "<|place▁holder▁no▁147|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128148": {
+ "content": "<|place▁holder▁no▁148|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128149": {
+ "content": "<|place▁holder▁no▁149|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128150": {
+ "content": "<|place▁holder▁no▁150|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128151": {
+ "content": "<|place▁holder▁no▁151|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128152": {
+ "content": "<|place▁holder▁no▁152|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128153": {
+ "content": "<|place▁holder▁no▁153|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128154": {
+ "content": "<|place▁holder▁no▁154|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128155": {
+ "content": "<|place▁holder▁no▁155|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128156": {
+ "content": "<|place▁holder▁no▁156|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128157": {
+ "content": "<|place▁holder▁no▁157|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128158": {
+ "content": "<|place▁holder▁no▁158|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128159": {
+ "content": "<|place▁holder▁no▁159|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128160": {
+ "content": "<|place▁holder▁no▁160|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128161": {
+ "content": "<|place▁holder▁no▁161|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128162": {
+ "content": "<|place▁holder▁no▁162|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128163": {
+ "content": "<|place▁holder▁no▁163|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128164": {
+ "content": "<|place▁holder▁no▁164|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128165": {
+ "content": "<|place▁holder▁no▁165|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128166": {
+ "content": "<|place▁holder▁no▁166|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128167": {
+ "content": "<|place▁holder▁no▁167|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128168": {
+ "content": "<|place▁holder▁no▁168|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128169": {
+ "content": "<|place▁holder▁no▁169|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128170": {
+ "content": "<|place▁holder▁no▁170|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128171": {
+ "content": "<|place▁holder▁no▁171|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128172": {
+ "content": "<|place▁holder▁no▁172|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128173": {
+ "content": "<|place▁holder▁no▁173|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128174": {
+ "content": "<|place▁holder▁no▁174|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128175": {
+ "content": "<|place▁holder▁no▁175|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128176": {
+ "content": "<|place▁holder▁no▁176|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128177": {
+ "content": "<|place▁holder▁no▁177|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128178": {
+ "content": "<|place▁holder▁no▁178|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128179": {
+ "content": "<|place▁holder▁no▁179|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128180": {
+ "content": "<|place▁holder▁no▁180|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128181": {
+ "content": "<|place▁holder▁no▁181|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128182": {
+ "content": "<|place▁holder▁no▁182|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128183": {
+ "content": "<|place▁holder▁no▁183|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128184": {
+ "content": "<|place▁holder▁no▁184|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128185": {
+ "content": "<|place▁holder▁no▁185|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128186": {
+ "content": "<|place▁holder▁no▁186|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128187": {
+ "content": "<|place▁holder▁no▁187|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128188": {
+ "content": "<|place▁holder▁no▁188|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128189": {
+ "content": "<|place▁holder▁no▁189|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128190": {
+ "content": "<|place▁holder▁no▁190|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128191": {
+ "content": "<|place▁holder▁no▁191|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128192": {
+ "content": "<|place▁holder▁no▁192|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128193": {
+ "content": "<|place▁holder▁no▁193|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128194": {
+ "content": "<|place▁holder▁no▁194|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128195": {
+ "content": "<|place▁holder▁no▁195|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128196": {
+ "content": "<|place▁holder▁no▁196|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128197": {
+ "content": "<|place▁holder▁no▁197|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128198": {
+ "content": "<|place▁holder▁no▁198|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128199": {
+ "content": "<|place▁holder▁no▁199|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128200": {
+ "content": "<|place▁holder▁no▁200|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128201": {
+ "content": "<|place▁holder▁no▁201|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128202": {
+ "content": "<|place▁holder▁no▁202|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128203": {
+ "content": "<|place▁holder▁no▁203|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128204": {
+ "content": "<|place▁holder▁no▁204|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128205": {
+ "content": "<|place▁holder▁no▁205|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128206": {
+ "content": "<|place▁holder▁no▁206|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128207": {
+ "content": "<|place▁holder▁no▁207|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128208": {
+ "content": "<|place▁holder▁no▁208|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128209": {
+ "content": "<|place▁holder▁no▁209|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128210": {
+ "content": "<|place▁holder▁no▁210|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128211": {
+ "content": "<|place▁holder▁no▁211|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128212": {
+ "content": "<|place▁holder▁no▁212|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128213": {
+ "content": "<|place▁holder▁no▁213|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128214": {
+ "content": "<|place▁holder▁no▁214|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128215": {
+ "content": "<|place▁holder▁no▁215|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128216": {
+ "content": "<|place▁holder▁no▁216|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128217": {
+ "content": "<|place▁holder▁no▁217|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128218": {
+ "content": "<|place▁holder▁no▁218|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128219": {
+ "content": "<|place▁holder▁no▁219|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128220": {
+ "content": "<|place▁holder▁no▁220|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128221": {
+ "content": "<|place▁holder▁no▁221|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128222": {
+ "content": "<|place▁holder▁no▁222|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128223": {
+ "content": "<|place▁holder▁no▁223|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128224": {
+ "content": "<|place▁holder▁no▁224|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128225": {
+ "content": "<|place▁holder▁no▁225|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128226": {
+ "content": "<|place▁holder▁no▁226|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128227": {
+ "content": "<|place▁holder▁no▁227|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128228": {
+ "content": "<|place▁holder▁no▁228|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128229": {
+ "content": "<|place▁holder▁no▁229|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128230": {
+ "content": "<|place▁holder▁no▁230|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128231": {
+ "content": "<|place▁holder▁no▁231|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128232": {
+ "content": "<|place▁holder▁no▁232|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128233": {
+ "content": "<|place▁holder▁no▁233|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128234": {
+ "content": "<|place▁holder▁no▁234|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128235": {
+ "content": "<|place▁holder▁no▁235|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128236": {
+ "content": "<|place▁holder▁no▁236|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128237": {
+ "content": "<|place▁holder▁no▁237|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128238": {
+ "content": "<|place▁holder▁no▁238|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128239": {
+ "content": "<|place▁holder▁no▁239|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128240": {
+ "content": "<|place▁holder▁no▁240|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128241": {
+ "content": "<|place▁holder▁no▁241|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128242": {
+ "content": "<|place▁holder▁no▁242|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128243": {
+ "content": "<|place▁holder▁no▁243|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128244": {
+ "content": "<|place▁holder▁no▁244|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128245": {
+ "content": "<|place▁holder▁no▁245|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128246": {
+ "content": "<|place▁holder▁no▁246|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128247": {
+ "content": "<|place▁holder▁no▁247|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128248": {
+ "content": "<|place▁holder▁no▁248|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128249": {
+ "content": "<|place▁holder▁no▁249|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128250": {
+ "content": "<|place▁holder▁no▁250|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128251": {
+ "content": "<|place▁holder▁no▁251|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128252": {
+ "content": "<|place▁holder▁no▁252|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128253": {
+ "content": "<|place▁holder▁no▁253|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128254": {
+ "content": "<|place▁holder▁no▁254|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128255": {
+ "content": "<|place▁holder▁no▁255|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128256": {
+ "content": "<|place▁holder▁no▁256|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128257": {
+ "content": "<|place▁holder▁no▁257|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128258": {
+ "content": "<|place▁holder▁no▁258|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128259": {
+ "content": "<|place▁holder▁no▁259|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128260": {
+ "content": "<|place▁holder▁no▁260|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128261": {
+ "content": "<|place▁holder▁no▁261|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128262": {
+ "content": "<|place▁holder▁no▁262|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128263": {
+ "content": "<|place▁holder▁no▁263|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128264": {
+ "content": "<|place▁holder▁no▁264|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128265": {
+ "content": "<|place▁holder▁no▁265|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128266": {
+ "content": "<|place▁holder▁no▁266|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128267": {
+ "content": "<|place▁holder▁no▁267|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128268": {
+ "content": "<|place▁holder▁no▁268|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128269": {
+ "content": "<|place▁holder▁no▁269|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128270": {
+ "content": "<|place▁holder▁no▁270|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128271": {
+ "content": "<|place▁holder▁no▁271|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128272": {
+ "content": "<|place▁holder▁no▁272|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128273": {
+ "content": "<|place▁holder▁no▁273|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128274": {
+ "content": "<|place▁holder▁no▁274|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128275": {
+ "content": "<|place▁holder▁no▁275|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128276": {
+ "content": "<|place▁holder▁no▁276|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128277": {
+ "content": "<|place▁holder▁no▁277|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128278": {
+ "content": "<|place▁holder▁no▁278|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128279": {
+ "content": "<|place▁holder▁no▁279|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128280": {
+ "content": "<|place▁holder▁no▁280|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128281": {
+ "content": "<|place▁holder▁no▁281|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128282": {
+ "content": "<|place▁holder▁no▁282|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128283": {
+ "content": "<|place▁holder▁no▁283|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128284": {
+ "content": "<|place▁holder▁no▁284|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128285": {
+ "content": "<|place▁holder▁no▁285|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128286": {
+ "content": "<|place▁holder▁no▁286|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128287": {
+ "content": "<|place▁holder▁no▁287|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128288": {
+ "content": "<|place▁holder▁no▁288|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128289": {
+ "content": "<|place▁holder▁no▁289|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128290": {
+ "content": "<|place▁holder▁no▁290|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128291": {
+ "content": "<|place▁holder▁no▁291|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128292": {
+ "content": "<|place▁holder▁no▁292|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128293": {
+ "content": "<|place▁holder▁no▁293|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128294": {
+ "content": "<|place▁holder▁no▁294|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128295": {
+ "content": "<|place▁holder▁no▁295|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128296": {
+ "content": "<|place▁holder▁no▁296|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128297": {
+ "content": "<|place▁holder▁no▁297|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128298": {
+ "content": "<|place▁holder▁no▁298|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128299": {
+ "content": "<|place▁holder▁no▁299|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128300": {
+ "content": "<|place▁holder▁no▁300|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128301": {
+ "content": "<|place▁holder▁no▁301|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128302": {
+ "content": "<|place▁holder▁no▁302|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128303": {
+ "content": "<|place▁holder▁no▁303|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128304": {
+ "content": "<|place▁holder▁no▁304|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128305": {
+ "content": "<|place▁holder▁no▁305|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128306": {
+ "content": "<|place▁holder▁no▁306|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128307": {
+ "content": "<|place▁holder▁no▁307|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128308": {
+ "content": "<|place▁holder▁no▁308|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128309": {
+ "content": "<|place▁holder▁no▁309|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128310": {
+ "content": "<|place▁holder▁no▁310|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128311": {
+ "content": "<|place▁holder▁no▁311|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128312": {
+ "content": "<|place▁holder▁no▁312|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128313": {
+ "content": "<|place▁holder▁no▁313|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128314": {
+ "content": "<|place▁holder▁no▁314|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128315": {
+ "content": "<|place▁holder▁no▁315|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128316": {
+ "content": "<|place▁holder▁no▁316|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128317": {
+ "content": "<|place▁holder▁no▁317|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128318": {
+ "content": "<|place▁holder▁no▁318|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128319": {
+ "content": "<|place▁holder▁no▁319|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128320": {
+ "content": "<|place▁holder▁no▁320|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128321": {
+ "content": "<|place▁holder▁no▁321|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128322": {
+ "content": "<|place▁holder▁no▁322|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128323": {
+ "content": "<|place▁holder▁no▁323|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128324": {
+ "content": "<|place▁holder▁no▁324|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128325": {
+ "content": "<|place▁holder▁no▁325|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128326": {
+ "content": "<|place▁holder▁no▁326|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128327": {
+ "content": "<|place▁holder▁no▁327|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128328": {
+ "content": "<|place▁holder▁no▁328|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128329": {
+ "content": "<|place▁holder▁no▁329|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128330": {
+ "content": "<|place▁holder▁no▁330|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128331": {
+ "content": "<|place▁holder▁no▁331|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128332": {
+ "content": "<|place▁holder▁no▁332|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128333": {
+ "content": "<|place▁holder▁no▁333|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128334": {
+ "content": "<|place▁holder▁no▁334|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128335": {
+ "content": "<|place▁holder▁no▁335|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128336": {
+ "content": "<|place▁holder▁no▁336|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128337": {
+ "content": "<|place▁holder▁no▁337|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128338": {
+ "content": "<|place▁holder▁no▁338|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128339": {
+ "content": "<|place▁holder▁no▁339|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128340": {
+ "content": "<|place▁holder▁no▁340|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128341": {
+ "content": "<|place▁holder▁no▁341|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128342": {
+ "content": "<|place▁holder▁no▁342|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128343": {
+ "content": "<|place▁holder▁no▁343|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128344": {
+ "content": "<|place▁holder▁no▁344|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128345": {
+ "content": "<|place▁holder▁no▁345|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128346": {
+ "content": "<|place▁holder▁no▁346|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128347": {
+ "content": "<|place▁holder▁no▁347|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128348": {
+ "content": "<|place▁holder▁no▁348|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128349": {
+ "content": "<|place▁holder▁no▁349|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128350": {
+ "content": "<|place▁holder▁no▁350|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128351": {
+ "content": "<|place▁holder▁no▁351|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128352": {
+ "content": "<|place▁holder▁no▁352|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128353": {
+ "content": "<|place▁holder▁no▁353|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128354": {
+ "content": "<|place▁holder▁no▁354|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128355": {
+ "content": "<|place▁holder▁no▁355|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128356": {
+ "content": "<|place▁holder▁no▁356|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128357": {
+ "content": "<|place▁holder▁no▁357|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128358": {
+ "content": "<|place▁holder▁no▁358|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128359": {
+ "content": "<|place▁holder▁no▁359|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128360": {
+ "content": "<|place▁holder▁no▁360|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128361": {
+ "content": "<|place▁holder▁no▁361|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128362": {
+ "content": "<|place▁holder▁no▁362|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128363": {
+ "content": "<|place▁holder▁no▁363|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128364": {
+ "content": "<|place▁holder▁no▁364|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128365": {
+ "content": "<|place▁holder▁no▁365|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128366": {
+ "content": "<|place▁holder▁no▁366|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128367": {
+ "content": "<|place▁holder▁no▁367|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128368": {
+ "content": "<|place▁holder▁no▁368|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128369": {
+ "content": "<|place▁holder▁no▁369|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128370": {
+ "content": "<|place▁holder▁no▁370|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128371": {
+ "content": "<|place▁holder▁no▁371|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128372": {
+ "content": "<|place▁holder▁no▁372|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128373": {
+ "content": "<|place▁holder▁no▁373|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128374": {
+ "content": "<|place▁holder▁no▁374|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128375": {
+ "content": "<|place▁holder▁no▁375|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128376": {
+ "content": "<|place▁holder▁no▁376|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128377": {
+ "content": "<|place▁holder▁no▁377|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128378": {
+ "content": "<|place▁holder▁no▁378|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128379": {
+ "content": "<|place▁holder▁no▁379|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128380": {
+ "content": "<|place▁holder▁no▁380|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128381": {
+ "content": "<|place▁holder▁no▁381|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128382": {
+ "content": "<|place▁holder▁no▁382|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128383": {
+ "content": "<|place▁holder▁no▁383|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128384": {
+ "content": "<|place▁holder▁no▁384|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128385": {
+ "content": "<|place▁holder▁no▁385|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128386": {
+ "content": "<|place▁holder▁no▁386|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128387": {
+ "content": "<|place▁holder▁no▁387|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128388": {
+ "content": "<|place▁holder▁no▁388|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128389": {
+ "content": "<|place▁holder▁no▁389|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128390": {
+ "content": "<|place▁holder▁no▁390|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128391": {
+ "content": "<|place▁holder▁no▁391|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128392": {
+ "content": "<|place▁holder▁no▁392|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128393": {
+ "content": "<|place▁holder▁no▁393|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128394": {
+ "content": "<|place▁holder▁no▁394|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128395": {
+ "content": "<|place▁holder▁no▁395|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128396": {
+ "content": "<|place▁holder▁no▁396|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128397": {
+ "content": "<|place▁holder▁no▁397|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128398": {
+ "content": "<|place▁holder▁no▁398|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128399": {
+ "content": "<|place▁holder▁no▁399|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128400": {
+ "content": "<|place▁holder▁no▁400|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128401": {
+ "content": "<|place▁holder▁no▁401|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128402": {
+ "content": "<|place▁holder▁no▁402|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128403": {
+ "content": "<|place▁holder▁no▁403|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128404": {
+ "content": "<|place▁holder▁no▁404|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128405": {
+ "content": "<|place▁holder▁no▁405|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128406": {
+ "content": "<|place▁holder▁no▁406|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128407": {
+ "content": "<|place▁holder▁no▁407|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128408": {
+ "content": "<|place▁holder▁no▁408|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128409": {
+ "content": "<|place▁holder▁no▁409|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128410": {
+ "content": "<|place▁holder▁no▁410|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128411": {
+ "content": "<|place▁holder▁no▁411|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128412": {
+ "content": "<|place▁holder▁no▁412|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128413": {
+ "content": "<|place▁holder▁no▁413|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128414": {
+ "content": "<|place▁holder▁no▁414|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128415": {
+ "content": "<|place▁holder▁no▁415|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128416": {
+ "content": "<|place▁holder▁no▁416|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128417": {
+ "content": "<|place▁holder▁no▁417|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128418": {
+ "content": "<|place▁holder▁no▁418|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128419": {
+ "content": "<|place▁holder▁no▁419|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128420": {
+ "content": "<|place▁holder▁no▁420|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128421": {
+ "content": "<|place▁holder▁no▁421|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128422": {
+ "content": "<|place▁holder▁no▁422|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128423": {
+ "content": "<|place▁holder▁no▁423|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128424": {
+ "content": "<|place▁holder▁no▁424|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128425": {
+ "content": "<|place▁holder▁no▁425|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128426": {
+ "content": "<|place▁holder▁no▁426|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128427": {
+ "content": "<|place▁holder▁no▁427|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128428": {
+ "content": "<|place▁holder▁no▁428|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128429": {
+ "content": "<|place▁holder▁no▁429|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128430": {
+ "content": "<|place▁holder▁no▁430|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128431": {
+ "content": "<|place▁holder▁no▁431|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128432": {
+ "content": "<|place▁holder▁no▁432|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128433": {
+ "content": "<|place▁holder▁no▁433|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128434": {
+ "content": "<|place▁holder▁no▁434|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128435": {
+ "content": "<|place▁holder▁no▁435|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128436": {
+ "content": "<|place▁holder▁no▁436|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128437": {
+ "content": "<|place▁holder▁no▁437|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128438": {
+ "content": "<|place▁holder▁no▁438|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128439": {
+ "content": "<|place▁holder▁no▁439|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128440": {
+ "content": "<|place▁holder▁no▁440|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128441": {
+ "content": "<|place▁holder▁no▁441|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128442": {
+ "content": "<|place▁holder▁no▁442|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128443": {
+ "content": "<|place▁holder▁no▁443|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128444": {
+ "content": "<|place▁holder▁no▁444|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128445": {
+ "content": "<|place▁holder▁no▁445|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128446": {
+ "content": "<|place▁holder▁no▁446|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128447": {
+ "content": "<|place▁holder▁no▁447|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128448": {
+ "content": "<|place▁holder▁no▁448|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128449": {
+ "content": "<|place▁holder▁no▁449|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128450": {
+ "content": "<|place▁holder▁no▁450|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128451": {
+ "content": "<|place▁holder▁no▁451|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128452": {
+ "content": "<|place▁holder▁no▁452|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128453": {
+ "content": "<|place▁holder▁no▁453|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128454": {
+ "content": "<|place▁holder▁no▁454|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128455": {
+ "content": "<|place▁holder▁no▁455|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128456": {
+ "content": "<|place▁holder▁no▁456|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128457": {
+ "content": "<|place▁holder▁no▁457|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128458": {
+ "content": "<|place▁holder▁no▁458|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128459": {
+ "content": "<|place▁holder▁no▁459|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128460": {
+ "content": "<|place▁holder▁no▁460|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128461": {
+ "content": "<|place▁holder▁no▁461|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128462": {
+ "content": "<|place▁holder▁no▁462|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128463": {
+ "content": "<|place▁holder▁no▁463|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128464": {
+ "content": "<|place▁holder▁no▁464|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128465": {
+ "content": "<|place▁holder▁no▁465|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128466": {
+ "content": "<|place▁holder▁no▁466|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128467": {
+ "content": "<|place▁holder▁no▁467|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128468": {
+ "content": "<|place▁holder▁no▁468|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128469": {
+ "content": "<|place▁holder▁no▁469|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128470": {
+ "content": "<|place▁holder▁no▁470|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128471": {
+ "content": "<|place▁holder▁no▁471|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128472": {
+ "content": "<|place▁holder▁no▁472|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128473": {
+ "content": "<|place▁holder▁no▁473|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128474": {
+ "content": "<|place▁holder▁no▁474|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128475": {
+ "content": "<|place▁holder▁no▁475|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128476": {
+ "content": "<|place▁holder▁no▁476|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128477": {
+ "content": "<|place▁holder▁no▁477|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128478": {
+ "content": "<|place▁holder▁no▁478|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128479": {
+ "content": "<|place▁holder▁no▁479|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128480": {
+ "content": "<|place▁holder▁no▁480|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128481": {
+ "content": "<|place▁holder▁no▁481|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128482": {
+ "content": "<|place▁holder▁no▁482|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128483": {
+ "content": "<|place▁holder▁no▁483|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128484": {
+ "content": "<|place▁holder▁no▁484|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128485": {
+ "content": "<|place▁holder▁no▁485|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128486": {
+ "content": "<|place▁holder▁no▁486|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128487": {
+ "content": "<|place▁holder▁no▁487|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128488": {
+ "content": "<|place▁holder▁no▁488|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128489": {
+ "content": "<|place▁holder▁no▁489|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128490": {
+ "content": "<|place▁holder▁no▁490|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128491": {
+ "content": "<|place▁holder▁no▁491|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128492": {
+ "content": "<|place▁holder▁no▁492|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128493": {
+ "content": "<|place▁holder▁no▁493|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128494": {
+ "content": "<|place▁holder▁no▁494|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128495": {
+ "content": "<|place▁holder▁no▁495|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128496": {
+ "content": "<|place▁holder▁no▁496|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128497": {
+ "content": "<|place▁holder▁no▁497|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128498": {
+ "content": "<|place▁holder▁no▁498|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128499": {
+ "content": "<|place▁holder▁no▁499|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128500": {
+ "content": "<|place▁holder▁no▁500|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128501": {
+ "content": "<|place▁holder▁no▁501|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128502": {
+ "content": "<|place▁holder▁no▁502|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128503": {
+ "content": "<|place▁holder▁no▁503|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128504": {
+ "content": "<|place▁holder▁no▁504|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128505": {
+ "content": "<|place▁holder▁no▁505|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128506": {
+ "content": "<|place▁holder▁no▁506|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128507": {
+ "content": "<|place▁holder▁no▁507|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128508": {
+ "content": "<|place▁holder▁no▁508|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128509": {
+ "content": "<|place▁holder▁no▁509|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128510": {
+ "content": "<|place▁holder▁no▁510|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128511": {
+ "content": "<|place▁holder▁no▁511|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128512": {
+ "content": "<|place▁holder▁no▁512|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128513": {
+ "content": "<|place▁holder▁no▁513|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128514": {
+ "content": "<|place▁holder▁no▁514|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128515": {
+ "content": "<|place▁holder▁no▁515|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128516": {
+ "content": "<|place▁holder▁no▁516|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128517": {
+ "content": "<|place▁holder▁no▁517|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128518": {
+ "content": "<|place▁holder▁no▁518|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128519": {
+ "content": "<|place▁holder▁no▁519|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128520": {
+ "content": "<|place▁holder▁no▁520|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128521": {
+ "content": "<|place▁holder▁no▁521|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128522": {
+ "content": "<|place▁holder▁no▁522|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128523": {
+ "content": "<|place▁holder▁no▁523|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128524": {
+ "content": "<|place▁holder▁no▁524|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128525": {
+ "content": "<|place▁holder▁no▁525|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128526": {
+ "content": "<|place▁holder▁no▁526|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128527": {
+ "content": "<|place▁holder▁no▁527|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128528": {
+ "content": "<|place▁holder▁no▁528|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128529": {
+ "content": "<|place▁holder▁no▁529|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128530": {
+ "content": "<|place▁holder▁no▁530|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128531": {
+ "content": "<|place▁holder▁no▁531|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128532": {
+ "content": "<|place▁holder▁no▁532|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128533": {
+ "content": "<|place▁holder▁no▁533|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128534": {
+ "content": "<|place▁holder▁no▁534|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128535": {
+ "content": "<|place▁holder▁no▁535|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128536": {
+ "content": "<|place▁holder▁no▁536|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128537": {
+ "content": "<|place▁holder▁no▁537|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128538": {
+ "content": "<|place▁holder▁no▁538|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128539": {
+ "content": "<|place▁holder▁no▁539|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128540": {
+ "content": "<|place▁holder▁no▁540|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128541": {
+ "content": "<|place▁holder▁no▁541|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128542": {
+ "content": "<|place▁holder▁no▁542|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128543": {
+ "content": "<|place▁holder▁no▁543|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128544": {
+ "content": "<|place▁holder▁no▁544|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128545": {
+ "content": "<|place▁holder▁no▁545|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128546": {
+ "content": "<|place▁holder▁no▁546|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128547": {
+ "content": "<|place▁holder▁no▁547|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128548": {
+ "content": "<|place▁holder▁no▁548|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128549": {
+ "content": "<|place▁holder▁no▁549|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128550": {
+ "content": "<|place▁holder▁no▁550|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128551": {
+ "content": "<|place▁holder▁no▁551|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128552": {
+ "content": "<|place▁holder▁no▁552|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128553": {
+ "content": "<|place▁holder▁no▁553|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128554": {
+ "content": "<|place▁holder▁no▁554|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128555": {
+ "content": "<|place▁holder▁no▁555|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128556": {
+ "content": "<|place▁holder▁no▁556|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128557": {
+ "content": "<|place▁holder▁no▁557|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128558": {
+ "content": "<|place▁holder▁no▁558|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128559": {
+ "content": "<|place▁holder▁no▁559|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128560": {
+ "content": "<|place▁holder▁no▁560|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128561": {
+ "content": "<|place▁holder▁no▁561|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128562": {
+ "content": "<|place▁holder▁no▁562|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128563": {
+ "content": "<|place▁holder▁no▁563|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128564": {
+ "content": "<|place▁holder▁no▁564|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128565": {
+ "content": "<|place▁holder▁no▁565|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128566": {
+ "content": "<|place▁holder▁no▁566|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128567": {
+ "content": "<|place▁holder▁no▁567|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128568": {
+ "content": "<|place▁holder▁no▁568|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128569": {
+ "content": "<|place▁holder▁no▁569|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128570": {
+ "content": "<|place▁holder▁no▁570|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128571": {
+ "content": "<|place▁holder▁no▁571|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128572": {
+ "content": "<|place▁holder▁no▁572|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128573": {
+ "content": "<|place▁holder▁no▁573|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128574": {
+ "content": "<|place▁holder▁no▁574|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128575": {
+ "content": "<|place▁holder▁no▁575|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128576": {
+ "content": "<|place▁holder▁no▁576|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128577": {
+ "content": "<|place▁holder▁no▁577|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128578": {
+ "content": "<|place▁holder▁no▁578|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128579": {
+ "content": "<|place▁holder▁no▁579|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128580": {
+ "content": "<|place▁holder▁no▁580|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128581": {
+ "content": "<|place▁holder▁no▁581|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128582": {
+ "content": "<|place▁holder▁no▁582|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128583": {
+ "content": "<|place▁holder▁no▁583|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128584": {
+ "content": "<|place▁holder▁no▁584|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128585": {
+ "content": "<|place▁holder▁no▁585|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128586": {
+ "content": "<|place▁holder▁no▁586|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128587": {
+ "content": "<|place▁holder▁no▁587|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128588": {
+ "content": "<|place▁holder▁no▁588|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128589": {
+ "content": "<|place▁holder▁no▁589|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128590": {
+ "content": "<|place▁holder▁no▁590|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128591": {
+ "content": "<|place▁holder▁no▁591|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128592": {
+ "content": "<|place▁holder▁no▁592|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128593": {
+ "content": "<|place▁holder▁no▁593|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128594": {
+ "content": "<|place▁holder▁no▁594|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128595": {
+ "content": "<|place▁holder▁no▁595|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128596": {
+ "content": "<|place▁holder▁no▁596|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128597": {
+ "content": "<|place▁holder▁no▁597|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128598": {
+ "content": "<|place▁holder▁no▁598|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128599": {
+ "content": "<|place▁holder▁no▁599|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128600": {
+ "content": "<|place▁holder▁no▁600|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128601": {
+ "content": "<|place▁holder▁no▁601|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128602": {
+ "content": "<|place▁holder▁no▁602|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128603": {
+ "content": "<|place▁holder▁no▁603|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128604": {
+ "content": "<|place▁holder▁no▁604|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128605": {
+ "content": "<|place▁holder▁no▁605|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128606": {
+ "content": "<|place▁holder▁no▁606|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128607": {
+ "content": "<|place▁holder▁no▁607|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128608": {
+ "content": "<|place▁holder▁no▁608|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128609": {
+ "content": "<|place▁holder▁no▁609|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128610": {
+ "content": "<|place▁holder▁no▁610|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128611": {
+ "content": "<|place▁holder▁no▁611|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128612": {
+ "content": "<|place▁holder▁no▁612|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128613": {
+ "content": "<|place▁holder▁no▁613|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128614": {
+ "content": "<|place▁holder▁no▁614|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128615": {
+ "content": "<|place▁holder▁no▁615|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128616": {
+ "content": "<|place▁holder▁no▁616|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128617": {
+ "content": "<|place▁holder▁no▁617|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128618": {
+ "content": "<|place▁holder▁no▁618|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128619": {
+ "content": "<|place▁holder▁no▁619|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128620": {
+ "content": "<|place▁holder▁no▁620|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128621": {
+ "content": "<|place▁holder▁no▁621|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128622": {
+ "content": "<|place▁holder▁no▁622|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128623": {
+ "content": "<|place▁holder▁no▁623|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128624": {
+ "content": "<|place▁holder▁no▁624|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128625": {
+ "content": "<|place▁holder▁no▁625|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128626": {
+ "content": "<|place▁holder▁no▁626|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128627": {
+ "content": "<|place▁holder▁no▁627|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128628": {
+ "content": "<|place▁holder▁no▁628|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128629": {
+ "content": "<|place▁holder▁no▁629|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128630": {
+ "content": "<|place▁holder▁no▁630|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128631": {
+ "content": "<|place▁holder▁no▁631|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128632": {
+ "content": "<|place▁holder▁no▁632|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128633": {
+ "content": "<|place▁holder▁no▁633|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128634": {
+ "content": "<|place▁holder▁no▁634|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128635": {
+ "content": "<|place▁holder▁no▁635|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128636": {
+ "content": "<|place▁holder▁no▁636|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128637": {
+ "content": "<|place▁holder▁no▁637|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128638": {
+ "content": "<|place▁holder▁no▁638|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128639": {
+ "content": "<|place▁holder▁no▁639|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128640": {
+ "content": "<|place▁holder▁no▁640|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128641": {
+ "content": "<|place▁holder▁no▁641|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128642": {
+ "content": "<|place▁holder▁no▁642|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128643": {
+ "content": "<|place▁holder▁no▁643|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128644": {
+ "content": "<|place▁holder▁no▁644|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128645": {
+ "content": "<|place▁holder▁no▁645|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128646": {
+ "content": "<|place▁holder▁no▁646|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128647": {
+ "content": "<|place▁holder▁no▁647|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128648": {
+ "content": "<|place▁holder▁no▁648|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128649": {
+ "content": "<|place▁holder▁no▁649|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128650": {
+ "content": "<|place▁holder▁no▁650|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128651": {
+ "content": "<|place▁holder▁no▁651|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128652": {
+ "content": "<|place▁holder▁no▁652|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128653": {
+ "content": "<|place▁holder▁no▁653|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128654": {
+ "content": "<|place▁holder▁no▁654|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128655": {
+ "content": "<|place▁holder▁no▁655|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128656": {
+ "content": "<|place▁holder▁no▁656|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128657": {
+ "content": "<|place▁holder▁no▁657|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128658": {
+ "content": "<|place▁holder▁no▁658|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128659": {
+ "content": "<|place▁holder▁no▁659|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128660": {
+ "content": "<|place▁holder▁no▁660|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128661": {
+ "content": "<|place▁holder▁no▁661|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128662": {
+ "content": "<|place▁holder▁no▁662|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128663": {
+ "content": "<|place▁holder▁no▁663|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128664": {
+ "content": "<|place▁holder▁no▁664|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128665": {
+ "content": "<|place▁holder▁no▁665|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128666": {
+ "content": "<|place▁holder▁no▁666|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128667": {
+ "content": "<|place▁holder▁no▁667|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128668": {
+ "content": "<|place▁holder▁no▁668|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128669": {
+ "content": "<|place▁holder▁no▁669|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128670": {
+ "content": "<|place▁holder▁no▁670|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128671": {
+ "content": "<|place▁holder▁no▁671|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128672": {
+ "content": "<|place▁holder▁no▁672|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128673": {
+ "content": "<|place▁holder▁no▁673|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128674": {
+ "content": "<|place▁holder▁no▁674|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128675": {
+ "content": "<|place▁holder▁no▁675|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128676": {
+ "content": "<|place▁holder▁no▁676|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128677": {
+ "content": "<|place▁holder▁no▁677|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128678": {
+ "content": "<|place▁holder▁no▁678|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128679": {
+ "content": "<|place▁holder▁no▁679|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128680": {
+ "content": "<|place▁holder▁no▁680|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128681": {
+ "content": "<|place▁holder▁no▁681|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128682": {
+ "content": "<|place▁holder▁no▁682|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128683": {
+ "content": "<|place▁holder▁no▁683|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128684": {
+ "content": "<|place▁holder▁no▁684|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128685": {
+ "content": "<|place▁holder▁no▁685|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128686": {
+ "content": "<|place▁holder▁no▁686|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128687": {
+ "content": "<|place▁holder▁no▁687|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128688": {
+ "content": "<|place▁holder▁no▁688|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128689": {
+ "content": "<|place▁holder▁no▁689|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128690": {
+ "content": "<|place▁holder▁no▁690|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128691": {
+ "content": "<|place▁holder▁no▁691|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128692": {
+ "content": "<|place▁holder▁no▁692|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128693": {
+ "content": "<|place▁holder▁no▁693|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128694": {
+ "content": "<|place▁holder▁no▁694|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128695": {
+ "content": "<|place▁holder▁no▁695|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128696": {
+ "content": "<|place▁holder▁no▁696|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128697": {
+ "content": "<|place▁holder▁no▁697|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128698": {
+ "content": "<|place▁holder▁no▁698|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128699": {
+ "content": "<|place▁holder▁no▁699|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128700": {
+ "content": "<|place▁holder▁no▁700|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128701": {
+ "content": "<|place▁holder▁no▁701|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128702": {
+ "content": "<|place▁holder▁no▁702|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128703": {
+ "content": "<|place▁holder▁no▁703|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128704": {
+ "content": "<|place▁holder▁no▁704|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128705": {
+ "content": "<|place▁holder▁no▁705|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128706": {
+ "content": "<|place▁holder▁no▁706|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128707": {
+ "content": "<|place▁holder▁no▁707|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128708": {
+ "content": "<|place▁holder▁no▁708|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128709": {
+ "content": "<|place▁holder▁no▁709|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128710": {
+ "content": "<|place▁holder▁no▁710|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128711": {
+ "content": "<|place▁holder▁no▁711|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128712": {
+ "content": "<|place▁holder▁no▁712|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128713": {
+ "content": "<|place▁holder▁no▁713|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128714": {
+ "content": "<|place▁holder▁no▁714|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128715": {
+ "content": "<|place▁holder▁no▁715|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128716": {
+ "content": "<|place▁holder▁no▁716|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128717": {
+ "content": "<|place▁holder▁no▁717|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128718": {
+ "content": "<|place▁holder▁no▁718|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128719": {
+ "content": "<|place▁holder▁no▁719|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128720": {
+ "content": "<|place▁holder▁no▁720|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128721": {
+ "content": "<|place▁holder▁no▁721|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128722": {
+ "content": "<|place▁holder▁no▁722|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128723": {
+ "content": "<|place▁holder▁no▁723|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128724": {
+ "content": "<|place▁holder▁no▁724|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128725": {
+ "content": "<|place▁holder▁no▁725|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128726": {
+ "content": "<|place▁holder▁no▁726|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128727": {
+ "content": "<|place▁holder▁no▁727|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128728": {
+ "content": "<|place▁holder▁no▁728|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128729": {
+ "content": "<|place▁holder▁no▁729|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128730": {
+ "content": "<|place▁holder▁no▁730|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128731": {
+ "content": "<|place▁holder▁no▁731|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128732": {
+ "content": "<|place▁holder▁no▁732|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128733": {
+ "content": "<|place▁holder▁no▁733|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128734": {
+ "content": "<|place▁holder▁no▁734|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128735": {
+ "content": "<|place▁holder▁no▁735|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128736": {
+ "content": "<|place▁holder▁no▁736|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128737": {
+ "content": "<|place▁holder▁no▁737|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128738": {
+ "content": "<|place▁holder▁no▁738|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128739": {
+ "content": "<|place▁holder▁no▁739|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128740": {
+ "content": "<|place▁holder▁no▁740|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128741": {
+ "content": "<|place▁holder▁no▁741|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128742": {
+ "content": "<|place▁holder▁no▁742|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128743": {
+ "content": "<|place▁holder▁no▁743|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128744": {
+ "content": "<|place▁holder▁no▁744|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128745": {
+ "content": "<|place▁holder▁no▁745|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128746": {
+ "content": "<|place▁holder▁no▁746|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128747": {
+ "content": "<|place▁holder▁no▁747|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128748": {
+ "content": "<|place▁holder▁no▁748|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128749": {
+ "content": "<|place▁holder▁no▁749|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128750": {
+ "content": "<|place▁holder▁no▁750|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128751": {
+ "content": "<|place▁holder▁no▁751|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128752": {
+ "content": "<|place▁holder▁no▁752|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128753": {
+ "content": "<|place▁holder▁no▁753|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128754": {
+ "content": "<|place▁holder▁no▁754|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128755": {
+ "content": "<|place▁holder▁no▁755|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128756": {
+ "content": "<|place▁holder▁no▁756|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128757": {
+ "content": "<|place▁holder▁no▁757|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128758": {
+ "content": "<|place▁holder▁no▁758|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128759": {
+ "content": "<|place▁holder▁no▁759|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128760": {
+ "content": "<|place▁holder▁no▁760|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128761": {
+ "content": "<|place▁holder▁no▁761|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128762": {
+ "content": "<|place▁holder▁no▁762|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128763": {
+ "content": "<|place▁holder▁no▁763|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128764": {
+ "content": "<|place▁holder▁no▁764|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128765": {
+ "content": "<|place▁holder▁no▁765|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128766": {
+ "content": "<|place▁holder▁no▁766|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128767": {
+ "content": "<|place▁holder▁no▁767|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128768": {
+ "content": "<|place▁holder▁no▁768|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128769": {
+ "content": "<|place▁holder▁no▁769|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128770": {
+ "content": "<|place▁holder▁no▁770|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128771": {
+ "content": "<|place▁holder▁no▁771|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128772": {
+ "content": "<|place▁holder▁no▁772|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128773": {
+ "content": "<|place▁holder▁no▁773|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128774": {
+ "content": "<|place▁holder▁no▁774|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128775": {
+ "content": "<|place▁holder▁no▁775|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128776": {
+ "content": "<|place▁holder▁no▁776|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128777": {
+ "content": "<|place▁holder▁no▁777|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128778": {
+ "content": "<|place▁holder▁no▁778|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128779": {
+ "content": "<|place▁holder▁no▁779|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128780": {
+ "content": "<|place▁holder▁no▁780|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128781": {
+ "content": "<|place▁holder▁no▁781|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128782": {
+ "content": "<|place▁holder▁no▁782|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128783": {
+ "content": "<|place▁holder▁no▁783|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128784": {
+ "content": "<|place▁holder▁no▁784|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128785": {
+ "content": "<|place▁holder▁no▁785|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128786": {
+ "content": "<|place▁holder▁no▁786|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128787": {
+ "content": "<|place▁holder▁no▁787|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128788": {
+ "content": "<|place▁holder▁no▁788|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128789": {
+ "content": "<|place▁holder▁no▁789|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128790": {
+ "content": "<|place▁holder▁no▁790|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128791": {
+ "content": "<|place▁holder▁no▁791|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128792": {
+ "content": "<|place▁holder▁no▁792|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128793": {
+ "content": "<|place▁holder▁no▁793|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128794": {
+ "content": "<|place▁holder▁no▁794|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128795": {
+ "content": "<|place▁holder▁no▁795|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128796": {
+ "content": "<|search▁begin|>",
+ "lstrip": false,
+ "normalized": true,
+ "rstrip": false,
+ "single_word": false,
+ "special": false
+ },
+ "128797": {
+ "content": "<|search▁end|>",
+ "lstrip": false,
+ "normalized": true,
+ "rstrip": false,
+ "single_word": false,
+ "special": false
+ },
+ "128798": {
+ "content": "",
+ "lstrip": false,
+ "normalized": true,
+ "rstrip": false,
+ "single_word": false,
+ "special": false
+ },
+ "128799": {
+ "content": "",
+ "lstrip": false,
+ "normalized": true,
+ "rstrip": false,
+ "single_word": false,
+ "special": false
+ },
+ "128800": {
+ "content": "<|fim▁hole|>",
+ "lstrip": false,
+ "normalized": true,
+ "rstrip": false,
+ "single_word": false,
+ "special": false
+ },
+ "128801": {
+ "content": "<|fim▁begin|>",
+ "lstrip": false,
+ "normalized": true,
+ "rstrip": false,
+ "single_word": false,
+ "special": false
+ },
+ "128802": {
+ "content": "<|fim▁end|>",
+ "lstrip": false,
+ "normalized": true,
+ "rstrip": false,
+ "single_word": false,
+ "special": false
+ },
+ "128803": {
+ "content": "<|User|>",
+ "lstrip": false,
+ "normalized": true,
+ "rstrip": false,
+ "single_word": false,
+ "special": false
+ },
+ "128804": {
+ "content": "<|Assistant|>",
+ "lstrip": false,
+ "normalized": true,
+ "rstrip": false,
+ "single_word": false,
+ "special": false
+ },
+ "128805": {
+ "content": "<|EOT|>",
+ "lstrip": false,
+ "normalized": true,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128806": {
+ "content": "<|tool▁calls▁begin|>",
+ "lstrip": false,
+ "normalized": true,
+ "rstrip": false,
+ "single_word": false,
+ "special": false
+ },
+ "128807": {
+ "content": "<|tool▁calls▁end|>",
+ "lstrip": false,
+ "normalized": true,
+ "rstrip": false,
+ "single_word": false,
+ "special": false
+ },
+ "128808": {
+ "content": "<|tool▁call▁begin|>",
+ "lstrip": false,
+ "normalized": true,
+ "rstrip": false,
+ "single_word": false,
+ "special": false
+ },
+ "128809": {
+ "content": "<|tool▁call▁end|>",
+ "lstrip": false,
+ "normalized": true,
+ "rstrip": false,
+ "single_word": false,
+ "special": false
+ },
+ "128810": {
+ "content": "<|tool▁outputs▁begin|>",
+ "lstrip": false,
+ "normalized": true,
+ "rstrip": false,
+ "single_word": false,
+ "special": false
+ },
+ "128811": {
+ "content": "<|tool▁outputs▁end|>",
+ "lstrip": false,
+ "normalized": true,
+ "rstrip": false,
+ "single_word": false,
+ "special": false
+ },
+ "128812": {
+ "content": "<|tool▁output▁begin|>",
+ "lstrip": false,
+ "normalized": true,
+ "rstrip": false,
+ "single_word": false,
+ "special": false
+ },
+ "128813": {
+ "content": "<|tool▁output▁end|>",
+ "lstrip": false,
+ "normalized": true,
+ "rstrip": false,
+ "single_word": false,
+ "special": false
+ },
+ "128814": {
+ "content": "<|tool▁sep|>",
+ "lstrip": false,
+ "normalized": true,
+ "rstrip": false,
+ "single_word": false,
+ "special": false
+ }
+ },
+ "bos_token": "<|begin▁of▁sentence|>",
+ "clean_up_tokenization_spaces": false,
+ "eos_token": "<|end▁of▁sentence|>",
+ "extra_special_tokens": {},
+ "legacy": true,
+ "model_max_length": 163840,
+ "pad_token": "<|▁pad▁|>",
+ "padding_side": "left",
+ "sp_model_kwargs": {},
+ "tokenizer_class": "LlamaTokenizerFast",
+ "unk_token": null,
+ "use_default_system_prompt": false,
+ "chat_template": "{% if not add_generation_prompt is defined %}{% set add_generation_prompt = false %}{% endif %}{% if not thinking is defined %}{% set thinking = false %}{% endif %}{% set ns = namespace(is_first=false, is_tool=false, system_prompt='', is_first_sp=true, is_last_user=false) %}{%- for message in messages %}{%- if message['role'] == 'system' %}{%- if ns.is_first_sp %}{% set ns.system_prompt = ns.system_prompt + message['content'] %}{% set ns.is_first_sp = false %}{%- else %}{% set ns.system_prompt = ns.system_prompt + '\n\n' + message['content'] %}{%- endif %}{%- endif %}{%- endfor %}{{ bos_token }}{{ ns.system_prompt }}{%- for message in messages %}{%- if message['role'] == 'user' %}{%- set ns.is_tool = false -%}{%- set ns.is_first = false -%}{%- set ns.is_last_user = true -%}{{'<|User|>' + message['content']}}{%- endif %}{%- if message['role'] == 'assistant' and message['tool_calls'] is defined and message['tool_calls'] is not none %}{%- if ns.is_last_user %}{{'<|Assistant|>'}}{%- endif %}{%- set ns.is_last_user = false -%}{%- set ns.is_first = false %}{%- set ns.is_tool = false -%}{%- for tool in message['tool_calls'] %}{%- if not ns.is_first %}{%- if message['content'] is none %}{{'<|tool▁calls▁begin|><|tool▁call▁begin|>'+ tool['function']['name'] + '<|tool▁sep|>' + tool['function']['arguments'] + '<|tool▁call▁end|>'}}{%- else %}{{message['content'] + '<|tool▁calls▁begin|><|tool▁call▁begin|>' + tool['function']['name'] + '<|tool▁sep|>' + tool['function']['arguments'] + '<|tool▁call▁end|>'}}{%- endif %}{%- set ns.is_first = true -%}{%- else %}{{'<|tool▁call▁begin|>'+ tool['function']['name'] + '<|tool▁sep|>' + tool['function']['arguments'] + '<|tool▁call▁end|>'}}{%- endif %}{%- endfor %}{{'<|tool▁calls▁end|><|end▁of▁sentence|>'}}{%- endif %}{%- if message['role'] == 'assistant' and (message['tool_calls'] is not defined or message['tool_calls'] is none) %}{%- if ns.is_last_user %}{{'<|Assistant|>'}}{%- if message['prefix'] is defined and message['prefix'] and thinking %}{{''}} {%- else %}{{''}}{%- endif %}{%- endif %}{%- set ns.is_last_user = false -%}{%- if ns.is_tool %}{{message['content'] + '<|end▁of▁sentence|>'}}{%- set ns.is_tool = false -%}{%- else %}{%- set content = message['content'] -%}{%- if '' in content %}{%- set content = content.split('', 1)[1] -%}{%- endif %}{{content + '<|end▁of▁sentence|>'}}{%- endif %}{%- endif %}{%- if message['role'] == 'tool' %}{%- set ns.is_last_user = false -%}{%- set ns.is_tool = true -%}{{'<|tool▁output▁begin|>' + message['content'] + '<|tool▁output▁end|>'}}{%- endif %}{%- endfor -%}{%- if add_generation_prompt and ns.is_last_user and not ns.is_tool %}{{'<|Assistant|>'}}{%- if not thinking %}{{''}}{%- else %}{{''}}{%- endif %}{% endif %}"
+}
\ No newline at end of file