set 64000 to 65536

This commit is contained in:
zRzRzRzRzRzRzR
2025-07-03 16:38:32 +08:00
parent 591f89245d
commit ddd07b32b8
2 changed files with 2 additions and 2 deletions

View File

@ -22,7 +22,7 @@
"hidden_size": 4096, "hidden_size": 4096,
"initializer_range": 0.02, "initializer_range": 0.02,
"intermediate_size": 13696, "intermediate_size": 13696,
"max_position_embeddings": 64000, "max_position_embeddings": 65536,
"num_attention_heads": 32, "num_attention_heads": 32,
"num_hidden_layers": 40, "num_hidden_layers": 40,
"num_key_value_heads": 2, "num_key_value_heads": 2,

View File

@ -211,7 +211,7 @@
"input_ids", "input_ids",
"attention_mask" "attention_mask"
], ],
"model_max_length": 64000, "model_max_length": 65536,
"padding_side": "left", "padding_side": "left",
"remove_space": false, "remove_space": false,
"tokenizer_class": "PreTrainedTokenizer" "tokenizer_class": "PreTrainedTokenizer"