liuxz0801 commited on
Commit
f814882
·
verified ·
1 Parent(s): 2c8737c

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +6 -6
config.json CHANGED
@@ -1,13 +1,13 @@
1
  {
2
  "architectures": [
3
- "Telechat3ForCausalLM"
4
  ],
5
  "attention_bias": false,
6
  "attention_dropout": 0.0,
7
  "auto_map": {
8
- "AutoConfig": "configuration_telechat3.Telechat3Config",
9
- "AutoModel": "modeling_telechat3.Telechat3Model",
10
- "AutoModelForCausalLM": "modeling_telechat3.Telechat3ForCausalLM"
11
  },
12
  "bos_token_id": 1,
13
  "eos_token_id": 2,
@@ -16,7 +16,7 @@
16
  "hidden_size": 6144,
17
  "initializer_range": 0.0048,
18
  "intermediate_size": 24576,
19
- "max_position_embeddings": 131072,
20
  "mlp_bias": false,
21
  "model_type": "telechat3",
22
  "num_attention_heads": 48,
@@ -28,7 +28,7 @@
28
  "rope_scaling": {
29
  "beta_fast": 32.0,
30
  "beta_slow": 1.0,
31
- "factor": 16.0,
32
  "original_max_position_embeddings": 8192,
33
  "rope_type": "telechat3-yarn",
34
  "type": "telechat3-yarn"
 
1
  {
2
  "architectures": [
3
+ "TeleChat3ForCausalLM"
4
  ],
5
  "attention_bias": false,
6
  "attention_dropout": 0.0,
7
  "auto_map": {
8
+ "AutoConfig": "configuration_telechat3.TeleChat3Config",
9
+ "AutoModel": "modeling_telechat3.TeleChat3Model",
10
+ "AutoModelForCausalLM": "modeling_telechat3.TeleChat3ForCausalLM"
11
  },
12
  "bos_token_id": 1,
13
  "eos_token_id": 2,
 
16
  "hidden_size": 6144,
17
  "initializer_range": 0.0048,
18
  "intermediate_size": 24576,
19
+ "max_position_embeddings": 32768,
20
  "mlp_bias": false,
21
  "model_type": "telechat3",
22
  "num_attention_heads": 48,
 
28
  "rope_scaling": {
29
  "beta_fast": 32.0,
30
  "beta_slow": 1.0,
31
+ "factor": 4.0,
32
  "original_max_position_embeddings": 8192,
33
  "rope_type": "telechat3-yarn",
34
  "type": "telechat3-yarn"