-
Notifications
You must be signed in to change notification settings - Fork 6
Expand file tree
/
Copy pathconfig.tiny.json
More file actions
62 lines (62 loc) · 1.56 KB
/
config.tiny.json
File metadata and controls
62 lines (62 loc) · 1.56 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
{
"architectures": [
"DiaForConditionalGeneration"
],
"bos_token_id": 1026,
"decoder_config": {
"cross_head_dim": 32,
"cross_hidden_size": 256,
"cross_num_attention_heads": 4,
"cross_num_key_value_heads": 4,
"head_dim": 32,
"hidden_act": "silu",
"hidden_size": 512,
"initializer_range": 0.02,
"intermediate_size": 2048,
"max_position_embeddings": 3072,
"model_type": "dia_decoder",
"norm_eps": 0.00001,
"num_attention_heads": 8,
"num_channels": 9,
"num_hidden_layers": 9,
"num_key_value_heads": 2,
"rope_scaling": null,
"rope_theta": 10000.0,
"vocab_size": 1028
},
"delay_pattern": [
0,
8,
9,
10,
11,
12,
13,
14,
15
],
"encoder_config": {
"head_dim": 32,
"hidden_act": "silu",
"hidden_size": 256,
"initializer_range": 0.02,
"intermediate_size": 1024,
"max_position_embeddings": 1024,
"model_type": "dia_encoder",
"norm_eps": 0.00001,
"num_attention_heads": 4,
"num_hidden_layers": 3,
"num_key_value_heads": 4,
"rope_scaling": null,
"rope_theta": 10000.0,
"vocab_size": 256
},
"eos_token_id": 1024,
"initializer_range": 0.02,
"is_encoder_decoder": true,
"model_type": "dia",
"norm_eps": 0.00001,
"pad_token_id": 1025,
"torch_dtype": "float32",
"transformers_version": "4.53.0.dev0"
}