Auraithm commited on
Commit
72f2668
·
verified ·
1 Parent(s): 8753171

Upload config.json with huggingface_hub

Browse files
Files changed (1) hide show
  1. config.json +8 -6
config.json CHANGED
@@ -2,19 +2,21 @@
2
  "architectures": [
3
  "SDARForCausalLM"
4
  ],
5
- "attention_bias": false,
6
- "attention_dropout": 0.0,
7
  "auto_map": {
8
  "AutoConfig": "configuration_sdar.SDARConfig",
9
  "AutoModel": "modeling_sdar.SDARModel",
10
- "AutoModelForCausalLM": "modeling_sdar.SDARForCausalLM",
11
- "AutoTokenizer": "tokenization_qwen2.Qwen2Tokenizer"
12
  },
 
 
 
13
  "bos_token_id": 151643,
14
  "debug": false,
15
  "eos_token_id": 151643,
 
 
16
  "ep_size": 1,
17
- "fuse_cross_entropy": false,
18
  "head_dim": 128,
19
  "hidden_act": "silu",
20
  "hidden_size": 4096,
@@ -39,4 +41,4 @@
39
  "use_deepep": false,
40
  "use_sliding_window": false,
41
  "vocab_size": 151936
42
- }
 
2
  "architectures": [
3
  "SDARForCausalLM"
4
  ],
 
 
5
  "auto_map": {
6
  "AutoConfig": "configuration_sdar.SDARConfig",
7
  "AutoModel": "modeling_sdar.SDARModel",
8
+ "AutoModelForCausalLM": "modeling_sdar.SDARForCausalLM"
 
9
  },
10
+ "attention_bias": false,
11
+ "attention_dropout": 0.0,
12
+ "attn_implementation": "flex_attention",
13
  "bos_token_id": 151643,
14
  "debug": false,
15
  "eos_token_id": 151643,
16
+ "block_size": 4,
17
+ "mask_token_id": 151669,
18
  "ep_size": 1,
19
+ "fuse_cross_entropy": true,
20
  "head_dim": 128,
21
  "hidden_act": "silu",
22
  "hidden_size": 4096,
 
41
  "use_deepep": false,
42
  "use_sliding_window": false,
43
  "vocab_size": 151936
44
+ }