MiniCPM4-MCP / config.json
SmartDazi's picture
Upload folder using huggingface_hub
a5efb83 verified
{
"_name_or_path": "/share_data/data7/fanshengda/mcp-agent/minicpm4_sft/mcp_summary/checkpoint-25000",
"architectures": [
"MiniCPMForCausalLM"
],
"attention_bias": false,
"attention_dropout": 0.0,
"auto_map": {
"AutoConfig": "configuration_minicpm.MiniCPMConfig",
"AutoModel": "modeling_minicpm.MiniCPMForCausalLM",
"AutoModelForCausalLM": "modeling_minicpm.MiniCPMForCausalLM",
"AutoModelForSeq2SeqLM": "modeling_minicpm.MiniCPMForCausalLM",
"AutoModelForSequenceClassification": "modeling_minicpm.MiniCPMForSequenceClassification"
},
"bos_token_id": 1,
"dim_model_base": 256,
"eos_token_id": [
2,
73440
],
"hidden_act": "silu",
"hidden_size": 4096,
"initializer_range": 0.1,
"intermediate_size": 16384,
"max_position_embeddings": 32768,
"model_type": "minicpm",
"num_attention_heads": 32,
"num_hidden_layers": 32,
"num_key_value_heads": 2,
"pad_token_id": 2,
"pretraining_tp": 1,
"rms_norm_eps": 1e-06,
"rope_scaling": {
"long_factor": [
0.9977997200264581,
1.014658295992452,
1.0349680404997148,
1.059429246056193,
1.0888815016813513,
1.1243301355211495,
1.166977103606075,
1.2182568066927284,
1.2798772354275727,
1.3538666751582975,
1.4426259039919596,
1.5489853358570191,
1.6762658237220625,
1.8283407612492941,
2.0096956085876183,
2.225478927469756,
2.481536379650452,
2.784415934557119,
3.1413289096347365,
3.560047844772632,
4.048719380066383,
4.615569542115128,
5.2684819496549835,
6.014438591970396,
6.858830049237097,
7.804668263503327,
8.851768731513417,
9.99600492938444,
11.228766118181639,
12.536757560834843,
13.902257701387796,
15.303885189125953,
16.717837610115794,
18.119465097853947,
19.484965238406907,
20.792956681060105,
22.02571786985731,
23.16995406772833,
24.217054535738416,
25.16289275000465,
26.007284207271347,
26.753240849586767,
27.40615325712662,
27.973003419175363,
28.461674954469114,
28.880393889607006,
29.237306864684626,
29.540186419591297,
29.79624387177199,
30.01202719065413,
30.193382037992453,
30.34545697551969,
30.47273746338473,
30.579096895249787,
30.66785612408345,
30.741845563814174,
30.80346599254902,
30.85474569563567,
30.897392663720595,
30.932841297560394,
30.962293553185553,
30.986754758742034,
31.007064503249293,
31.02392307921529
],
"original_max_position_embeddings": 32768,
"rope_type": "longrope",
"short_factor": [
0.9977997200264581,
1.014658295992452,
1.0349680404997148,
1.059429246056193,
1.0888815016813513,
1.1243301355211495,
1.166977103606075,
1.2182568066927284,
1.2798772354275727,
1.3538666751582975,
1.4426259039919596,
1.5489853358570191,
1.6762658237220625,
1.8283407612492941,
2.0096956085876183,
2.225478927469756,
2.481536379650452,
2.784415934557119,
3.1413289096347365,
3.560047844772632,
4.048719380066383,
4.615569542115128,
5.2684819496549835,
6.014438591970396,
6.858830049237097,
7.804668263503327,
8.851768731513417,
9.99600492938444,
11.228766118181639,
12.536757560834843,
13.902257701387796,
15.303885189125953,
16.717837610115794,
18.119465097853947,
19.484965238406907,
20.792956681060105,
22.02571786985731,
23.16995406772833,
24.217054535738416,
25.16289275000465,
26.007284207271347,
26.753240849586767,
27.40615325712662,
27.973003419175363,
28.461674954469114,
28.880393889607006,
29.237306864684626,
29.540186419591297,
29.79624387177199,
30.01202719065413,
30.193382037992453,
30.34545697551969,
30.47273746338473,
30.579096895249787,
30.66785612408345,
30.741845563814174,
30.80346599254902,
30.85474569563567,
30.897392663720595,
30.932841297560394,
30.962293553185553,
30.986754758742034,
31.007064503249293,
31.02392307921529
]
},
"rope_theta": 10000.0,
"scale_depth": 1.4,
"scale_emb": 12,
"tie_word_embeddings": false,
"torch_dtype": "bfloat16",
"transformers_version": "4.49.0",
"use_cache": true,
"vocab_size": 73448
}