prince-canuma commited on
Commit
e0df3dd
·
verified ·
1 Parent(s): 39b44d9

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +0 -84
config.json CHANGED
@@ -42,90 +42,6 @@
42
  "LABEL_1": 1
43
  },
44
  "length_penalty": 1.0,
45
- "llm_config": {
46
- "vocab_size": 151936,
47
- "max_position_embeddings": 32768,
48
- "hidden_size": 4096,
49
- "intermediate_size": 22016,
50
- "num_hidden_layers": 32,
51
- "num_attention_heads": 32,
52
- "use_sliding_window": false,
53
- "sliding_window": 4096,
54
- "max_window_layers": 28,
55
- "num_key_value_heads": 32,
56
- "hidden_act": "silu",
57
- "initializer_range": 0.02,
58
- "rms_norm_eps": 1e-06,
59
- "use_cache": true,
60
- "rope_theta": 10000.0,
61
- "rope_scaling": null,
62
- "attention_dropout": 0.0,
63
- "return_dict": true,
64
- "output_hidden_states": false,
65
- "output_attentions": false,
66
- "torchscript": false,
67
- "torch_dtype": null,
68
- "use_bfloat16": false,
69
- "tf_legacy_loss": false,
70
- "pruned_heads": {},
71
- "tie_word_embeddings": false,
72
- "chunk_size_feed_forward": 0,
73
- "is_encoder_decoder": false,
74
- "is_decoder": false,
75
- "cross_attention_hidden_size": null,
76
- "add_cross_attention": false,
77
- "tie_encoder_decoder": false,
78
- "max_length": 20,
79
- "min_length": 0,
80
- "do_sample": false,
81
- "early_stopping": false,
82
- "num_beams": 1,
83
- "num_beam_groups": 1,
84
- "diversity_penalty": 0.0,
85
- "temperature": 1.0,
86
- "top_k": 50,
87
- "top_p": 1.0,
88
- "typical_p": 1.0,
89
- "repetition_penalty": 1.0,
90
- "length_penalty": 1.0,
91
- "no_repeat_ngram_size": 0,
92
- "encoder_no_repeat_ngram_size": 0,
93
- "bad_words_ids": null,
94
- "num_return_sequences": 1,
95
- "output_scores": false,
96
- "return_dict_in_generate": false,
97
- "forced_bos_token_id": null,
98
- "forced_eos_token_id": null,
99
- "remove_invalid_values": false,
100
- "exponential_decay_length_penalty": null,
101
- "suppress_tokens": null,
102
- "begin_suppress_tokens": null,
103
- "architectures": [
104
- "Qwen2ForCausalLM"
105
- ],
106
- "finetuning_task": null,
107
- "id2label": {
108
- "0": "LABEL_0",
109
- "1": "LABEL_1"
110
- },
111
- "label2id": {
112
- "LABEL_0": 0,
113
- "LABEL_1": 1
114
- },
115
- "tokenizer_class": null,
116
- "prefix": null,
117
- "bos_token_id": null,
118
- "pad_token_id": null,
119
- "eos_token_id": null,
120
- "sep_token_id": null,
121
- "decoder_start_token_id": null,
122
- "task_specific_params": null,
123
- "problem_type": null,
124
- "_name_or_path": "",
125
- "_attn_implementation_autoset": false,
126
- "transformers_version": "4.51.3",
127
- "model_type": "qwen2"
128
- },
129
  "max_dynamic_patch": 12,
130
  "max_length": 20,
131
  "min_dynamic_patch": 1,
 
42
  "LABEL_1": 1
43
  },
44
  "length_penalty": 1.0,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
45
  "max_dynamic_patch": 12,
46
  "max_length": 20,
47
  "min_dynamic_patch": 1,