export CUDA_VISIBLE_DEVICES=0,1,2,3,4,5,6,7 model_name_or_path="meta-llama/Llama-2-7b-chat-hf" super_tokenizer_name_or_path="/home/baaiks/ninglu/code/PluginTransformer/data/outputs/90k_0104+12/super_tokenizer" output_dir="data/outputs/90k_0104+12-longalpaca_0106" mkdir -p ${output_dir} deepspeed --master_port 12345 --module main.train \ --model_name_or_path ${model_name_or_path} \ --super_tokenizer_name_or_path ${super_tokenizer_name_or_path} \ --super_tokenizer_num_hidden_layers 12 \ --dataset_list "longalpaca_0106" \ --output_dir ${output_dir} \ --learning_rate 1e-5 \ --num_train_epochs 1 \ --per_device_train_batch_size 1 \ --max_step 850 \ --logging_strategy "steps" \ --logging_steps 50 \ --save_strategy "steps" \ --save_steps 850 \ --gradient_checkpointing \ --deepspeed "data/ds_config/ds_config_stage1.json" \ | tee "${output_dir}/train.log"