-
Notifications
You must be signed in to change notification settings - Fork 6
/
Copy pathfinetune.sh
55 lines (52 loc) · 1.86 KB
/
finetune.sh
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
# 0
python finetune.py \
--base_model='decapoda-research/llama-13b-hf' \
--num_epochs=20 \
--cutoff_len=512 \
--group_by_length \
--data_path 'finetune_data/30/alpaca_safety.json' \
--output_dir './lora-alpaca/30/alpaca_13B_finetune_without_regen' \
--lora_target_modules='[q_proj,k_proj,v_proj,o_proj]' \
--lora_r=16 \
--micro_batch_size=8 \
--val_set_size=0 \
--resume_from_checkpoint='lora-alpaca/alpaca_13B'
# 1
python finetune.py \
--base_model='decapoda-research/llama-13b-hf' \
--num_epochs=20 \
--cutoff_len=512 \
--group_by_length \
--data_path 'finetune_data/30/alpaca_safety.json' \
--output_dir './lora-alpaca/30/alpaca_13B_finetune1_without_regen' \
--lora_target_modules='[q_proj,k_proj,v_proj,o_proj]' \
--lora_r=16 \
--micro_batch_size=8 \
--val_set_size=0 \
--resume_from_checkpoint='lora-alpaca/30/alpaca_13B_finetune_without_regen'
# 2
python finetune.py \
--base_model='decapoda-research/llama-13b-hf' \
--num_epochs=20 \
--cutoff_len=512 \
--group_by_length \
--data_path 'finetune_data/30/alpaca_safety.json' \
--output_dir './lora-alpaca/30/alpaca_13B_finetune2_without_regen' \
--lora_target_modules='[q_proj,k_proj,v_proj,o_proj]' \
--lora_r=16 \
--micro_batch_size=8 \
--val_set_size=0 \
--resume_from_checkpoint='lora-alpaca/30/alpaca_13B_finetune1_without_regen'
# 3
python finetune.py \
--base_model='decapoda-research/llama-13b-hf' \
--num_epochs=20 \
--cutoff_len=512 \
--group_by_length \
--data_path 'finetune_data/30/alpaca_safety.json' \
--output_dir './lora-alpaca/30/alpaca_13B_finetune3_without_regen' \
--lora_target_modules='[q_proj,k_proj,v_proj,o_proj]' \
--lora_r=16 \
--micro_batch_size=8 \
--val_set_size=0 \
--resume_from_checkpoint='lora-alpaca/30/alpaca_13B_finetune2_without_regen'