-
Notifications
You must be signed in to change notification settings - Fork 3
/
Copy pathbert-uncased-k-fold.sh
33 lines (28 loc) · 6.96 KB
/
bert-uncased-k-fold.sh
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
# 10 fold bert-base-uncased, question_answer, seed 2020
python training-k-fold.py --model_name "bert-base-uncased" --content "Question_Answer" --max_len 512 --fold 0 --seed 2020 --split "GroupKfold" --n_splits 10 --batch_size 4 --valid_batch_size 32 --accumulation_steps 2 --lr 1e-4 --loss "bce" --augment --num_epoch 8 --num_workers 4
python training-k-fold.py --model_name "bert-base-uncased" --content "Question_Answer" --max_len 512 --fold 1 --seed 2020 --split "GroupKfold" --n_splits 10 --batch_size 4 --valid_batch_size 32 --accumulation_steps 2 --lr 1e-4 --loss "bce" --augment --num_epoch 8 --num_workers 4
python training-k-fold.py --model_name "bert-base-uncased" --content "Question_Answer" --max_len 512 --fold 2 --seed 2020 --split "GroupKfold" --n_splits 10 --batch_size 4 --valid_batch_size 32 --accumulation_steps 2 --lr 1e-4 --loss "bce" --augment --num_epoch 8 --num_workers 4
python training-k-fold.py --model_name "bert-base-uncased" --content "Question_Answer" --max_len 512 --fold 3 --seed 2020 --split "GroupKfold" --n_splits 10 --batch_size 4 --valid_batch_size 32 --accumulation_steps 2 --lr 1e-4 --loss "bce" --augment --num_epoch 8 --num_workers 4
python training-k-fold.py --model_name "bert-base-uncased" --content "Question_Answer" --max_len 512 --fold 4 --seed 2020 --split "GroupKfold" --n_splits 10 --batch_size 4 --valid_batch_size 32 --accumulation_steps 2 --lr 1e-4 --loss "bce" --augment --num_epoch 8 --num_workers 4
python training-k-fold.py --model_name "bert-base-uncased" --content "Question_Answer" --max_len 512 --fold 5 --seed 2020 --split "GroupKfold" --n_splits 10 --batch_size 4 --valid_batch_size 32 --accumulation_steps 2 --lr 1e-4 --loss "bce" --augment --num_epoch 8 --num_workers 4
python training-k-fold.py --model_name "bert-base-uncased" --content "Question_Answer" --max_len 512 --fold 6 --seed 2020 --split "GroupKfold" --n_splits 10 --batch_size 4 --valid_batch_size 32 --accumulation_steps 2 --lr 1e-4 --loss "bce" --augment --num_epoch 8 --num_workers 4
python training-k-fold.py --model_name "bert-base-uncased" --content "Question_Answer" --max_len 512 --fold 7 --seed 2020 --split "GroupKfold" --n_splits 10 --batch_size 4 --valid_batch_size 32 --accumulation_steps 2 --lr 1e-4 --loss "bce" --augment --num_epoch 8 --num_workers 4
python training-k-fold.py --model_name "bert-base-uncased" --content "Question_Answer" --max_len 512 --fold 8 --seed 2020 --split "GroupKfold" --n_splits 10 --batch_size 4 --valid_batch_size 32 --accumulation_steps 2 --lr 1e-4 --loss "bce" --augment --num_epoch 8 --num_workers 4
python training-k-fold.py --model_name "bert-base-uncased" --content "Question_Answer" --max_len 512 --fold 9 --seed 2020 --split "GroupKfold" --n_splits 10 --batch_size 4 --valid_batch_size 32 --accumulation_steps 2 --lr 1e-4 --loss "bce" --augment --num_epoch 8 --num_workers 4
# 5 fold bert-base-uncased, question + answer, seed 2020
python training-k-fold.py --model_name "bert-base-uncased" --content "Answer" --max_len 512 --fold 0 --seed 2020 --n_splits 5 --split "GroupKfold" --batch_size 8 --valid_batch_size 32 --accumulation_steps 1 --lr 1e-4 --loss "bce" --augment --num_epoch 8 --num_workers 4
python training-k-fold.py --model_name "bert-base-uncased" --content "Answer" --max_len 512 --fold 1 --seed 2020 --n_splits 5 --split "GroupKfold" --batch_size 8 --valid_batch_size 32 --accumulation_steps 1 --lr 1e-4 --loss "bce" --augment --num_epoch 8 --num_workers 4
python training-k-fold.py --model_name "bert-base-uncased" --content "Answer" --max_len 512 --fold 2 --seed 2020 --n_splits 5 --split "GroupKfold" --batch_size 8 --valid_batch_size 32 --accumulation_steps 1 --lr 1e-4 --loss "bce" --augment --num_epoch 8 --num_workers 4
python training-k-fold.py --model_name "bert-base-uncased" --content "Answer" --max_len 512 --fold 3 --seed 2020 --n_splits 5 --split "GroupKfold" --batch_size 8 --valid_batch_size 32 --accumulation_steps 1 --lr 1e-4 --loss "bce" --augment --num_epoch 8 --num_workers 4
python training-k-fold.py --model_name "bert-base-uncased" --content "Answer" --max_len 512 --fold 4 --seed 2020 --n_splits 5 --split "GroupKfold" --batch_size 8 --valid_batch_size 32 --accumulation_steps 1 --lr 1e-4 --loss "bce" --augment --num_epoch 8 --num_workers 4
python training-k-fold.py --model_name "bert-base-uncased" --content "Question" --max_len 512 --fold 0 --seed 2020 --n_splits 5 --split "GroupKfold" --batch_size 8 --valid_batch_size 32 --accumulation_steps 1 --lr 1e-4 --loss "bce" --augment --num_epoch 8 --num_workers 4
python training-k-fold.py --model_name "bert-base-uncased" --content "Question" --max_len 512 --fold 1 --seed 2020 --n_splits 5 --split "GroupKfold" --batch_size 8 --valid_batch_size 32 --accumulation_steps 1 --lr 1e-4 --loss "bce" --augment --num_epoch 8 --num_workers 4
python training-k-fold.py --model_name "bert-base-uncased" --content "Question" --max_len 512 --fold 2 --seed 2020 --n_splits 5 --split "GroupKfold" --batch_size 8 --valid_batch_size 32 --accumulation_steps 1 --lr 1e-4 --loss "bce" --augment --num_epoch 8 --num_workers 4
python training-k-fold.py --model_name "bert-base-uncased" --content "Question" --max_len 512 --fold 3 --seed 2020 --n_splits 5 --split "GroupKfold" --batch_size 8 --valid_batch_size 32 --accumulation_steps 1 --lr 1e-4 --loss "bce" --augment --num_epoch 8 --num_workers 4
python training-k-fold.py --model_name "bert-base-uncased" --content "Question" --max_len 512 --fold 4 --seed 2020 --n_splits 5 --split "GroupKfold" --batch_size 8 --valid_batch_size 32 --accumulation_steps 1 --lr 1e-4 --loss "bce" --augment --num_epoch 8 --num_workers 4
# new version bert-uncased for Ivan, 5 fold bert-base-uncased, question_answer, seed 1010
# python training-k-fold.py --model_name "bert-base-uncased" --content "Question_Answer" --max_len 512 --fold 0 --seed 1010 --split "GroupKfold" --n_splits 5 --batch_size 8 --valid_batch_size 32 --accumulation_steps 1 --lr 1e-4 --loss "bce" --augment --num_epoch 8 --num_workers 4
# python training-k-fold.py --model_name "bert-base-uncased" --content "Question_Answer" --max_len 512 --fold 1 --seed 1010 --split "GroupKfold" --n_splits 5 --batch_size 8 --valid_batch_size 32 --accumulation_steps 1 --lr 1e-4 --loss "bce" --augment --num_epoch 8 --num_workers 4
# python training-k-fold.py --model_name "bert-base-uncased" --content "Question_Answer" --max_len 512 --fold 2 --seed 1010 --split "GroupKfold" --n_splits 5 --batch_size 8 --valid_batch_size 32 --accumulation_steps 1 --lr 1e-4 --loss "bce" --augment --num_epoch 8 --num_workers 4
# python training-k-fold.py --model_name "bert-base-uncased" --content "Question_Answer" --max_len 512 --fold 3 --seed 1010 --split "GroupKfold" --n_splits 5 --batch_size 8 --valid_batch_size 32 --accumulation_steps 1 --lr 1e-4 --loss "bce" --augment --num_epoch 8 --num_workers 4
# python training-k-fold.py --model_name "bert-base-uncased" --content "Question_Answer" --max_len 512 --fold 4 --seed 1010 --split "GroupKfold" --n_splits 5 --batch_size 8 --valid_batch_size 32 --accumulation_steps 1 --lr 1e-4 --loss "bce" --augment --num_epoch 8 --num_workers 4