gpt-code-clippy/run_clm_gpt_neo.sh
arampacha 19462b5239
Grad accum (#30)
* adds gradient accumulation

* adds saving when training is finished
2021-07-08 22:16:49 +03:00

33 lines
1.0 KiB
Bash

#! /bin/bash
./run_clm_flax.py \
--output_dir $HOME/gpt-neo-125M-code-clippy \
--model_name_or_path="EleutherAI/gpt-neo-125M" \
--dataset_name="code_search_net" \
--dataset_config_name="python" \
--text_column_name="func_code_string" \
--do_train --do_eval \
--block_size="1024" \
--per_device_train_batch_size="16" \
--per_device_eval_batch_size="16" \
--preprocessing_num_workers="8" \
--learning_rate="3e-4" \
--adafactor \
--warmup_steps="50" \
--adam_beta1="0.9" \
--adam_beta2="0.98" \
--weight_decay="0.01" \
--overwrite_output_dir \
--num_train_epochs="1" \
--logging_steps="100" \
--eval_steps="200" \
--push_to_hub="False" \
--report_to="all" \
--dtype="bfloat16" \
--skip_memory_metrics="True" \
--save_steps="200" \
--save_total_limit 2 \
--gradient_accumulation_steps 1 \
# --resume_from_checkpoint $HOME/gpt-neo-125M-code-clippy/ckpt_201 \
# --max_train_samples="10000" \
# --max_eval_samples="1000"