mirror of
https://github.com/CodedotAl/gpt-code-clippy.git
synced 2024-10-26 09:17:45 +03:00
2736967001
* saves optimizer state together with model * enables to resume from saved checkpoint * removes old checkpoint up to `save_totat_limit` Co-authored-by: arampacha <aruthart@gmail.com>
22 lines
708 B
Bash
Executable File
22 lines
708 B
Bash
Executable File
#! /bin/bash
|
|
./run_clm_flax.py \
|
|
--output_dir $HOME/gpt-neo-2.7B-code-clippy \
|
|
--model_name_or_path="EleutherAI/gpt-neo-2.7B" \
|
|
--dataset_name="code_search_net" \
|
|
--dataset_config_name="python" \
|
|
--text_column_name="func_code_string" \
|
|
--do_train --do_eval \
|
|
--block_size="128" \
|
|
--per_device_train_batch_size="1" \
|
|
--per_device_eval_batch_size="1" \
|
|
--preprocessing_num_workers="8" \
|
|
--dtype="bfloat16" \
|
|
--learning_rate="5e-3" \
|
|
--warmup_steps="1000" \
|
|
--adam_beta1="0.9" \
|
|
--adam_beta2="0.98" \
|
|
--weight_decay="0.01" \
|
|
--overwrite_output_dir \
|
|
--num_train_epochs="1" \
|
|
--push_to_hub="False" \
|
|
--resume_from_checkpoint="False" |