Skip to content

Commit

Permalink
Minor updates
Browse files Browse the repository at this point in the history
  • Loading branch information
chiragjn committed Dec 8, 2024
1 parent 25d052d commit 4e853b1
Show file tree
Hide file tree
Showing 3 changed files with 5 additions and 4 deletions.
4 changes: 2 additions & 2 deletions finetune.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -265,7 +265,7 @@
"micro_batch_size = launch_parameters.batch_size\n",
"\n",
"# Learning rate\n",
"learning_rate = 0.0002\n",
"learning_rate = launch_parameters.learning_rate\n",
"\n",
"# How many epochs to run training for\n",
"num_epochs = 10\n",
Expand Down Expand Up @@ -394,7 +394,7 @@
"--lora_alpha {lora_alpha} \\\n",
"--lora_dropout 0.05 \\\n",
"--logging_steps 5 \\\n",
"--evaluation_strategy steps \\\n",
"--eval_strategy steps \\\n",
"--eval_steps {eval_steps} \\\n",
"--save_strategy steps \\\n",
"--save_steps {save_steps} \\\n",
Expand Down
4 changes: 2 additions & 2 deletions reporting.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,13 +55,13 @@ class ReportingConfig(BaseModel):
--max_steps 3
--gradient_accumulation_steps 4
--gradient_checkpointing unsloth
--learning_rate 0.00001
--learning_rate 0.0001
--output_dir ./outputs
--train_on_inputs False
--logging_steps 1
--save_strategy steps
--save_steps 0.5
--evaluation_strategy steps
--eval_strategy steps
--eval_steps 0.5
--adapter qlora
--lora_target_linear True
Expand Down
1 change: 1 addition & 0 deletions utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -97,6 +97,7 @@ class Config:
model_id: str = "TinyLlama/TinyLlama-1.1B-Chat-v1.0"
max_length: Optional[int] = 2048
batch_size: int = 1
learning_rate: float = 0.0001


def load_launch_parameters(path):
Expand Down

0 comments on commit 4e853b1

Please # to comment.