We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
1 parent 7018576 commit 1ac8998Copy full SHA for 1ac8998
docs/config.qmd
@@ -227,6 +227,12 @@ lora_modules_to_save:
227
228
lora_fan_in_fan_out: false
229
230
+# LoRA+ hyperparameters
231
+# For more details about the following options, see:
232
+# https://arxiv.org/abs/2402.12354 and `src/axolotl/core/train_builder.py`
233
+loraplus_lr_ratio: # loraplus learning rate ratio lr_B / lr_A. Recommended value is 2^4.
234
+loraplus_lr_embedding: # loraplus learning rate for lora embedding layers. Default value is 1e-6.
235
+
236
peft:
237
# Configuration options for loftq initialization for LoRA
238
# https://huggingface.co/docs/peft/developer_guides/quantization#loftq-initialization
0 commit comments