Skip to content

Commit

Permalink
assume empty lora dropout means 0.0 and add tests (#2243)
Browse files Browse the repository at this point in the history
* assume empty lora dropout means 0.0 and add tests

* remove un-necessary arg

* refactor based on pr feedback:

* chore: lint
  • Loading branch information
winglian authored Jan 13, 2025
1 parent dd26cc3 commit bc1c9c2
Show file tree
Hide file tree
Showing 2 changed files with 76 additions and 0 deletions.
7 changes: 7 additions & 0 deletions src/axolotl/utils/config/models/input/v0_4_1/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -367,6 +367,13 @@ def convert_loraplus_lr_embedding(cls, loraplus_lr_embedding):
loraplus_lr_embedding = float(loraplus_lr_embedding)
return loraplus_lr_embedding

@model_validator(mode="before")
@classmethod
def validate_lora_dropout(cls, data):
if data.get("adapter") is not None and data.get("lora_dropout") is None:
data["lora_dropout"] = 0.0
return data


class ReLoRAConfig(BaseModel):
"""ReLoRA configuration subset"""
Expand Down
69 changes: 69 additions & 0 deletions tests/test_lora.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,69 @@
"""
tests for loading loras
"""
from axolotl.utils.config import normalize_config, validate_config
from axolotl.utils.dict import DictDefault
from axolotl.utils.models import load_model, load_tokenizer

# pylint: disable=duplicate-code
minimal_config = DictDefault(
{
"base_model": "HuggingFaceTB/SmolLM2-135M",
"learning_rate": 0.000001,
"datasets": [
{
"path": "mhenrichsen/alpaca_2k_test",
"type": "alpaca",
}
],
"micro_batch_size": 1,
"gradient_accumulation_steps": 1,
}
)


class TestLoRALoad:
"""
Test class for loading LoRA weights
"""

def test_load_lora_weights(self):
cfg = DictDefault(
{
"base_model": "HuggingFaceTB/SmolLM2-135M",
"adapter": "lora",
"lora_r": 8,
"lora_alpha": 16,
"lora_dropout": 0.0,
"lora_target_linear": True,
"micro_batch_size": 1,
"gradient_accumulation_steps": 1,
"sequence_len": 1024,
}
| minimal_config
)
cfg = validate_config(cfg)
normalize_config(cfg)
tokenizer = load_tokenizer(cfg)
load_model(cfg, tokenizer)

def test_load_lora_weights_empty_dropout(self):
cfg = DictDefault(
{
"base_model": "HuggingFaceTB/SmolLM2-135M",
"adapter": "lora",
"lora_r": 8,
"lora_alpha": 16,
"lora_dropout": None,
"lora_target_linear": True,
"micro_batch_size": 1,
"gradient_accumulation_steps": 1,
"sequence_len": 1024,
}
| minimal_config
)
cfg = validate_config(cfg)
normalize_config(cfg)
assert cfg.lora_dropout == 0.0
tokenizer = load_tokenizer(cfg)
load_model(cfg, tokenizer)

0 comments on commit bc1c9c2

Please sign in to comment.