Skip to content

Commit 3fd66bf

Browse files
dariocazzaniclaude
andcommitted
Include optimizer in experiment directory naming
- Add optimizer to TrainingDefaults (default: sgd) - Include optimizer in path when non-default (e.g., adamw) - Update get_experiment_dir callers in config.py and train_kd.py Fixes issue where SGD and AdamW experiments at same lr would overwrite each other. Now ConvNeXt AdamW experiments get paths like: results/raw/cifar10/convnext_tiny/std_adamw_lr0.004_s42/ Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
1 parent afac6ef commit 3fd66bf

File tree

3 files changed

+8
-0
lines changed

3 files changed

+8
-0
lines changed

experiments/config.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -113,6 +113,7 @@ def __post_init__(self) -> None:
113113
augment=self.augment,
114114
ablation=self.ablation.value,
115115
lr=self.lr,
116+
optimizer=self.optimizer,
116117
)
117118
self.output_dir = str(experiment_dir)
118119

experiments/paths.py

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -23,6 +23,7 @@ class TrainingDefaults:
2323
lr: float = 0.1
2424
augment: str = "basic"
2525
ablation: str = "none"
26+
optimizer: str = "sgd"
2627

2728

2829
@dataclass(frozen=True)
@@ -47,6 +48,7 @@ def get_experiment_dir(
4748
augment: str = TRAINING_DEFAULTS.augment,
4849
ablation: str = TRAINING_DEFAULTS.ablation,
4950
lr: float = TRAINING_DEFAULTS.lr,
51+
optimizer: str = TRAINING_DEFAULTS.optimizer,
5052
kd_temperature: float | None = None,
5153
kd_alpha: float | None = None,
5254
) -> Path:
@@ -78,12 +80,16 @@ def get_experiment_dir(
7880
parts.append(augment)
7981
if ablation != TRAINING_DEFAULTS.ablation:
8082
parts.append(ablation)
83+
if optimizer != TRAINING_DEFAULTS.optimizer:
84+
parts.append(optimizer)
8185
if lr != TRAINING_DEFAULTS.lr:
8286
parts.append(f"lr{lr:g}")
8387
else:
8488
parts.append("bit_kd")
8589
if ablation != TRAINING_DEFAULTS.ablation:
8690
parts.append(ablation)
91+
if optimizer != TRAINING_DEFAULTS.optimizer:
92+
parts.append(optimizer)
8793
if lr != TRAINING_DEFAULTS.lr:
8894
parts.append(f"lr{lr:g}")
8995
if kd_temperature is not None and kd_temperature != KD_DEFAULTS.temperature:

experiments/train_kd.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -254,6 +254,7 @@ def main() -> None:
254254
args.seed,
255255
ablation=args.ablation,
256256
lr=args.lr,
257+
optimizer=args.optimizer,
257258
kd_temperature=args.temperature,
258259
kd_alpha=args.alpha,
259260
)

0 commit comments

Comments
 (0)