Skip to content

Commit

Permalink
upgrade to latest torch shampoo comit
Browse files Browse the repository at this point in the history
  • Loading branch information
samsja committed Jan 6, 2025
1 parent a64ca4b commit ea23984
Show file tree
Hide file tree
Showing 2 changed files with 3 additions and 5 deletions.
6 changes: 2 additions & 4 deletions src/zeroband/optimizers/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
import torch
from zeroband.optimizers.muon import Muon, AdamConfig, MuonConfig
from distributed_shampoo import (
EighEigenvalueCorrectionConfig,
DefaultSOAPConfig,
DistributedShampoo,
FullyShardShampooConfig,
ShampooPT2CompileConfig,
Expand Down Expand Up @@ -52,9 +52,7 @@ def get_optimizer(params: list[torch.nn.Parameter], config: OptimizersConfig) ->
max_preconditioner_dim=config.max_preconditioner_dim,
precondition_frequency=config.precondition_frequency,
use_decoupled_weight_decay=True,
# This can also be set to `QREigenvalueCorrectionConfig` which is less expensive
# and might therefore allow for a smaller `precondition_frequency`.
preconditioner_computation_config=EighEigenvalueCorrectionConfig(),
preconditioner_config=DefaultSOAPConfig,
distributed_config=FullyShardShampooConfig(),
shampoo_pt2_compile_config=ShampooPT2CompileConfig(enable_shampoo_pt2_dynamic_shape=False),
)
Expand Down
2 changes: 1 addition & 1 deletion uv.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

0 comments on commit ea23984

Please sign in to comment.