Skip to content

Commit

Permalink
Fix mypy errors (#54)
Browse files Browse the repository at this point in the history
Summary:
- Added one minor type hint.
- Ignored errors about instantiating a class with an `abstractmethod` since this seems to be a shortcoming on mypy's side: it doesn't recognize that the `dataclass` decorator will take care of `__init__`.
- Removed a `super()` call in the `__post_init__` function of `AdaGradGraftingConfig`. I think this is not necessary and mypy complains about inheriting from an `abstractmethod`.

Pull Request resolved: #54

Reviewed By: anana10c

Differential Revision: D66241155

Pulled By: tsunghsienlee

fbshipit-source-id: 5cd918d7c02693d1fd941b99e1b5ae9e06532cad
  • Loading branch information
runame authored and facebook-github-bot committed Nov 20, 2024
1 parent b507d64 commit bfc71b1
Show file tree
Hide file tree
Showing 7 changed files with 6 additions and 7 deletions.
2 changes: 1 addition & 1 deletion distributed_shampoo/examples/trainer_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -529,7 +529,7 @@ def instantiate_grafting_config(
epsilon=grafting_epsilon,
)
elif grafting_type == GraftingType.SGD:
return SGDGraftingConfig(
return SGDGraftingConfig( # type: ignore[abstract]
beta2=grafting_beta2,
epsilon=grafting_epsilon,
)
Expand Down
2 changes: 1 addition & 1 deletion distributed_shampoo/gpu_tests/shampoo_grafting_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -274,7 +274,7 @@ def test_sgd_grafting_on_quadratic(self) -> None:
start_preconditioning_step=math.inf,
use_nesterov=use_nesterov,
use_decoupled_weight_decay=False,
grafting_config=SGDGraftingConfig(),
grafting_config=SGDGraftingConfig(), # type: ignore[abstract]
),
device=device,
)
1 change: 0 additions & 1 deletion distributed_shampoo/shampoo_types.py
Original file line number Diff line number Diff line change
Expand Up @@ -246,7 +246,6 @@ class AdaGradGraftingConfig(GraftingConfig):
epsilon: float = 1e-10

def __post_init__(self) -> None:
super().__init__()
if not self.epsilon > 0.0:
raise ValueError(f"Invalid epsilon value: {self.epsilon}. Must be > 0.0.")

Expand Down
2 changes: 1 addition & 1 deletion distributed_shampoo/tests/distributed_shampoo_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@ def test_invalid_grafting_config(self) -> None:
):
DistributedShampoo(
self._model.parameters(),
grafting_config=SGDGraftingConfig(),
grafting_config=SGDGraftingConfig(), # type: ignore[abstract]
)

def test_invalid_with_incorrect_hyperparameter_setting(self) -> None:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -216,7 +216,7 @@ def _model_factory(

@skip_if_lt_x_gpu(2)
def test_fully_shard_shampoo_against_default_shampoo(self) -> None:
fully_shard_config = FullyShardShampooConfig()
fully_shard_config = FullyShardShampooConfig() # type: ignore[abstract]
ShampooFullyShardDistributorTest._test_two_configs(
ShampooFullyShardDistributorTest._shampoo_optim_factory(
None,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -282,7 +282,7 @@ def test_abstract_methods(self) -> None:
# Basic setup for instantiating BaseShampooPreconditionerList.
params = (torch.tensor([1.0, 2.0]),)
block_list = (params[0],)
state = {params[0]: {}}
state: dict[Tensor, dict] = {params[0]: {}}
block_info_list = (
BlockInfo(
param=params[0],
Expand Down
2 changes: 1 addition & 1 deletion tests/matrix_functions_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -302,7 +302,7 @@ def test_matrix_inverse_root_with_invalid_root_inv_config(self) -> None:
matrix_inverse_root(
A=A,
root=root,
root_inv_config=InvalidRootInvConfig(),
root_inv_config=InvalidRootInvConfig(), # type: ignore[abstract]
is_diagonal=False,
)

Expand Down

0 comments on commit bfc71b1

Please sign in to comment.