Skip to content

Commit

Permalink
Skip HSDP + TP pytests that require torch 2.3 or above (mosaicml#3426)
Browse files Browse the repository at this point in the history
* test

* skip if torch version less than 2.3

* typo in ema

* add remote

* comments

---------

Co-authored-by: v-chen_data <[email protected]>
  • Loading branch information
KuuCi and v-chen_data authored Jun 25, 2024
1 parent 9a6780b commit b27a6aa
Showing 1 changed file with 4 additions and 2 deletions.
6 changes: 4 additions & 2 deletions tests/trainer/test_fsdp_checkpoint.py
Original file line number Diff line number Diff line change
Expand Up @@ -316,6 +316,8 @@ def test_fsdp_full_state_dict_load(
):
if use_hsdp:
pytest.xfail('Known Pytorch issue with HSDP, waiting for pytorch patch')
if (use_tp or use_hsdp) and version.parse(torch.__version__) < version.parse('2.3.0'):
pytest.skip('HSDP and TP require torch 2.3.0 or later')
if autoresume:
run_name = 'my-cool-autoresume-run'
else:
Expand Down Expand Up @@ -833,8 +835,8 @@ def test_fsdp_partitioned_state_dict_load(
):
if weights_only and autoresume:
pytest.skip('Weights only with autoresume is not supported')
if use_tp and version.parse(torch.__version__) < version.parse('2.3.0'):
pytest.skip('TP requires torch 2.3.0 or later')
if (use_tp or use_hsdp) and version.parse(torch.__version__) < version.parse('2.3.0'):
pytest.skip('HSDP and TP require torch 2.3.0 or later')

load_ignore_keys = [] if load_ignore_keys is None else load_ignore_keys

Expand Down

0 comments on commit b27a6aa

Please sign in to comment.