Skip to content

Commit

Permalink
Fix compatibility with Python <= 3.8 (#172)
Browse files Browse the repository at this point in the history
Summary: Pull Request resolved: #172

Differential Revision: D55627601
  • Loading branch information
diego-urgell authored and facebook-github-bot committed Apr 2, 2024
1 parent 4575bf8 commit 433beaa
Showing 1 changed file with 2 additions and 2 deletions.
4 changes: 2 additions & 2 deletions torchsnapshot/io_preparers/sharded_tensor.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
from dataclasses import dataclass
from functools import reduce
from operator import mul
from typing import Callable, List, Optional, Tuple
from typing import Callable, List, Optional, Tuple, Union

import torch
from torch.distributed._shard.sharded_tensor import (
Expand Down Expand Up @@ -199,7 +199,7 @@ def prepare_read(
cls,
entry: ShardedTensorEntry,
obj_out: Optional[ShardedTensor] = None,
) -> Tuple[List[ReadReq], Future[ShardedTensor | torch.Tensor]]:
) -> Tuple[List[ReadReq], Future[Union[ShardedTensor, torch.Tensor]]]:
# Note: in case obj_out is None, a Future[Tensor] will be returned
if obj_out is None:
obj_out = ShardedTensorIOPreparer.empty_tensor_from_sharded_tensor_entry(
Expand Down

0 comments on commit 433beaa

Please sign in to comment.