Skip to content

Commit

Permalink
remove constraints from capture_pre_autograd_graph (pytorch#120981)
Browse files Browse the repository at this point in the history
Differential Revision: D54407296

Pull Request resolved: pytorch#120981
Approved by: https://github.com/zhxchen17
  • Loading branch information
avikchaudhuri authored and pytorchmergebot committed Mar 2, 2024
1 parent 83d848e commit f351a71
Show file tree
Hide file tree
Showing 2 changed files with 8 additions and 26 deletions.
21 changes: 1 addition & 20 deletions torch/_export/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -94,7 +94,6 @@ def capture_pre_autograd_graph(
f: torch.nn.Module,
args: Tuple[Any],
kwargs: Optional[Dict[str, Any]] = None,
constraints: Optional[List[Constraint]] = None,
dynamic_shapes: Optional[Union[Dict[str, Any], Tuple[Any]]] = None,
) -> torch.nn.Module:
"""
Expand All @@ -110,15 +109,6 @@ def capture_pre_autograd_graph(
kwargs: optional example keyword inputs.
constraints: [DEPRECATED: use ``dynamic_shapes`` instead, see below]
An optional list of constraints on the dynamic arguments
that specify their possible range of shapes. By default, shapes of
input torch.Tensors are assumed to be static. If an input torch.Tensor
is expected to have dynamic shapes, please use :func:`dynamic_dim`
to define :class:`Constraint` objects that specify the dynamics and the possible
range of shapes. See :func:`dynamic_dim` docstring for examples on
how to use it.
dynamic_shapes: Should either be:
1) a dict from argument names of ``f`` to their dynamic shape specifications,
2) a tuple that specifies dynamic shape specifications for each input in original order.
Expand Down Expand Up @@ -147,16 +137,7 @@ def capture_pre_autograd_graph(
if kwargs is None:
kwargs = {}

if constraints is not None:
warnings.warn(
"Using `constraints` to specify dynamic shapes for export is DEPRECATED "
"and will not be supported in the future. "
"Please use `dynamic_shapes` instead (see docs on `torch.export.export`).",
DeprecationWarning,
stacklevel=2,
)
else:
constraints = _process_dynamic_shapes(f, args, kwargs, dynamic_shapes)
constraints = _process_dynamic_shapes(f, args, kwargs, dynamic_shapes)

# Do not decompose dropout for exported models, because in eval mode the dropout
# op disappears from the graph, which makes it difficult to switch to train mode.
Expand Down
13 changes: 7 additions & 6 deletions torch/testing/_internal/common_quantization.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,10 +14,7 @@
import torch.distributed as dist
from torch.testing._internal.common_utils import TestCase, TEST_WITH_ROCM

from torch._export import (
capture_pre_autograd_graph,
dynamic_dim,
)
from torch._export import capture_pre_autograd_graph
from torch.ao.quantization import (
QuantType,
default_dynamic_qat_qconfig,
Expand Down Expand Up @@ -1185,10 +1182,14 @@ def _test_quantizer(

# program capture
m = copy.deepcopy(m_eager)
dynamic_shapes = tuple(
{0: torch.export.Dim("dim")} if i == 0 else None
for i in range(len(example_inputs))
)
m = capture_pre_autograd_graph(
m,
example_inputs,
constraints=[dynamic_dim(example_inputs[0], 0)] if export_with_dynamic_shape else [],
dynamic_shapes=dynamic_shapes if export_with_dynamic_shape else None,
)

if is_qat:
Expand Down Expand Up @@ -1224,7 +1225,7 @@ def _test_quantizer(
m_fx = capture_pre_autograd_graph(
m_fx,
example_inputs,
constraints=[dynamic_dim(example_inputs[0], 0)] if export_with_dynamic_shape else [],
dynamic_shapes=dynamic_shapes if export_with_dynamic_shape else None,
)
node_occurrence = {}
for k, v in PT2EQuantizationTestCase._MAP_TO_FX_TRACED_OPS.items():
Expand Down

0 comments on commit f351a71

Please sign in to comment.