From f4d795b983126b15453474744a7d33f8987a66d3 Mon Sep 17 00:00:00 2001 From: Zonglin Peng Date: Thu, 16 Jan 2025 22:32:26 -0800 Subject: [PATCH] migrate facto utils to OSS (#7686) Summary: Pull Request resolved: https://github.com/pytorch/executorch/pull/7686 as titled Reviewed By: skrtskrtfb Differential Revision: D68195666 --- examples/cadence/operators/facto_util.py | 91 ++++++++++++++++++++++++ examples/cadence/operators/targets.bzl | 14 ++++ 2 files changed, 105 insertions(+) create mode 100644 examples/cadence/operators/facto_util.py diff --git a/examples/cadence/operators/facto_util.py b/examples/cadence/operators/facto_util.py new file mode 100644 index 0000000000..e9b16f8bf6 --- /dev/null +++ b/examples/cadence/operators/facto_util.py @@ -0,0 +1,91 @@ +# (c) Meta Platforms, Inc. and affiliates. Confidential and proprietary. + +# pyre-strict + +import copy +from typing import List, OrderedDict, Tuple + +import torch +from inputgen.argtuple.gen import ArgumentTupleGenerator +from inputgen.specs.model import ConstraintProducer as cp +from inputgen.utils.random_manager import random_manager +from inputgen.variable.type import ScalarDtype +from specdb.db import SpecDictDB + +# seed to generate identical cases every run to reproduce from bisect +random_manager.seed(1729) + + +def apply_tensor_contraints(op_name: str, tensor_constraints: list[object]) -> None: + match op_name: + case ( + "sigmoid.default" + | "_softmax.default" + | "rsqrt.default" + | "exp.default" + | "mul.Tensor" + | "div.Tensor" + ): + tensor_constraints.append( + cp.Dtype.In(lambda deps: [torch.float]), + ) + case ( + "add.Tensor" + | "sub.Tensor" + | "add.Scalar" + | "sub.Scalar" + | "mul.Scalar" + | "div.Scalar" + ): + tensor_constraints.append( + cp.Dtype.In(lambda deps: [torch.float, torch.int]), + ) + case _: + tensor_constraints.append( + cp.Dtype.In(lambda deps: [torch.float, torch.int]), + ) + tensor_constraints.extend( + [ + cp.Value.Ge(lambda deps, dtype, struct: -(2**8)), + cp.Value.Le(lambda deps, dtype, struct: 2**8), + cp.Rank.Ge(lambda deps: 1), + cp.Rank.Le(lambda deps: 2**2), + cp.Size.Ge(lambda deps, r, d: 1), + cp.Size.Le(lambda deps, r, d: 2**2), + ] + ) + + +def facto_testcase_gen(op_name: str) -> List[Tuple[List[str], OrderedDict[str, str]]]: + # minimal example to test add.Tensor using FACTO + spec = SpecDictDB[op_name] + + for index, in_spec in enumerate(copy.deepcopy(spec.inspec)): + if in_spec.type.is_scalar(): + if in_spec.name != "alpha": + spec.inspec[index].constraints.extend( + [ + cp.Dtype.In(lambda deps: [ScalarDtype.float, ScalarDtype.int]), + cp.Value.Ge(lambda deps, dtype: -(2**8)), + cp.Value.Le(lambda deps, dtype: 2**2), + cp.Size.Ge(lambda deps, r, d: 1), + cp.Size.Le(lambda deps, r, d: 2**2), + ] + ) + else: + spec.inspec[index].constraints.extend( + [ + cp.Value.Gt(lambda deps, dtype: 0), + cp.Value.Le(lambda deps, dtype: 2), + ] + ) + elif in_spec.type.is_tensor(): + tensor_constraints = [] + # common tensor constraints + apply_tensor_contraints(op_name, tensor_constraints) + spec.inspec[index].constraints.extend(tensor_constraints) + + return [ + (posargs, inkwargs) + for posargs, inkwargs, _ in ArgumentTupleGenerator(spec).gen() + ] diff --git a/examples/cadence/operators/targets.bzl b/examples/cadence/operators/targets.bzl index e1fbeb9fdf..a646f0076b 100644 --- a/examples/cadence/operators/targets.bzl +++ b/examples/cadence/operators/targets.bzl @@ -5,6 +5,7 @@ # LICENSE file in the root directory of this source tree. load("@fbcode_macros//build_defs:python_unittest.bzl", "python_unittest") +load("@fbcode_macros//build_defs:python_library.bzl", "python_library") TESTS_LIST = [ "add_op", @@ -16,6 +17,19 @@ def define_common_targets(): for op in TESTS_LIST: _define_test_target(op) + python_library( + name = "facto_util", + srcs = [ + "facto_util.py", + ], + typing = True, + deps = [ + "fbcode//caffe2:torch", + "fbcode//pytorch/facto:inputgen", + "fbcode//pytorch/facto:specdb", + ], + ) + def _define_test_target(test_name): file_name = "test_{}".format(test_name)