Skip to content

Commit

Permalink
migrate facto utils to OSS (pytorch#7686)
Browse files Browse the repository at this point in the history
Summary:
Pull Request resolved: pytorch#7686

as titled

Reviewed By: skrtskrtfb

Differential Revision: D68195666
  • Loading branch information
zonglinpeng authored and facebook-github-bot committed Jan 17, 2025
1 parent 1a6b7a6 commit b6bdf7b
Show file tree
Hide file tree
Showing 2 changed files with 105 additions and 0 deletions.
91 changes: 91 additions & 0 deletions examples/cadence/operators/facto_util.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,91 @@
# (c) Meta Platforms, Inc. and affiliates. Confidential and proprietary.

# pyre-strict

import copy
from typing import List, OrderedDict, Tuple

import torch
from inputgen.argtuple.gen import ArgumentTupleGenerator
from inputgen.specs.model import ConstraintProducer as cp
from inputgen.utils.random_manager import random_manager
from inputgen.variable.type import ScalarDtype
from specdb.db import SpecDictDB

# seed to generate identical cases every run to reproduce from bisect
random_manager.seed(1729)


def apply_tensor_contraints(op_name: str, tensor_constraints: list[object]) -> None:
match op_name:
case (
"sigmoid.default"
| "_softmax.default"
| "rsqrt.default"
| "exp.default"
| "mul.Tensor"
| "div.Tensor"
):
tensor_constraints.append(
cp.Dtype.In(lambda deps: [torch.float]),
)
case (
"add.Tensor"
| "sub.Tensor"
| "add.Scalar"
| "sub.Scalar"
| "mul.Scalar"
| "div.Scalar"
):
tensor_constraints.append(
cp.Dtype.In(lambda deps: [torch.float, torch.int]),
)
case _:
tensor_constraints.append(
cp.Dtype.In(lambda deps: [torch.float, torch.int]),
)
tensor_constraints.extend(
[
cp.Value.Ge(lambda deps, dtype, struct: -(2**8)),
cp.Value.Le(lambda deps, dtype, struct: 2**8),
cp.Rank.Ge(lambda deps: 1),
cp.Rank.Le(lambda deps: 2**2),
cp.Size.Ge(lambda deps, r, d: 1),
cp.Size.Le(lambda deps, r, d: 2**2),
]
)


def facto_testcase_gen(op_name: str) -> List[Tuple[List[str], OrderedDict[str, str]]]:
# minimal example to test add.Tensor using FACTO
spec = SpecDictDB[op_name]

for index, in_spec in enumerate(copy.deepcopy(spec.inspec)):
if in_spec.type.is_scalar():
if in_spec.name != "alpha":
spec.inspec[index].constraints.extend(
[
cp.Dtype.In(lambda deps: [ScalarDtype.float, ScalarDtype.int]),
cp.Value.Ge(lambda deps, dtype: -(2**8)),
cp.Value.Le(lambda deps, dtype: 2**2),
cp.Size.Ge(lambda deps, r, d: 1),
cp.Size.Le(lambda deps, r, d: 2**2),
]
)
else:
spec.inspec[index].constraints.extend(
[
cp.Value.Gt(lambda deps, dtype: 0),
cp.Value.Le(lambda deps, dtype: 2),
]
)
elif in_spec.type.is_tensor():
tensor_constraints = []
# common tensor constraints
apply_tensor_contraints(op_name, tensor_constraints)
spec.inspec[index].constraints.extend(tensor_constraints)

return [
(posargs, inkwargs)
for posargs, inkwargs, _ in ArgumentTupleGenerator(spec).gen()
]
14 changes: 14 additions & 0 deletions examples/cadence/operators/targets.bzl
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
# LICENSE file in the root directory of this source tree.

load("@fbcode_macros//build_defs:python_unittest.bzl", "python_unittest")
load("@fbcode_macros//build_defs:python_library.bzl", "python_library")

TESTS_LIST = [
"add_op",
Expand All @@ -16,6 +17,19 @@ def define_common_targets():
for op in TESTS_LIST:
_define_test_target(op)

python_library(
name = "facto_util",
srcs = [
"facto_util.py",
],
typing = True,
deps = [
"fbcode//caffe2:torch",
"fbcode//pytorch/facto:inputgen",
"fbcode//pytorch/facto:specdb",
],
)


def _define_test_target(test_name):
file_name = "test_{}".format(test_name)
Expand Down

0 comments on commit b6bdf7b

Please sign in to comment.