Skip to content
This repository has been archived by the owner on Nov 22, 2022. It is now read-only.

Commit

Permalink
Add conditional import in accelerator_lowering.py to avoid ImportError (
Browse files Browse the repository at this point in the history
#1633)

Summary:
Pull Request resolved: #1633

Alternative approach to #1631. Handling ImportError of accelerator in accelerator_lowering.py instead of new_task.py.
accelerator function is both used as a declarator on class AcceleratorTransformerLayers and called directly in lower_modules_to_accelerator. In addition to adding the try block for importing accelerator, accelerator_lowering_supported is used in lower_modules_to_accelerator to avoid ImportError.

Reviewed By: mikekgfb

Differential Revision: D26885302

fbshipit-source-id: 0630494f5d44fff9869e8d88e3bb23224fe4826f
  • Loading branch information
Jiankun Liu authored and facebook-github-bot committed Mar 9, 2021
1 parent 8331191 commit 653a05e
Show file tree
Hide file tree
Showing 3 changed files with 50 additions and 2 deletions.
17 changes: 16 additions & 1 deletion pytext/task/accelerator_lowering.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,17 @@
from typing import Iterable, List

import torch
from accelerators.pytorch.lib.glow_decorator import accelerator

accelerator_lowering_supported = True
try:
from accelerators.pytorch.lib.glow_decorator import accelerator
except ImportError:
accelerator_lowering_supported = False

from .nop_decorator import accelerator

print("Accelerator Lowering not supported!")

from pytext.config import ExportConfig
from pytext.models.roberta import RoBERTaEncoder
from pytext.utils.usage import log_accelerator_feature_usage
Expand Down Expand Up @@ -61,6 +71,7 @@ def accelerator_transformerLayers_inputs(
return input_examples


# accelerator imported from .nop_decorator to avoid ImportError when glow_decorator is not available
@accelerator(
[
(
Expand Down Expand Up @@ -133,6 +144,10 @@ def swap_modules_for_accelerator(model):


def lower_modules_to_accelerator(model: nn.Module, trace, export_options: ExportConfig):
# Raise error if accelerator could not be imported
if not accelerator_lowering_supported:
raise RuntimeError("Accelerator Lowering not supported!")

import torch_glow

log_accelerator_feature_usage("build.NNPI")
Expand Down
2 changes: 1 addition & 1 deletion pytext/task/new_task.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@
log_feature_usage,
log_accelerator_feature_usage,
)
from torch import jit, sort
from torch import sort

from .accelerator_lowering import (
lower_modules_to_accelerator,
Expand Down
33 changes: 33 additions & 0 deletions pytext/task/nop_decorator.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reservedimport functools

import functools

# module decorator for specifying acceleration
# The purpose is to avoid ImportError when glow_decorator is not available
class accelerator:
def __init__(self, specs, inputs_function=None):
pass

def __call__(self, module):
@functools.wraps(module)
def wrapper(*args, **kwargs):
return module(*args, **kwargs)

return wrapper

@classmethod
def _dfs_modules(cls, node, backend, results, submod_path=""):
pass

@classmethod
def get_modules(cls, model, backend):
pass

@classmethod
def get_module_from_path(cls, model, prefixes):
pass

@classmethod
def get_embedding_module_from_path(cls, model, submod_path):
pass

0 comments on commit 653a05e

Please sign in to comment.