Skip to content

Commit 617b6ed

Browse files
davidberard98facebook-github-bot
authored andcommitted
Update torchdynamo backend for torchbench
Summary: Previously not added to TARGETS build file, so trying to run internally would fail when `--dynamo` is added to torchbench command Also removed aot_autograd_speedup_strategy to reflect changes in pytorch/torchdynamo#201 (i.e. use `--torchdynamo aot_nvfuser` instead) Differential Revision: D38445665 fbshipit-source-id: 4385902594ff79a5fe2cf2f3366961fcd552befc
1 parent 97393f4 commit 617b6ed

File tree

2 files changed

+2
-10
lines changed

2 files changed

+2
-10
lines changed

configs/torchdynamo/nvfuser-aot-speedup.yaml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -7,4 +7,4 @@ args:
77
# empty argument means the default pytorch eager mode
88
- ""
99
- "--torchdynamo nvfuser"
10-
- "--torchdynamo aot_autograd_speedup_strategy"
10+
- "--torchdynamo aot_nvfuser"

torchbenchmark/util/backends/torchdynamo.py

Lines changed: 1 addition & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -5,16 +5,10 @@
55
import functools
66
from typing import List
77
import torchdynamo
8-
from torchdynamo.optimizations.training import aot_autograd_speedup_strategy
9-
10-
EXTRA_BACKENDS = {
11-
"aot_autograd_speedup_strategy": aot_autograd_speedup_strategy,
12-
}
138

149
def parse_torchdynamo_args(model: 'torchbenchmark.util.model.BenchmarkModel', dyamo_args: List[str]) -> argparse.Namespace:
1510
parser = argparse.ArgumentParser()
1611
available_backends = torchdynamo.list_backends()
17-
available_backends.extend(EXTRA_BACKENDS.keys())
1812
parser.add_argument(
1913
"--torchdynamo", choices=available_backends, help="Specify torchdynamo backends"
2014
)
@@ -23,9 +17,7 @@ def parse_torchdynamo_args(model: 'torchbenchmark.util.model.BenchmarkModel', dy
2317

2418

2519
def apply_torchdynamo_args(model: 'torchbenchmark.util.model.BenchmarkModel', args: argparse.Namespace, precision: str):
26-
if args.torchdynamo in EXTRA_BACKENDS:
27-
model.add_context(functools.partial(torchdynamo.optimize, EXTRA_BACKENDS[args.torchdynamo]))
28-
elif args.torchdynamo == "fx2trt" and precision == "fp16":
20+
if args.torchdynamo == "fx2trt" and precision == "fp16":
2921
model.add_context(functools.partial(torchdynamo.optimize, torchdynamo.optimizations.backends.fx2trt_compiler_fp16))
3022
else:
3123
model.add_context(functools.partial(torchdynamo.optimize, args.torchdynamo))

0 commit comments

Comments
 (0)