Skip to content

Commit d16c10b

Browse files
author
samdow
committed
fix ci
1 parent 5170948 commit d16c10b

File tree

1 file changed

+6
-1
lines changed

1 file changed

+6
-1
lines changed

test/test_ops.py

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -623,6 +623,12 @@ def vjp_of_vjp(*args_and_cotangents):
623623

624624
@ops(functorch_lagging_op_db + additional_op_db, allowed_dtypes=(torch.float,))
625625
@toleranceOverride({torch.float32: tol(atol=1e-04, rtol=1e-04)})
626+
@opsToleranceOverride('TestOperators', 'test_vmapvjp', (
627+
tol1('linalg.svd',
628+
{torch.float32: tol(atol=1.5e-04, rtol=1e-04)}, device_type="cuda"),
629+
tol1('svd',
630+
{torch.float32: tol(atol=1.5e-04, rtol=1e-04)}, device_type="cuda"),
631+
))
626632
@skipOps('TestOperators', 'test_vmapvjp', vmapvjp_fail)
627633
def test_vmapvjp(self, device, dtype, op):
628634
if not op.supports_autograd:
@@ -1031,7 +1037,6 @@ def test():
10311037
# fallback path doesn't work
10321038
# All of the following are bugs and need to be fixed
10331039
xfail('__getitem__', ''),
1034-
xfail('clamp', ''),
10351040
xfail('index_put', ''),
10361041
xfail('matrix_exp'),
10371042
xfail('view_as_complex'),

0 commit comments

Comments
 (0)