Coverage for src/flag_gems/ops/sub.py: 87%
31 statements
« prev ^ index » next coverage.py v7.6.9, created at 2026-03-25 02:48 +0800
« prev ^ index » next coverage.py v7.6.9, created at 2026-03-25 02:48 +0800
1import logging
3import torch
4import triton
6from flag_gems.utils import pointwise_dynamic
8logger = logging.getLogger(__name__)
11@pointwise_dynamic(is_tensor=[True, True, False], promotion_methods=[(0, 1, "DEFAULT")])
12@triton.jit
13def sub_func(x, y, alpha):
14 return x - y * alpha
17@pointwise_dynamic(
18 is_tensor=[True, False, False], promotion_methods=[(0, 1, "DEFAULT")]
19)
20@triton.jit
21def sub_func_tensor_scalar(x, y, alpha):
22 return x - y * alpha
25@pointwise_dynamic(
26 is_tensor=[False, True, False], promotion_methods=[(0, 1, "DEFAULT")]
27)
28@triton.jit
29def sub_func_scalar_tensor(x, y, alpha):
30 return x - y * alpha
33def sub(A, B, *, alpha=1):
34 logger.debug("GEMS SUB")
35 if isinstance(A, torch.Tensor) and isinstance(B, torch.Tensor):
36 return sub_func(A, B, alpha)
37 elif isinstance(A, torch.Tensor):
38 return sub_func_tensor_scalar(A, B, alpha)
39 elif isinstance(B, torch.Tensor):
40 return sub_func_scalar_tensor(A, B, alpha)
41 else:
42 # Both scalar
43 return torch.tensor(A - B * alpha)
46def sub_(A, B, *, alpha=1):
47 logger.debug("GEMS SUB_")
48 if isinstance(B, torch.Tensor):
49 return sub_func(A, B, alpha, out0=A)
50 else:
51 return sub_func_tensor_scalar(A, B, alpha, out0=A)