Coverage for src/flag_gems/runtime/backend/_hygon/ops/__init__.py: 0%

17 statements  

« prev     ^ index     » next       coverage.py v7.6.9, created at 2026-03-22 16:54 +0800

1from .all import all, all_dim, all_dims 

2from .any import any, any_dim, any_dims 

3from .attention import ( 

4 ScaleDotProductAttention, 

5 flash_attention_forward, 

6 flash_attn_varlen_func, 

7 scaled_dot_product_attention, 

8 scaled_dot_product_attention_backward, 

9 scaled_dot_product_attention_forward, 

10) 

11from .div import ( 

12 div_mode, 

13 div_mode_, 

14 floor_divide, 

15 floor_divide_, 

16 remainder, 

17 remainder_, 

18 true_divide, 

19 true_divide_, 

20) 

21from .exponential_ import exponential_ 

22from .fill import fill_scalar, fill_scalar_, fill_tensor, fill_tensor_ 

23from .gelu import gelu, gelu_ 

24from .isclose import allclose, isclose 

25from .isin import isin 

26from .mm import mm 

27from .pow import ( 

28 pow_scalar, 

29 pow_tensor_scalar, 

30 pow_tensor_scalar_, 

31 pow_tensor_tensor, 

32 pow_tensor_tensor_, 

33) 

34from .randperm import randperm 

35from .silu import silu, silu_, silu_backward 

36from .sort import sort, sort_stable 

37from .unique import _unique2 

38from .upsample_nearest2d import upsample_nearest2d 

39 

40__all__ = [ 

41 "_unique2", 

42 "ScaleDotProductAttention", 

43 "all", 

44 "all_dim", 

45 "all_dims", 

46 "allclose", 

47 "any", 

48 "any_dim", 

49 "any_dims", 

50 "div_mode", 

51 "div_mode_", 

52 "exponential_", 

53 "fill_scalar", 

54 "fill_scalar_", 

55 "fill_tensor", 

56 "fill_tensor_", 

57 "flash_attention_forward", 

58 "flash_attn_varlen_func", 

59 "floor_divide", 

60 "floor_divide_", 

61 "gelu", 

62 "gelu_", 

63 "isin", 

64 "isclose", 

65 "mm", 

66 "pow_scalar", 

67 "pow_tensor_scalar", 

68 "pow_tensor_scalar_", 

69 "pow_tensor_tensor", 

70 "pow_tensor_tensor_", 

71 "randperm", 

72 "remainder", 

73 "remainder_", 

74 "scaled_dot_product_attention", 

75 "scaled_dot_product_attention_backward", 

76 "scaled_dot_product_attention_forward", 

77 "silu", 

78 "silu_", 

79 "silu_backward", 

80 "sort", 

81 "sort_stable", 

82 "true_divide", 

83 "true_divide_", 

84 "upsample_nearest2d", 

85]