Skip to content

Commit c250004

Browse files
committed
fromat
1 parent 1b779df commit c250004

7 files changed

Lines changed: 10 additions & 5 deletions

File tree

python/infinicore/__init__.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -45,18 +45,18 @@
4545
from infinicore.ops.add import add
4646
from infinicore.ops.add_rms_norm import add_rms_norm, add_rms_norm_
4747
from infinicore.ops.attention import attention
48+
from infinicore.ops.gcd import gcd
49+
from infinicore.ops.gt import gt
4850
from infinicore.ops.matmul import matmul
4951
from infinicore.ops.mul import mul
5052
from infinicore.ops.narrow import narrow
5153
from infinicore.ops.paged_attention import paged_attention
5254
from infinicore.ops.paged_attention_prefill import paged_attention_prefill
5355
from infinicore.ops.paged_caching import paged_caching
5456
from infinicore.ops.rearrange import rearrange
57+
from infinicore.ops.select_scatter import select_scatter
5558
from infinicore.ops.squeeze import squeeze
5659
from infinicore.ops.unsqueeze import unsqueeze
57-
from infinicore.ops.gcd import gcd
58-
from infinicore.ops.gt import gt
59-
from infinicore.ops.select_scatter import select_scatter
6060
from infinicore.tensor import (
6161
Tensor,
6262
empty,

python/infinicore/nn/functional/__init__.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,13 +1,13 @@
11
from .causal_softmax import causal_softmax
22
from .embedding import embedding
3+
from .glu import glu
34
from .linear import linear
5+
from .nll_loss import nll_loss
46
from .random_sample import random_sample
57
from .rms_norm import rms_norm
68
from .rope import RopeAlgo, rope
79
from .silu import silu
810
from .swiglu import swiglu
9-
from .nll_loss import nll_loss
10-
from .glu import glu
1111

1212
__all__ = [
1313
"causal_softmax",

python/infinicore/nn/functional/glu.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,7 @@
22
from infinicore.lib import _infinicore
33
from infinicore.tensor import Tensor
44

5+
56
def glu(input: Tensor, dim: int = -1) -> Tensor:
67

78
if infinicore.use_ntops and input.device.type in ("cuda", "musa"):

python/infinicore/nn/functional/nll_loss.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,7 @@
22
from infinicore.lib import _infinicore
33
from infinicore.tensor import Tensor
44

5+
56
def nll_loss(
67
input: Tensor,
78
target: Tensor,

python/infinicore/ops/gcd.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,7 @@
22
from infinicore.lib import _infinicore
33
from infinicore.tensor import Tensor
44

5+
56
def gcd(input: Tensor, other: Tensor, *, out=None) -> Tensor:
67
r"""Computes the element-wise greatest common divisor (GCD)."""
78

python/infinicore/ops/gt.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,7 @@
22
from infinicore.lib import _infinicore
33
from infinicore.tensor import Tensor
44

5+
56
def gt(input: Tensor, other: Tensor | float, *, out: Tensor | None = None) -> Tensor:
67
if infinicore.use_ntops and input.device.type in ("cuda", "musa"):
78
return infinicore.ntops.torch.gt(input, other, out=out)

python/infinicore/ops/select_scatter.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,7 @@
22
from infinicore.lib import _infinicore
33
from infinicore.tensor import Tensor
44

5+
56
def select_scatter(input: Tensor, src: Tensor, dim: int, index: int) -> Tensor:
67
if infinicore.use_ntops and input.device.type in ("cuda", "musa"):
78
return infinicore.ntops.torch.select_scatter(input, src, dim, index)

0 commit comments

Comments
 (0)