Skip to content

Commit b2b1e6a

Browse files
committed
style: apply clang-format
1 parent 9c04549 commit b2b1e6a

File tree

4 files changed

+23
-22
lines changed

4 files changed

+23
-22
lines changed

src/operator.h

Lines changed: 5 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -95,12 +95,11 @@ struct ActiveImplementationsImpl<Key, kDev, 0> {
9595
};
9696

9797
template <typename Key, Device::Type kDev>
98-
using ActiveImplementations =
99-
typename Flatten<
100-
typename ActiveImplementationsImpl<Key, kDev, 0>::type,
101-
typename ActiveImplementationsImpl<Key, kDev, 1>::type,
102-
typename ActiveImplementationsImpl<Key, kDev, 2>::type,
103-
typename ActiveImplementationsImpl<Key, kDev, 3>::type>::type;
98+
using ActiveImplementations = typename Flatten<
99+
typename ActiveImplementationsImpl<Key, kDev, 0>::type,
100+
typename ActiveImplementationsImpl<Key, kDev, 1>::type,
101+
typename ActiveImplementationsImpl<Key, kDev, 2>::type,
102+
typename ActiveImplementationsImpl<Key, kDev, 3>::type>::type;
104103

105104
class OperatorBase {
106105
public:

src/torch/add/add.cc

Lines changed: 9 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -7,20 +7,19 @@ namespace infini::ops {
77
template <Device::Type kDev>
88
Operator<Add, kDev, 1>::Operator(const Tensor input, const Tensor other,
99
Tensor out)
10-
: Add{input, other, out},
11-
device_index_{out.device().index()} {}
10+
: Add{input, other, out}, device_index_{out.device().index()} {}
1211

1312
template <Device::Type kDev>
1413
void Operator<Add, kDev, 1>::operator()(const Tensor input, const Tensor other,
1514
Tensor out) const {
16-
auto at_input = ToAtenTensor<kDev>(
17-
const_cast<void*>(input.data()), input_shape_, input_strides_,
18-
input_type_, device_index_);
19-
auto at_other = ToAtenTensor<kDev>(
20-
const_cast<void*>(other.data()), other_shape_, other_strides_,
21-
other_type_, device_index_);
22-
auto at_out = ToAtenTensor<kDev>(
23-
out.data(), out_shape_, out_strides_, out_type_, device_index_);
15+
auto at_input =
16+
ToAtenTensor<kDev>(const_cast<void*>(input.data()), input_shape_,
17+
input_strides_, input_type_, device_index_);
18+
auto at_other =
19+
ToAtenTensor<kDev>(const_cast<void*>(other.data()), other_shape_,
20+
other_strides_, other_type_, device_index_);
21+
auto at_out = ToAtenTensor<kDev>(out.data(), out_shape_, out_strides_,
22+
out_type_, device_index_);
2423

2524
at::add_out(at_out, at_input, at_other);
2625
}

src/torch/gemm/gemm.cc

Lines changed: 6 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -17,10 +17,12 @@ Operator<Gemm, kDev, 2>::Operator(const Tensor a, const Tensor b,
1717
device_index_{c.device().index()} {}
1818

1919
template <Device::Type kDev>
20-
void Operator<Gemm, kDev, 2>::operator()(
21-
const Tensor a, const Tensor b, std::optional<float> alpha,
22-
std::optional<float> beta, std::optional<int> trans_a,
23-
std::optional<int> trans_b, Tensor c) const {
20+
void Operator<Gemm, kDev, 2>::operator()(const Tensor a, const Tensor b,
21+
std::optional<float> alpha,
22+
std::optional<float> beta,
23+
std::optional<int> trans_a,
24+
std::optional<int> trans_b,
25+
Tensor c) const {
2426
auto at_a = ToAtenTensor<kDev>(const_cast<void*>(a.data()), a_shape_,
2527
a_strides_, a_type_, device_index_);
2628
auto at_b = ToAtenTensor<kDev>(const_cast<void*>(b.data()), b_shape_,

src/torch/tensor_.h

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -64,8 +64,9 @@ inline at::Tensor ToAtenTensor(void* data, const Tensor::Shape& shape,
6464
auto options = at::TensorOptions().dtype(ToAtenDtype(dtype));
6565

6666
if constexpr (kDev != Device::Type::kCpu) {
67-
std::string device_str = std::string(detail::TorchDeviceName<kDev>::kValue) +
68-
":" + std::to_string(device_index);
67+
std::string device_str =
68+
std::string(detail::TorchDeviceName<kDev>::kValue) + ":" +
69+
std::to_string(device_index);
6970
options = options.device(device_str);
7071
}
7172

0 commit comments

Comments
 (0)