Skip to content

Commit 6eee669

Browse files
authored
Fix semantic merge confict in vortex-tensor (#7299)
## Summary Fixes semantic merge confict in vortex-tensor ## Testing N/A Signed-off-by: Connor Tsui <connor.tsui20@gmail.com>
1 parent e415157 commit 6eee669

File tree

6 files changed

+21
-21
lines changed

6 files changed

+21
-21
lines changed

vortex-tensor/src/encodings/turboquant/array/data.rs

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -14,8 +14,8 @@ use vortex_error::vortex_ensure_eq;
1414

1515
use crate::encodings::turboquant::array::slots::Slot;
1616
use crate::encodings::turboquant::vtable::TurboQuant;
17-
use crate::utils::extension_element_ptype;
18-
use crate::utils::extension_list_size;
17+
use crate::utils::tensor_element_ptype;
18+
use crate::utils::tensor_list_size;
1919

2020
/// TurboQuant array data.
2121
///
@@ -117,7 +117,7 @@ impl TurboQuantData {
117117

118118
let dimension = dtype
119119
.as_extension_opt()
120-
.and_then(|ext| extension_list_size(ext).ok())
120+
.and_then(|ext| tensor_list_size(ext).ok())
121121
.vortex_expect("dtype must be a Vector extension type with FixedSizeList storage");
122122

123123
let bit_width = if centroids.is_empty() {
@@ -154,7 +154,7 @@ impl TurboQuantData {
154154
rotation_signs: &ArrayRef,
155155
) -> VortexResult<()> {
156156
let ext = TurboQuant::validate_dtype(dtype)?;
157-
let dimension = extension_list_size(ext)?;
157+
let dimension = tensor_list_size(ext)?;
158158
let padded_dim = dimension.next_power_of_two();
159159

160160
// Codes must be a non-nullable FixedSizeList<u8> with list_size == padded_dim.
@@ -209,7 +209,7 @@ impl TurboQuantData {
209209

210210
// Norms dtype must match the element ptype of the Vector, with the parent's nullability.
211211
// Norms carry the validity of the entire TurboQuant array.
212-
let element_ptype = extension_element_ptype(ext)?;
212+
let element_ptype = tensor_element_ptype(ext)?;
213213
let expected_norms_dtype = DType::Primitive(element_ptype, dtype.nullability());
214214
vortex_ensure_eq!(
215215
*norms.dtype(),

vortex-tensor/src/encodings/turboquant/array/scheme.rs

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -14,8 +14,8 @@ use vortex_error::VortexResult;
1414
use crate::encodings::turboquant::TurboQuant;
1515
use crate::encodings::turboquant::TurboQuantConfig;
1616
use crate::encodings::turboquant::turboquant_encode;
17-
use crate::utils::extension_element_ptype;
18-
use crate::utils::extension_list_size;
17+
use crate::utils::tensor_element_ptype;
18+
use crate::utils::tensor_list_size;
1919

2020
/// TurboQuant compression scheme for [`Vector`] extension types.
2121
///
@@ -59,8 +59,8 @@ impl Scheme for TurboQuantScheme {
5959
let len = data.array().len();
6060

6161
let ext = TurboQuant::validate_dtype(dtype)?;
62-
let element_ptype = extension_element_ptype(ext)?;
63-
let dimension = extension_list_size(ext)?;
62+
let element_ptype = tensor_element_ptype(ext)?;
63+
let dimension = tensor_list_size(ext)?;
6464

6565
Ok(estimate_compression_ratio(
6666
element_ptype.bit_width(),

vortex-tensor/src/encodings/turboquant/compute/cosine_similarity.rs

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -43,7 +43,7 @@ use vortex_error::vortex_ensure_eq;
4343

4444
use crate::encodings::turboquant::TurboQuant;
4545
use crate::encodings::turboquant::array::float_from_f32;
46-
use crate::utils::extension_element_ptype;
46+
use crate::utils::tensor_element_ptype;
4747

4848
/// Compute the per-row unit-norm dot products in f32 (centroids are always f32).
4949
///
@@ -107,7 +107,7 @@ pub fn cosine_similarity_quantized_column(
107107
"TurboQuant quantized dot product requires matching dimensions",
108108
);
109109

110-
let element_ptype = extension_element_ptype(lhs.dtype().as_extension())?;
110+
let element_ptype = tensor_element_ptype(lhs.dtype().as_extension())?;
111111
let validity = lhs.norms().validity()?.and(rhs.norms().validity()?)?;
112112
let dots = compute_unit_dots(&lhs, &rhs, ctx)?;
113113

@@ -145,7 +145,7 @@ pub fn dot_product_quantized_column(
145145
"TurboQuant quantized dot product requires matching dimensions",
146146
);
147147

148-
let element_ptype = extension_element_ptype(lhs.dtype().as_extension())?;
148+
let element_ptype = tensor_element_ptype(lhs.dtype().as_extension())?;
149149
let validity = lhs.norms().validity()?.and(rhs.norms().validity()?)?;
150150
let dots = compute_unit_dots(&lhs, &rhs, ctx)?;
151151
let num_rows = lhs.norms().len();

vortex-tensor/src/encodings/turboquant/decompress.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,7 @@ use vortex_error::VortexResult;
2222
use crate::encodings::turboquant::TurboQuant;
2323
use crate::encodings::turboquant::array::float_from_f32;
2424
use crate::encodings::turboquant::array::rotation::RotationMatrix;
25-
use crate::utils::extension_element_ptype;
25+
use crate::utils::tensor_element_ptype;
2626

2727
/// Decompress a `TurboQuantArray` into a [`Vector`] extension array.
2828
///
@@ -38,7 +38,7 @@ pub fn execute_decompress(
3838
let padded_dim = array.padded_dim() as usize;
3939
let num_rows = array.norms().len();
4040
let ext_dtype = array.dtype().as_extension().clone();
41-
let element_ptype = extension_element_ptype(&ext_dtype)?;
41+
let element_ptype = tensor_element_ptype(&ext_dtype)?;
4242

4343
if num_rows == 0 {
4444
let fsl_validity = Validity::from(ext_dtype.storage_dtype().nullability());

vortex-tensor/src/encodings/turboquant/vtable.rs

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -39,8 +39,8 @@ use crate::encodings::turboquant::array::slots::Slot;
3939
use crate::encodings::turboquant::compute::rules::PARENT_KERNELS;
4040
use crate::encodings::turboquant::compute::rules::RULES;
4141
use crate::encodings::turboquant::decompress::execute_decompress;
42-
use crate::utils::extension_element_ptype;
43-
use crate::utils::extension_list_size;
42+
use crate::utils::tensor_element_ptype;
43+
use crate::utils::tensor_list_size;
4444
use crate::vector::Vector;
4545

4646
/// Encoding marker type for TurboQuant.
@@ -66,7 +66,7 @@ impl TurboQuant {
6666
vortex_err!("TurboQuant dtype must be a Vector extension type, got {dtype}")
6767
})?;
6868

69-
let dimension = extension_list_size(ext)?;
69+
let dimension = tensor_list_size(ext)?;
7070
vortex_ensure!(
7171
dimension >= Self::MIN_DIMENSION,
7272
"TurboQuant requires dimension >= {}, got {dimension}",
@@ -113,7 +113,7 @@ impl VTable for TurboQuant {
113113
vortex_err!("TurboQuant dtype must be a Vector extension type, got {dtype}")
114114
})?;
115115

116-
let dimension = extension_list_size(ext)?;
116+
let dimension = tensor_list_size(ext)?;
117117
vortex_ensure!(
118118
dimension >= Self::MIN_DIMENSION,
119119
"TurboQuant requires dimension >= {}, got {dimension}",
@@ -208,8 +208,8 @@ impl VTable for TurboQuant {
208208

209209
// Validate and derive dimension and element ptype from the Vector extension dtype.
210210
let ext = TurboQuant::validate_dtype(dtype)?;
211-
let dimension = extension_list_size(ext)?;
212-
let element_ptype = extension_element_ptype(ext)?;
211+
let dimension = tensor_list_size(ext)?;
212+
let element_ptype = tensor_element_ptype(ext)?;
213213

214214
let padded_dim = dimension.next_power_of_two();
215215

vortex-tensor/src/scalar_fns/l2_norm.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -133,7 +133,7 @@ impl ScalarFnVTable for L2Norm {
133133
// (e.g., if the input extension has f64 elements).
134134
if let Some(tq) = input_ref.as_opt::<TurboQuant>() {
135135
let ext = input_ref.dtype().as_extension();
136-
let target_ptype = extension_element_ptype(ext)?;
136+
let target_ptype = tensor_element_ptype(ext)?;
137137
let norms: PrimitiveArray = tq.norms().clone().execute(ctx)?;
138138
let target_dtype = DType::Primitive(target_ptype, input_ref.dtype().nullability());
139139
return norms.into_array().cast(target_dtype);

0 commit comments

Comments
 (0)