Skip to content

Commit 436df5a

Browse files
committed
refactor: move ndv_tests imports to module level
Move imports to module level in ndv_tests since they're in their own module anyway.
1 parent 79408c7 commit 436df5a

1 file changed

Lines changed: 4 additions & 5 deletions

File tree

datafusion/datasource-parquet/src/metadata.rs

Lines changed: 4 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -839,10 +839,14 @@ mod tests {
839839
mod ndv_tests {
840840
use super::*;
841841
use arrow::datatypes::Field;
842+
use parquet::arrow::parquet_to_arrow_schema;
842843
use parquet::basic::Type as PhysicalType;
843844
use parquet::file::metadata::{ColumnChunkMetaData, RowGroupMetaData};
845+
use parquet::file::reader::{FileReader, SerializedFileReader};
844846
use parquet::file::statistics::Statistics as ParquetStatistics;
845847
use parquet::schema::types::{SchemaDescriptor, Type as SchemaType};
848+
use std::fs::File;
849+
use std::path::PathBuf;
846850

847851
fn create_schema_descr(num_columns: usize) -> Arc<SchemaDescriptor> {
848852
let fields: Vec<Arc<SchemaType>> = (0..num_columns)
@@ -1135,11 +1139,6 @@ mod tests {
11351139
/// - name: 5 distinct values
11361140
#[test]
11371141
fn test_distinct_count_from_real_parquet_file() {
1138-
use parquet::arrow::parquet_to_arrow_schema;
1139-
use parquet::file::reader::{FileReader, SerializedFileReader};
1140-
use std::fs::File;
1141-
use std::path::PathBuf;
1142-
11431142
// Path to test file created by DuckDB with distinct_count statistics
11441143
let mut path = PathBuf::from(env!("CARGO_MANIFEST_DIR"));
11451144
path.push("src/test_data/ndv_test.parquet");

0 commit comments

Comments
 (0)