File tree Expand file tree Collapse file tree 1 file changed +7
-9
lines changed
Expand file tree Collapse file tree 1 file changed +7
-9
lines changed Original file line number Diff line number Diff line change @@ -1613,17 +1613,15 @@ def _task_to_record_batches(
16131613
16141614 file_project_schema = prune_columns (file_schema , projected_field_ids , select_full_types = False )
16151615
1616- scanner_kwargs : dict [ str , Any ] = {
1617- " fragment" : fragment ,
1618- " schema" : physical_schema ,
1616+ fragment_scanner = ds . Scanner . from_fragment (
1617+ fragment = fragment ,
1618+ schema = physical_schema ,
16191619 # This will push down the query to Arrow.
16201620 # But in case there are positional deletes, we have to apply them first
1621- "filter" : pyarrow_filter if not positional_deletes else None ,
1622- "columns" : [col .name for col in file_project_schema .columns ],
1623- }
1624- if batch_size is not None :
1625- scanner_kwargs ["batch_size" ] = batch_size
1626- fragment_scanner = ds .Scanner .from_fragment (** scanner_kwargs )
1621+ filter = pyarrow_filter if not positional_deletes else None ,
1622+ columns = [col .name for col in file_project_schema .columns ],
1623+ batch_size = batch_size ,
1624+ )
16271625
16281626 next_index = 0
16291627 batches = fragment_scanner .to_batches ()
You can’t perform that action at this time.
0 commit comments