@@ -248,7 +248,7 @@ async def test_run_transfer_postgres_to_hdfs_with_full_strategy(
248248
249249 await run_transfer_and_verify (client , group_owner , postgres_to_hdfs .id )
250250
251- files = [file for file in hdfs_file_connection .list_dir (target_path ) if file .is_file ()]
251+ files = [file . name for file in hdfs_file_connection .list_dir (target_path ) if file .is_file ()]
252252 verify_file_name_template (files , expected_extension )
253253
254254 spark .catalog .clearCache ()
@@ -304,7 +304,7 @@ async def test_run_transfer_postgres_to_hdfs_with_incremental_strategy(
304304 fill_with_data (first_transfer_df )
305305 await run_transfer_and_verify (client , group_owner , postgres_to_hdfs .id )
306306
307- files = [file for file in hdfs_file_connection .list_dir (target_path ) if file .is_file ()]
307+ files = [file . name for file in hdfs_file_connection .list_dir (target_path ) if file .is_file ()]
308308 verify_file_name_template (files , expected_extension )
309309
310310 spark .catalog .clearCache ()
@@ -323,7 +323,7 @@ async def test_run_transfer_postgres_to_hdfs_with_incremental_strategy(
323323 fill_with_data (second_transfer_df )
324324 await run_transfer_and_verify (client , group_owner , postgres_to_hdfs .id )
325325
326- files = [file for file in hdfs_file_connection .list_dir (target_path ) if file .is_file ()]
326+ files = [file . name for file in hdfs_file_connection .list_dir (target_path ) if file .is_file ()]
327327 verify_file_name_template (files , expected_extension )
328328
329329 spark .catalog .clearCache ()
0 commit comments