@@ -1053,10 +1053,12 @@ def append(self, df: pa.Table) -> None:
10531053 if len (self .spec ().fields ) > 0 :
10541054 raise ValueError ("Cannot write to partitioned tables" )
10551055
1056+ from pyiceberg .io .pyarrow import schema_to_pyarrow
1057+
10561058 _check_schema_compatible (self .schema (), other_schema = df .schema )
10571059 # cast if the two schemas are compatible but not equal
1058- if self .schema (). as_arrow ( ) != df .schema :
1059- df = df .cast (self .schema (). as_arrow ( ))
1060+ if schema_to_pyarrow ( self .schema ()) != df .schema :
1061+ df = df .cast (schema_to_pyarrow ( self .schema ()))
10601062
10611063 merge = _MergingSnapshotProducer (operation = Operation .APPEND , table = self )
10621064
@@ -1091,10 +1093,12 @@ def overwrite(self, df: pa.Table, overwrite_filter: BooleanExpression = ALWAYS_T
10911093 if len (self .spec ().fields ) > 0 :
10921094 raise ValueError ("Cannot write to partitioned tables" )
10931095
1096+ from pyiceberg .io .pyarrow import schema_to_pyarrow
1097+
10941098 _check_schema_compatible (self .schema (), other_schema = df .schema )
10951099 # cast if the two schemas are compatible but not equal
1096- if self .schema (). as_arrow ( ) != df .schema :
1097- df = df .cast (self .schema (). as_arrow ( ))
1100+ if schema_to_pyarrow ( self .schema ()) != df .schema :
1101+ df = df .cast (schema_to_pyarrow ( self .schema ()))
10981102
10991103 merge = _MergingSnapshotProducer (
11001104 operation = Operation .OVERWRITE if self .current_snapshot () is not None else Operation .APPEND ,
0 commit comments