Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
19 commits
Select commit Hold shift + click to select a range
7b13a81
test: enables assorted tests
chalmerlowe Apr 10, 2026
1b30c5f
test: revise mypy session to python 3.14
chalmerlowe Apr 10, 2026
cbafca8
test: enables system.sh script to accept NOX_SESSIONS from configs li…
chalmerlowe Apr 10, 2026
3df718a
test: adds core_deps_from_source session
chalmerlowe Apr 10, 2026
9f09980
test: adds parametrization for system & system_noextras
chalmerlowe Apr 10, 2026
bafff39
test: adds constant to match expectations of core_deps_from_source
chalmerlowe Apr 10, 2026
0450b6b
test: adds re (regex) import to support core_deps_from_source nox ses…
chalmerlowe Apr 10, 2026
314a2b3
Merge branch 'main' into test-enable-assorted-tests
chalmerlowe Apr 10, 2026
4106be3
test: restore Python runtimes to match split repo
chalmerlowe Apr 10, 2026
be47e99
test: remove 3.9 from noxfile.py
chalmerlowe Apr 10, 2026
920be87
test: add 3.9 back in cause CI/CD pipeline expects it, even if we ski…
chalmerlowe Apr 10, 2026
f90c49b
chore: filters out fiona
chalmerlowe Apr 10, 2026
fbeffee
chore: adjusts type hints to account for complications with mypy
chalmerlowe Apr 10, 2026
aa6e4d8
chore: adjusts type hints to account for complications with mypy part 2
chalmerlowe Apr 10, 2026
32d0c23
chore: adjusts type hints to account for complications with mypy part 3
chalmerlowe Apr 10, 2026
c714005
chore: adjusts type hints to account for complications with mypy part 4
chalmerlowe Apr 10, 2026
48f9953
chore: adjusts type hints to account for complications with mypy part 5
chalmerlowe Apr 10, 2026
cc20328
chore: adjusts test fixture to use mock object
chalmerlowe Apr 10, 2026
638f083
chore: adjusts type hints to account for complications with mypy part 6
chalmerlowe Apr 10, 2026
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 4 additions & 2 deletions .kokoro/system.sh
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,8 @@ run_package_test() {
local PROJECT_ID
local GOOGLE_APPLICATION_CREDENTIALS
local NOX_FILE
local NOX_SESSION
# Inherit NOX_SESSION from environment to allow configs (like prerelease.cfg) to pass it in
local NOX_SESSION="${NOX_SESSION}"

echo "------------------------------------------------------------"
echo "Configuring environment for: ${package_name}"
Expand All @@ -66,7 +67,8 @@ run_package_test() {
PROJECT_ID=$(cat "${KOKORO_GFILE_DIR}/project-id.json")
GOOGLE_APPLICATION_CREDENTIALS="${KOKORO_GFILE_DIR}/service-account.json"
NOX_FILE="noxfile.py"
NOX_SESSION="system-3.12"
# Use inherited NOX_SESSION if set, otherwise fallback to system-3.12
NOX_SESSION="${NOX_SESSION:-system-3.12}"
;;
esac

Expand Down
1 change: 1 addition & 0 deletions packages/bigframes/bigframes/core/blocks.py
Original file line number Diff line number Diff line change
Expand Up @@ -2796,6 +2796,7 @@ def _is_monotonic(
)
block = block.drop_columns([equal_monotonic_id, strict_monotonic_id])

assert last_result_id is not None
block, monotonic_result_id = block.apply_binary_op(
last_result_id,
last_notna_id,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -528,8 +528,9 @@ def _(
column: ibis_types.Column,
window=None,
) -> ibis_types.Value:
# Ibis FirstNonNullValue expects Value[Any, Columnar], Mypy struggles to see Column as compatible.
return _apply_window_if_present(
ibis_ops.FirstNonNullValue(column).to_expr(),
ibis_ops.FirstNonNullValue(column).to_expr(), # type: ignore[arg-type]
window, # type: ignore
)

Expand All @@ -549,8 +550,9 @@ def _(
column: ibis_types.Column,
window=None,
) -> ibis_types.Value:
# Ibis LastNonNullValue expects Value[Any, Columnar], Mypy struggles to see Column as compatible.
return _apply_window_if_present(
ibis_ops.LastNonNullValue(column).to_expr(),
ibis_ops.LastNonNullValue(column).to_expr(), # type: ignore[arg-type]
window, # type: ignore
)

Expand Down Expand Up @@ -803,8 +805,9 @@ def _to_ibis_boundary(
) -> Optional[ibis_expr_window.WindowBoundary]:
if boundary is None:
return None
# WindowBoundary expects Value[Any, Any], ibis_types.literal returns Scalar which Mypy doesn't see as compatible.
return ibis_expr_window.WindowBoundary(
abs(boundary),
ibis_types.literal(boundary if boundary >= 0 else -boundary), # type: ignore[arg-type]
preceding=boundary <= 0, # type:ignore
)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -49,8 +49,8 @@ def compile_sql(request: configs.CompileRequest) -> configs.CompileResult:
# Can only pullup slice if we are doing ORDER BY in outermost SELECT
# Need to do this before replacing unsupported ops, as that will rewrite slice ops
result_node = rewrites.pull_up_limits(result_node)
result_node = _replace_unsupported_ops(result_node)
result_node = result_node.bottom_up(rewrites.simplify_join)
result_node = cast(nodes.ResultNode, _replace_unsupported_ops(result_node))
result_node = cast(nodes.ResultNode, result_node.bottom_up(rewrites.simplify_join))
# prune before pulling up order to avoid unnnecessary row_number() ops
result_node = cast(nodes.ResultNode, rewrites.column_pruning(result_node))
result_node = rewrites.defer_order(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -182,7 +182,7 @@ def st_buffer(

@ibis_udf.scalar.builtin
def st_distance(
a: ibis_dtypes.geography, b: ibis_dtypes.geography, use_spheroid: bool
a: ibis_dtypes.geography, b: ibis_dtypes.geography, use_spheroid: bool # type: ignore
) -> ibis_dtypes.float: # type: ignore
"""Convert string to geography."""

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2168,9 +2168,12 @@ def obj_make_ref_json(objectref_json: ibis_dtypes.JSON) -> _OBJ_REF_IBIS_DTYPE:


@ibis_udf.scalar.builtin(name="OBJ.GET_ACCESS_URL")
def obj_get_access_url(
obj_ref: _OBJ_REF_IBIS_DTYPE, mode: ibis_dtypes.String
) -> ibis_dtypes.JSON: # type: ignore
# Stub for BigQuery UDF, empty body is intentional.
# _OBJ_REF_IBIS_DTYPE is a variable holding a type, Mypy complains about it being used as type hint.
def obj_get_access_url( # type: ignore[empty-body]
obj_ref: _OBJ_REF_IBIS_DTYPE, # type: ignore[valid-type]
mode: ibis_dtypes.String
) -> ibis_dtypes.JSON:
"""Get access url (as ObjectRefRumtime JSON) from ObjectRef."""


Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -53,8 +53,8 @@ def compile_sql(request: configs.CompileRequest) -> configs.CompileResult:
# Can only pullup slice if we are doing ORDER BY in outermost SELECT
# Need to do this before replacing unsupported ops, as that will rewrite slice ops
result_node = rewrite.pull_up_limits(result_node)
result_node = _replace_unsupported_ops(result_node)
result_node = result_node.bottom_up(rewrite.simplify_join)
result_node = typing.cast(nodes.ResultNode, _replace_unsupported_ops(result_node))
result_node = typing.cast(nodes.ResultNode, result_node.bottom_up(rewrite.simplify_join))
# prune before pulling up order to avoid unnnecessary row_number() ops
result_node = typing.cast(nodes.ResultNode, rewrite.column_pruning(result_node))
result_node = rewrite.defer_order(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -243,7 +243,7 @@ def factor_aggregation(root: nodes.ColumnDef) -> FactoredAggregation:
}

root_scalar_expr = nodes.ColumnDef(
sub_expressions(root.expression, agg_outputs_dict),
sub_expressions(root.expression, cast(Mapping[expression.Expression, expression.Expression], agg_outputs_dict)),
root.id, # type: ignore
)

Expand Down
3 changes: 2 additions & 1 deletion packages/bigframes/bigframes/core/local_data.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,7 @@

import bigframes.core.schema as schemata
import bigframes.dtypes
from bigframes.core import identifiers
from bigframes.core import pyarrow_utils


Expand Down Expand Up @@ -155,7 +156,7 @@ def to_arrow(
return schema, batches

def is_nullable(self, column_id: identifiers.ColumnId) -> bool:
return self.data.column(column_id).null_count > 0
return self.data.column(column_id.name).null_count > 0

def to_pyarrow_table(
self,
Expand Down
2 changes: 1 addition & 1 deletion packages/bigframes/bigframes/core/nodes.py
Original file line number Diff line number Diff line change
Expand Up @@ -674,7 +674,7 @@ def fields(self) -> Sequence[Field]:
Field(
col_id,
self.local_data_source.schema.get_type(source_id),
nullable=self.local_data_source.is_nullable(source_id),
nullable=self.local_data_source.is_nullable(identifiers.ColumnId(source_id)),
)
for col_id, source_id in self.scan_list.items
)
Expand Down
5 changes: 3 additions & 2 deletions packages/bigframes/bigframes/core/rewrite/as_sql.py
Original file line number Diff line number Diff line change
Expand Up @@ -291,8 +291,9 @@ def _extract_ctes_to_with_expr(
root.top_down(lambda x: mapping.get(x, x)),
cte_names,
tuple(
cte_node.child.top_down(lambda x: mapping.get(x, x))
for cte_node in topological_ctes # type: ignore
# Mypy loses context that cte_node is a CteNode with a child attribute, despite the isinstance filter above.
cte_node.child.top_down(lambda x: mapping.get(x, x)) # type: ignore[attr-defined]
for cte_node in topological_ctes
),
)

Expand Down
9 changes: 5 additions & 4 deletions packages/bigframes/bigframes/dataframe.py
Original file line number Diff line number Diff line change
Expand Up @@ -3926,12 +3926,13 @@ def round(self, decimals: Union[int, dict[Hashable, int]] = 0) -> DataFrame:
bigframes.dtypes.BOOL_DTYPE
}:
if is_mapping:
if label in decimals: # type: ignore
decimals_dict = typing.cast(dict[typing.Hashable, int], decimals)
if label in decimals_dict:
exprs.append(
ops.round_op.as_expr(
col_id,
ex.const(
decimals[label],
decimals_dict[label],
dtype=bigframes.dtypes.INT_DTYPE, # type: ignore
),
)
Expand Down Expand Up @@ -4447,8 +4448,8 @@ def to_latex(
) -> str | None:
return self.to_pandas(allow_large_results=allow_large_results).to_latex(
buf,
columns=columns,
header=header,
columns=typing.cast(typing.Optional[list[str]], columns),
header=typing.cast(typing.Union[bool, list[str]], header),
index=index,
**kwargs, # type: ignore
)
Expand Down
3 changes: 2 additions & 1 deletion packages/bigframes/bigframes/series.py
Original file line number Diff line number Diff line change
Expand Up @@ -2308,9 +2308,10 @@ def to_json(
)
else:
pd_series = self.to_pandas(allow_large_results=allow_large_results)
# Pandas Series.to_json only supports a subset of orients, but bigframes Series.to_json allows all of them.
return pd_series.to_json(
path_or_buf=path_or_buf,
orient=orient,
orient=orient, # type: ignore[arg-type]
lines=lines,
index=index, # type: ignore
)
Expand Down
5 changes: 3 additions & 2 deletions packages/bigframes/bigframes/session/iceberg.py
Original file line number Diff line number Diff line change
Expand Up @@ -98,9 +98,10 @@ def _extract_location_from_catalog_extension_data(data):


class SchemaVisitor(pyiceberg.schema.SchemaVisitorPerPrimitiveType[bq.SchemaField]):
def schema(
# Override returns a tuple of fields instead of a single field, violating supertype signature but intentional for this visitor.
def schema( # type: ignore[override]
self, schema: pyiceberg.schema.Schema, struct_result: bq.SchemaField
) -> tuple[bq.SchemaField, ...]: # type: ignore
) -> tuple[bq.SchemaField, ...]:
return tuple(f for f in struct_result.fields)

def struct(
Expand Down
Loading
Loading