Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
21 commits
Select commit Hold shift + click to select a range
3e9c51f
refactor: unify ResultSet implementations on Arrow-backed path
mkaufmann Apr 24, 2026
9863083
refactor: address PR review — share vector builder and use in-memory …
mkaufmann Apr 24, 2026
3a37fe2
fix: close allocator on StreamingResultSet.of failure in query path
mkaufmann May 11, 2026
3bfd821
fix: range-check narrowing integer setters in VectorPopulator
mkaufmann May 11, 2026
99d9b55
refactor: namespace jdbc type-name field-metadata key
mkaufmann May 11, 2026
00894a5
test: pin jdbc type-name fallback path
mkaufmann May 11, 2026
ed477c1
test: pin getObject(int, Map) null/empty fast path
mkaufmann May 11, 2026
a538fe6
fix: validate row arity in MetadataResultSets.of
mkaufmann May 11, 2026
272b0f8
refactor: drop empty-batch workaround in MetadataResultSets
mkaufmann May 11, 2026
72d96ee
test: merge DataCloudMetadataResultSetTest into MetadataResultSetsTest
mkaufmann May 11, 2026
1a31efb
refactor: drop StreamingResultSet getObject(Class) bandaid
mkaufmann May 11, 2026
8bf45a2
refactor: trim docstrings and prune redundant ResultSet-shape tests
mkaufmann May 11, 2026
c826ae7
refactor: collapse StreamingResultSet.of factories into one
mkaufmann May 11, 2026
7baf67d
refactor: rename StreamingResultSet to DataCloudResultSet, drop marke…
mkaufmann May 11, 2026
4c52910
fix: declare TYPE_INFO boolean columns as BOOL and tighten VarCharVec…
mkaufmann May 15, 2026
4029987
fix: chain reader and allocator close exceptions via addSuppressed
mkaufmann May 15, 2026
974efa7
fix: hoist queryId before allocator construction in DataCloudStatement
mkaufmann May 15, 2026
dd484f2
fix: close allocator on ArrowStreamReader constructor failure and uni…
mkaufmann May 15, 2026
006cbda
fix: implement INTEGER → boolean coercion in BaseIntVectorAccessor
mkaufmann May 15, 2026
f9633a1
fix: make DataCloudResultSet.close idempotent across cursor.close fai…
mkaufmann May 15, 2026
a9bb197
fix: range-check unscaled value in DecimalVectorSetter
mkaufmann May 15, 2026
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -17,25 +17,37 @@
import lombok.SneakyThrows;
import lombok.extern.slf4j.Slf4j;
import lombok.val;
import org.apache.arrow.memory.BufferAllocator;
import org.apache.arrow.vector.FieldVector;
import org.apache.arrow.vector.VectorSchemaRoot;
import org.apache.arrow.vector.ipc.ArrowStreamReader;

/**
* Row cursor over an {@link ArrowStreamReader} that drives the {@link DataCloudResultSet}.
*
* <p>The cursor owns the supplied {@link BufferAllocator} alongside the reader: closing the
* cursor closes the reader (which releases ArrowBuf accounting) and then the allocator (which
* returns its budget). This is the single place that guarantees root-allocator hygiene for the
* driver; callers of {@link DataCloudResultSet#of} hand ownership over and do not close the
* allocator themselves.
*/
@Slf4j
class ArrowStreamReaderCursor implements AutoCloseable {

private static final int INIT_ROW_NUMBER = -1;

private final ArrowStreamReader reader;
private final BufferAllocator allocator;
private final ZoneId sessionZone;

@lombok.Getter
private int rowsSeen = 0;

private final AtomicInteger currentIndex = new AtomicInteger(INIT_ROW_NUMBER);

ArrowStreamReaderCursor(ArrowStreamReader reader, ZoneId sessionZone) {
ArrowStreamReaderCursor(ArrowStreamReader reader, BufferAllocator allocator, ZoneId sessionZone) {
this.reader = reader;
this.allocator = allocator;
this.sessionZone = sessionZone;
}

Expand Down Expand Up @@ -91,6 +103,13 @@ public boolean next() {
@SneakyThrows
@Override
public void close() {
reader.close();
// try-with-resources closes in reverse declaration order: reader first (releases the
// buffers accounted against the allocator so its closing budget check passes), then
// allocator. If both throw, Java attaches the second as suppressed onto the first
// instead of dropping the reader exception via the standard try/finally semantics.
try (BufferAllocator a = allocator;
ArrowStreamReader r = reader) {
// resource cleanup happens at exit
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -48,6 +48,7 @@
import java.sql.Statement;
import java.sql.Struct;
import java.time.Duration;
import java.time.ZoneId;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
Expand Down Expand Up @@ -220,7 +221,7 @@ public DataCloudResultSet getRowBasedResultSet(String queryId, long offset, long
QueryResultArrowStream.OUTPUT_FORMAT);
val arrowStream = SQLExceptionQueryResultIterator.createSqlExceptionArrowStreamReader(
iterator, connectionProperties.isIncludeCustomerDetailInReason(), queryId, null);
return StreamingResultSet.of(arrowStream, queryId);
return DataCloudResultSet.of(arrowStream, queryId, ZoneId.systemDefault());
} catch (StatusRuntimeException ex) {
throw QueryExceptionHandler.createException(
connectionProperties.isIncludeCustomerDetailInReason(), null, queryId, ex);
Expand Down Expand Up @@ -263,7 +264,7 @@ public DataCloudResultSet getChunkBasedResultSet(String queryId, long chunkId, l
QueryResultArrowStream.OUTPUT_FORMAT);
val arrowStream = SQLExceptionQueryResultIterator.createSqlExceptionArrowStreamReader(
iterator, connectionProperties.isIncludeCustomerDetailInReason(), queryId, null);
return StreamingResultSet.of(arrowStream, queryId);
return DataCloudResultSet.of(arrowStream, queryId, ZoneId.systemDefault());
} catch (StatusRuntimeException ex) {
throw QueryExceptionHandler.createException(
connectionProperties.isIncludeCustomerDetailInReason(), null, queryId, ex);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@

import com.google.common.collect.ImmutableList;
import com.salesforce.datacloud.jdbc.config.DriverVersion;
import com.salesforce.datacloud.jdbc.core.metadata.DataCloudResultSetMetaData;
import com.salesforce.datacloud.jdbc.core.metadata.MetadataResultSets;
import com.salesforce.datacloud.jdbc.core.types.HyperTypes;
import com.salesforce.datacloud.jdbc.util.JdbcURL;
import com.salesforce.datacloud.jdbc.util.ThrowingJdbcSupplier;
Expand Down Expand Up @@ -706,39 +706,39 @@ public ResultSet getColumns(String catalog, String schemaPattern, String tableNa
@Override
public ResultSet getColumnPrivileges(String catalog, String schema, String table, String columnNamePattern)
throws SQLException {
return DataCloudMetadataResultSet.empty();
return MetadataResultSets.emptyNoColumns();
}

@Override
public ResultSet getTablePrivileges(String catalog, String schemaPattern, String tableNamePattern)
throws SQLException {
return DataCloudMetadataResultSet.empty();
return MetadataResultSets.emptyNoColumns();
}

@Override
public ResultSet getBestRowIdentifier(String catalog, String schema, String table, int scope, boolean nullable)
throws SQLException {
return DataCloudMetadataResultSet.empty();
return MetadataResultSets.emptyNoColumns();
}

@Override
public ResultSet getVersionColumns(String catalog, String schema, String table) throws SQLException {
return DataCloudMetadataResultSet.empty();
return MetadataResultSets.emptyNoColumns();
}

@Override
public ResultSet getPrimaryKeys(String catalog, String schema, String table) throws SQLException {
return DataCloudMetadataResultSet.empty();
return MetadataResultSets.emptyNoColumns();
}

@Override
public ResultSet getImportedKeys(String catalog, String schema, String table) throws SQLException {
return DataCloudMetadataResultSet.empty();
return MetadataResultSets.emptyNoColumns();
}

@Override
public ResultSet getExportedKeys(String catalog, String schema, String table) throws SQLException {
return DataCloudMetadataResultSet.empty();
return MetadataResultSets.emptyNoColumns();
}

@Override
Expand All @@ -750,19 +750,18 @@ public ResultSet getCrossReference(
String foreignSchema,
String foreignTable)
throws SQLException {
return DataCloudMetadataResultSet.empty();
return MetadataResultSets.emptyNoColumns();
}

@Override
public ResultSet getTypeInfo() throws SQLException {
return DataCloudMetadataResultSet.of(
new DataCloudResultSetMetaData(MetadataSchemas.TYPE_INFO), HyperTypes.typeInfoRows());
return MetadataResultSets.ofRawRows(MetadataSchemas.TYPE_INFO, HyperTypes.typeInfoRows());
}

@Override
public ResultSet getIndexInfo(String catalog, String schema, String table, boolean unique, boolean approximate)
throws SQLException {
return DataCloudMetadataResultSet.empty();
return MetadataResultSets.emptyNoColumns();
}

@Override
Expand Down

This file was deleted.

Loading
Loading