Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions sandbox/plugins/dsl-query-executor/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,8 @@ dependencies {

testImplementation project(':test:framework')
testImplementation "org.mockito:mockito-core:${versions.mockito}"
testImplementation "com.fasterxml.jackson.core:jackson-databind:${versions.jackson_databind}"
testImplementation "com.fasterxml.jackson.core:jackson-annotations:${versions.jackson_annotations}"

internalClusterTestImplementation project(':server')
internalClusterTestImplementation project(':test:framework')
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,21 +9,39 @@
package org.opensearch.dsl.converter;

import org.apache.calcite.jdbc.CalciteSchema;
import org.apache.calcite.rel.RelNode;
import org.apache.calcite.rel.logical.LogicalSort;
import org.apache.calcite.rel.logical.LogicalTableScan;
import org.apache.calcite.rel.type.RelDataType;
import org.apache.calcite.rel.type.RelDataTypeFactory;
import org.apache.calcite.schema.SchemaPlus;
import org.apache.calcite.schema.impl.AbstractTable;
import org.apache.calcite.sql.type.SqlTypeName;
import org.opensearch.common.settings.Settings;
import org.opensearch.common.xcontent.json.JsonXContent;
import org.opensearch.core.xcontent.DeprecationHandler;
import org.opensearch.core.xcontent.NamedXContentRegistry;
import org.opensearch.core.xcontent.XContentParser;
import org.opensearch.dsl.executor.QueryPlans;
import org.opensearch.dsl.golden.CalciteTestInfra;
import org.opensearch.dsl.golden.GoldenFileLoader;
import org.opensearch.dsl.golden.GoldenTestCase;
import org.opensearch.search.SearchModule;
import org.opensearch.search.aggregations.BucketOrder;
import org.opensearch.search.aggregations.bucket.terms.TermsAggregationBuilder;
import org.opensearch.search.aggregations.metrics.AvgAggregationBuilder;
import org.opensearch.search.builder.SearchSourceBuilder;
import org.opensearch.test.OpenSearchTestCase;

import java.io.IOException;
import java.net.URL;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;

public class SearchSourceConverterTests extends OpenSearchTestCase {

Expand Down Expand Up @@ -128,4 +146,78 @@ public void testMetricOnlyAggPlanHasNoPostAggSort() throws ConversionException {
// Metric-only agg has no bucket orders, so no LogicalSort wrapper
assertFalse(plans.get(QueryPlans.Type.AGGREGATION).get(0).relNode() instanceof LogicalSort);
}

// ---- Golden file driven RelNode generation tests ----

/**
* Auto-discovers all golden JSON files and validates that each inputDsl
* produces the expected RelNode plan via SearchSourceConverter.convert().
* Adding a new test case only requires adding a new JSON file — no new
* Java method needed.
*/
public void testGoldenFileRelNodeGeneration() throws Exception {
URL goldenDir = getClass().getClassLoader().getResource("golden");
assertNotNull("Golden file resource directory not found", goldenDir);

List<Path> goldenFiles;
try (var stream = Files.list(Path.of(goldenDir.toURI()))) {
goldenFiles = stream.filter(p -> p.toString().endsWith(".json")).collect(Collectors.toList());
}
assertFalse("No golden files found", goldenFiles.isEmpty());

List<String> failures = new ArrayList<>();
for (Path file : goldenFiles) {
String fileName = file.getFileName().toString();
try {
GoldenTestCase tc = GoldenFileLoader.load(fileName);
CalciteTestInfra.InfraResult infra = CalciteTestInfra.buildFromMapping(tc.getIndexName(), tc.getIndexMapping());

SearchSourceBuilder searchSource = parseSearchSource(tc.getInputDsl());
SearchSourceConverter conv = new SearchSourceConverter(infra.schema());
QueryPlans plans = conv.convert(searchSource, tc.getIndexName());

QueryPlans.Type expectedType = QueryPlans.Type.valueOf(tc.getPlanType());
List<QueryPlans.QueryPlan> matchingPlans = plans.get(expectedType);
if (matchingPlans.isEmpty()) {
failures.add(fileName + ": No " + expectedType + " plan produced");
continue;
}

RelNode relNode = matchingPlans.get(0).relNode();
String actualPlan = relNode.explain().trim();
String expectedPlan = String.join("\n", tc.getExpectedRelNodePlan());

if (!expectedPlan.equals(actualPlan)) {
failures.add(fileName + ": RelNode plan mismatch\n Expected: " + expectedPlan + "\n Actual: " + actualPlan);
}

List<String> actualFields = relNode.getRowType().getFieldNames();
if (!tc.getMockResultFieldNames().equals(actualFields)) {
failures.add(
fileName + ": Field names mismatch\n Expected: " + tc.getMockResultFieldNames() + "\n Actual: " + actualFields
);
}
} catch (Exception e) {
failures.add(fileName + ": " + e.getClass().getSimpleName() + " - " + e.getMessage());
}
}

if (!failures.isEmpty()) {
fail("Golden file RelNode generation failures:\n" + String.join("\n", failures));
}
}

private SearchSourceBuilder parseSearchSource(Map<String, Object> inputDsl) throws IOException {
String json;
try (var builder = JsonXContent.contentBuilder()) {
builder.map(inputDsl);
json = builder.toString();
}
NamedXContentRegistry registry = new NamedXContentRegistry(
new SearchModule(Settings.EMPTY, Collections.emptyList()).getNamedXContents()
);
try (XContentParser parser = JsonXContent.jsonXContent.createParser(registry, DeprecationHandler.IGNORE_DEPRECATIONS, json)) {
return SearchSourceBuilder.fromXContent(parser);
}
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,115 @@
/*
* SPDX-License-Identifier: Apache-2.0
*
* The OpenSearch Contributors require contributions made to
* this file be licensed under the Apache-2.0 license or a
* compatible open source license.
*/

package org.opensearch.dsl.golden;

import org.apache.calcite.config.CalciteConnectionConfigImpl;
import org.apache.calcite.jdbc.CalciteSchema;
import org.apache.calcite.plan.RelOptCluster;
import org.apache.calcite.plan.RelOptTable;
import org.apache.calcite.plan.hep.HepPlanner;
import org.apache.calcite.plan.hep.HepProgram;
import org.apache.calcite.prepare.CalciteCatalogReader;
import org.apache.calcite.rel.type.RelDataType;
import org.apache.calcite.rel.type.RelDataTypeFactory;
import org.apache.calcite.rel.type.RelDataTypeSystem;
import org.apache.calcite.rex.RexBuilder;
import org.apache.calcite.schema.SchemaPlus;
import org.apache.calcite.schema.impl.AbstractTable;
import org.apache.calcite.sql.type.SqlTypeFactoryImpl;
import org.apache.calcite.sql.type.SqlTypeName;

import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Properties;

/**
* Builds Calcite planning infrastructure from a golden file's index mapping.
*
* <p>Mirrors the pattern in {@code TestUtils} and {@code SearchSourceConverter}'s
* constructor, but constructs the schema dynamically from the golden file's
* {@code indexMapping} field instead of using a hardcoded schema.
*/
public class CalciteTestInfra {

private CalciteTestInfra() {}

/**
* Builds a complete Calcite infrastructure from a golden file's index mapping.
*
* @param indexName the index name to register in the schema
* @param indexMapping field name → SQL type name (e.g. "VARCHAR", "INTEGER")
* @return an {@link InfraResult} containing the cluster, table, and schema
* @throws IllegalArgumentException if indexMapping contains an unsupported type
*/
public static InfraResult buildFromMapping(String indexName, Map<String, String> indexMapping) {
Objects.requireNonNull(indexName, "indexName must not be null");
Objects.requireNonNull(indexMapping, "indexMapping must not be null");

RelDataTypeFactory typeFactory = new SqlTypeFactoryImpl(RelDataTypeSystem.DEFAULT);
HepPlanner planner = new HepPlanner(HepProgram.builder().build());
RelOptCluster cluster = RelOptCluster.create(planner, new RexBuilder(typeFactory));

SchemaPlus schema = CalciteSchema.createRootSchema(true).plus();
schema.add(indexName, new AbstractTable() {
@Override
public RelDataType getRowType(RelDataTypeFactory tf) {
RelDataTypeFactory.Builder builder = tf.builder();
for (Map.Entry<String, String> entry : indexMapping.entrySet()) {
SqlTypeName sqlType = toSqlTypeName(entry.getValue());
builder.add(entry.getKey(), tf.createTypeWithNullability(tf.createSqlType(sqlType), true));
}
return builder.build();
}
});

CalciteCatalogReader reader = new CalciteCatalogReader(
CalciteSchema.from(schema),
Collections.singletonList(""),
typeFactory,
new CalciteConnectionConfigImpl(new Properties())
);
RelOptTable table = Objects.requireNonNull(reader.getTable(List.of(indexName)), "Table not found in schema: " + indexName);

return new InfraResult(cluster, table, schema);
}

/**
* Maps a golden file type string to a Calcite {@link SqlTypeName}.
*
* @throws IllegalArgumentException for unsupported type strings
*/
private static SqlTypeName toSqlTypeName(String goldenType) {
switch (goldenType) {
case "VARCHAR":
return SqlTypeName.VARCHAR;
case "INTEGER":
return SqlTypeName.INTEGER;
case "BIGINT":
return SqlTypeName.BIGINT;
case "DOUBLE":
return SqlTypeName.DOUBLE;
case "FLOAT":
return SqlTypeName.FLOAT;
case "BOOLEAN":
return SqlTypeName.BOOLEAN;
case "DATE":
return SqlTypeName.DATE;
case "TIMESTAMP":
return SqlTypeName.TIMESTAMP;
default:
throw new IllegalArgumentException("Unsupported SQL type in golden file indexMapping: " + goldenType);
}
}

/** Result record containing the Calcite infrastructure built from a golden file mapping. */
public record InfraResult(RelOptCluster cluster, RelOptTable table, SchemaPlus schema) {
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,102 @@
/*
* SPDX-License-Identifier: Apache-2.0
*
* The OpenSearch Contributors require contributions made to
* this file be licensed under the Apache-2.0 license or a
* compatible open source license.
*/

package org.opensearch.dsl.golden;

import com.fasterxml.jackson.databind.ObjectMapper;

import org.opensearch.dsl.executor.QueryPlans;

import java.io.IOException;
import java.io.InputStream;
import java.nio.file.Files;
import java.nio.file.Path;

/**
* Loads and validates golden file test cases.
*
* <p>Each golden file is a self-contained JSON document parsed into a
* {@link GoldenTestCase}. Required fields are validated after parsing;
* aggregation test cases must additionally include {@code aggregationMetadata}.
*/
public class GoldenFileLoader {

private static final ObjectMapper MAPPER = new ObjectMapper();
private static final String RESOURCE_DIR = "golden/";

private GoldenFileLoader() {}

/**
* Loads a golden file by name from the classpath resource directory
* {@code src/test/resources/golden/}.
*
* @param goldenFileName file name (e.g. {@code "term_query_hits.json"})
* @return parsed and validated test case
* @throws IllegalArgumentException if the file is missing, malformed, or
* has missing required fields
*/
public static GoldenTestCase load(String goldenFileName) {
String resourcePath = RESOURCE_DIR + goldenFileName;
try (InputStream is = GoldenFileLoader.class.getClassLoader().getResourceAsStream(resourcePath)) {
if (is == null) {
throw new IllegalArgumentException("Golden file not found on classpath: " + resourcePath);
}
GoldenTestCase testCase = MAPPER.readValue(is, GoldenTestCase.class);
validate(testCase, Path.of(resourcePath));
return testCase;
} catch (IOException e) {
throw new IllegalArgumentException("Failed to parse golden file: " + resourcePath, e);
}
}

/**
* Loads a golden file from an absolute or relative file-system path.
*
* @param goldenFilePath path to the JSON golden file
* @return parsed and validated test case
* @throws IllegalArgumentException if the file is malformed or has missing
* required fields
*/
public static GoldenTestCase load(Path goldenFilePath) {
try (InputStream is = Files.newInputStream(goldenFilePath)) {
GoldenTestCase testCase = MAPPER.readValue(is, GoldenTestCase.class);
validate(testCase, goldenFilePath);
return testCase;
} catch (IOException e) {
throw new IllegalArgumentException("Failed to parse golden file: " + goldenFilePath, e);
}
}

/**
* Validates that all required fields are present in the parsed test case.
* Throws {@link IllegalArgumentException} identifying the file and the
* missing field.
*/
private static void validate(GoldenTestCase testCase, Path filePath) {
requireNonNull(testCase.getTestName(), "testName", filePath);
requireNonNull(testCase.getIndexName(), "indexName", filePath);
requireNonNull(testCase.getIndexMapping(), "indexMapping", filePath);
requireNonNull(testCase.getInputDsl(), "inputDsl", filePath);
requireNonNull(testCase.getExpectedRelNodePlan(), "expectedRelNodePlan", filePath);
requireNonNull(testCase.getMockResultFieldNames(), "mockResultFieldNames", filePath);
requireNonNull(testCase.getMockResultRows(), "mockResultRows", filePath);
requireNonNull(testCase.getExpectedOutputDsl(), "expectedOutputDsl", filePath);
requireNonNull(testCase.getPlanType(), "planType", filePath);
try {
QueryPlans.Type.valueOf(testCase.getPlanType());
} catch (IllegalArgumentException e) {
throw new IllegalArgumentException("Golden file " + filePath + " has invalid planType: " + testCase.getPlanType());
}
}

private static void requireNonNull(Object value, String fieldName, Path filePath) {
if (value == null) {
throw new IllegalArgumentException("Golden file " + filePath + " missing required field: " + fieldName);
}
}
}
Loading
Loading