From 1fe7848b84903871df895de7ebf036c4f511b255 Mon Sep 17 00:00:00 2001 From: qianmoQ Date: Wed, 28 Jan 2026 16:48:07 +0800 Subject: [PATCH 1/5] =?UTF-8?q?feat(driver):=20=E9=87=8D=E5=86=99=20Redis?= =?UTF-8?q?=20Driver?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../io/edurt/datacap/sql/parser/SqlBase.g4 | 9 +- driver/datacap-driver-redis/pom.xml | 44 +- .../main/java/io/edurt/datacap/core/Hint.java | 22 - .../java/io/edurt/datacap/core/HintKey.java | 19 - .../java/io/edurt/datacap/core/Logger.java | 26 - .../main/java/io/edurt/datacap/core/Op.java | 36 - .../io/edurt/datacap/core/RedisClient.java | 14 - .../datacap/core/RedisDatabaseMetadata.java | 1382 -------------- .../io/edurt/datacap/core/RedisResultSet.java | 1627 ----------------- .../io/edurt/datacap/core/RedisStatement.java | 407 ----- .../io/edurt/datacap/core/ServerVersion.java | 33 - .../java/io/edurt/datacap/core/Utils.java | 96 - .../datacap/driver/AbstractRedisClient.java | 92 - .../{core => driver}/RedisConnection.java | 338 ++-- .../edurt/datacap/driver/RedisJdbcDriver.java | 72 + .../RedisResultSet.java} | 899 ++++----- .../RedisResultSetMetaData.java | 126 +- .../edurt/datacap/driver/RedisStatement.java | 487 +++++ .../edurt/datacap/driver/RedisTypeHelper.java | 105 ++ .../cluster/JedisRedisClusterClient.java | 64 - .../driver/cluster/RedisClusterDriver.java | 107 -- .../driver/conf/BaseConnectionInfo.java | 31 - .../conf/RedisClusterConnectionInfo.java | 55 - .../driver/conf/RedisConnectionInfo.java | 44 - .../datacap/driver/parser/RedisParser.java | 78 + .../driver/parser/RedisSelectParser.java | 80 + .../driver/parser/RedisShowParser.java | 77 + .../driver/redis/JedisRedisClient.java | 60 - .../datacap/driver/redis/RedisDriver.java | 116 -- .../META-INF/services/java.sql.Driver | 3 +- .../test/java/io/edurt/datacap/RedisTest.java | 88 - pom.xml | 6 + test/datacap-test-driver/pom.xml | 6 + .../datacap/test/redis/RedisJdbcBaseTest.java | 114 ++ .../redis/RedisJdbcDriverConnectionTest.java | 64 + .../test/redis/RedisJdbcDriverSelectTest.java | 86 + .../test/redis/RedisJdbcDriverShowTest.java | 73 + .../datacap/test/redis/RedisParserTest.java | 79 + 38 files changed, 2047 insertions(+), 5018 deletions(-) delete mode 100644 driver/datacap-driver-redis/src/main/java/io/edurt/datacap/core/Hint.java delete mode 100644 driver/datacap-driver-redis/src/main/java/io/edurt/datacap/core/HintKey.java delete mode 100644 driver/datacap-driver-redis/src/main/java/io/edurt/datacap/core/Logger.java delete mode 100644 driver/datacap-driver-redis/src/main/java/io/edurt/datacap/core/Op.java delete mode 100644 driver/datacap-driver-redis/src/main/java/io/edurt/datacap/core/RedisClient.java delete mode 100644 driver/datacap-driver-redis/src/main/java/io/edurt/datacap/core/RedisDatabaseMetadata.java delete mode 100644 driver/datacap-driver-redis/src/main/java/io/edurt/datacap/core/RedisResultSet.java delete mode 100644 driver/datacap-driver-redis/src/main/java/io/edurt/datacap/core/RedisStatement.java delete mode 100644 driver/datacap-driver-redis/src/main/java/io/edurt/datacap/core/ServerVersion.java delete mode 100644 driver/datacap-driver-redis/src/main/java/io/edurt/datacap/core/Utils.java delete mode 100644 driver/datacap-driver-redis/src/main/java/io/edurt/datacap/driver/AbstractRedisClient.java rename driver/datacap-driver-redis/src/main/java/io/edurt/datacap/{core => driver}/RedisConnection.java (52%) create mode 100644 driver/datacap-driver-redis/src/main/java/io/edurt/datacap/driver/RedisJdbcDriver.java rename driver/datacap-driver-redis/src/main/java/io/edurt/datacap/{core/EmptyResultSet.java => driver/RedisResultSet.java} (70%) rename driver/datacap-driver-redis/src/main/java/io/edurt/datacap/{core => driver}/RedisResultSetMetaData.java (59%) create mode 100644 driver/datacap-driver-redis/src/main/java/io/edurt/datacap/driver/RedisStatement.java create mode 100644 driver/datacap-driver-redis/src/main/java/io/edurt/datacap/driver/RedisTypeHelper.java delete mode 100644 driver/datacap-driver-redis/src/main/java/io/edurt/datacap/driver/cluster/JedisRedisClusterClient.java delete mode 100644 driver/datacap-driver-redis/src/main/java/io/edurt/datacap/driver/cluster/RedisClusterDriver.java delete mode 100644 driver/datacap-driver-redis/src/main/java/io/edurt/datacap/driver/conf/BaseConnectionInfo.java delete mode 100644 driver/datacap-driver-redis/src/main/java/io/edurt/datacap/driver/conf/RedisClusterConnectionInfo.java delete mode 100644 driver/datacap-driver-redis/src/main/java/io/edurt/datacap/driver/conf/RedisConnectionInfo.java create mode 100644 driver/datacap-driver-redis/src/main/java/io/edurt/datacap/driver/parser/RedisParser.java create mode 100644 driver/datacap-driver-redis/src/main/java/io/edurt/datacap/driver/parser/RedisSelectParser.java create mode 100644 driver/datacap-driver-redis/src/main/java/io/edurt/datacap/driver/parser/RedisShowParser.java delete mode 100644 driver/datacap-driver-redis/src/main/java/io/edurt/datacap/driver/redis/JedisRedisClient.java delete mode 100644 driver/datacap-driver-redis/src/main/java/io/edurt/datacap/driver/redis/RedisDriver.java delete mode 100644 driver/datacap-driver-redis/src/test/java/io/edurt/datacap/RedisTest.java create mode 100644 test/datacap-test-driver/src/test/java/io/edurt/datacap/test/redis/RedisJdbcBaseTest.java create mode 100644 test/datacap-test-driver/src/test/java/io/edurt/datacap/test/redis/RedisJdbcDriverConnectionTest.java create mode 100644 test/datacap-test-driver/src/test/java/io/edurt/datacap/test/redis/RedisJdbcDriverSelectTest.java create mode 100644 test/datacap-test-driver/src/test/java/io/edurt/datacap/test/redis/RedisJdbcDriverShowTest.java create mode 100644 test/datacap-test-driver/src/test/java/io/edurt/datacap/test/redis/RedisParserTest.java diff --git a/core/datacap-parser/src/main/antlr4/io/edurt/datacap/sql/parser/SqlBase.g4 b/core/datacap-parser/src/main/antlr4/io/edurt/datacap/sql/parser/SqlBase.g4 index eb1a641801..1581a871ff 100644 --- a/core/datacap-parser/src/main/antlr4/io/edurt/datacap/sql/parser/SqlBase.g4 +++ b/core/datacap-parser/src/main/antlr4/io/edurt/datacap/sql/parser/SqlBase.g4 @@ -692,12 +692,13 @@ defaultValue // Names and Identifiers catalogName: identifier; columnName: identifier; -tableName: (databaseName '.')? identifier; +tableName: (databaseName '.')? identifier | patternIdentifier; databaseName: identifier; indexName: identifier; constraintName: identifier; alias: identifier; functionName: identifier; +patternIdentifier: PATTERN_IDENTIFIER; identifier : IDENTIFIER @@ -818,6 +819,12 @@ STRING | '"' ( ~('"'|'\\') | ('\\' .) )* '"' ; +PATTERN_IDENTIFIER + : '*' + | (LETTER | DIGIT | '_')+ (':' (LETTER | DIGIT | '_' | '*' | '?')+)+ + | (LETTER | DIGIT | '_')+ ':' '*' + ; + IDENTIFIER : (LETTER | '_') (LETTER | DIGIT | '_')* ; diff --git a/driver/datacap-driver-redis/pom.xml b/driver/datacap-driver-redis/pom.xml index a8338a4a69..084c2c7739 100644 --- a/driver/datacap-driver-redis/pom.xml +++ b/driver/datacap-driver-redis/pom.xml @@ -1,49 +1,31 @@ - 4.0.0 - datacap io.edurt.datacap + datacap 2025.1.2 ../../pom.xml datacap-driver-redis datacap-driver-redis - DataCap - Driver - - - 4.3.1 - + DataCap - Redis - Driver - redis.clients - jedis - ${redis.version} + org.redisson + redisson + + + io.edurt.datacap + datacap-parser + + + ch.qos.logback + logback-classic + ${logback.version} - - - - - org.apache.maven.plugins - maven-checkstyle-plugin - ${plugin.maven.checkstyle.version} - - true - - - - com.github.spotbugs - spotbugs-maven-plugin - ${plugin.maven.findbugs.version} - - true - - - - diff --git a/driver/datacap-driver-redis/src/main/java/io/edurt/datacap/core/Hint.java b/driver/datacap-driver-redis/src/main/java/io/edurt/datacap/core/Hint.java deleted file mode 100644 index cb04dd8f3e..0000000000 --- a/driver/datacap-driver-redis/src/main/java/io/edurt/datacap/core/Hint.java +++ /dev/null @@ -1,22 +0,0 @@ -package io.edurt.datacap.core; - -import lombok.AllArgsConstructor; -import lombok.Builder; -import lombok.Data; -import lombok.NoArgsConstructor; - -@Data -@Builder -@NoArgsConstructor -@AllArgsConstructor -public class Hint -{ - /** - * hint key - */ - private HintKey key; - /** - * hint value - */ - private String value; -} diff --git a/driver/datacap-driver-redis/src/main/java/io/edurt/datacap/core/HintKey.java b/driver/datacap-driver-redis/src/main/java/io/edurt/datacap/core/HintKey.java deleted file mode 100644 index e5a1142071..0000000000 --- a/driver/datacap-driver-redis/src/main/java/io/edurt/datacap/core/HintKey.java +++ /dev/null @@ -1,19 +0,0 @@ -package io.edurt.datacap.core; - -import java.util.Arrays; -import java.util.Objects; - -public enum HintKey -{ - decoder, - sample_key, - noop; - - public static HintKey fromString(String string) - { - return Arrays.stream(values()) - .filter(t -> Objects.equals(t.toString(), string)) - .findFirst() - .orElse(noop); - } -} diff --git a/driver/datacap-driver-redis/src/main/java/io/edurt/datacap/core/Logger.java b/driver/datacap-driver-redis/src/main/java/io/edurt/datacap/core/Logger.java deleted file mode 100644 index d80017740d..0000000000 --- a/driver/datacap-driver-redis/src/main/java/io/edurt/datacap/core/Logger.java +++ /dev/null @@ -1,26 +0,0 @@ -package io.edurt.datacap.core; - -import java.util.Arrays; - -public class Logger -{ - private final Class mark; - - public Logger(Class mark) - { - this.mark = mark; - } - - public synchronized void log(String format, Object... arguments) - { - Object[] objs = Arrays.stream(arguments) - .map(t -> { - if (t instanceof Throwable) { - Throwable th = (Throwable) t; - return th.getMessage(); - } - return t; - }).toArray(Object[]::new); - System.out.printf(mark + ":::" + format + "\n", objs); - } -} diff --git a/driver/datacap-driver-redis/src/main/java/io/edurt/datacap/core/Op.java b/driver/datacap-driver-redis/src/main/java/io/edurt/datacap/core/Op.java deleted file mode 100644 index e89bb76e9a..0000000000 --- a/driver/datacap-driver-redis/src/main/java/io/edurt/datacap/core/Op.java +++ /dev/null @@ -1,36 +0,0 @@ -package io.edurt.datacap.core; - -import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; -import lombok.AllArgsConstructor; -import lombok.Data; -import lombok.NoArgsConstructor; - -import java.util.List; - -@Data -@NoArgsConstructor -@AllArgsConstructor -@SuppressFBWarnings(value = {"EI_EXPOSE_REP", "EI_EXPOSE_REP2"}, - justification = "I prefer to suppress these FindBugs warnings") -public class Op -{ - /** - * Executed original SQL - */ - private String originSql; - - /** - * Hint parsed from SQL - */ - private List hints; - - /** - * COMMAND parsed from SQL - */ - private String command; - - /** - * Parameters parsed by SQL - */ - private String[] params; -} diff --git a/driver/datacap-driver-redis/src/main/java/io/edurt/datacap/core/RedisClient.java b/driver/datacap-driver-redis/src/main/java/io/edurt/datacap/core/RedisClient.java deleted file mode 100644 index f425cb1b0e..0000000000 --- a/driver/datacap-driver-redis/src/main/java/io/edurt/datacap/core/RedisClient.java +++ /dev/null @@ -1,14 +0,0 @@ -package io.edurt.datacap.core; - -import java.sql.SQLException; - -public interface RedisClient -{ - String[] sendCommand(String sql) - throws SQLException; - - void select(int dbIndex) - throws SQLException; - - void close(); -} diff --git a/driver/datacap-driver-redis/src/main/java/io/edurt/datacap/core/RedisDatabaseMetadata.java b/driver/datacap-driver-redis/src/main/java/io/edurt/datacap/core/RedisDatabaseMetadata.java deleted file mode 100644 index e8d54a969c..0000000000 --- a/driver/datacap-driver-redis/src/main/java/io/edurt/datacap/core/RedisDatabaseMetadata.java +++ /dev/null @@ -1,1382 +0,0 @@ -package io.edurt.datacap.core; - -import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; - -import java.io.BufferedReader; -import java.io.StringReader; -import java.sql.*; -import java.util.stream.IntStream; - -@SuppressFBWarnings(value = {"NP_NONNULL_RETURN_VIOLATION", "OBL_UNSATISFIED_OBLIGATION", "ODR_OPEN_DATABASE_RESOURCE", "SQL_BAD_RESULTSET_ACCESS"}, - justification = "I prefer to suppress these FindBugs warnings") -public class RedisDatabaseMetadata - implements DatabaseMetaData -{ - private final static Logger LOGGER = new Logger(RedisDatabaseMetadata.class); - - private final RedisConnection connection; - private final String dbIndex; - - public RedisDatabaseMetadata(RedisConnection connection, String dbIndex) - { - this.connection = connection; - this.dbIndex = dbIndex; - } - - @Override - public boolean allProceduresAreCallable() - throws SQLException - { - return false; - } - - @Override - public boolean allTablesAreSelectable() - throws SQLException - { - return false; - } - - @Override - public String getURL() - throws SQLException - { - return null; - } - - @Override - public String getUserName() - throws SQLException - { - return null; - } - - @Override - public boolean isReadOnly() - throws SQLException - { - return false; - } - - @Override - public boolean nullsAreSortedHigh() - throws SQLException - { - return false; - } - - @Override - public boolean nullsAreSortedLow() - throws SQLException - { - return false; - } - - @Override - public boolean nullsAreSortedAtStart() - throws SQLException - { - return false; - } - - @Override - public boolean nullsAreSortedAtEnd() - throws SQLException - { - return false; - } - - @Override - public String getDatabaseProductName() - throws SQLException - { - return "Redis"; - } - - @Override - public String getDatabaseProductVersion() - throws SQLException - { - return this.getVersion().getOrigin(); - } - - private ServerVersion getVersion() - throws SQLException - { - Statement statement = connection.createStatement(); - String version = "0.0.1"; - try { - ResultSet rs = statement.executeQuery("INFO server"); - while (rs.next()) { - String result = rs.getString(0); - String versionX = new BufferedReader(new StringReader(result)) - .lines() - .filter(line -> line.startsWith("redis_version")) - .findFirst() - .map(line -> line.substring(line.indexOf(":") + 1)) - .orElse(null); - if (versionX != null) { - version = versionX; - break; - } - } - } - catch (Exception e) { - LOGGER.log("getDatabaseProductVersion exception occurs, " + e.getMessage()); - } - return new ServerVersion(version); - } - - @Override - public String getDriverName() - throws SQLException - { - return "Redis JDBC Connector/J"; - } - - @Override - public String getDriverVersion() - throws SQLException - { - return "redis-jdbc-driver 1.0.0"; - } - - @Override - public int getDriverMajorVersion() - { - return 1; - } - - @Override - public int getDriverMinorVersion() - { - return 0; - } - - @Override - public boolean usesLocalFiles() - throws SQLException - { - return false; - } - - @Override - public boolean usesLocalFilePerTable() - throws SQLException - { - return false; - } - - @Override - public boolean supportsMixedCaseIdentifiers() - throws SQLException - { - return false; - } - - @Override - public boolean storesUpperCaseIdentifiers() - throws SQLException - { - return false; - } - - @Override - public boolean storesLowerCaseIdentifiers() - throws SQLException - { - return false; - } - - @Override - public boolean storesMixedCaseIdentifiers() - throws SQLException - { - return false; - } - - @Override - public boolean supportsMixedCaseQuotedIdentifiers() - throws SQLException - { - return false; - } - - @Override - public boolean storesUpperCaseQuotedIdentifiers() - throws SQLException - { - return false; - } - - @Override - public boolean storesLowerCaseQuotedIdentifiers() - throws SQLException - { - return false; - } - - @Override - public boolean storesMixedCaseQuotedIdentifiers() - throws SQLException - { - return false; - } - - @Override - public String getIdentifierQuoteString() - throws SQLException - { - return null; - } - - @Override - public String getSQLKeywords() - throws SQLException - { - return null; - } - - @Override - public String getNumericFunctions() - throws SQLException - { - return null; - } - - @Override - public String getStringFunctions() - throws SQLException - { - return null; - } - - @Override - public String getSystemFunctions() - throws SQLException - { - return null; - } - - @Override - public String getTimeDateFunctions() - throws SQLException - { - return null; - } - - @Override - public String getSearchStringEscape() - throws SQLException - { - return null; - } - - @Override - public String getExtraNameCharacters() - throws SQLException - { - return null; - } - - @Override - public boolean supportsAlterTableWithAddColumn() - throws SQLException - { - return false; - } - - @Override - public boolean supportsAlterTableWithDropColumn() - throws SQLException - { - return false; - } - - @Override - public boolean supportsColumnAliasing() - throws SQLException - { - return false; - } - - @Override - public boolean nullPlusNonNullIsNull() - throws SQLException - { - return false; - } - - @Override - public boolean supportsConvert() - throws SQLException - { - return false; - } - - @Override - public boolean supportsConvert(int fromType, int toType) - throws SQLException - { - return false; - } - - @Override - public boolean supportsTableCorrelationNames() - throws SQLException - { - return false; - } - - @Override - public boolean supportsDifferentTableCorrelationNames() - throws SQLException - { - return false; - } - - @Override - public boolean supportsExpressionsInOrderBy() - throws SQLException - { - return false; - } - - @Override - public boolean supportsOrderByUnrelated() - throws SQLException - { - return false; - } - - @Override - public boolean supportsGroupBy() - throws SQLException - { - return false; - } - - @Override - public boolean supportsGroupByUnrelated() - throws SQLException - { - return false; - } - - @Override - public boolean supportsGroupByBeyondSelect() - throws SQLException - { - return false; - } - - @Override - public boolean supportsLikeEscapeClause() - throws SQLException - { - return false; - } - - @Override - public boolean supportsMultipleResultSets() - throws SQLException - { - return false; - } - - @Override - public boolean supportsMultipleTransactions() - throws SQLException - { - return false; - } - - @Override - public boolean supportsNonNullableColumns() - throws SQLException - { - return false; - } - - @Override - public boolean supportsMinimumSQLGrammar() - throws SQLException - { - return false; - } - - @Override - public boolean supportsCoreSQLGrammar() - throws SQLException - { - return false; - } - - @Override - public boolean supportsExtendedSQLGrammar() - throws SQLException - { - return false; - } - - @Override - public boolean supportsANSI92EntryLevelSQL() - throws SQLException - { - return false; - } - - @Override - public boolean supportsANSI92IntermediateSQL() - throws SQLException - { - return false; - } - - @Override - public boolean supportsANSI92FullSQL() - throws SQLException - { - return false; - } - - @Override - public boolean supportsIntegrityEnhancementFacility() - throws SQLException - { - return false; - } - - @Override - public boolean supportsOuterJoins() - throws SQLException - { - return false; - } - - @Override - public boolean supportsFullOuterJoins() - throws SQLException - { - return false; - } - - @Override - public boolean supportsLimitedOuterJoins() - throws SQLException - { - return false; - } - - @Override - public String getSchemaTerm() - throws SQLException - { - return "SCHEMA"; - } - - @Override - public String getProcedureTerm() - throws SQLException - { - return "PROCEDURE"; - } - - @Override - public String getCatalogTerm() - throws SQLException - { - return "CATALOG"; - } - - @Override - public boolean isCatalogAtStart() - throws SQLException - { - return false; - } - - @Override - public String getCatalogSeparator() - throws SQLException - { - return "."; - } - - @Override - public boolean supportsSchemasInDataManipulation() - throws SQLException - { - return false; - } - - @Override - public boolean supportsSchemasInProcedureCalls() - throws SQLException - { - return false; - } - - @Override - public boolean supportsSchemasInTableDefinitions() - throws SQLException - { - return false; - } - - @Override - public boolean supportsSchemasInIndexDefinitions() - throws SQLException - { - return false; - } - - @Override - public boolean supportsSchemasInPrivilegeDefinitions() - throws SQLException - { - return false; - } - - @Override - public boolean supportsCatalogsInDataManipulation() - throws SQLException - { - return false; - } - - @Override - public boolean supportsCatalogsInProcedureCalls() - throws SQLException - { - return false; - } - - @Override - public boolean supportsCatalogsInTableDefinitions() - throws SQLException - { - return false; - } - - @Override - public boolean supportsCatalogsInIndexDefinitions() - throws SQLException - { - return false; - } - - @Override - public boolean supportsCatalogsInPrivilegeDefinitions() - throws SQLException - { - return false; - } - - @Override - public boolean supportsPositionedDelete() - throws SQLException - { - return false; - } - - @Override - public boolean supportsPositionedUpdate() - throws SQLException - { - return false; - } - - @Override - public boolean supportsSelectForUpdate() - throws SQLException - { - return false; - } - - @Override - public boolean supportsStoredProcedures() - throws SQLException - { - return false; - } - - @Override - public boolean supportsSubqueriesInComparisons() - throws SQLException - { - return false; - } - - @Override - public boolean supportsSubqueriesInExists() - throws SQLException - { - return false; - } - - @Override - public boolean supportsSubqueriesInIns() - throws SQLException - { - return false; - } - - @Override - public boolean supportsSubqueriesInQuantifieds() - throws SQLException - { - return false; - } - - @Override - public boolean supportsCorrelatedSubqueries() - throws SQLException - { - return false; - } - - @Override - public boolean supportsUnion() - throws SQLException - { - return false; - } - - @Override - public boolean supportsUnionAll() - throws SQLException - { - return false; - } - - @Override - public boolean supportsOpenCursorsAcrossCommit() - throws SQLException - { - return false; - } - - @Override - public boolean supportsOpenCursorsAcrossRollback() - throws SQLException - { - return false; - } - - @Override - public boolean supportsOpenStatementsAcrossCommit() - throws SQLException - { - return false; - } - - @Override - public boolean supportsOpenStatementsAcrossRollback() - throws SQLException - { - return false; - } - - @Override - public int getMaxBinaryLiteralLength() - throws SQLException - { - return 100; - } - - @Override - public int getMaxCharLiteralLength() - throws SQLException - { - return 100; - } - - @Override - public int getMaxColumnNameLength() - throws SQLException - { - return 100; - } - - @Override - public int getMaxColumnsInGroupBy() - throws SQLException - { - return 100; - } - - @Override - public int getMaxColumnsInIndex() - throws SQLException - { - return 100; - } - - @Override - public int getMaxColumnsInOrderBy() - throws SQLException - { - return 100; - } - - @Override - public int getMaxColumnsInSelect() - throws SQLException - { - return 100; - } - - @Override - public int getMaxColumnsInTable() - throws SQLException - { - return 100; - } - - @Override - public int getMaxConnections() - throws SQLException - { - return 0; - } - - @Override - public int getMaxCursorNameLength() - throws SQLException - { - return 64; - } - - @Override - public int getMaxIndexLength() - throws SQLException - { - return 100; - } - - @Override - public int getMaxSchemaNameLength() - throws SQLException - { - return 100; - } - - @Override - public int getMaxProcedureNameLength() - throws SQLException - { - return 100; - } - - @Override - public int getMaxCatalogNameLength() - throws SQLException - { - return 16; - } - - @Override - public int getMaxRowSize() - throws SQLException - { - return Integer.MAX_VALUE; - } - - @Override - public boolean doesMaxRowSizeIncludeBlobs() - throws SQLException - { - return false; - } - - @Override - public int getMaxStatementLength() - throws SQLException - { - return 65535; - } - - @Override - public int getMaxStatements() - throws SQLException - { - return 64; - } - - @Override - public int getMaxTableNameLength() - throws SQLException - { - return 64; - } - - @Override - public int getMaxTablesInSelect() - throws SQLException - { - return 64; - } - - @Override - public int getMaxUserNameLength() - throws SQLException - { - return 0; - } - - @Override - public int getDefaultTransactionIsolation() - throws SQLException - { - return 0; - } - - @Override - public boolean supportsTransactions() - throws SQLException - { - LOGGER.log("supportsTransactions() = false"); - return false; - } - - @Override - public boolean supportsTransactionIsolationLevel(int level) - throws SQLException - { - LOGGER.log("supportsTransactionIsolationLevel() = false"); - return false; - } - - @Override - public boolean supportsDataDefinitionAndDataManipulationTransactions() - throws SQLException - { - LOGGER.log("supportsDataDefinitionAndDataManipulationTransactions() = false"); - return false; - } - - @Override - public boolean supportsDataManipulationTransactionsOnly() - throws SQLException - { - LOGGER.log("supportsDataManipulationTransactionsOnly() = false"); - return false; - } - - @Override - public boolean dataDefinitionCausesTransactionCommit() - throws SQLException - { - LOGGER.log("dataDefinitionCausesTransactionCommit() = false"); - return false; - } - - @Override - public boolean dataDefinitionIgnoredInTransactions() - throws SQLException - { - LOGGER.log("dataDefinitionIgnoredInTransactions() = false"); - return false; - } - - @Override - public ResultSet getProcedures(String catalog, String schemaPattern, String procedureNamePattern) - throws SQLException - { - LOGGER.log("getProcedures()"); - return new EmptyResultSet(); - } - - @Override - public ResultSet getProcedureColumns(String catalog, String schemaPattern, String procedureNamePattern, String columnNamePattern) - throws SQLException - { - LOGGER.log("getProcedureColumns()"); - return new EmptyResultSet(); - } - - @Override - public ResultSet getTables(String catalog, String schemaPattern, String tableNamePattern, String[] types) - throws SQLException - { - LOGGER.log("getTables(%s, %s, %s, %s)", catalog, schemaPattern, tableNamePattern, types); - return new RedisResultSet(new String[] {"haha"}, connection.createStatement()); - } - - @Override - public ResultSet getSchemas() - throws SQLException - { - LOGGER.log("getSchemas()"); - ResultSet rs; - Statement statement = connection.createStatement(); - - String[] databases = IntStream.range(0, 16) - .mapToObj(i -> i + "") - .toArray(String[]::new); - rs = new RedisResultSet(databases, statement); - return rs; - } - - @Override - public ResultSet getCatalogs() - throws SQLException - { - LOGGER.log("DatabaseMetadata getCatalogs()"); - return new RedisResultSet(new String[0], connection.createStatement()); - } - - @Override - public ResultSet getTableTypes() - throws SQLException - { - LOGGER.log("getTableTypes()"); - return new EmptyResultSet(); - } - - @Override - public ResultSet getColumns(String catalog, String schemaPattern, String tableNamePattern, String columnNamePattern) - throws SQLException - { - LOGGER.log("getColumns(%s, %s, %s, %s)", catalog, schemaPattern, tableNamePattern, columnNamePattern); - return new EmptyResultSet(); - } - - @Override - public ResultSet getColumnPrivileges(String catalog, String schema, String table, String columnNamePattern) - throws SQLException - { - LOGGER.log("getColumnPrivileges()"); - return new EmptyResultSet(); - } - - @Override - public ResultSet getTablePrivileges(String catalog, String schemaPattern, String tableNamePattern) - throws SQLException - { - LOGGER.log("getTablePrivileges()"); - return new EmptyResultSet(); - } - - @Override - public ResultSet getBestRowIdentifier(String catalog, String schema, String table, int scope, boolean nullable) - throws SQLException - { - LOGGER.log("getBestRowIdentifier()"); - return new EmptyResultSet(); - } - - @Override - public ResultSet getVersionColumns(String catalog, String schema, String table) - throws SQLException - { - LOGGER.log("getVersionColumns()"); - return new EmptyResultSet(); - } - - @Override - public ResultSet getPrimaryKeys(String catalog, String schema, String table) - throws SQLException - { - LOGGER.log("getPrimaryKeys()"); - return new EmptyResultSet(); - } - - @Override - public ResultSet getImportedKeys(String catalog, String schema, String table) - throws SQLException - { - LOGGER.log("getImportedKeys()"); - return new EmptyResultSet(); - } - - @Override - public ResultSet getExportedKeys(String catalog, String schema, String table) - throws SQLException - { - LOGGER.log("getExportedKeys()"); - return new EmptyResultSet(); - } - - @Override - public ResultSet getCrossReference(String parentCatalog, String parentSchema, String parentTable, String foreignCatalog, String foreignSchema, String foreignTable) - throws SQLException - { - LOGGER.log("getCrossReference()"); - return new EmptyResultSet(); - } - - @Override - public ResultSet getTypeInfo() - throws SQLException - { - LOGGER.log("getTypeInfo()"); - return new EmptyResultSet(); - } - - @Override - public ResultSet getIndexInfo(String catalog, String schema, String table, boolean unique, boolean approximate) - throws SQLException - { - LOGGER.log("getIndexInfo()"); - return new EmptyResultSet(); - } - - @Override - public boolean supportsResultSetType(int type) - throws SQLException - { - LOGGER.log("supportsResultSetType(%s)", type); - return type == ResultSet.TYPE_FORWARD_ONLY; - } - - @Override - public boolean supportsResultSetConcurrency(int type, int concurrency) - throws SQLException - { - LOGGER.log("supportsResultSetConcurrency(%s, concurrency)", type, concurrency); - return false; - } - - @Override - public boolean ownUpdatesAreVisible(int type) - throws SQLException - { - LOGGER.log("ownUpdatesAreVisible(%s)", type); - return false; - } - - @Override - public boolean ownDeletesAreVisible(int type) - throws SQLException - { - LOGGER.log("ownDeletesAreVisible(%s)", type); - return false; - } - - @Override - public boolean ownInsertsAreVisible(int type) - throws SQLException - { - LOGGER.log("ownInsertsAreVisible(%s)", type); - return false; - } - - @Override - public boolean othersUpdatesAreVisible(int type) - throws SQLException - { - LOGGER.log("othersUpdatesAreVisible(%s)", type); - return false; - } - - @Override - public boolean othersDeletesAreVisible(int type) - throws SQLException - { - LOGGER.log("othersDeletesAreVisible(%s)", type); - return false; - } - - @Override - public boolean othersInsertsAreVisible(int type) - throws SQLException - { - LOGGER.log("othersInsertsAreVisible(%s)", type); - return false; - } - - @Override - public boolean updatesAreDetected(int type) - throws SQLException - { - LOGGER.log("updatesAreDetected(%s)", type); - return false; - } - - @Override - public boolean deletesAreDetected(int type) - throws SQLException - { - LOGGER.log("deletesAreDetected(%s)", type); - return false; - } - - @Override - public boolean insertsAreDetected(int type) - throws SQLException - { - LOGGER.log("insertsAreDetected(%s)", type); - return false; - } - - @Override - public boolean supportsBatchUpdates() - throws SQLException - { - LOGGER.log("supportsBatchUpdates()"); - return false; - } - - @Override - public ResultSet getUDTs(String catalog, String schemaPattern, String typeNamePattern, int[] types) - throws SQLException - { - LOGGER.log("getUDTs()"); - return new EmptyResultSet(); - } - - @Override - public Connection getConnection() - throws SQLException - { - LOGGER.log("getConnection()"); - return this.connection; - } - - @Override - public boolean supportsSavepoints() - throws SQLException - { - LOGGER.log("supportsSavepoints()"); - return false; - } - - @Override - public boolean supportsNamedParameters() - throws SQLException - { - LOGGER.log("supportsNamedParameters()"); - return false; - } - - @Override - public boolean supportsMultipleOpenResults() - throws SQLException - { - LOGGER.log("supportsMultipleOpenResults()"); - return false; - } - - @Override - public boolean supportsGetGeneratedKeys() - throws SQLException - { - LOGGER.log("supportsGetGeneratedKeys()"); - return false; - } - - @Override - public ResultSet getSuperTypes(String catalog, String schemaPattern, String typeNamePattern) - throws SQLException - { - LOGGER.log("getSuperTypes()"); - return new EmptyResultSet(); - } - - @Override - public ResultSet getSuperTables(String catalog, String schemaPattern, String tableNamePattern) - throws SQLException - { - LOGGER.log("getSuperTables()"); - return new EmptyResultSet(); - } - - @Override - public ResultSet getAttributes(String catalog, String schemaPattern, String typeNamePattern, String attributeNamePattern) - throws SQLException - { - LOGGER.log("getAttributes()"); - return new EmptyResultSet(); - } - - @Override - public boolean supportsResultSetHoldability(int holdability) - throws SQLException - { - LOGGER.log("supportsResultSetHoldability()"); - return holdability == ResultSet.HOLD_CURSORS_OVER_COMMIT; - } - - @Override - public int getResultSetHoldability() - throws SQLException - { - LOGGER.log("getResultSetHoldability()"); - return ResultSet.HOLD_CURSORS_OVER_COMMIT; - } - - @Override - public int getDatabaseMajorVersion() - throws SQLException - { - LOGGER.log("getDatabaseMajorVersion()"); - return this.getVersion().getMajor(); - } - - @Override - public int getDatabaseMinorVersion() - throws SQLException - { - LOGGER.log("getDatabaseMinorVersion()"); - return this.getVersion().getMinor(); - } - - @Override - public int getJDBCMajorVersion() - throws SQLException - { - LOGGER.log("getJDBCMajorVersion()"); - return 4; - } - - @Override - public int getJDBCMinorVersion() - throws SQLException - { - LOGGER.log("getJDBCMinorVersion()"); - return 0; - } - - @Override - public int getSQLStateType() - throws SQLException - { - LOGGER.log("getSQLStateType()"); - return DatabaseMetaData.sqlStateSQL; - } - - @Override - public boolean locatorsUpdateCopy() - throws SQLException - { - LOGGER.log("locatorsUpdateCopy()"); - return false; - } - - @Override - public boolean supportsStatementPooling() - throws SQLException - { - LOGGER.log("supportsStatementPooling()"); - return false; - } - - @Override - public RowIdLifetime getRowIdLifetime() - throws SQLException - { - LOGGER.log("getRowIdLifetime()"); - return null; - } - - @Override - public ResultSet getSchemas(String catalog, String schemaPattern) - throws SQLException - { - LOGGER.log("getSchemas(%s, %s)", catalog, schemaPattern); - if (schemaPattern.equals("%")) { - return this.getSchemas(); - } - - schemaPattern = Utils.isNumber(schemaPattern) ? schemaPattern : dbIndex; - - return new RedisResultSet(new String[] {schemaPattern}, this.connection.createStatement()); - } - - @Override - public boolean supportsStoredFunctionsUsingCallSyntax() - throws SQLException - { - LOGGER.log("supportsStoredFunctionsUsingCallSyntax()"); - return false; - } - - @Override - public boolean autoCommitFailureClosesAllResultSets() - throws SQLException - { - LOGGER.log("autoCommitFailureClosesAllResultSets()"); - return false; - } - - @Override - public ResultSet getClientInfoProperties() - throws SQLException - { - LOGGER.log("autoCommitFailureClosesAllResultSets()"); - //TODO https://docs.oracle.com/javase/8/docs/api/java/sql/Connection.html#setClientInfo-java.lang.String-java.lang.String- - return new EmptyResultSet(); - } - - @Override - public ResultSet getFunctions(String catalog, String schemaPattern, String functionNamePattern) - throws SQLException - { - LOGGER.log("getFunctions()"); - return new EmptyResultSet(); - } - - @Override - public ResultSet getFunctionColumns(String catalog, String schemaPattern, String functionNamePattern, String columnNamePattern) - throws SQLException - { - LOGGER.log("getFunctionColumns()"); - return new EmptyResultSet(); - } - - @Override - public ResultSet getPseudoColumns(String catalog, String schemaPattern, String tableNamePattern, String columnNamePattern) - throws SQLException - { - LOGGER.log("getPseudoColumns()"); - return new EmptyResultSet(); - } - - @Override - public boolean generatedKeyAlwaysReturned() - throws SQLException - { - LOGGER.log("generatedKeyAlwaysReturned()"); - return false; - } - - @Override - public long getMaxLogicalLobSize() - throws SQLException - { - LOGGER.log("getMaxLogicalLobSize()"); - return DatabaseMetaData.super.getMaxLogicalLobSize(); - } - - @Override - public boolean supportsRefCursors() - throws SQLException - { - LOGGER.log("supportsRefCursors()"); - return DatabaseMetaData.super.supportsRefCursors(); - } - - @Override - public T unwrap(Class iface) - throws SQLException - { - try { - LOGGER.log("unwrap()"); - return iface.cast(this); - } - catch (ClassCastException cce) { - LOGGER.log("Unable to unwrap to %s", iface); - throw new SQLException("Unable to unwrap to " + iface); - } - } - - @Override - public boolean isWrapperFor(Class iface) - throws SQLException - { - LOGGER.log("isWrapperFor()"); - return iface.isInstance(this); - } -} diff --git a/driver/datacap-driver-redis/src/main/java/io/edurt/datacap/core/RedisResultSet.java b/driver/datacap-driver-redis/src/main/java/io/edurt/datacap/core/RedisResultSet.java deleted file mode 100644 index 44288feda6..0000000000 --- a/driver/datacap-driver-redis/src/main/java/io/edurt/datacap/core/RedisResultSet.java +++ /dev/null @@ -1,1627 +0,0 @@ -package io.edurt.datacap.core; - -import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; - -import java.io.InputStream; -import java.io.Reader; -import java.math.BigDecimal; -import java.net.MalformedURLException; -import java.net.URL; -import java.sql.*; -import java.util.Calendar; -import java.util.Map; - -@SuppressFBWarnings(value = {"DM_DEFAULT_ENCODING", "EI_EXPOSE_REP2"}, - justification = "I prefer to suppress these FindBugs warnings") -public class RedisResultSet - implements ResultSet -{ - private final static Logger LOGGER = new Logger(RedisResultSet.class); - - private final String[] result; - private final Statement owningStatement; - - private int position = -1; - private boolean isClosed = false; - - public RedisResultSet(final String[] result, final Statement owningStatement) - { - this.result = result; - this.owningStatement = owningStatement; - } - - private void checkClosed() - throws SQLException - { - if (isClosed()) { - LOGGER.log("ResultSet is closed."); - throw new SQLException("RedisSet is closed."); - } - } - - @Override - public boolean next() - throws SQLException - { - this.checkClosed(); - - if (position < result.length - 1) { - position++; - return true; - } - else { - return false; - } - } - - @Override - public void close() - throws SQLException - { - LOGGER.log("ResultSet close"); - this.isClosed = true; - } - - @Override - public boolean wasNull() - throws SQLException - { - this.checkClosed(); - return result[position] == null; - } - - @Override - public String getString(int columnIndex) - throws SQLException - { - this.checkClosed(); - LOGGER.log("getString(%s)", columnIndex); - return result[position]; - } - - @Override - public boolean getBoolean(int columnIndex) - throws SQLException - { - this.checkClosed(); - - String r = this.getString(0); - - if ("0".equals(r) || "false".equals(r)) { - return false; - } - else if ("1".equals(r) || "true".equals(r)) { - return true; - } - else { - LOGGER.log("Cannot convert " + r + " into a boolean."); - throw new SQLException("Cannot convert " + r + " into a boolean."); - } - } - - @Override - public byte getByte(int columnIndex) - throws SQLException - { - this.checkClosed(); - - String string = this.getString(0); - - if (string == null) { - return 0; - } - return string.getBytes()[0]; - } - - @Override - public short getShort(int columnIndex) - throws SQLException - { - this.checkClosed(); - - String string = this.getString(0); - if (string == null) { - return 0; - } - - return Short.parseShort(string); - } - - @Override - public int getInt(int columnIndex) - throws SQLException - { - this.checkClosed(); - - String string = this.getString(0); - if (string == null) { - return 0; - } - - return Integer.parseInt(string); - } - - @Override - public long getLong(int columnIndex) - throws SQLException - { - this.checkClosed(); - - String string = this.getString(0); - if (string == null) { - return 0; - } - - return Long.parseLong(string); - } - - @Override - public float getFloat(int columnIndex) - throws SQLException - { - this.checkClosed(); - - String string = this.getString(0); - if (string == null) { - return 0; - } - - return Float.parseFloat(string); - } - - @Override - public double getDouble(int columnIndex) - throws SQLException - { - this.checkClosed(); - - String string = this.getString(0); - if (string == null) { - return 0; - } - - return Double.parseDouble(string); - } - - @Override - public BigDecimal getBigDecimal(int columnIndex, int scale) - throws SQLException - { - return this.getBigDecimal(0); - } - - @Override - public byte[] getBytes(int columnIndex) - throws SQLException - { - this.checkClosed(); - - String string = this.getString(0); - if (string == null) { - return null; - } - - return string.getBytes(); - } - - @Override - public Date getDate(int columnIndex) - throws SQLException - { - LOGGER.log("getDate not implemented"); - throw new SQLFeatureNotSupportedException("getDate not implemented"); - } - - @Override - public Time getTime(int columnIndex) - throws SQLException - { - LOGGER.log("getTime not implemented"); - throw new SQLFeatureNotSupportedException("getTime not implemented"); - } - - @Override - public Timestamp getTimestamp(int columnIndex) - throws SQLException - { - LOGGER.log("getTimestamp not implemented"); - throw new SQLFeatureNotSupportedException("getTimestamp not implemented"); - } - - @Override - public InputStream getAsciiStream(int columnIndex) - throws SQLException - { - LOGGER.log("getAsciiStream not implemented"); - throw new SQLFeatureNotSupportedException("getAsciiStream not implemented"); - } - - @Override - public InputStream getUnicodeStream(int columnIndex) - throws SQLException - { - LOGGER.log("getAsciiStream not implemented"); - throw new SQLFeatureNotSupportedException("getAsciiStream not implemented"); - } - - @Override - public InputStream getBinaryStream(int columnIndex) - throws SQLException - { - LOGGER.log("getAsciiStream not implemented"); - throw new SQLFeatureNotSupportedException("getAsciiStream not implemented"); - } - - @Override - public String getString(String columnLabel) - throws SQLException - { - return this.getString(0); - } - - @Override - public boolean getBoolean(String columnLabel) - throws SQLException - { - return this.getBoolean(0); - } - - @Override - public byte getByte(String columnLabel) - throws SQLException - { - return this.getByte(0); - } - - @Override - public short getShort(String columnLabel) - throws SQLException - { - return this.getShort(0); - } - - @Override - public int getInt(String columnLabel) - throws SQLException - { - return this.getInt(0); - } - - @Override - public long getLong(String columnLabel) - throws SQLException - { - return this.getLong(0); - } - - @Override - public float getFloat(String columnLabel) - throws SQLException - { - return this.getFloat(0); - } - - @Override - public double getDouble(String columnLabel) - throws SQLException - { - return this.getDouble(0); - } - - @Override - public BigDecimal getBigDecimal(String columnLabel, int scale) - throws SQLException - { - return this.getBigDecimal(0); - } - - @Override - public byte[] getBytes(String columnLabel) - throws SQLException - { - return this.getBytes(0); - } - - @Override - public Date getDate(String columnLabel) - throws SQLException - { - return this.getDate(0); - } - - @Override - public Time getTime(String columnLabel) - throws SQLException - { - return this.getTime(0); - } - - @Override - public Timestamp getTimestamp(String columnLabel) - throws SQLException - { - return this.getTimestamp(0); - } - - @Override - public InputStream getAsciiStream(String columnLabel) - throws SQLException - { - return this.getAsciiStream(0); - } - - @Override - public InputStream getUnicodeStream(String columnLabel) - throws SQLException - { - return this.getUnicodeStream(0); - } - - @Override - public InputStream getBinaryStream(String columnLabel) - throws SQLException - { - return this.getBinaryStream(0); - } - - @Override - public SQLWarning getWarnings() - throws SQLException - { - return null; - } - - @Override - public void clearWarnings() - throws SQLException - { - } - - @Override - public String getCursorName() - throws SQLException - { - LOGGER.log("getCursorName not implemented"); - throw new SQLFeatureNotSupportedException("getCursorName not implemented"); - } - - @Override - public ResultSetMetaData getMetaData() - throws SQLException - { - return new RedisResultSetMetaData(); - } - - @Override - public Object getObject(int columnIndex) - throws SQLException - { - LOGGER.log("getObject not implemented"); - throw new SQLFeatureNotSupportedException("getObject not implemented"); - } - - @Override - public Object getObject(String columnLabel) - throws SQLException - { - return this.getObject(0); - } - - @Override - public int findColumn(String columnLabel) - throws SQLException - { - return 0; - } - - @Override - public Reader getCharacterStream(int columnIndex) - throws SQLException - { - LOGGER.log("getCharacterStream not implemented"); - throw new SQLFeatureNotSupportedException("getCharacterStream not implemented"); - } - - @Override - public Reader getCharacterStream(String columnLabel) - throws SQLException - { - LOGGER.log("getCharacterStream not implemented"); - throw new SQLFeatureNotSupportedException("getCharacterStream not implemented"); - } - - @Override - public BigDecimal getBigDecimal(int columnIndex) - throws SQLException - { - this.checkClosed(); - - String string = this.getString(0); - if (string == null) { - return null; - } - - return new BigDecimal(string); - } - - @Override - public BigDecimal getBigDecimal(String columnLabel) - throws SQLException - { - return this.getBigDecimal(0); - } - - @Override - public boolean isBeforeFirst() - throws SQLException - { - this.checkClosed(); - return position < 0; - } - - @Override - public boolean isAfterLast() - throws SQLException - { - this.checkClosed(); - return position >= result.length; - } - - @Override - public boolean isFirst() - throws SQLException - { - this.checkClosed(); - return position == 0; - } - - @Override - public boolean isLast() - throws SQLException - { - this.checkClosed(); - return position == result.length - 1; - } - - @Override - public void beforeFirst() - throws SQLException - { - this.checkClosed(); - position = -1; - } - - @Override - public void afterLast() - throws SQLException - { - this.checkClosed(); - position = result.length; - } - - @Override - public boolean first() - throws SQLException - { - this.checkClosed(); - position = 0; - return result.length > 0; - } - - @Override - public boolean last() - throws SQLException - { - this.checkClosed(); - position = result.length - 1; - return result.length > 0; - } - - @Override - public int getRow() - throws SQLException - { - this.checkClosed(); - return this.position + 1; - } - - @Override - public boolean absolute(int row) - throws SQLException - { - // TODO - return false; - } - - @Override - public boolean relative(int rows) - throws SQLException - { - LOGGER.log("relative not implemented"); - throw new SQLFeatureNotSupportedException("relative not implemented"); - } - - @Override - public boolean previous() - throws SQLException - { - LOGGER.log("previous not implemented"); - throw new SQLFeatureNotSupportedException("previous not implemented"); - } - - @Override - public void setFetchDirection(int direction) - throws SQLException - { - LOGGER.log("setFetchDirection not implemented"); - throw new SQLFeatureNotSupportedException("setFetchDirection not implemented"); - } - - @Override - public int getFetchDirection() - throws SQLException - { - this.checkClosed(); - return FETCH_FORWARD; - } - - @Override - public void setFetchSize(int rows) - throws SQLException - { - LOGGER.log("setFetchSize not implemented"); - throw new SQLFeatureNotSupportedException("setFetchSize not implemented"); - } - - @Override - public int getFetchSize() - throws SQLException - { - this.checkClosed(); - return result.length; - } - - @Override - public int getType() - throws SQLException - { - this.checkClosed(); - return TYPE_FORWARD_ONLY; - } - - @Override - public int getConcurrency() - throws SQLException - { - this.checkClosed(); - return ResultSet.CONCUR_READ_ONLY; - } - - @Override - public boolean rowUpdated() - throws SQLException - { - LOGGER.log("rowUpdated not implemented"); - throw new SQLFeatureNotSupportedException("rowUpdated not implemented"); - } - - @Override - public boolean rowInserted() - throws SQLException - { - LOGGER.log("rowInserted not implemented"); - throw new SQLFeatureNotSupportedException("rowInserted not implemented"); - } - - @Override - public boolean rowDeleted() - throws SQLException - { - LOGGER.log("rowDeleted not implemented"); - throw new SQLFeatureNotSupportedException("rowDeleted not implemented"); - } - - @Override - public void updateNull(int columnIndex) - throws SQLException - { - LOGGER.log("updateNull not implemented"); - throw new SQLFeatureNotSupportedException("updateNull not implemented"); - } - - @Override - public void updateBoolean(int columnIndex, boolean x) - throws SQLException - { - LOGGER.log("updateBoolean not implemented"); - throw new SQLFeatureNotSupportedException("updateBoolean not implemented"); - } - - @Override - public void updateByte(int columnIndex, byte x) - throws SQLException - { - LOGGER.log("updateByte not implemented"); - throw new SQLFeatureNotSupportedException("updateByte not implemented"); - } - - @Override - public void updateShort(int columnIndex, short x) - throws SQLException - { - LOGGER.log("updateShort not implemented"); - throw new SQLFeatureNotSupportedException("updateShort not implemented"); - } - - @Override - public void updateInt(int columnIndex, int x) - throws SQLException - { - LOGGER.log("updateInt not implemented"); - throw new SQLFeatureNotSupportedException("updateInt not implemented"); - } - - @Override - public void updateLong(int columnIndex, long x) - throws SQLException - { - LOGGER.log("updateLong not implemented"); - throw new SQLFeatureNotSupportedException("updateLong not implemented"); - } - - @Override - public void updateFloat(int columnIndex, float x) - throws SQLException - { - LOGGER.log("updateFloat not implemented"); - throw new SQLFeatureNotSupportedException("updateFloat not implemented"); - } - - @Override - public void updateDouble(int columnIndex, double x) - throws SQLException - { - LOGGER.log("updateDouble not implemented"); - throw new SQLFeatureNotSupportedException("updateDouble not implemented"); - } - - @Override - public void updateBigDecimal(int columnIndex, BigDecimal x) - throws SQLException - { - LOGGER.log("updateBigDecimal not implemented"); - throw new SQLFeatureNotSupportedException("updateBigDecimal not implemented"); - } - - @Override - public void updateString(int columnIndex, String x) - throws SQLException - { - LOGGER.log("updateString not implemented"); - throw new SQLFeatureNotSupportedException("updateString not implemented"); - } - - @Override - public void updateBytes(int columnIndex, byte[] x) - throws SQLException - { - LOGGER.log("updateBytes not implemented"); - throw new SQLFeatureNotSupportedException("updateBytes not implemented"); - } - - @Override - public void updateDate(int columnIndex, Date x) - throws SQLException - { - LOGGER.log("updateDate not implemented"); - throw new SQLFeatureNotSupportedException("updateDate not implemented"); - } - - @Override - public void updateTime(int columnIndex, Time x) - throws SQLException - { - LOGGER.log("updateTime not implemented"); - throw new SQLFeatureNotSupportedException("updateTime not implemented"); - } - - @Override - public void updateTimestamp(int columnIndex, Timestamp x) - throws SQLException - { - LOGGER.log("updateTimestamp not implemented"); - throw new SQLFeatureNotSupportedException("updateTimestamp not implemented"); - } - - @Override - public void updateAsciiStream(int columnIndex, InputStream x, int length) - throws SQLException - { - LOGGER.log("updateAsciiStream not implemented"); - throw new SQLFeatureNotSupportedException("updateAsciiStream not implemented"); - } - - @Override - public void updateBinaryStream(int columnIndex, InputStream x, int length) - throws SQLException - { - LOGGER.log("updateBinaryStream not implemented"); - throw new SQLFeatureNotSupportedException("updateBinaryStream not implemented"); - } - - @Override - public void updateCharacterStream(int columnIndex, Reader x, int length) - throws SQLException - { - LOGGER.log("updateCharacterStream not implemented"); - throw new SQLFeatureNotSupportedException("updateCharacterStream not implemented"); - } - - @Override - public void updateObject(int columnIndex, Object x, int scaleOrLength) - throws SQLException - { - LOGGER.log("updateObject not implemented"); - throw new SQLFeatureNotSupportedException("updateObject not implemented"); - } - - @Override - public void updateObject(int columnIndex, Object x) - throws SQLException - { - LOGGER.log("updateObject not implemented"); - throw new SQLFeatureNotSupportedException("updateObject not implemented"); - } - - @Override - public void updateNull(String columnLabel) - throws SQLException - { - LOGGER.log("updateNull not implemented"); - throw new SQLFeatureNotSupportedException("updateNull not implemented"); - } - - @Override - public void updateBoolean(String columnLabel, boolean x) - throws SQLException - { - LOGGER.log("updateBoolean not implemented"); - throw new SQLFeatureNotSupportedException("updateBoolean not implemented"); - } - - @Override - public void updateByte(String columnLabel, byte x) - throws SQLException - { - LOGGER.log("updateByte not implemented"); - throw new SQLFeatureNotSupportedException("updateByte not implemented"); - } - - @Override - public void updateShort(String columnLabel, short x) - throws SQLException - { - LOGGER.log("updateShort not implemented"); - throw new SQLFeatureNotSupportedException("updateShort not implemented"); - } - - @Override - public void updateInt(String columnLabel, int x) - throws SQLException - { - LOGGER.log("updateInt not implemented"); - throw new SQLFeatureNotSupportedException("updateInt not implemented"); - } - - @Override - public void updateLong(String columnLabel, long x) - throws SQLException - { - LOGGER.log("updateLong not implemented"); - throw new SQLFeatureNotSupportedException("updateLong not implemented"); - } - - @Override - public void updateFloat(String columnLabel, float x) - throws SQLException - { - LOGGER.log("updateFloat not implemented"); - throw new SQLFeatureNotSupportedException("updateFloat not implemented"); - } - - @Override - public void updateDouble(String columnLabel, double x) - throws SQLException - { - LOGGER.log("updateDouble not implemented"); - throw new SQLFeatureNotSupportedException("updateDouble not implemented"); - } - - @Override - public void updateBigDecimal(String columnLabel, BigDecimal x) - throws SQLException - { - LOGGER.log("updateBigDecimal not implemented"); - throw new SQLFeatureNotSupportedException("updateBigDecimal not implemented"); - } - - @Override - public void updateString(String columnLabel, String x) - throws SQLException - { - LOGGER.log("updateString not implemented"); - throw new SQLFeatureNotSupportedException("updateString not implemented"); - } - - @Override - public void updateBytes(String columnLabel, byte[] x) - throws SQLException - { - LOGGER.log("updateBytes not implemented"); - throw new SQLFeatureNotSupportedException("updateBytes not implemented"); - } - - @Override - public void updateDate(String columnLabel, Date x) - throws SQLException - { - LOGGER.log("updateDate not implemented"); - throw new SQLFeatureNotSupportedException("updateDate not implemented"); - } - - @Override - public void updateTime(String columnLabel, Time x) - throws SQLException - { - LOGGER.log("updateTime not implemented"); - throw new SQLFeatureNotSupportedException("updateTime not implemented"); - } - - @Override - public void updateTimestamp(String columnLabel, Timestamp x) - throws SQLException - { - LOGGER.log("updateTimestamp not implemented"); - throw new SQLFeatureNotSupportedException("updateTimestamp not implemented"); - } - - @Override - public void updateAsciiStream(String columnLabel, InputStream x, int length) - throws SQLException - { - LOGGER.log("updateAsciiStream not implemented"); - throw new SQLFeatureNotSupportedException("updateAsciiStream not implemented"); - } - - @Override - public void updateBinaryStream(String columnLabel, InputStream x, int length) - throws SQLException - { - LOGGER.log("updateBinaryStream not implemented"); - throw new SQLFeatureNotSupportedException("updateBinaryStream not implemented"); - } - - @Override - public void updateCharacterStream(String columnLabel, Reader reader, int length) - throws SQLException - { - LOGGER.log("updateCharacterStream not implemented"); - throw new SQLFeatureNotSupportedException("updateCharacterStream not implemented"); - } - - @Override - public void updateObject(String columnLabel, Object x, int scaleOrLength) - throws SQLException - { - LOGGER.log("updateObject not implemented"); - throw new SQLFeatureNotSupportedException("updateObject not implemented"); - } - - @Override - public void updateObject(String columnLabel, Object x) - throws SQLException - { - LOGGER.log("updateObject not implemented"); - throw new SQLFeatureNotSupportedException("updateObject not implemented"); - } - - @Override - public void insertRow() - throws SQLException - { - LOGGER.log("insertRow not implemented"); - throw new SQLFeatureNotSupportedException("insertRow not implemented"); - } - - @Override - public void updateRow() - throws SQLException - { - LOGGER.log("updateRow not implemented"); - throw new SQLFeatureNotSupportedException("updateRow not implemented"); - } - - @Override - public void deleteRow() - throws SQLException - { - LOGGER.log("deleteRow not implemented"); - throw new SQLFeatureNotSupportedException("deleteRow not implemented"); - } - - @Override - public void refreshRow() - throws SQLException - { - LOGGER.log("refreshRow not implemented"); - throw new SQLFeatureNotSupportedException("refreshRow not implemented"); - } - - @Override - public void cancelRowUpdates() - throws SQLException - { - LOGGER.log("cancelRowUpdates not implemented"); - throw new SQLFeatureNotSupportedException("cancelRowUpdates not implemented"); - } - - @Override - public void moveToInsertRow() - throws SQLException - { - LOGGER.log("moveToInsertRow not implemented"); - throw new SQLFeatureNotSupportedException("moveToInsertRow not implemented"); - } - - @Override - public void moveToCurrentRow() - throws SQLException - { - LOGGER.log("moveToCurrentRow not implemented"); - throw new SQLFeatureNotSupportedException("moveToCurrentRow not implemented"); - } - - @Override - public Statement getStatement() - throws SQLException - { - this.checkClosed(); - return this.owningStatement; - } - - @Override - public Object getObject(int columnIndex, Map> map) - throws SQLException - { - LOGGER.log("getObject not implemented"); - throw new SQLFeatureNotSupportedException("getObject not implemented"); - } - - @Override - public Ref getRef(int columnIndex) - throws SQLException - { - LOGGER.log("getRef not implemented"); - throw new SQLFeatureNotSupportedException("getRef not implemented"); - } - - @Override - public Blob getBlob(int columnIndex) - throws SQLException - { - LOGGER.log("getBlob not implemented"); - throw new SQLFeatureNotSupportedException("getBlob not implemented"); - } - - @Override - public Clob getClob(int columnIndex) - throws SQLException - { - LOGGER.log("getClob not implemented"); - throw new SQLFeatureNotSupportedException("getClob not implemented"); - } - - @Override - public Array getArray(int columnIndex) - throws SQLException - { - LOGGER.log("getArray not implemented"); - throw new SQLFeatureNotSupportedException("getArray not implemented"); - } - - @Override - public Object getObject(String columnLabel, Map> map) - throws SQLException - { - LOGGER.log("getObject not implemented"); - throw new SQLFeatureNotSupportedException("getObject not implemented"); - } - - @Override - public Ref getRef(String columnLabel) - throws SQLException - { - LOGGER.log("getRef not implemented"); - throw new SQLFeatureNotSupportedException("getRef not implemented"); - } - - @Override - public Blob getBlob(String columnLabel) - throws SQLException - { - LOGGER.log("getBlob not implemented"); - throw new SQLFeatureNotSupportedException("getBlob not implemented"); - } - - @Override - public Clob getClob(String columnLabel) - throws SQLException - { - LOGGER.log("getClob not implemented"); - throw new SQLFeatureNotSupportedException("getClob not implemented"); - } - - @Override - public Array getArray(String columnLabel) - throws SQLException - { - LOGGER.log("getArray not implemented"); - throw new SQLFeatureNotSupportedException("getArray not implemented"); - } - - @Override - public Date getDate(int columnIndex, Calendar cal) - throws SQLException - { - LOGGER.log("getDate not implemented"); - throw new SQLFeatureNotSupportedException("getDate not implemented"); - } - - @Override - public Date getDate(String columnLabel, Calendar cal) - throws SQLException - { - LOGGER.log("getDate not implemented"); - throw new SQLFeatureNotSupportedException("getDate not implemented"); - } - - @Override - public Time getTime(int columnIndex, Calendar cal) - throws SQLException - { - LOGGER.log("getTime not implemented"); - throw new SQLFeatureNotSupportedException("getTime not implemented"); - } - - @Override - public Time getTime(String columnLabel, Calendar cal) - throws SQLException - { - LOGGER.log("getTime not implemented"); - throw new SQLFeatureNotSupportedException("getTime not implemented"); - } - - @Override - public Timestamp getTimestamp(int columnIndex, Calendar cal) - throws SQLException - { - LOGGER.log("getTimestamp not implemented"); - throw new SQLFeatureNotSupportedException("getTimestamp not implemented"); - } - - @Override - public Timestamp getTimestamp(String columnLabel, Calendar cal) - throws SQLException - { - LOGGER.log("getTimestamp not implemented"); - throw new SQLFeatureNotSupportedException("getTimestamp not implemented"); - } - - @Override - public URL getURL(int columnIndex) - throws SQLException - { - this.checkClosed(); - String string = this.getString(columnIndex); - if (string == null) { - return null; - } - - try { - return new URL(string); - } - catch (MalformedURLException e) { - throw new SQLException(e); - } - } - - @Override - public URL getURL(String columnLabel) - throws SQLException - { - return this.getURL(0); - } - - @Override - public void updateRef(int columnIndex, Ref x) - throws SQLException - { - LOGGER.log("updateRef not implemented"); - throw new SQLFeatureNotSupportedException("updateRef not implemented"); - } - - @Override - public void updateRef(String columnLabel, Ref x) - throws SQLException - { - LOGGER.log("updateRef not implemented"); - throw new SQLFeatureNotSupportedException("updateRef not implemented"); - } - - @Override - public void updateBlob(int columnIndex, Blob x) - throws SQLException - { - LOGGER.log("updateBlob not implemented"); - throw new SQLFeatureNotSupportedException("updateBlob not implemented"); - } - - @Override - public void updateBlob(String columnLabel, Blob x) - throws SQLException - { - LOGGER.log("updateBlob not implemented"); - throw new SQLFeatureNotSupportedException("updateBlob not implemented"); - } - - @Override - public void updateClob(int columnIndex, Clob x) - throws SQLException - { - LOGGER.log("updateClob not implemented"); - throw new SQLFeatureNotSupportedException("updateClob not implemented"); - } - - @Override - public void updateClob(String columnLabel, Clob x) - throws SQLException - { - LOGGER.log("updateClob not implemented"); - throw new SQLFeatureNotSupportedException("updateClob not implemented"); - } - - @Override - public void updateArray(int columnIndex, Array x) - throws SQLException - { - LOGGER.log("updateArray not implemented"); - throw new SQLFeatureNotSupportedException("updateArray not implemented"); - } - - @Override - public void updateArray(String columnLabel, Array x) - throws SQLException - { - LOGGER.log("updateArray not implemented"); - throw new SQLFeatureNotSupportedException("updateArray not implemented"); - } - - @Override - public RowId getRowId(int columnIndex) - throws SQLException - { - LOGGER.log("getRowId not implemented"); - throw new SQLFeatureNotSupportedException("getRowId not implemented"); - } - - @Override - public RowId getRowId(String columnLabel) - throws SQLException - { - LOGGER.log("getRowId not implemented"); - throw new SQLFeatureNotSupportedException("getRowId not implemented"); - } - - @Override - public void updateRowId(int columnIndex, RowId x) - throws SQLException - { - LOGGER.log("updateRowId not implemented"); - throw new SQLFeatureNotSupportedException("updateRowId not implemented"); - } - - @Override - public void updateRowId(String columnLabel, RowId x) - throws SQLException - { - LOGGER.log("updateRowId not implemented"); - throw new SQLFeatureNotSupportedException("updateRowId not implemented"); - } - - @Override - public int getHoldability() - throws SQLException - { - LOGGER.log("getHoldability not implemented"); - throw new SQLFeatureNotSupportedException("getHoldability not implemented"); - } - - @Override - public boolean isClosed() - throws SQLException - { - LOGGER.log("isClosed = %s", isClosed); - return this.isClosed; - } - - @Override - public void updateNString(int columnIndex, String nString) - throws SQLException - { - LOGGER.log("updateNString not implemented"); - throw new SQLFeatureNotSupportedException("updateNString not implemented"); - } - - @Override - public void updateNString(String columnLabel, String nString) - throws SQLException - { - LOGGER.log("updateNString not implemented"); - throw new SQLFeatureNotSupportedException("updateNString not implemented"); - } - - @Override - public void updateNClob(int columnIndex, NClob nClob) - throws SQLException - { - LOGGER.log("updateNClob not implemented"); - throw new SQLFeatureNotSupportedException("updateNClob not implemented"); - } - - @Override - public void updateNClob(String columnLabel, NClob nClob) - throws SQLException - { - LOGGER.log("updateNClob not implemented"); - throw new SQLFeatureNotSupportedException("updateNClob not implemented"); - } - - @Override - public NClob getNClob(int columnIndex) - throws SQLException - { - LOGGER.log("getNClob not implemented"); - throw new SQLFeatureNotSupportedException("getNClob not implemented"); - } - - @Override - public NClob getNClob(String columnLabel) - throws SQLException - { - LOGGER.log("getNClob not implemented"); - throw new SQLFeatureNotSupportedException("getNClob not implemented"); - } - - @Override - public SQLXML getSQLXML(int columnIndex) - throws SQLException - { - LOGGER.log("getSQLXML not implemented"); - throw new SQLFeatureNotSupportedException("getSQLXML not implemented"); - } - - @Override - public SQLXML getSQLXML(String columnLabel) - throws SQLException - { - LOGGER.log("getSQLXML not implemented"); - throw new SQLFeatureNotSupportedException("getSQLXML not implemented"); - } - - @Override - public void updateSQLXML(int columnIndex, SQLXML xmlObject) - throws SQLException - { - LOGGER.log("updateSQLXML not implemented"); - throw new SQLFeatureNotSupportedException("updateSQLXML not implemented"); - } - - @Override - public void updateSQLXML(String columnLabel, SQLXML xmlObject) - throws SQLException - { - LOGGER.log("updateSQLXML not implemented"); - throw new SQLFeatureNotSupportedException("updateSQLXML not implemented"); - } - - @Override - public String getNString(int columnIndex) - throws SQLException - { - this.checkClosed(); - return result[position]; - } - - @Override - public String getNString(String columnLabel) - throws SQLException - { - return this.getNString(0); - } - - @Override - public Reader getNCharacterStream(int columnIndex) - throws SQLException - { - LOGGER.log("getNCharacterStream not implemented"); - throw new SQLFeatureNotSupportedException("getNCharacterStream not implemented"); - } - - @Override - public Reader getNCharacterStream(String columnLabel) - throws SQLException - { - LOGGER.log("getNCharacterStream not implemented"); - throw new SQLFeatureNotSupportedException("getNCharacterStream not implemented"); - } - - @Override - public void updateNCharacterStream(int columnIndex, Reader x, long length) - throws SQLException - { - LOGGER.log("updateNCharacterStream not implemented"); - throw new SQLFeatureNotSupportedException("updateNCharacterStream not implemented"); - } - - @Override - public void updateNCharacterStream(String columnLabel, Reader reader, long length) - throws SQLException - { - LOGGER.log("updateNCharacterStream not implemented"); - throw new SQLFeatureNotSupportedException("updateNCharacterStream not implemented"); - } - - @Override - public void updateAsciiStream(int columnIndex, InputStream x, long length) - throws SQLException - { - LOGGER.log("updateAsciiStream not implemented"); - throw new SQLFeatureNotSupportedException("updateAsciiStream not implemented"); - } - - @Override - public void updateBinaryStream(int columnIndex, InputStream x, long length) - throws SQLException - { - LOGGER.log("updateBinaryStream not implemented"); - throw new SQLFeatureNotSupportedException("updateBinaryStream not implemented"); - } - - @Override - public void updateCharacterStream(int columnIndex, Reader x, long length) - throws SQLException - { - LOGGER.log("updateCharacterStream not implemented"); - throw new SQLFeatureNotSupportedException("updateCharacterStream not implemented"); - } - - @Override - public void updateAsciiStream(String columnLabel, InputStream x, long length) - throws SQLException - { - LOGGER.log("updateAsciiStream not implemented"); - throw new SQLFeatureNotSupportedException("updateAsciiStream not implemented"); - } - - @Override - public void updateBinaryStream(String columnLabel, InputStream x, long length) - throws SQLException - { - LOGGER.log("updateBinaryStream not implemented"); - throw new SQLFeatureNotSupportedException("updateBinaryStream not implemented"); - } - - @Override - public void updateCharacterStream(String columnLabel, Reader reader, long length) - throws SQLException - { - LOGGER.log("updateCharacterStream not implemented"); - throw new SQLFeatureNotSupportedException("updateCharacterStream not implemented"); - } - - @Override - public void updateBlob(int columnIndex, InputStream inputStream, long length) - throws SQLException - { - LOGGER.log("updateBlob not implemented"); - throw new SQLFeatureNotSupportedException("updateBlob not implemented"); - } - - @Override - public void updateBlob(String columnLabel, InputStream inputStream, long length) - throws SQLException - { - LOGGER.log("updateBlob not implemented"); - throw new SQLFeatureNotSupportedException("updateBlob not implemented"); - } - - @Override - public void updateClob(int columnIndex, Reader reader, long length) - throws SQLException - { - LOGGER.log("updateClob not implemented"); - throw new SQLFeatureNotSupportedException("updateClob not implemented"); - } - - @Override - public void updateClob(String columnLabel, Reader reader, long length) - throws SQLException - { - LOGGER.log("updateClob not implemented"); - throw new SQLFeatureNotSupportedException("updateClob not implemented"); - } - - @Override - public void updateNClob(int columnIndex, Reader reader, long length) - throws SQLException - { - LOGGER.log("updateNClob not implemented"); - throw new SQLFeatureNotSupportedException("updateNClob not implemented"); - } - - @Override - public void updateNClob(String columnLabel, Reader reader, long length) - throws SQLException - { - LOGGER.log("updateNClob not implemented"); - throw new SQLFeatureNotSupportedException("updateNClob not implemented"); - } - - @Override - public void updateNCharacterStream(int columnIndex, Reader x) - throws SQLException - { - LOGGER.log("updateNCharacterStream not implemented"); - throw new SQLFeatureNotSupportedException("updateNCharacterStream not implemented"); - } - - @Override - public void updateNCharacterStream(String columnLabel, Reader reader) - throws SQLException - { - LOGGER.log("updateNCharacterStream not implemented"); - throw new SQLFeatureNotSupportedException("updateNCharacterStream not implemented"); - } - - @Override - public void updateAsciiStream(int columnIndex, InputStream x) - throws SQLException - { - LOGGER.log("updateAsciiStream not implemented"); - throw new SQLFeatureNotSupportedException("updateAsciiStream not implemented"); - } - - @Override - public void updateBinaryStream(int columnIndex, InputStream x) - throws SQLException - { - LOGGER.log("updateBinaryStream not implemented"); - throw new SQLFeatureNotSupportedException("updateBinaryStream not implemented"); - } - - @Override - public void updateCharacterStream(int columnIndex, Reader x) - throws SQLException - { - LOGGER.log("updateCharacterStream not implemented"); - throw new SQLFeatureNotSupportedException("updateCharacterStream not implemented"); - } - - @Override - public void updateAsciiStream(String columnLabel, InputStream x) - throws SQLException - { - LOGGER.log("updateAsciiStream not implemented"); - throw new SQLFeatureNotSupportedException("updateAsciiStream not implemented"); - } - - @Override - public void updateBinaryStream(String columnLabel, InputStream x) - throws SQLException - { - LOGGER.log("updateBinaryStream not implemented"); - throw new SQLFeatureNotSupportedException("updateBinaryStream not implemented"); - } - - @Override - public void updateCharacterStream(String columnLabel, Reader reader) - throws SQLException - { - LOGGER.log("updateCharacterStream not implemented"); - throw new SQLFeatureNotSupportedException("updateCharacterStream not implemented"); - } - - @Override - public void updateBlob(int columnIndex, InputStream inputStream) - throws SQLException - { - LOGGER.log("updateBlob not implemented"); - throw new SQLFeatureNotSupportedException("updateBlob not implemented"); - } - - @Override - public void updateBlob(String columnLabel, InputStream inputStream) - throws SQLException - { - LOGGER.log("updateBlob not implemented"); - throw new SQLFeatureNotSupportedException("updateBlob not implemented"); - } - - @Override - public void updateClob(int columnIndex, Reader reader) - throws SQLException - { - LOGGER.log("updateClob not implemented"); - throw new SQLFeatureNotSupportedException("updateClob not implemented"); - } - - @Override - public void updateClob(String columnLabel, Reader reader) - throws SQLException - { - LOGGER.log("updateClob not implemented"); - throw new SQLFeatureNotSupportedException("updateClob not implemented"); - } - - @Override - public void updateNClob(int columnIndex, Reader reader) - throws SQLException - { - LOGGER.log("updateNClob not implemented"); - throw new SQLFeatureNotSupportedException("updateNClob not implemented"); - } - - @Override - public void updateNClob(String columnLabel, Reader reader) - throws SQLException - { - LOGGER.log("updateNClob not implemented"); - throw new SQLFeatureNotSupportedException("updateNClob not implemented"); - } - - @Override - public T getObject(int columnIndex, Class type) - throws SQLException - { - LOGGER.log("getObject not implemented"); - throw new SQLFeatureNotSupportedException("getObject not implemented"); - } - - @Override - public T getObject(String columnLabel, Class type) - throws SQLException - { - LOGGER.log("getObject not implemented"); - throw new SQLFeatureNotSupportedException("getObject not implemented"); - } - - @Override - public T unwrap(Class iface) - throws SQLException - { - try { - return iface.cast(this); - } - catch (ClassCastException cce) { - LOGGER.log("Unable to unwrap to %s" + iface); - throw new SQLException("Unable to unwrap to " + iface); - } - } - - @Override - public boolean isWrapperFor(Class iface) - throws SQLException - { - return iface.isInstance(this); - } -} diff --git a/driver/datacap-driver-redis/src/main/java/io/edurt/datacap/core/RedisStatement.java b/driver/datacap-driver-redis/src/main/java/io/edurt/datacap/core/RedisStatement.java deleted file mode 100644 index de24a25e2a..0000000000 --- a/driver/datacap-driver-redis/src/main/java/io/edurt/datacap/core/RedisStatement.java +++ /dev/null @@ -1,407 +0,0 @@ -package io.edurt.datacap.core; - -import java.sql.*; - -public class RedisStatement - implements Statement -{ - private final static Logger LOGGER = new Logger(RedisStatement.class); - - private final RedisConnection connection; - private final RedisClient redisClient; - - private ResultSet resultSet; - private boolean isClosed = false; - private int fetchSize = 1; - - public RedisStatement(RedisConnection connection, RedisClient redisClient) - { - this.connection = connection; - this.redisClient = redisClient; - } - - @Override - public ResultSet executeQuery(String sql) - throws SQLException - { - LOGGER.log("executeQuery(%s)", sql); - - this.checkClosed(); - - String[] result = this.redisClient.sendCommand(sql); - return new RedisResultSet(result, this); - } - - @Override - public int executeUpdate(String sql) - throws SQLException - { - this.checkClosed(); - - String[] result = this.redisClient.sendCommand(sql); - return result.length; - } - - @Override - public void close() - throws SQLException - { - LOGGER.log("close()"); - if (isClosed) { - LOGGER.log("Statement has been closed."); - return; - } - if (this.resultSet != null && !this.resultSet.isClosed()) { - this.resultSet.close(); - } - isClosed = true; - } - - @Override - public int getMaxFieldSize() - throws SQLException - { - this.checkClosed(); - return Integer.MAX_VALUE; - } - - @Override - public void setMaxFieldSize(int max) - throws SQLException - { - this.checkClosed(); - // do nothing - } - - @Override - public int getMaxRows() - throws SQLException - { - this.checkClosed(); - return 0; - } - - @Override - public void setMaxRows(int max) - throws SQLException - { - this.checkClosed(); - // do nothing - } - - @Override - public void setEscapeProcessing(boolean enable) - throws SQLException - { - this.checkClosed(); - // do nothing - } - - @Override - public int getQueryTimeout() - throws SQLException - { - this.checkClosed(); - return 0; - } - - @Override - public void setQueryTimeout(int seconds) - throws SQLException - { - LOGGER.log("setQueryTimeout.."); - } - - @Override - public void cancel() - throws SQLException - { - throw new SQLFeatureNotSupportedException("cancel not implemented"); - } - - @Override - public SQLWarning getWarnings() - throws SQLException - { - LOGGER.log("getWarnings returns null"); - return null; - } - - @Override - public void clearWarnings() - throws SQLException - { - this.checkClosed(); - } - - @Override - public void setCursorName(String name) - throws SQLException - { - LOGGER.log("setCursorName not implemented"); - throw new SQLFeatureNotSupportedException("setCursorName not implemented"); - } - - @Override - public boolean execute(String sql) - throws SQLException - { - this.checkClosed(); - - String[] result = this.redisClient.sendCommand(sql); - this.resultSet = new RedisResultSet(result, this); - - return true; - } - - @Override - public ResultSet getResultSet() - throws SQLException - { - this.checkClosed(); - return this.resultSet; - } - - @Override - public int getUpdateCount() - throws SQLException - { - this.checkClosed(); - LOGGER.log("getUpdateCount()"); - // 原因在父类 - return -1; - } - - @Override - public boolean getMoreResults() - throws SQLException - { - this.checkClosed(); - LOGGER.log("getMoreResults()"); - return this.getMoreResults(CLOSE_CURRENT_RESULT); - } - - @Override - public void setFetchDirection(int direction) - throws SQLException - { - this.checkClosed(); - LOGGER.log("setFetchDirection not implemented"); - throw new SQLFeatureNotSupportedException("setFetchDirection not implemented"); - } - - @Override - public int getFetchDirection() - throws SQLException - { - this.checkClosed(); - return java.sql.ResultSet.FETCH_FORWARD; - } - - @Override - public void setFetchSize(int rows) - throws SQLException - { - this.checkClosed(); - this.fetchSize = rows; - } - - @Override - public int getFetchSize() - throws SQLException - { - this.checkClosed(); - return this.fetchSize; - } - - @Override - public int getResultSetConcurrency() - throws SQLException - { - this.checkClosed(); - return ResultSet.CONCUR_READ_ONLY; - } - - @Override - public int getResultSetType() - throws SQLException - { - this.checkClosed(); - return ResultSet.TYPE_FORWARD_ONLY; - } - - @Override - public void addBatch(String sql) - throws SQLException - { - this.checkClosed(); - LOGGER.log("addBatch not implemented"); - throw new SQLFeatureNotSupportedException("addBatch not implemented"); - } - - @Override - public void clearBatch() - throws SQLException - { - this.checkClosed(); - LOGGER.log("clearBatch not implemented"); - throw new SQLFeatureNotSupportedException("addBatch not implemented"); - } - - @Override - public int[] executeBatch() - throws SQLException - { - this.checkClosed(); - LOGGER.log("executeBatch not implemented"); - throw new SQLFeatureNotSupportedException("executeBatch not implemented"); - } - - @Override - public Connection getConnection() - throws SQLException - { - this.checkClosed(); - return this.connection; - } - - @Override - public boolean getMoreResults(int current) - throws SQLException - { - this.checkClosed(); - LOGGER.log("getMoreResults(%s)", current); - return false; - } - - @Override - public ResultSet getGeneratedKeys() - throws SQLException - { - LOGGER.log("getGeneratedKeys not implemented"); - throw new SQLFeatureNotSupportedException("getGeneratedKeys not implemented"); - } - - @Override - public int executeUpdate(String sql, int autoGeneratedKeys) - throws SQLException - { - this.executeUpdate(sql); - return 0; - } - - @Override - public int executeUpdate(String sql, int[] columnIndexes) - throws SQLException - { - return this.executeUpdate(sql, 0); - } - - @Override - public int executeUpdate(String sql, String[] columnNames) - throws SQLException - { - return this.executeUpdate(sql, 0); - } - - @Override - public boolean execute(String sql, int autoGeneratedKeys) - throws SQLException - { - this.executeUpdate(sql); - return true; - } - - @Override - public boolean execute(String sql, int[] columnIndexes) - throws SQLException - { - return this.execute(sql, 0); - } - - @Override - public boolean execute(String sql, String[] columnNames) - throws SQLException - { - return this.execute(sql, 0); - } - - @Override - public int getResultSetHoldability() - throws SQLException - { - this.checkClosed(); - return ResultSet.HOLD_CURSORS_OVER_COMMIT; - } - - @Override - public boolean isClosed() - throws SQLException - { - LOGGER.log("Statement isClosed = %s", isClosed); - return this.isClosed; - } - - @Override - public void setPoolable(boolean poolable) - throws SQLException - { - LOGGER.log("setPoolable not implemented"); - throw new SQLFeatureNotSupportedException("setPoolable not implemented"); - } - - @Override - public boolean isPoolable() - throws SQLException - { - this.checkClosed(); - return false; - } - - @Override - public void closeOnCompletion() - throws SQLException - { - this.checkClosed(); - LOGGER.log("closeOnCompletion not implemented"); - throw new SQLFeatureNotSupportedException("closeOnCompletion not implemented"); - } - - @Override - public boolean isCloseOnCompletion() - throws SQLException - { - this.checkClosed(); - LOGGER.log("isCloseOnCompletion not implemented"); - return false; - } - - private void checkClosed() - throws SQLException - { - if (isClosed()) { - LOGGER.log("Statement is closed."); - throw new SQLException("Statement is closed."); - } - } - - @Override - public T unwrap(Class iface) - throws SQLException - { - try { - return iface.cast(this); - } - catch (ClassCastException cce) { - LOGGER.log("Unable to unwrap to %s", iface); - throw new SQLException("Unable to unwrap to " + iface); - } - } - - @Override - public boolean isWrapperFor(Class iface) - throws SQLException - { - return iface.isInstance(this); - } -} diff --git a/driver/datacap-driver-redis/src/main/java/io/edurt/datacap/core/ServerVersion.java b/driver/datacap-driver-redis/src/main/java/io/edurt/datacap/core/ServerVersion.java deleted file mode 100644 index 93eb71b1a0..0000000000 --- a/driver/datacap-driver-redis/src/main/java/io/edurt/datacap/core/ServerVersion.java +++ /dev/null @@ -1,33 +0,0 @@ -package io.edurt.datacap.core; - -import lombok.Data; - -@Data -public class ServerVersion -{ - private String origin; - - private Integer major; - private Integer minor; - private Integer patch; - - public ServerVersion(String origin) - { - this.origin = origin; - - String[] arr = origin.split("\\."); - for (int i = 0; i < arr.length; i++) { - String str = arr[i]; - int v = Utils.isNumber(str) ? Integer.parseInt(str) : 0; - if (i == 0) { - this.major = v; - } - else if (i == 1) { - minor = v; - } - else if (i == 2) { - patch = v; - } - } - } -} diff --git a/driver/datacap-driver-redis/src/main/java/io/edurt/datacap/core/Utils.java b/driver/datacap-driver-redis/src/main/java/io/edurt/datacap/core/Utils.java deleted file mode 100644 index d0df24bbfb..0000000000 --- a/driver/datacap-driver-redis/src/main/java/io/edurt/datacap/core/Utils.java +++ /dev/null @@ -1,96 +0,0 @@ -package io.edurt.datacap.core; - -import java.io.BufferedReader; -import java.io.StringReader; -import java.util.*; -import java.util.stream.Collectors; - -public class Utils -{ - public static boolean isNumber(String str) - { - if (str == null || str.length() == 0) { - return false; - } - for (int i = 0; i < str.length(); i++) { - boolean digit = Character.isDigit(str.charAt(i)); - if (!digit) { - return false; - } - } - return true; - } - - public static List toList(T[] arr) - { - if (arr == null) { - return null; - } - return Arrays.stream(arr) - .collect(Collectors.toList()); - } - - public static Op parseSql(String rawSql) - { - // for IDEA database tool only - if (rawSql.contains("SELECT 'keep alive'")) { - return new Op(rawSql, null, "PING", new String[0]); - } - - // hints - List lines = new BufferedReader(new StringReader(rawSql)) - .lines() - .collect(Collectors.toList()); - - List hintLines = new ArrayList<>(); - List sqlLines = new ArrayList<>(); - lines.forEach(line -> { - if (line.startsWith("--")) { - hintLines.add(line); - } - else { - sqlLines.add(line); - } - }); - - List hints = hintLines - .stream() - .map(line -> { - String hintStr = line.replace("--", "") - .replaceAll(" ", ""); - String[] arr = hintStr.split(":"); - return new Hint(HintKey.fromString(arr[0]), arr[1]); - }).collect(Collectors.toList()); - - // sql to execute - StringBuilder sb = new StringBuilder(); - sqlLines.forEach(sb::append); - - String sql = sb.toString(); - - String[] arr = sql.split(" "); - - String commandString = arr[0]; - - if (arr.length == 1) { - return new Op(rawSql, hints, commandString, new String[0]); - } - else { - String[] commandParams = Arrays.copyOfRange(arr, 1, arr.length); - return new Op(rawSql, hints, commandString, commandParams); - } - } - - public static Map parseQueryStringToMap(String queryString) - { - String[] params = queryString.split("&"); - Map map = new HashMap<>(); - for (String param : params) { - String[] p = param.split("="); - if (p.length == 2) { - map.put(p[0], p[1]); - } - } - return map; - } -} diff --git a/driver/datacap-driver-redis/src/main/java/io/edurt/datacap/driver/AbstractRedisClient.java b/driver/datacap-driver-redis/src/main/java/io/edurt/datacap/driver/AbstractRedisClient.java deleted file mode 100644 index f947a64038..0000000000 --- a/driver/datacap-driver-redis/src/main/java/io/edurt/datacap/driver/AbstractRedisClient.java +++ /dev/null @@ -1,92 +0,0 @@ -package io.edurt.datacap.driver; - -import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; -import io.edurt.datacap.core.Hint; -import io.edurt.datacap.core.Logger; -import io.edurt.datacap.core.Op; -import io.edurt.datacap.core.RedisClient; -import io.edurt.datacap.core.Utils; -import redis.clients.jedis.Protocol; -import redis.clients.jedis.util.SafeEncoder; - -import java.sql.SQLException; -import java.util.Arrays; -import java.util.Collection; -import java.util.List; -import java.util.stream.Stream; - -@SuppressFBWarnings(value = {"BC_BAD_CAST_TO_ABSTRACT_COLLECTION"}, - justification = "I prefer to suppress these FindBugs warnings") -public abstract class AbstractRedisClient - implements RedisClient -{ - public static final Logger LOGGER = new Logger(AbstractRedisClient.class); - - @Override - public String[] sendCommand(String sql) - throws SQLException - { - try { - Op op = Utils.parseSql(sql); - - Object result = this.sendCommand(op); - - return this.decodeResult(sql, result, op.getHints()); - } - catch (Throwable e) { - throw new SQLException(e); - } - } - - protected abstract Object sendCommand(Op op); - - protected Protocol.Command convertCommand(String commandString) - { - return Arrays.stream(Protocol.Command.values()) - .filter(t -> { - String string = t.toString(); - return string.equalsIgnoreCase(commandString); - }) - .findFirst() - .orElseThrow(() -> new RuntimeException( - String.format("command invalided. commandString = %s", commandString) - )); - } - - /** - * hint: - * -- decoder:jdk - * TODO - * - * @param sql - * @param originResult - * @param hints - * @return - */ - protected String[] decodeResult(String sql, Object originResult, List hints) - { - String[] decodedResult; - if (originResult == null) { - decodedResult = new String[] {null}; - } - else if (originResult.getClass().isArray()) { - String decoded = SafeEncoder.encode((byte[]) originResult); - decodedResult = Stream.of(decoded) - .toArray(String[]::new); - } - else if (originResult instanceof Collection) { - List list = (List) originResult; - decodedResult = list.stream() - .map(t -> SafeEncoder.encode((byte[]) t)) - .toArray(String[]::new); - } - else { - LOGGER.log("cannot decode result. originResult = %s", originResult); - decodedResult = Stream.of(originResult.toString()) - .toArray(String[]::new); - } - LOGGER.log("decode success. sql = %s, originResult = %s, decodedResult = %s", - sql, originResult, Utils.toList(decodedResult)); - return decodedResult; - } -} diff --git a/driver/datacap-driver-redis/src/main/java/io/edurt/datacap/core/RedisConnection.java b/driver/datacap-driver-redis/src/main/java/io/edurt/datacap/driver/RedisConnection.java similarity index 52% rename from driver/datacap-driver-redis/src/main/java/io/edurt/datacap/core/RedisConnection.java rename to driver/datacap-driver-redis/src/main/java/io/edurt/datacap/driver/RedisConnection.java index 2f38f39ab0..072c3050bb 100644 --- a/driver/datacap-driver-redis/src/main/java/io/edurt/datacap/core/RedisConnection.java +++ b/driver/datacap-driver-redis/src/main/java/io/edurt/datacap/driver/RedisConnection.java @@ -1,120 +1,169 @@ -package io.edurt.datacap.core; - -import java.sql.*; +package io.edurt.datacap.driver; + +import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; +import org.redisson.Redisson; +import org.redisson.api.RedissonClient; +import org.redisson.config.Config; + +import java.sql.Array; +import java.sql.Blob; +import java.sql.CallableStatement; +import java.sql.Clob; +import java.sql.Connection; +import java.sql.DatabaseMetaData; +import java.sql.NClob; +import java.sql.PreparedStatement; +import java.sql.SQLClientInfoException; +import java.sql.SQLException; +import java.sql.SQLFeatureNotSupportedException; +import java.sql.SQLWarning; +import java.sql.SQLXML; +import java.sql.Savepoint; +import java.sql.Statement; +import java.sql.Struct; import java.util.Map; import java.util.Properties; import java.util.concurrent.Executor; +@SuppressFBWarnings(value = {"CT_CONSTRUCTOR_THROW", "NP_NONNULL_RETURN_VIOLATION"}) public class RedisConnection implements Connection { - private final static Logger LOGGER = new Logger(RedisConnection.class); - - private final RedisClient redisClient; - private final Properties properties; - - private String dbIndex; - - public RedisConnection(RedisClient redisClient, String dbIndex, Properties properties) - { - this.redisClient = redisClient; - this.dbIndex = dbIndex; - this.properties = properties; - } - + private final RedissonClient client; + private final int database; private boolean isClosed = false; - @Override - public Statement createStatement() + public RedisConnection(String url, Properties info) throws SQLException { - this.checkClosed(); - return new RedisStatement(this, this.redisClient); + try { + String redisUrl = url.substring(5); + if (redisUrl.startsWith("redis:")) { + redisUrl = redisUrl.substring(6); + } + while (redisUrl.startsWith("/")) { + redisUrl = redisUrl.substring(1); + } + + String host = "localhost"; + int port = 6379; + this.database = Integer.parseInt(info.getProperty("database", "0")); + + if (!redisUrl.isEmpty()) { + String[] parts = redisUrl.split(":"); + host = parts[0]; + if (parts.length > 1) { + port = Integer.parseInt(parts[1].split("/")[0]); + } + } + + String username = info.getProperty("user"); + String password = info.getProperty("password"); + + Config config = new Config(); + String address = "redis://" + host + ":" + port; + + if (username != null && password != null) { + config.useSingleServer() + .setAddress(address) + .setDatabase(database) + .setUsername(username) + .setPassword(password); + } + else if (password != null) { + config.useSingleServer() + .setAddress(address) + .setDatabase(database) + .setPassword(password); + } + else { + config.useSingleServer() + .setAddress(address) + .setDatabase(database); + } + + this.client = Redisson.create(config); + client.getKeys().count(); + } + catch (Exception e) { + throw new SQLException("Failed to connect to Redis: " + e.getMessage(), e); + } } - @Override - public PreparedStatement prepareStatement(String sql) - throws SQLException + public RedissonClient getClient() { - // TODO 暂不实现,感觉意义不大,未来看是否需要实现 - LOGGER.log("prepareStatement not implemented"); - throw new SQLFeatureNotSupportedException("prepareStatement not implemented"); + return client; } - @Override - public CallableStatement prepareCall(String sql) - throws SQLException + public int getDatabase() { - // TODO 暂不实现,感觉无意义,因为主要是执行存储过程的,redis没这玩意 - LOGGER.log("prepareCall not implemented"); - throw new SQLFeatureNotSupportedException("prepareCall not implemented"); + return database; } @Override - public String nativeSQL(String sql) + public Statement createStatement() throws SQLException { - LOGGER.log("nativeSQL not implemented"); - throw new SQLFeatureNotSupportedException("nativeSQL not implemented"); + checkClosed(); + return new RedisStatement(this); } @Override - public void setAutoCommit(boolean autoCommit) + public PreparedStatement prepareStatement(String sql) throws SQLException { + return null; } @Override - public boolean getAutoCommit() + public CallableStatement prepareCall(String sql) throws SQLException { - return true; + return null; } @Override - public void commit() + public String nativeSQL(String sql) throws SQLException { - // TODO 待支持事务 + return ""; } - @Override - public void rollback() + private void checkClosed() throws SQLException { - // TODO + if (isClosed) { + throw new SQLException("Connection is closed"); + } } @Override public void close() - throws SQLException { - this.redisClient.close(); - LOGGER.log("Connection close"); - this.isClosed = true; + if (!isClosed) { + client.shutdown(); + isClosed = true; + } } @Override public boolean isClosed() throws SQLException { - LOGGER.log("Connection isClosed = %s", isClosed); - return this.isClosed; + return isClosed; } @Override public DatabaseMetaData getMetaData() throws SQLException { - return new RedisDatabaseMetadata(this, this.dbIndex); + throw new SQLFeatureNotSupportedException("Method not supported"); } @Override public void setReadOnly(boolean readOnly) throws SQLException - { - // do nothing - } + {} @Override public boolean isReadOnly() @@ -126,346 +175,287 @@ public boolean isReadOnly() @Override public void setCatalog(String catalog) throws SQLException - { - LOGGER.log("setCatalog(%s)", catalog); - // do nothing - } + {} @Override public String getCatalog() throws SQLException { - return null; + return ""; } @Override public void setTransactionIsolation(int level) throws SQLException - { - } + {} @Override public int getTransactionIsolation() throws SQLException { - return Connection.TRANSACTION_NONE; + return 0; } @Override public SQLWarning getWarnings() throws SQLException { - LOGGER.log("getWarnings returns null"); return null; } @Override public void clearWarnings() throws SQLException - { - } + {} @Override public Statement createStatement(int resultSetType, int resultSetConcurrency) throws SQLException { - return this.createStatement(); + return null; } @Override public PreparedStatement prepareStatement(String sql, int resultSetType, int resultSetConcurrency) throws SQLException { - return this.prepareStatement(sql); + return null; } @Override public CallableStatement prepareCall(String sql, int resultSetType, int resultSetConcurrency) throws SQLException { - return this.prepareCall(sql); + return null; } @Override public Map> getTypeMap() throws SQLException { - LOGGER.log("getTypeMap not implemented"); - throw new SQLFeatureNotSupportedException("getTypeMap not implemented"); + return Map.of(); } @Override public void setTypeMap(Map> map) throws SQLException - { - LOGGER.log("setTypeMap not implemented"); - throw new SQLFeatureNotSupportedException("setTypeMap not implemented"); - } + {} @Override public void setHoldability(int holdability) throws SQLException - { - // do nothing - } + {} @Override public int getHoldability() throws SQLException { - return ResultSet.HOLD_CURSORS_OVER_COMMIT; + return 0; } @Override public Savepoint setSavepoint() throws SQLException { - LOGGER.log("setSavepoint not implemented"); - throw new SQLFeatureNotSupportedException("setSavepoint not implemented"); + return null; } @Override public Savepoint setSavepoint(String name) throws SQLException { - LOGGER.log("setSavepoint not implemented"); - throw new SQLFeatureNotSupportedException("setSavepoint not implemented"); + return null; } @Override public void rollback(Savepoint savepoint) throws SQLException - { - LOGGER.log("rollback not implemented"); - throw new SQLFeatureNotSupportedException("rollback not implemented"); - } + {} @Override public void releaseSavepoint(Savepoint savepoint) throws SQLException - { - LOGGER.log("releaseSavepoint not implemented"); - throw new SQLFeatureNotSupportedException("releaseSavepoint not implemented"); - } + {} @Override public Statement createStatement(int resultSetType, int resultSetConcurrency, int resultSetHoldability) throws SQLException { - return this.createStatement(); + return null; } @Override public PreparedStatement prepareStatement(String sql, int resultSetType, int resultSetConcurrency, int resultSetHoldability) throws SQLException { - return this.prepareStatement(sql); + return null; } @Override public CallableStatement prepareCall(String sql, int resultSetType, int resultSetConcurrency, int resultSetHoldability) throws SQLException { - return this.prepareCall(sql); + return null; } @Override public PreparedStatement prepareStatement(String sql, int autoGeneratedKeys) throws SQLException { - return this.prepareStatement(sql); + return null; } @Override public PreparedStatement prepareStatement(String sql, int[] columnIndexes) throws SQLException { - return this.prepareStatement(sql); + return null; } @Override public PreparedStatement prepareStatement(String sql, String[] columnNames) throws SQLException { - return this.prepareStatement(sql); + return null; } @Override public Clob createClob() throws SQLException { - LOGGER.log("createClob not implemented"); - throw new SQLFeatureNotSupportedException("createClob not implemented"); + return null; } @Override public Blob createBlob() throws SQLException { - LOGGER.log("createBlob not implemented"); - throw new SQLFeatureNotSupportedException("createBlob not implemented"); + return null; } @Override public NClob createNClob() throws SQLException { - LOGGER.log("createNClob not implemented"); - throw new SQLFeatureNotSupportedException("createNClob not implemented"); + return null; } @Override public SQLXML createSQLXML() throws SQLException { - LOGGER.log("createSQLXML not implemented"); - throw new SQLFeatureNotSupportedException("createSQLXML not implemented"); + return null; } @Override public boolean isValid(int timeout) throws SQLException { - LOGGER.log("isValid = %s", isClosed); - if (this.isClosed) { - return false; - } - // TODO 超时时间 - try { - this.redisClient.sendCommand("PING"); - return true; - } - catch (Exception e) { - return false; - } + return false; } @Override public void setClientInfo(String name, String value) throws SQLClientInfoException - { - this.properties.put(name, value); - } + {} @Override public void setClientInfo(Properties properties) throws SQLClientInfoException - { - this.properties.putAll(properties); - } + {} @Override public String getClientInfo(String name) throws SQLException { - this.checkClosed(); - - String property = this.properties.getProperty(name); - property = property == null ? "" : property; - LOGGER.log("getClientInfo(%s) = %s, properties = %s", name, property, properties); - return property; + return ""; } @Override public Properties getClientInfo() throws SQLException { - LOGGER.log("getClientInfo() = %s", properties); - return this.properties; + return null; } @Override public Array createArrayOf(String typeName, Object[] elements) throws SQLException { - LOGGER.log("createArrayOf not implemented"); - throw new SQLFeatureNotSupportedException("createArrayOf not implemented"); + return null; } @Override public Struct createStruct(String typeName, Object[] attributes) throws SQLException { - LOGGER.log("createStruct not implemented"); - throw new SQLFeatureNotSupportedException("createStruct not implemented"); + return null; } @Override public void setSchema(String schema) throws SQLException - { - synchronized (RedisConnection.class) { - LOGGER.log("setSchema(%s)", schema); - this.checkClosed(); - - this.redisClient.select(Integer.parseInt(schema)); - - this.dbIndex = schema; - } - } + {} @Override public String getSchema() throws SQLException { - synchronized (RedisConnection.class) { - this.checkClosed(); - LOGGER.log("getSchema() = %s", this.dbIndex); - return this.dbIndex; - } + return ""; } @Override public void abort(Executor executor) throws SQLException - { - LOGGER.log("abort not implemented"); - throw new SQLFeatureNotSupportedException("abort not implemented"); - } + {} @Override public void setNetworkTimeout(Executor executor, int milliseconds) throws SQLException + {} + + @Override + public int getNetworkTimeout() + throws SQLException { - LOGGER.log("setNetworkTimeout not implemented"); - throw new SQLFeatureNotSupportedException("setNetworkTimeout not implemented"); + return 0; } @Override - public int getNetworkTimeout() + public void setAutoCommit(boolean autoCommit) throws SQLException { - LOGGER.log("getNetworkTimeout not implemented"); - throw new SQLFeatureNotSupportedException("getNetworkTimeout not implemented"); + throw new UnsupportedOperationException("Redis doesn't support transactions in the same way as relational databases"); } @Override - public T unwrap(Class iface) + public boolean getAutoCommit() throws SQLException { - try { - return iface.cast(this); - } - catch (ClassCastException cce) { - LOGGER.log("Unable to unwrap to %s", iface); - throw new SQLException("Unable to unwrap to " + iface); - } + return true; } @Override - public boolean isWrapperFor(Class iface) + public void commit() + throws SQLException + {} + + @Override + public void rollback() + throws SQLException + {} + + @Override + public T unwrap(Class iface) throws SQLException { - this.checkClosed(); - return iface.isInstance(this); + return null; } - private void checkClosed() + @Override + public boolean isWrapperFor(Class iface) throws SQLException { - if (isClosed()) { - LOGGER.log("Connection is closed."); - throw new SQLException("Connection is closed."); - } + return false; } } diff --git a/driver/datacap-driver-redis/src/main/java/io/edurt/datacap/driver/RedisJdbcDriver.java b/driver/datacap-driver-redis/src/main/java/io/edurt/datacap/driver/RedisJdbcDriver.java new file mode 100644 index 0000000000..a26099d78e --- /dev/null +++ b/driver/datacap-driver-redis/src/main/java/io/edurt/datacap/driver/RedisJdbcDriver.java @@ -0,0 +1,72 @@ +package io.edurt.datacap.driver; + +import java.sql.Connection; +import java.sql.Driver; +import java.sql.DriverManager; +import java.sql.DriverPropertyInfo; +import java.sql.SQLException; +import java.sql.SQLFeatureNotSupportedException; +import java.util.Properties; +import java.util.logging.Logger; + +public class RedisJdbcDriver + implements Driver +{ + static { + try { + DriverManager.registerDriver(new RedisJdbcDriver()); + } + catch (SQLException e) { + throw new RuntimeException("Can't register Redis JDBC Driver", e); + } + } + + @Override + public boolean acceptsURL(String url) + throws SQLException + { + return url != null && url.startsWith("jdbc:redis:"); + } + + @Override + public Connection connect(String url, Properties info) + throws SQLException + { + if (!acceptsURL(url)) { + return null; + } + return new RedisConnection(url, info); + } + + @Override + public int getMajorVersion() + { + return 1; + } + + @Override + public int getMinorVersion() + { + return 0; + } + + @Override + public DriverPropertyInfo[] getPropertyInfo(String url, Properties info) + throws SQLException + { + return new DriverPropertyInfo[0]; + } + + @Override + public boolean jdbcCompliant() + { + return false; + } + + @Override + public Logger getParentLogger() + throws SQLFeatureNotSupportedException + { + throw new SQLFeatureNotSupportedException("Parent logger is not supported"); + } +} diff --git a/driver/datacap-driver-redis/src/main/java/io/edurt/datacap/core/EmptyResultSet.java b/driver/datacap-driver-redis/src/main/java/io/edurt/datacap/driver/RedisResultSet.java similarity index 70% rename from driver/datacap-driver-redis/src/main/java/io/edurt/datacap/core/EmptyResultSet.java rename to driver/datacap-driver-redis/src/main/java/io/edurt/datacap/driver/RedisResultSet.java index 79334dc398..d8018b151f 100644 --- a/driver/datacap-driver-redis/src/main/java/io/edurt/datacap/core/EmptyResultSet.java +++ b/driver/datacap-driver-redis/src/main/java/io/edurt/datacap/driver/RedisResultSet.java @@ -1,240 +1,271 @@ -package io.edurt.datacap.core; +package io.edurt.datacap.driver; + +import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; import java.io.InputStream; import java.io.Reader; import java.math.BigDecimal; import java.net.URL; -import java.sql.*; +import java.sql.Array; +import java.sql.Blob; +import java.sql.Clob; +import java.sql.Date; +import java.sql.NClob; +import java.sql.Ref; +import java.sql.ResultSet; +import java.sql.ResultSetMetaData; +import java.sql.RowId; +import java.sql.SQLException; +import java.sql.SQLWarning; +import java.sql.SQLXML; +import java.sql.Statement; +import java.sql.Time; +import java.sql.Timestamp; +import java.util.ArrayList; import java.util.Calendar; +import java.util.List; import java.util.Map; -public class EmptyResultSet +@SuppressFBWarnings(value = {"DM_DEFAULT_ENCODING", "EI_EXPOSE_REP2"}) +public class RedisResultSet implements ResultSet { - private final static Logger LOGGER = new Logger(EmptyResultSet.class); - - @Override - public boolean next() - throws SQLException - { - return false; - } - - @Override - public void close() - throws SQLException - { - LOGGER.log("close()"); - } - - @Override - public boolean wasNull() - throws SQLException - { - return false; + private final List> rows; + private final List columnNames; + private Map current; + private int position = -1; + private boolean isClosed = false; + private ResultSetMetaData metadata; + + public RedisResultSet(List> rows) + { + this.rows = rows; + this.columnNames = new ArrayList<>(); + this.current = null; + this.metadata = null; + + if (!rows.isEmpty()) { + Map first = rows.get(0); + columnNames.addAll(first.keySet()); + this.metadata = new RedisResultSetMetaData(columnNames, first); + } } @Override - public String getString(int columnIndex) + public boolean next() throws SQLException { - return null; - } + checkClosed(); - @Override - public boolean getBoolean(int columnIndex) - throws SQLException - { + position++; + if (position < rows.size()) { + current = rows.get(position); + return true; + } + current = null; return false; } - @Override - public byte getByte(int columnIndex) - throws SQLException - { - return 0; - } - - @Override - public short getShort(int columnIndex) - throws SQLException - { - return 0; - } - - @Override - public int getInt(int columnIndex) - throws SQLException - { - return 0; - } - - @Override - public long getLong(int columnIndex) - throws SQLException - { - return 0; - } - - @Override - public float getFloat(int columnIndex) - throws SQLException - { - return 0; - } - - @Override - public double getDouble(int columnIndex) - throws SQLException - { - return 0; - } - - @Override - public BigDecimal getBigDecimal(int columnIndex, int scale) - throws SQLException - { - return null; - } - - @Override - public byte[] getBytes(int columnIndex) - throws SQLException - { - return new byte[0]; - } - - @Override - public Date getDate(int columnIndex) - throws SQLException - { - return null; - } - - @Override - public Time getTime(int columnIndex) - throws SQLException - { - return null; - } - - @Override - public Timestamp getTimestamp(int columnIndex) - throws SQLException - { - return null; - } - - @Override - public InputStream getAsciiStream(int columnIndex) - throws SQLException - { - return null; - } - - @Override - public InputStream getUnicodeStream(int columnIndex) - throws SQLException - { - return null; - } - - @Override - public InputStream getBinaryStream(int columnIndex) - throws SQLException - { - return null; - } - @Override public String getString(String columnLabel) throws SQLException { - return null; + checkClosed(); + + if (current == null) { + throw new SQLException("No current row"); + } + Object value = current.get(columnLabel); + return value == null ? null : value.toString(); } @Override public boolean getBoolean(String columnLabel) throws SQLException { - return false; + checkClosed(); + + Object value = current.get(columnLabel); + if (value == null) { + return false; + } + if (value instanceof Boolean) { + return (Boolean) value; + } + return Boolean.parseBoolean(value.toString()); } @Override public byte getByte(String columnLabel) throws SQLException { - return 0; + checkClosed(); + + Object value = current.get(columnLabel); + if (value == null) { + return 0; + } + if (value instanceof Number) { + return ((Number) value).byteValue(); + } + return Byte.parseByte(value.toString()); } @Override public short getShort(String columnLabel) throws SQLException { - return 0; + checkClosed(); + + Object value = current.get(columnLabel); + if (value == null) { + return 0; + } + if (value instanceof Number) { + return ((Number) value).shortValue(); + } + return Short.parseShort(value.toString()); } @Override public int getInt(String columnLabel) throws SQLException { - return 0; + checkClosed(); + + Object value = current.get(columnLabel); + if (value == null) { + return 0; + } + if (value instanceof Number) { + return ((Number) value).intValue(); + } + return Integer.parseInt(value.toString()); } @Override public long getLong(String columnLabel) throws SQLException { - return 0; + checkClosed(); + + Object value = current.get(columnLabel); + if (value == null) { + return 0; + } + if (value instanceof Number) { + return ((Number) value).longValue(); + } + return Long.parseLong(value.toString()); } @Override public float getFloat(String columnLabel) throws SQLException { - return 0; + checkClosed(); + + Object value = current.get(columnLabel); + if (value == null) { + return 0; + } + if (value instanceof Number) { + return ((Number) value).floatValue(); + } + return Float.parseFloat(value.toString()); } @Override public double getDouble(String columnLabel) throws SQLException { - return 0; + checkClosed(); + + Object value = current.get(columnLabel); + if (value == null) { + return 0; + } + if (value instanceof Number) { + return ((Number) value).doubleValue(); + } + return Double.parseDouble(value.toString()); } @Override public BigDecimal getBigDecimal(String columnLabel, int scale) throws SQLException { - return null; + checkClosed(); + + Object value = current.get(columnLabel); + if (value == null) { + return null; + } + return new BigDecimal(value.toString()); } @Override public byte[] getBytes(String columnLabel) throws SQLException { - return new byte[0]; + checkClosed(); + + Object value = current.get(columnLabel); + if (value == null) { + return null; + } + if (value instanceof byte[]) { + return (byte[]) value; + } + return value.toString().getBytes(); } @Override public Date getDate(String columnLabel) throws SQLException { - return null; + checkClosed(); + + Object value = current.get(columnLabel); + if (value == null) { + return null; + } + if (value instanceof Date) { + return (Date) value; + } + return Date.valueOf(value.toString()); } @Override public Time getTime(String columnLabel) throws SQLException { - return null; + checkClosed(); + + Object value = current.get(columnLabel); + if (value == null) { + return null; + } + if (value instanceof Time) { + return (Time) value; + } + return Time.valueOf(value.toString()); } @Override public Timestamp getTimestamp(String columnLabel) throws SQLException { - return null; + checkClosed(); + + Object value = current.get(columnLabel); + if (value == null) { + return null; + } + if (value instanceof Timestamp) { + return (Timestamp) value; + } + return Timestamp.valueOf(value.toString()); } @Override @@ -268,43 +299,52 @@ public SQLWarning getWarnings() @Override public void clearWarnings() throws SQLException - { - - } + {} @Override public String getCursorName() throws SQLException { - return null; + return ""; } @Override public ResultSetMetaData getMetaData() throws SQLException { - return null; + checkClosed(); + + return metadata; } @Override public Object getObject(int columnIndex) throws SQLException { - return null; + checkClosed(); + + String columnName = getColumnName(columnIndex); + return current.get(columnName); } @Override public Object getObject(String columnLabel) throws SQLException { - return null; + checkClosed(); + + return current.get(columnLabel); } @Override public int findColumn(String columnLabel) throws SQLException { - return 0; + int index = columnNames.indexOf(columnLabel); + if (index == -1) { + throw new SQLException("Column not found: " + columnLabel); + } + return index + 1; } @Override @@ -325,83 +365,100 @@ public Reader getCharacterStream(String columnLabel) public BigDecimal getBigDecimal(int columnIndex) throws SQLException { - return null; + return getBigDecimal(getColumnName(columnIndex), 0); } @Override public BigDecimal getBigDecimal(String columnLabel) throws SQLException { - return null; + return getBigDecimal(columnLabel, 0); } @Override public boolean isBeforeFirst() throws SQLException { - return false; + return position < 0; } @Override public boolean isAfterLast() throws SQLException { - return false; + return position >= rows.size(); } @Override public boolean isFirst() throws SQLException { - return false; + return position == 0; } @Override public boolean isLast() throws SQLException { - return false; + return position == rows.size() - 1; } @Override public void beforeFirst() throws SQLException { - + position = -1; + current = null; } @Override public void afterLast() throws SQLException { - + position = rows.size(); + current = null; } @Override public boolean first() throws SQLException { - return false; + if (rows.isEmpty()) { + return false; + } + position = 0; + current = rows.get(0); + return true; } @Override public boolean last() throws SQLException { - return false; + if (rows.isEmpty()) { + return false; + } + position = rows.size() - 1; + current = rows.get(position); + return true; } @Override public int getRow() throws SQLException { - return 0; + return position + 1; } @Override public boolean absolute(int row) throws SQLException { + if (row > 0 && row <= rows.size()) { + position = row - 1; + current = rows.get(position); + return true; + } return false; } @@ -409,36 +466,37 @@ public boolean absolute(int row) public boolean relative(int rows) throws SQLException { - return false; + return absolute(position + 1 + rows); } @Override public boolean previous() throws SQLException { + if (position > 0) { + position--; + current = rows.get(position); + return true; + } return false; } @Override public void setFetchDirection(int direction) throws SQLException - { - - } + {} @Override public int getFetchDirection() throws SQLException { - return 0; + return ResultSet.FETCH_FORWARD; } @Override public void setFetchSize(int rows) throws SQLException - { - - } + {} @Override public int getFetchSize() @@ -451,14 +509,14 @@ public int getFetchSize() public int getType() throws SQLException { - return 0; + return ResultSet.TYPE_SCROLL_INSENSITIVE; } @Override public int getConcurrency() throws SQLException { - return 0; + return ResultSet.CONCUR_READ_ONLY; } @Override @@ -485,317 +543,227 @@ public boolean rowDeleted() @Override public void updateNull(int columnIndex) throws SQLException - { - - } + {} @Override public void updateBoolean(int columnIndex, boolean x) throws SQLException - { - - } + {} @Override public void updateByte(int columnIndex, byte x) throws SQLException - { - - } + {} @Override public void updateShort(int columnIndex, short x) throws SQLException - { - - } + {} @Override public void updateInt(int columnIndex, int x) throws SQLException - { - - } + {} @Override public void updateLong(int columnIndex, long x) throws SQLException - { - - } + {} @Override public void updateFloat(int columnIndex, float x) throws SQLException - { - - } + {} @Override public void updateDouble(int columnIndex, double x) throws SQLException - { - - } + {} @Override public void updateBigDecimal(int columnIndex, BigDecimal x) throws SQLException - { - - } + {} @Override public void updateString(int columnIndex, String x) throws SQLException - { - - } + {} @Override public void updateBytes(int columnIndex, byte[] x) throws SQLException - { - - } + {} @Override public void updateDate(int columnIndex, Date x) throws SQLException - { - - } + {} @Override public void updateTime(int columnIndex, Time x) throws SQLException - { - - } + {} @Override public void updateTimestamp(int columnIndex, Timestamp x) throws SQLException - { - - } + {} @Override public void updateAsciiStream(int columnIndex, InputStream x, int length) throws SQLException - { - - } + {} @Override public void updateBinaryStream(int columnIndex, InputStream x, int length) throws SQLException - { - - } + {} @Override public void updateCharacterStream(int columnIndex, Reader x, int length) throws SQLException - { - - } + {} @Override public void updateObject(int columnIndex, Object x, int scaleOrLength) throws SQLException - { - - } + {} @Override public void updateObject(int columnIndex, Object x) throws SQLException - { - - } + {} @Override public void updateNull(String columnLabel) throws SQLException - { - - } + {} @Override public void updateBoolean(String columnLabel, boolean x) throws SQLException - { - - } + {} @Override public void updateByte(String columnLabel, byte x) throws SQLException - { - - } + {} @Override public void updateShort(String columnLabel, short x) throws SQLException - { - - } + {} @Override public void updateInt(String columnLabel, int x) throws SQLException - { - - } + {} @Override public void updateLong(String columnLabel, long x) throws SQLException - { - - } + {} @Override public void updateFloat(String columnLabel, float x) throws SQLException - { - - } + {} @Override public void updateDouble(String columnLabel, double x) throws SQLException - { - - } + {} @Override public void updateBigDecimal(String columnLabel, BigDecimal x) throws SQLException - { - - } + {} @Override public void updateString(String columnLabel, String x) throws SQLException - { - - } + {} @Override public void updateBytes(String columnLabel, byte[] x) throws SQLException - { - - } + {} @Override public void updateDate(String columnLabel, Date x) throws SQLException - { - - } + {} @Override public void updateTime(String columnLabel, Time x) throws SQLException - { - - } + {} @Override public void updateTimestamp(String columnLabel, Timestamp x) throws SQLException - { - - } + {} @Override public void updateAsciiStream(String columnLabel, InputStream x, int length) throws SQLException - { - - } + {} @Override public void updateBinaryStream(String columnLabel, InputStream x, int length) throws SQLException - { - - } + {} @Override public void updateCharacterStream(String columnLabel, Reader reader, int length) throws SQLException - { - - } + {} @Override public void updateObject(String columnLabel, Object x, int scaleOrLength) - throws SQLException - { - - } + throws SQLException + {} @Override public void updateObject(String columnLabel, Object x) throws SQLException - { - - } + {} @Override public void insertRow() throws SQLException - { - - } + {} @Override public void updateRow() throws SQLException - { - - } + {} @Override public void deleteRow() throws SQLException - { - - } + {} @Override public void refreshRow() throws SQLException - { - - } + {} @Override public void cancelRowUpdates() throws SQLException - { - - } + {} @Override public void moveToInsertRow() throws SQLException - { - - } + {} @Override public void moveToCurrentRow() throws SQLException - { - - } + {} @Override public Statement getStatement() @@ -808,7 +776,7 @@ public Statement getStatement() public Object getObject(int columnIndex, Map> map) throws SQLException { - return null; + return getObject(columnIndex); } @Override @@ -843,7 +811,7 @@ public Array getArray(int columnIndex) public Object getObject(String columnLabel, Map> map) throws SQLException { - return null; + return getObject(columnLabel); } @Override @@ -878,42 +846,42 @@ public Array getArray(String columnLabel) public Date getDate(int columnIndex, Calendar cal) throws SQLException { - return null; + return getDate(getColumnName(columnIndex)); } @Override public Date getDate(String columnLabel, Calendar cal) throws SQLException { - return null; + return getDate(columnLabel); } @Override public Time getTime(int columnIndex, Calendar cal) throws SQLException { - return null; + return getTime(getColumnName(columnIndex)); } @Override public Time getTime(String columnLabel, Calendar cal) throws SQLException { - return null; + return getTime(columnLabel); } @Override public Timestamp getTimestamp(int columnIndex, Calendar cal) throws SQLException { - return null; + return getTimestamp(getColumnName(columnIndex)); } @Override public Timestamp getTimestamp(String columnLabel, Calendar cal) throws SQLException { - return null; + return getTimestamp(columnLabel); } @Override @@ -933,58 +901,42 @@ public URL getURL(String columnLabel) @Override public void updateRef(int columnIndex, Ref x) throws SQLException - { - - } + {} @Override public void updateRef(String columnLabel, Ref x) throws SQLException - { - - } + {} @Override public void updateBlob(int columnIndex, Blob x) throws SQLException - { - - } + {} @Override public void updateBlob(String columnLabel, Blob x) throws SQLException - { - - } + {} @Override public void updateClob(int columnIndex, Clob x) throws SQLException - { - - } + {} @Override public void updateClob(String columnLabel, Clob x) throws SQLException - { - - } + {} @Override public void updateArray(int columnIndex, Array x) throws SQLException - { - - } + {} @Override public void updateArray(String columnLabel, Array x) throws SQLException - { - - } + {} @Override public RowId getRowId(int columnIndex) @@ -1003,59 +955,46 @@ public RowId getRowId(String columnLabel) @Override public void updateRowId(int columnIndex, RowId x) throws SQLException - { - - } + {} @Override public void updateRowId(String columnLabel, RowId x) throws SQLException - { - - } + {} @Override public int getHoldability() throws SQLException { - return 0; + return ResultSet.HOLD_CURSORS_OVER_COMMIT; } @Override public boolean isClosed() throws SQLException { - LOGGER.log("isClosed = %s", false); - return false; + return isClosed; } @Override public void updateNString(int columnIndex, String nString) throws SQLException - { - - } + {} @Override public void updateNString(String columnLabel, String nString) throws SQLException - { - - } + {} @Override public void updateNClob(int columnIndex, NClob nClob) throws SQLException - { - - } + {} @Override public void updateNClob(String columnLabel, NClob nClob) throws SQLException - { - - } + {} @Override public NClob getNClob(int columnIndex) @@ -1088,29 +1027,25 @@ public SQLXML getSQLXML(String columnLabel) @Override public void updateSQLXML(int columnIndex, SQLXML xmlObject) throws SQLException - { - - } + {} @Override public void updateSQLXML(String columnLabel, SQLXML xmlObject) throws SQLException - { - - } + {} @Override public String getNString(int columnIndex) throws SQLException { - return null; + return getString(columnIndex); } @Override public String getNString(String columnLabel) throws SQLException { - return null; + return getString(columnLabel); } @Override @@ -1130,208 +1065,286 @@ public Reader getNCharacterStream(String columnLabel) @Override public void updateNCharacterStream(int columnIndex, Reader x, long length) throws SQLException - { - - } + {} @Override public void updateNCharacterStream(String columnLabel, Reader reader, long length) throws SQLException - { - - } + {} @Override public void updateAsciiStream(int columnIndex, InputStream x, long length) throws SQLException - { - - } + {} @Override public void updateBinaryStream(int columnIndex, InputStream x, long length) throws SQLException - { - - } + {} @Override public void updateCharacterStream(int columnIndex, Reader x, long length) throws SQLException - { - - } + {} @Override public void updateAsciiStream(String columnLabel, InputStream x, long length) throws SQLException - { - - } + {} @Override public void updateBinaryStream(String columnLabel, InputStream x, long length) throws SQLException - { - - } + {} @Override public void updateCharacterStream(String columnLabel, Reader reader, long length) throws SQLException - { - - } + {} @Override public void updateBlob(int columnIndex, InputStream inputStream, long length) throws SQLException - { - - } + {} @Override public void updateBlob(String columnLabel, InputStream inputStream, long length) throws SQLException - { - - } + {} @Override public void updateClob(int columnIndex, Reader reader, long length) throws SQLException - { - - } + {} @Override public void updateClob(String columnLabel, Reader reader, long length) throws SQLException - { - - } + {} @Override public void updateNClob(int columnIndex, Reader reader, long length) throws SQLException - { - - } + {} @Override public void updateNClob(String columnLabel, Reader reader, long length) throws SQLException - { - - } + {} @Override public void updateNCharacterStream(int columnIndex, Reader x) throws SQLException - { - - } + {} @Override public void updateNCharacterStream(String columnLabel, Reader reader) throws SQLException - { - - } + {} @Override public void updateAsciiStream(int columnIndex, InputStream x) throws SQLException - { - - } + {} @Override public void updateBinaryStream(int columnIndex, InputStream x) throws SQLException - { - - } + {} @Override public void updateCharacterStream(int columnIndex, Reader x) throws SQLException - { + {} + + @Override + public void updateAsciiStream(String columnLabel, InputStream x) + throws SQLException + {} + + @Override + public void updateBinaryStream(String columnLabel, InputStream x) + throws SQLException + {} + + @Override + public void updateCharacterStream(String columnLabel, Reader reader) + throws SQLException + {} + + @Override + public void updateBlob(int columnIndex, InputStream inputStream) + throws SQLException + {} + + @Override + public void updateBlob(String columnLabel, InputStream inputStream) + throws SQLException + {} + @Override + public void updateClob(int columnIndex, Reader reader) + throws SQLException + {} + + @Override + public void updateClob(String columnLabel, Reader reader) + throws SQLException + {} + + @Override + public void updateNClob(int columnIndex, Reader reader) + throws SQLException + {} + + @Override + public void updateNClob(String columnLabel, Reader reader) + throws SQLException + {} + + @Override + public T getObject(int columnIndex, Class type) + throws SQLException + { + return type.cast(getObject(columnIndex)); } @Override - public void updateAsciiStream(String columnLabel, InputStream x) + public T getObject(String columnLabel, Class type) throws SQLException { + return type.cast(getObject(columnLabel)); + } + private void checkClosed() + throws SQLException + { + if (isClosed) { + throw new SQLException("ResultSet is closed"); + } } @Override - public void updateBinaryStream(String columnLabel, InputStream x) + public void close() throws SQLException { + isClosed = true; + } + @Override + public boolean wasNull() + throws SQLException + { + return false; } @Override - public void updateCharacterStream(String columnLabel, Reader reader) + public String getString(int columnIndex) throws SQLException { + return getString(getColumnName(columnIndex)); + } + @Override + public boolean getBoolean(int columnIndex) + throws SQLException + { + return getBoolean(getColumnName(columnIndex)); } @Override - public void updateBlob(int columnIndex, InputStream inputStream) + public byte getByte(int columnIndex) throws SQLException { + return getByte(getColumnName(columnIndex)); + } + @Override + public short getShort(int columnIndex) + throws SQLException + { + return getShort(getColumnName(columnIndex)); } @Override - public void updateBlob(String columnLabel, InputStream inputStream) + public int getInt(int columnIndex) throws SQLException { + return getInt(getColumnName(columnIndex)); + } + @Override + public long getLong(int columnIndex) + throws SQLException + { + return getLong(getColumnName(columnIndex)); } @Override - public void updateClob(int columnIndex, Reader reader) + public float getFloat(int columnIndex) throws SQLException { + return getFloat(getColumnName(columnIndex)); + } + @Override + public double getDouble(int columnIndex) + throws SQLException + { + return getDouble(getColumnName(columnIndex)); } @Override - public void updateClob(String columnLabel, Reader reader) + public BigDecimal getBigDecimal(int columnIndex, int scale) throws SQLException { + return getBigDecimal(getColumnName(columnIndex), scale); + } + @Override + public byte[] getBytes(int columnIndex) + throws SQLException + { + return getBytes(getColumnName(columnIndex)); } @Override - public void updateNClob(int columnIndex, Reader reader) + public Date getDate(int columnIndex) throws SQLException { + return getDate(getColumnName(columnIndex)); + } + @Override + public Time getTime(int columnIndex) + throws SQLException + { + return getTime(getColumnName(columnIndex)); } @Override - public void updateNClob(String columnLabel, Reader reader) + public Timestamp getTimestamp(int columnIndex) throws SQLException { + return getTimestamp(getColumnName(columnIndex)); + } + @Override + public InputStream getAsciiStream(int columnIndex) + throws SQLException + { + return null; } @Override - public T getObject(int columnIndex, Class type) + public InputStream getUnicodeStream(int columnIndex) throws SQLException { return null; } @Override - public T getObject(String columnLabel, Class type) + public InputStream getBinaryStream(int columnIndex) throws SQLException { return null; @@ -1341,19 +1354,21 @@ public T getObject(String columnLabel, Class type) public T unwrap(Class iface) throws SQLException { - try { - return iface.cast(this); - } - catch (ClassCastException cce) { - LOGGER.log("Unable to unwrap to %s", iface); - throw new SQLException("Unable to unwrap to " + iface); - } + return null; } @Override public boolean isWrapperFor(Class iface) throws SQLException { - return iface.isInstance(this); + return false; + } + + private String getColumnName(int columnIndex) + { + if (columnIndex < 1 || columnIndex > columnNames.size()) { + throw new IllegalArgumentException("Invalid column index: " + columnIndex); + } + return columnNames.get(columnIndex - 1); } } diff --git a/driver/datacap-driver-redis/src/main/java/io/edurt/datacap/core/RedisResultSetMetaData.java b/driver/datacap-driver-redis/src/main/java/io/edurt/datacap/driver/RedisResultSetMetaData.java similarity index 59% rename from driver/datacap-driver-redis/src/main/java/io/edurt/datacap/core/RedisResultSetMetaData.java rename to driver/datacap-driver-redis/src/main/java/io/edurt/datacap/driver/RedisResultSetMetaData.java index a52fffadf9..241cdb0567 100644 --- a/driver/datacap-driver-redis/src/main/java/io/edurt/datacap/core/RedisResultSetMetaData.java +++ b/driver/datacap-driver-redis/src/main/java/io/edurt/datacap/driver/RedisResultSetMetaData.java @@ -1,119 +1,144 @@ -package io.edurt.datacap.core; +package io.edurt.datacap.driver; + +import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; import java.sql.ResultSetMetaData; import java.sql.SQLException; import java.sql.Types; +import java.util.List; +import java.util.Map; +@SuppressFBWarnings(value = {"NP_NONNULL_RETURN_VIOLATION", "EI_EXPOSE_REP2"}) public class RedisResultSetMetaData implements ResultSetMetaData { - private final static Logger LOGGER = new Logger(RedisResultSetMetaData.class); + private final List columnNames; + private final Map sampleRow; - public static final int MAX_SIZE = 1024; + public RedisResultSetMetaData(List columnNames, Map sampleRow) + { + this.columnNames = columnNames; + this.sampleRow = sampleRow; + } @Override - public T unwrap(Class iface) + public int getColumnCount() throws SQLException { - try { - return iface.cast(this); - } - catch (ClassCastException cce) { - LOGGER.log("Unable to unwrap to %s", iface); - throw new SQLException("Unable to unwrap to " + iface); - } + return columnNames.size(); } @Override - public boolean isWrapperFor(Class iface) + public String getColumnName(int column) throws SQLException { - return iface.isInstance(this); + checkColumnIndex(column); + return columnNames.get(column - 1); } @Override - public int getColumnCount() + public String getColumnLabel(int column) throws SQLException { - return 1; + return getColumnName(column); } @Override - public boolean isAutoIncrement(int column) + public int getColumnType(int column) throws SQLException { - return false; + checkColumnIndex(column); + + String columnName = columnNames.get(column - 1); + Object value = sampleRow.get(columnName); + return RedisTypeHelper.getJdbcType(value); } @Override - public boolean isCaseSensitive(int column) + public String getColumnTypeName(int column) throws SQLException { - return true; + checkColumnIndex(column); + + String columnName = columnNames.get(column - 1); + Object value = sampleRow.get(columnName); + return RedisTypeHelper.getTypeName(value); } @Override - public boolean isSearchable(int column) + public String getColumnClassName(int column) throws SQLException { - return false; + checkColumnIndex(column); + + String columnName = columnNames.get(column - 1); + Object value = sampleRow.get(columnName); + return RedisTypeHelper.getJavaClassName(value); } @Override - public boolean isCurrency(int column) + public int isNullable(int column) throws SQLException { - return false; + return columnNullable; } @Override - public int isNullable(int column) + public boolean isAutoIncrement(int column) throws SQLException { - return ResultSetMetaData.columnNoNulls; + return false; } @Override - public boolean isSigned(int column) + public boolean isCaseSensitive(int column) throws SQLException { - return false; + return getColumnType(column) == Types.VARCHAR; } @Override - public int getColumnDisplaySize(int column) + public boolean isSearchable(int column) throws SQLException { - return MAX_SIZE; + return true; } @Override - public String getColumnLabel(int column) + public boolean isCurrency(int column) throws SQLException { - return "RESULTS"; + return false; + } + + private void checkColumnIndex(int column) + throws SQLException + { + if (column < 1 || column > columnNames.size()) { + throw new SQLException("Invalid column index: " + column); + } } @Override - public String getColumnName(int column) + public boolean isSigned(int column) throws SQLException { - return "RESULTS"; + int type = getColumnType(column); + return type == Types.INTEGER || type == Types.BIGINT || type == Types.DOUBLE; } @Override - public String getSchemaName(int column) + public int getColumnDisplaySize(int column) throws SQLException { - LOGGER.log("getSchemaName(%s)", column); - return "9"; + return 0; } @Override public int getPrecision(int column) throws SQLException { - return MAX_SIZE; + return 0; } @Override @@ -131,51 +156,54 @@ public String getTableName(int column) } @Override - public String getCatalogName(int column) + public String getSchemaName(int column) throws SQLException { return ""; } @Override - public int getColumnType(int column) + public String getCatalogName(int column) throws SQLException { - return Types.NVARCHAR; + return ""; } @Override - public String getColumnTypeName(int column) + public boolean isReadOnly(int column) throws SQLException { - return "String"; + return false; } @Override - public boolean isReadOnly(int column) + public boolean isWritable(int column) throws SQLException { return true; } @Override - public boolean isWritable(int column) + public boolean isDefinitelyWritable(int column) throws SQLException { - return false; + return true; } @Override - public boolean isDefinitelyWritable(int column) + public T unwrap(Class iface) throws SQLException { - return false; + if (iface.isAssignableFrom(getClass())) { + return iface.cast(this); + } + throw new SQLException("Cannot unwrap to " + iface.getName()); } @Override - public String getColumnClassName(int column) + public boolean isWrapperFor(Class iface) throws SQLException { - return "java.lang.String"; + return iface.isAssignableFrom(getClass()); } } diff --git a/driver/datacap-driver-redis/src/main/java/io/edurt/datacap/driver/RedisStatement.java b/driver/datacap-driver-redis/src/main/java/io/edurt/datacap/driver/RedisStatement.java new file mode 100644 index 0000000000..e01fcd271d --- /dev/null +++ b/driver/datacap-driver-redis/src/main/java/io/edurt/datacap/driver/RedisStatement.java @@ -0,0 +1,487 @@ +package io.edurt.datacap.driver; + +import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; +import io.edurt.datacap.driver.parser.RedisParser; +import io.edurt.datacap.driver.parser.RedisShowParser; +import lombok.extern.slf4j.Slf4j; +import org.redisson.api.RKeys; +import org.redisson.api.RType; +import org.redisson.api.RedissonClient; + +import java.sql.Connection; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.sql.SQLWarning; +import java.sql.Statement; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +@Slf4j +@SuppressFBWarnings(value = {"EI_EXPOSE_REP", "EI_EXPOSE_REP2", "NP_NULL_PARAM_DEREF"}) +public class RedisStatement + implements Statement +{ + private final RedisConnection connection; + private boolean isClosed = false; + + public RedisStatement(RedisConnection connection) + { + this.connection = connection; + } + + @Override + public ResultSet executeQuery(String sql) + throws SQLException + { + checkClosed(); + + try { + RedisParser parser = RedisParser.createParser(sql); + if (parser instanceof RedisShowParser) { + return executeShowStatement((RedisShowParser) parser); + } + + Map query = parser.getQuery(); + if (query != null && query.containsKey("serverInfo")) { + return handleServerInfo(); + } + + return executeSelectStatement(parser); + } + catch (Exception e) { + throw new SQLException("Failed to execute query", e); + } + } + + private ResultSet handleServerInfo() + { + List> rows = new ArrayList<>(); + Map row = new HashMap<>(); + row.put("info", "Redis Server"); + rows.add(row); + return new RedisResultSet(rows); + } + + private ResultSet executeSelectStatement(RedisParser parser) + { + RedissonClient client = connection.getClient(); + String collection = parser.getCollection(); + List> rows = new ArrayList<>(); + + if (collection == null || collection.equals("*")) { + RKeys keys = client.getKeys(); + for (String key : keys.getKeys()) { + Map row = new HashMap<>(); + row.put("key", key); + row.put("type", getKeyType(client, key)); + row.put("value", getKeyValue(client, key)); + rows.add(row); + } + } + else { + RKeys keys = client.getKeys(); + String pattern = collection.contains("*") || collection.contains("?") ? collection : collection + "*"; + for (String key : keys.getKeysByPattern(pattern)) { + Map row = new HashMap<>(); + row.put("key", key); + row.put("type", getKeyType(client, key)); + row.put("value", getKeyValue(client, key)); + rows.add(row); + } + } + + return new RedisResultSet(rows); + } + + private String getKeyType(RedissonClient client, String key) + { + RType type = client.getKeys().getType(key); + return type != null ? type.name() : "NONE"; + } + + private Object getKeyValue(RedissonClient client, String key) + { + RType type = client.getKeys().getType(key); + if (type == null) { + return null; + } + + String typeName = type.name(); + if ("STRING".equals(typeName) || "OBJECT".equals(typeName)) { + return client.getBucket(key).get(); + } + else if ("LIST".equals(typeName)) { + return client.getList(key).readAll(); + } + else if ("SET".equals(typeName)) { + return client.getSet(key).readAll(); + } + else if ("ZSET".equals(typeName)) { + return client.getScoredSortedSet(key).readAll(); + } + else if ("MAP".equals(typeName) || "HASH".equals(typeName)) { + return client.getMap(key).readAllMap(); + } + else { + return "[" + typeName + "]"; + } + } + + private ResultSet executeShowStatement(RedisShowParser parser) + throws SQLException + { + try { + switch (parser.getShowType()) { + case DATABASES: + return handleShowDatabases(); + case TABLES: + return handleShowTables(parser); + case COLUMNS: + return handleShowColumns(parser); + default: + throw new SQLException("Unsupported SHOW command type"); + } + } + catch (Exception e) { + throw new SQLException("Failed to execute SHOW command", e); + } + } + + private ResultSet handleShowDatabases() + { + List> rows = new ArrayList<>(); + for (int i = 0; i < 16; i++) { + Map row = new HashMap<>(); + row.put("name", String.valueOf(i)); + rows.add(row); + } + return new RedisResultSet(rows); + } + + private ResultSet handleShowTables(RedisShowParser parser) + { + RedissonClient client = connection.getClient(); + List> rows = new ArrayList<>(); + + String pattern = parser.getPattern() != null ? parser.getPattern() : "*"; + RKeys keys = client.getKeys(); + + for (String key : keys.getKeysByPattern(pattern)) { + Map row = new HashMap<>(); + row.put("name", key); + rows.add(row); + } + + return new RedisResultSet(rows); + } + + private ResultSet handleShowColumns(RedisShowParser parser) + { + RedissonClient client = connection.getClient(); + String tableName = parser.getCollection(); + List> rows = new ArrayList<>(); + + if (tableName != null) { + RType type = client.getKeys().getType(tableName); + if (type != null && ("MAP".equals(type.name()) || "HASH".equals(type.name()))) { + for (Object field : client.getMap(tableName).keySet()) { + Map row = new HashMap<>(); + row.put("name", field.toString()); + rows.add(row); + } + } + else { + Map row = new HashMap<>(); + row.put("name", "value"); + rows.add(row); + } + } + + return new RedisResultSet(rows); + } + + @Override + public int executeUpdate(String sql) + throws SQLException + { + throw new UnsupportedOperationException("Update operation not supported"); + } + + private void checkClosed() + throws SQLException + { + if (isClosed) { + throw new SQLException("Statement is closed"); + } + } + + @Override + public void close() + throws SQLException + { + isClosed = true; + } + + @Override + public int getMaxFieldSize() + throws SQLException + { + return 0; + } + + @Override + public void setMaxFieldSize(int max) + throws SQLException + {} + + @Override + public int getMaxRows() + throws SQLException + { + return 0; + } + + @Override + public void setMaxRows(int max) + throws SQLException + {} + + @Override + public void setEscapeProcessing(boolean enable) + throws SQLException + {} + + @Override + public int getQueryTimeout() + throws SQLException + { + return 0; + } + + @Override + public void setQueryTimeout(int seconds) + throws SQLException + {} + + @Override + public void cancel() + throws SQLException + {} + + @Override + public SQLWarning getWarnings() + throws SQLException + { + return null; + } + + @Override + public void clearWarnings() + throws SQLException + {} + + @Override + public void setCursorName(String name) + throws SQLException + {} + + @Override + public boolean execute(String sql) + throws SQLException + { + return false; + } + + @Override + public ResultSet getResultSet() + throws SQLException + { + return null; + } + + @Override + public int getUpdateCount() + throws SQLException + { + return 0; + } + + @Override + public boolean getMoreResults() + throws SQLException + { + return false; + } + + @Override + public void setFetchDirection(int direction) + throws SQLException + {} + + @Override + public int getFetchDirection() + throws SQLException + { + return 0; + } + + @Override + public void setFetchSize(int rows) + throws SQLException + {} + + @Override + public int getFetchSize() + throws SQLException + { + return 0; + } + + @Override + public int getResultSetConcurrency() + throws SQLException + { + return 0; + } + + @Override + public int getResultSetType() + throws SQLException + { + return 0; + } + + @Override + public void addBatch(String sql) + throws SQLException + {} + + @Override + public void clearBatch() + throws SQLException + {} + + @Override + public int[] executeBatch() + throws SQLException + { + return new int[0]; + } + + @Override + public Connection getConnection() + throws SQLException + { + return null; + } + + @Override + public boolean getMoreResults(int current) + throws SQLException + { + return false; + } + + @Override + public ResultSet getGeneratedKeys() + throws SQLException + { + return null; + } + + @Override + public int executeUpdate(String sql, int autoGeneratedKeys) + throws SQLException + { + return 0; + } + + @Override + public int executeUpdate(String sql, int[] columnIndexes) + throws SQLException + { + return 0; + } + + @Override + public int executeUpdate(String sql, String[] columnNames) + throws SQLException + { + return 0; + } + + @Override + public boolean execute(String sql, int autoGeneratedKeys) + throws SQLException + { + return false; + } + + @Override + public boolean execute(String sql, int[] columnIndexes) + throws SQLException + { + return false; + } + + @Override + public boolean execute(String sql, String[] columnNames) + throws SQLException + { + return false; + } + + @Override + public int getResultSetHoldability() + throws SQLException + { + return 0; + } + + @Override + public boolean isClosed() + throws SQLException + { + return false; + } + + @Override + public void setPoolable(boolean poolable) + throws SQLException + {} + + @Override + public boolean isPoolable() + throws SQLException + { + return false; + } + + @Override + public void closeOnCompletion() + throws SQLException + {} + + @Override + public boolean isCloseOnCompletion() + throws SQLException + { + return false; + } + + @Override + public T unwrap(Class iface) + throws SQLException + { + return null; + } + + @Override + public boolean isWrapperFor(Class iface) + throws SQLException + { + return false; + } +} diff --git a/driver/datacap-driver-redis/src/main/java/io/edurt/datacap/driver/RedisTypeHelper.java b/driver/datacap-driver-redis/src/main/java/io/edurt/datacap/driver/RedisTypeHelper.java new file mode 100644 index 0000000000..e0650c7997 --- /dev/null +++ b/driver/datacap-driver-redis/src/main/java/io/edurt/datacap/driver/RedisTypeHelper.java @@ -0,0 +1,105 @@ +package io.edurt.datacap.driver; + +import java.sql.Types; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; + +public class RedisTypeHelper +{ + private static final Map TYPE_MAP = new HashMap<>(); + + private RedisTypeHelper() {} + + public static RedisDataType getType(String className) + { + RedisDataType type = TYPE_MAP.get(className); + return type != null ? type : new RedisDataType(Types.OTHER, "OTHER", Object.class); + } + + public static RedisDataType getType(Object value) + { + if (value == null) { + return TYPE_MAP.get("NULL"); + } + return getType(value.getClass().getSimpleName()); + } + + public static int getJdbcType(String className) + { + return getType(className).getJdbcType(); + } + + public static int getJdbcType(Object value) + { + return getType(value).getJdbcType(); + } + + public static String getTypeName(String className) + { + return getType(className).getTypeName(); + } + + public static String getTypeName(Object value) + { + return getType(value).getTypeName(); + } + + public static String getJavaClassName(String className) + { + return getType(className).getJavaClassName(); + } + + public static String getJavaClassName(Object value) + { + return getType(value).getJavaClassName(); + } + + public static class RedisDataType + { + private final int jdbcType; + private final String typeName; + private final Class javaClass; + + public RedisDataType(int jdbcType, String typeName, Class javaClass) + { + this.jdbcType = jdbcType; + this.typeName = typeName; + this.javaClass = javaClass; + } + + public int getJdbcType() + { + return jdbcType; + } + + public String getTypeName() + { + return typeName; + } + + public String getJavaClassName() + { + return javaClass.getName(); + } + } + + static { + TYPE_MAP.put("String", new RedisDataType(Types.VARCHAR, "VARCHAR", String.class)); + TYPE_MAP.put("Integer", new RedisDataType(Types.INTEGER, "INTEGER", Integer.class)); + TYPE_MAP.put("Long", new RedisDataType(Types.BIGINT, "BIGINT", Long.class)); + TYPE_MAP.put("Double", new RedisDataType(Types.DOUBLE, "DOUBLE", Double.class)); + TYPE_MAP.put("Float", new RedisDataType(Types.FLOAT, "FLOAT", Float.class)); + TYPE_MAP.put("Boolean", new RedisDataType(Types.BOOLEAN, "BOOLEAN", Boolean.class)); + TYPE_MAP.put("ArrayList", new RedisDataType(Types.ARRAY, "ARRAY", List.class)); + TYPE_MAP.put("LinkedList", new RedisDataType(Types.ARRAY, "ARRAY", List.class)); + TYPE_MAP.put("HashSet", new RedisDataType(Types.ARRAY, "SET", Set.class)); + TYPE_MAP.put("LinkedHashSet", new RedisDataType(Types.ARRAY, "SET", Set.class)); + TYPE_MAP.put("TreeSet", new RedisDataType(Types.ARRAY, "ZSET", Set.class)); + TYPE_MAP.put("HashMap", new RedisDataType(Types.OTHER, "HASH", Map.class)); + TYPE_MAP.put("LinkedHashMap", new RedisDataType(Types.OTHER, "HASH", Map.class)); + TYPE_MAP.put("byte[]", new RedisDataType(Types.BINARY, "BINARY", byte[].class)); + TYPE_MAP.put("NULL", new RedisDataType(Types.NULL, "NULL", Object.class)); + } +} diff --git a/driver/datacap-driver-redis/src/main/java/io/edurt/datacap/driver/cluster/JedisRedisClusterClient.java b/driver/datacap-driver-redis/src/main/java/io/edurt/datacap/driver/cluster/JedisRedisClusterClient.java deleted file mode 100644 index 1ecc1540bb..0000000000 --- a/driver/datacap-driver-redis/src/main/java/io/edurt/datacap/driver/cluster/JedisRedisClusterClient.java +++ /dev/null @@ -1,64 +0,0 @@ -package io.edurt.datacap.driver.cluster; - -import io.edurt.datacap.driver.AbstractRedisClient; -import io.edurt.datacap.core.Hint; -import io.edurt.datacap.core.HintKey; -import io.edurt.datacap.core.Op; -import lombok.RequiredArgsConstructor; -import redis.clients.jedis.JedisCluster; -import redis.clients.jedis.Protocol; - -import java.sql.SQLException; -import java.util.List; - -@RequiredArgsConstructor -public class JedisRedisClusterClient - extends AbstractRedisClient -{ - private final JedisCluster jedisCluster; - - @Override - protected Object sendCommand(Op op) - { - String rawSql = op.getOriginSql(); - String commandString = op.getCommand(); - String[] params = op.getParams(); - List hints = op.getHints(); - - try { - Protocol.Command command = this.convertCommand(commandString); - - String sampleKey = hints.stream() - .filter(hint -> hint.getKey().equals(HintKey.sample_key)) - .findFirst() - .map(Hint::getValue) - .orElse(null); - - Object result; - if (params == null || params.length == 0) { - result = this.jedisCluster.sendCommand(sampleKey, command); - } - else { - result = this.jedisCluster.sendCommand(sampleKey, command, params); - } - return result; - } - catch (Throwable e) { - LOGGER.log("command `%s` cannot execute.", rawSql); - throw new RuntimeException(String.format("command `%s` cannot execute.", rawSql)); - } - } - - @Override - public void select(int dbIndex) - throws SQLException - { - throw new SQLException("Redis Cluster does not support this operation"); - } - - @Override - public void close() - { - this.jedisCluster.close(); - } -} diff --git a/driver/datacap-driver-redis/src/main/java/io/edurt/datacap/driver/cluster/RedisClusterDriver.java b/driver/datacap-driver-redis/src/main/java/io/edurt/datacap/driver/cluster/RedisClusterDriver.java deleted file mode 100644 index 38fa966cee..0000000000 --- a/driver/datacap-driver-redis/src/main/java/io/edurt/datacap/driver/cluster/RedisClusterDriver.java +++ /dev/null @@ -1,107 +0,0 @@ -package io.edurt.datacap.driver.cluster; - -import io.edurt.datacap.core.Logger; -import io.edurt.datacap.core.RedisConnection; -import io.edurt.datacap.driver.conf.RedisClusterConnectionInfo; -import io.edurt.datacap.driver.redis.RedisDriver; -import org.apache.commons.pool2.impl.GenericObjectPoolConfig; -import redis.clients.jedis.JedisCluster; - -import java.net.URI; -import java.net.URISyntaxException; -import java.sql.*; -import java.util.Properties; - -public class RedisClusterDriver - implements Driver -{ - private final static Logger LOGGER = new Logger(RedisDriver.class); - - private static final String REDIS_CLUSTER_JDBC_PREFIX = "jdbc:redis-cluster:"; - - static { - try { - DriverManager.registerDriver(new RedisClusterDriver()); - } - catch (Exception e) { - LOGGER.log("Can't register driver!"); - throw new RuntimeException("Can't register driver!", e); - } - } - - @Override - public Connection connect(String url, Properties info) - throws SQLException - { - if (!this.acceptsURL(url)) { - LOGGER.log("wrong url. url is %s", url); - return null; - } - if (info == null) { - info = new Properties(); - } - - String rawUrl = url.replaceFirst("jdbc:", ""); - RedisClusterConnectionInfo connectionInfo = new RedisClusterConnectionInfo(rawUrl, info); - - JedisCluster jedisCluster = new JedisCluster( - connectionInfo.getNodes(), - connectionInfo.getTimeout(), - connectionInfo.getTimeout(), - connectionInfo.getMaxAttempts(), - connectionInfo.getUsername(), - connectionInfo.getPassword(), - null, - new GenericObjectPoolConfig<>() - ); - JedisRedisClusterClient jedisRedisClusterClient = new JedisRedisClusterClient(jedisCluster); - - return new RedisConnection(jedisRedisClusterClient, "0", info); - } - - @Override - public boolean acceptsURL(String url) - throws SQLException - { - return url.toLowerCase().startsWith(REDIS_CLUSTER_JDBC_PREFIX); - } - - @Override - public DriverPropertyInfo[] getPropertyInfo(String url, Properties info) - throws SQLException - { - return new DriverPropertyInfo[0]; - } - - @Override - public int getMajorVersion() - { - return 0; - } - - @Override - public int getMinorVersion() - { - return 0; - } - - @Override - public boolean jdbcCompliant() - { - return false; - } - - @Override - public java.util.logging.Logger getParentLogger() - throws SQLFeatureNotSupportedException - { - return null; - } - - public static void main(String[] args) - throws URISyntaxException - { - URI uri = new URI("cluster:///?hosts=localhost:3306"); - System.out.println(uri.getQuery()); - } -} diff --git a/driver/datacap-driver-redis/src/main/java/io/edurt/datacap/driver/conf/BaseConnectionInfo.java b/driver/datacap-driver-redis/src/main/java/io/edurt/datacap/driver/conf/BaseConnectionInfo.java deleted file mode 100644 index e898d760df..0000000000 --- a/driver/datacap-driver-redis/src/main/java/io/edurt/datacap/driver/conf/BaseConnectionInfo.java +++ /dev/null @@ -1,31 +0,0 @@ -package io.edurt.datacap.driver.conf; - -import lombok.Data; -import lombok.NoArgsConstructor; - -import java.util.Properties; - -@Data -@NoArgsConstructor -public class BaseConnectionInfo -{ - protected String username; - protected String password; - protected boolean ssl; - protected int timeout; - - public BaseConnectionInfo(Properties info) - { - String username = info.getProperty("user"); - String password = info.getProperty("password"); - String sslString = info.getProperty("ssl"); - Object timeoutString = info.getOrDefault("timeout", "1000"); - - boolean ssl = "on".equalsIgnoreCase(sslString) || "true".equalsIgnoreCase(sslString); - - this.username = username; - this.password = password; - this.timeout = Integer.parseInt(timeoutString.toString()); - this.ssl = ssl; - } -} diff --git a/driver/datacap-driver-redis/src/main/java/io/edurt/datacap/driver/conf/RedisClusterConnectionInfo.java b/driver/datacap-driver-redis/src/main/java/io/edurt/datacap/driver/conf/RedisClusterConnectionInfo.java deleted file mode 100644 index 5bcd791dc1..0000000000 --- a/driver/datacap-driver-redis/src/main/java/io/edurt/datacap/driver/conf/RedisClusterConnectionInfo.java +++ /dev/null @@ -1,55 +0,0 @@ -package io.edurt.datacap.driver.conf; - -import io.edurt.datacap.core.Logger; -import io.edurt.datacap.core.Utils; -import lombok.Data; -import lombok.EqualsAndHashCode; -import lombok.NoArgsConstructor; -import redis.clients.jedis.HostAndPort; -import redis.clients.jedis.JedisCluster; - -import java.net.URI; -import java.util.Arrays; -import java.util.Map; -import java.util.Properties; -import java.util.Set; -import java.util.stream.Collectors; - -@Data -@NoArgsConstructor -@EqualsAndHashCode(callSuper = true) -public class RedisClusterConnectionInfo - extends BaseConnectionInfo -{ - public static final Logger LOGGER = new Logger(RedisClusterConnectionInfo.class); - - private Set nodes; - private int maxAttempts; - - public RedisClusterConnectionInfo(String rawUrl, Properties info) - { - super((info)); - try { - URI uri = new URI(rawUrl); - Object maxAttemptsString = info.getOrDefault("maxAttempts", JedisCluster.DEFAULT_MAX_ATTEMPTS); - int maxAttempts = Integer.parseInt(maxAttemptsString.toString()); - - String query = uri.getQuery(); - Map paramMap = Utils.parseQueryStringToMap(query); - String host = paramMap.get("host"); - String[] hosts = host.split(";"); - - this.nodes = Arrays.stream(hosts) - .map(h -> { - String[] split = h.split(":"); - return new HostAndPort(split[0], Integer.parseInt(split[1])); - }).collect(Collectors.toSet()); - - this.maxAttempts = maxAttempts; - } - catch (Exception e) { - LOGGER.log("Cannot parse JDBC URL %s", rawUrl); - throw new RuntimeException("Cannot parse JDBC URL: " + rawUrl, e); - } - } -} diff --git a/driver/datacap-driver-redis/src/main/java/io/edurt/datacap/driver/conf/RedisConnectionInfo.java b/driver/datacap-driver-redis/src/main/java/io/edurt/datacap/driver/conf/RedisConnectionInfo.java deleted file mode 100644 index 1094fd4624..0000000000 --- a/driver/datacap-driver-redis/src/main/java/io/edurt/datacap/driver/conf/RedisConnectionInfo.java +++ /dev/null @@ -1,44 +0,0 @@ -package io.edurt.datacap.driver.conf; - -import io.edurt.datacap.core.Logger; -import lombok.*; - -import java.net.URI; -import java.util.Properties; - -@Data -@EqualsAndHashCode(callSuper = true) -public class RedisConnectionInfo - extends BaseConnectionInfo -{ - private final static Logger LOGGER = new Logger(RedisConnectionInfo.class); - - private String host; - private int port; - private int dbIndex; - - public RedisConnectionInfo(String rawUrl, Properties info) - { - super(info); - try { - URI uri = new URI(rawUrl); - - String host = uri.getHost() != null ? uri.getHost() : "localhost"; - int port = uri.getPort() > 0 ? uri.getPort() : 6379; - - String path = uri.getPath(); - int dbIndex = 0; - if (path != null && path.length() > 1) { - dbIndex = Integer.parseInt(path.replaceAll("/", "")); - } - - this.host = host; - this.port = port; - this.dbIndex = dbIndex; - } - catch (Exception e) { - LOGGER.log("Cannot parse JDBC URL %s", rawUrl); - throw new RuntimeException("Cannot parse JDBC URL: " + rawUrl, e); - } - } -} diff --git a/driver/datacap-driver-redis/src/main/java/io/edurt/datacap/driver/parser/RedisParser.java b/driver/datacap-driver-redis/src/main/java/io/edurt/datacap/driver/parser/RedisParser.java new file mode 100644 index 0000000000..f08acca753 --- /dev/null +++ b/driver/datacap-driver-redis/src/main/java/io/edurt/datacap/driver/parser/RedisParser.java @@ -0,0 +1,78 @@ +package io.edurt.datacap.driver.parser; + +import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; +import io.edurt.datacap.sql.SQLParser; +import io.edurt.datacap.sql.statement.SQLStatement; +import io.edurt.datacap.sql.statement.SelectStatement; +import io.edurt.datacap.sql.statement.ShowStatement; + +import java.util.List; +import java.util.Map; + +@SuppressFBWarnings(value = {"EI_EXPOSE_REP", "EI_EXPOSE_REP2"}) +public class RedisParser +{ + protected Map filter; + protected List fields; + protected String command; + protected Map query; + protected String collection; + protected ShowStatement.ShowType showType; + protected String database; + + public static RedisParser createParser(String sql) + { + if (sql == null || sql.trim().isEmpty()) { + throw new IllegalArgumentException("SQL query cannot be null or empty"); + } + + SQLStatement statement = SQLParser.parse(sql.trim()); + if (statement instanceof SelectStatement) { + return new RedisSelectParser((SelectStatement) statement); + } + else if (statement instanceof ShowStatement) { + return new RedisShowParser((ShowStatement) statement); + } + throw new IllegalArgumentException("Unsupported SQL operation: " + sql); + } + + public Map getFilter() + { + return filter; + } + + public List getFields() + { + return fields; + } + + public String getCommand() + { + return command; + } + + public Map getQuery() + { + return query; + } + + public String getCollection() + { + return collection; + } + + public ShowStatement.ShowType getShowType() + { + return showType; + } + + public String getDatabase() + { + return database; + } + + public void setDatabase(String database) + { + this.database = database; + } +} diff --git a/driver/datacap-driver-redis/src/main/java/io/edurt/datacap/driver/parser/RedisSelectParser.java b/driver/datacap-driver-redis/src/main/java/io/edurt/datacap/driver/parser/RedisSelectParser.java new file mode 100644 index 0000000000..afa5e2602a --- /dev/null +++ b/driver/datacap-driver-redis/src/main/java/io/edurt/datacap/driver/parser/RedisSelectParser.java @@ -0,0 +1,80 @@ +package io.edurt.datacap.driver.parser; + +import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; +import io.edurt.datacap.sql.node.Expression; +import io.edurt.datacap.sql.node.element.SelectElement; +import io.edurt.datacap.sql.node.element.TableElement; +import io.edurt.datacap.sql.statement.SelectStatement; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; + +@SuppressFBWarnings(value = {"EI_EXPOSE_REP", "EI_EXPOSE_REP2", "CT_CONSTRUCTOR_THROW"}) +public class RedisSelectParser + extends RedisParser +{ + public RedisSelectParser(SelectStatement statement) + { + parseSelectStatement(statement); + } + + private void parseSelectStatement(SelectStatement select) + { + this.query = new HashMap<>(); + this.fields = new ArrayList<>(); + + if (isVersionQuery(select)) { + this.query.put("serverInfo", true); + return; + } + + List fromSources = select.getFromSources(); + if (fromSources != null && !fromSources.isEmpty()) { + TableElement mainTable = fromSources.get(0); + this.collection = mainTable.getTableName(); + } + + List selectElements = select.getSelectElements(); + if (selectElements != null) { + for (SelectElement element : selectElements) { + if (element.getExpression() != null) { + String fieldName = element.getExpression().getValue().toString(); + if (element.getAlias() != null) { + fieldName = element.getAlias(); + } + fields.add(fieldName); + } + } + } + + if (select.getWhereClause() != null) { + this.filter = new HashMap<>(); + parseWhereExpression(select.getWhereClause()); + } + } + + private boolean isVersionQuery(SelectStatement select) + { + List elements = select.getSelectElements(); + if (elements != null && elements.size() == 1) { + SelectElement element = elements.get(0); + if (element.getExpression() != null) { + Expression expr = element.getExpression(); + if (expr.getType() == Expression.ExpressionType.FUNCTION && + "VERSION".equalsIgnoreCase(expr.getValue().toString())) { + return true; + } + } + } + return false; + } + + private void parseWhereExpression(Expression expression) + { + if (expression == null || filter == null) { + return; + } + filter.put("condition", expression.getValue().toString()); + } +} diff --git a/driver/datacap-driver-redis/src/main/java/io/edurt/datacap/driver/parser/RedisShowParser.java b/driver/datacap-driver-redis/src/main/java/io/edurt/datacap/driver/parser/RedisShowParser.java new file mode 100644 index 0000000000..f02073b92a --- /dev/null +++ b/driver/datacap-driver-redis/src/main/java/io/edurt/datacap/driver/parser/RedisShowParser.java @@ -0,0 +1,77 @@ +package io.edurt.datacap.driver.parser; + +import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; +import io.edurt.datacap.sql.statement.ShowStatement; + +@SuppressFBWarnings(value = {"EI_EXPOSE_REP", "EI_EXPOSE_REP2", "CT_CONSTRUCTOR_THROW"}) +public class RedisShowParser + extends RedisParser +{ + private String pattern; + + public RedisShowParser(ShowStatement statement) + { + parseShowStatement(statement); + } + + public void parseShowStatement(ShowStatement show) + { + this.showType = show.getShowType(); + switch (show.getShowType()) { + case DATABASES: + this.command = "listDatabases"; + if (show.getPattern() != null) { + this.pattern = convertLikeToPattern(show.getPattern()); + } + break; + + case TABLES: + this.command = "listKeys"; + if (show.getDatabaseName() != null) { + this.database = show.getDatabaseName(); + } + if (show.getPattern() != null) { + this.pattern = convertLikeToPattern(show.getPattern()); + } + else { + this.pattern = "*"; + } + break; + + case COLUMNS: + this.command = "listFields"; + if (show.getDatabaseName() != null) { + this.database = show.getDatabaseName(); + } + if (show.getTableName() != null) { + this.collection = show.getTableName(); + } + if (show.getPattern() != null) { + this.pattern = convertLikeToPattern(show.getPattern()); + } + break; + + default: + throw new IllegalArgumentException("Unsupported SHOW type: " + show.getShowType()); + } + } + + private String convertLikeToPattern(String likePattern) + { + if (likePattern.startsWith("'") && likePattern.endsWith("'")) { + likePattern = likePattern.substring(1, likePattern.length() - 1); + } + else if (likePattern.startsWith("\"") && likePattern.endsWith("\"")) { + likePattern = likePattern.substring(1, likePattern.length() - 1); + } + + return likePattern + .replace("%", "*") + .replace("_", "?"); + } + + public String getPattern() + { + return pattern; + } +} diff --git a/driver/datacap-driver-redis/src/main/java/io/edurt/datacap/driver/redis/JedisRedisClient.java b/driver/datacap-driver-redis/src/main/java/io/edurt/datacap/driver/redis/JedisRedisClient.java deleted file mode 100644 index 8b8a6e03c0..0000000000 --- a/driver/datacap-driver-redis/src/main/java/io/edurt/datacap/driver/redis/JedisRedisClient.java +++ /dev/null @@ -1,60 +0,0 @@ -package io.edurt.datacap.driver.redis; - -import io.edurt.datacap.driver.AbstractRedisClient; -import io.edurt.datacap.core.Logger; -import io.edurt.datacap.core.Op; -import redis.clients.jedis.Jedis; -import redis.clients.jedis.Protocol; - -public class JedisRedisClient - extends AbstractRedisClient -{ - public static final Logger LOGGER = new Logger(JedisRedisClient.class); - - private final Jedis jedis; - - public JedisRedisClient(Jedis jedis) - { - this.jedis = jedis; - } - - @Override - protected synchronized Object sendCommand(Op op) - { - String rawSql = op.getOriginSql(); - String commandString = op.getCommand(); - String[] params = op.getParams(); - - int db = -1; - try { - db = jedis.getDB(); - Protocol.Command command = this.convertCommand(commandString); - - Object result; - if (params == null || params.length == 0) { - result = this.jedis.sendCommand(command); - } - else { - result = this.jedis.sendCommand(command, params); - } - return result; - } - catch (Throwable e) { - LOGGER.log("command on db %s `%s` cannot execute.", db, rawSql); - throw new RuntimeException(String.format("command on db %s `%s` cannot execute.", db, rawSql)); - } - } - - @Override - public synchronized void select(int dbIndex) - { - this.jedis.select(dbIndex); - } - - @Override - public synchronized void close() - { - LOGGER.log("close()"); - this.jedis.close(); - } -} diff --git a/driver/datacap-driver-redis/src/main/java/io/edurt/datacap/driver/redis/RedisDriver.java b/driver/datacap-driver-redis/src/main/java/io/edurt/datacap/driver/redis/RedisDriver.java deleted file mode 100644 index e465fd230b..0000000000 --- a/driver/datacap-driver-redis/src/main/java/io/edurt/datacap/driver/redis/RedisDriver.java +++ /dev/null @@ -1,116 +0,0 @@ -package io.edurt.datacap.driver.redis; - -import io.edurt.datacap.core.Logger; -import io.edurt.datacap.core.RedisConnection; -import io.edurt.datacap.driver.conf.RedisConnectionInfo; -import redis.clients.jedis.Jedis; - -import java.sql.*; -import java.util.Properties; - -public class RedisDriver - implements Driver -{ - private final static Logger LOGGER = new Logger(RedisDriver.class); - - private static final String REDIS_JDBC_PREFIX = "jdbc:redis:"; - - static { - try { - DriverManager.registerDriver(new RedisDriver()); - } - catch (Exception e) { - LOGGER.log("Can't register driver!"); - throw new RuntimeException("Can't register driver!", e); - } - } - - @Override - public Connection connect(String url, Properties info) - throws SQLException - { - if (!this.acceptsURL(url)) { - LOGGER.log("wrong url. url is %s", url); - return null; - } - if (info == null) { - info = new Properties(); - } - - String rawUrl = url.replaceFirst("jdbc:", ""); - RedisConnectionInfo redisConnectionInfo = new RedisConnectionInfo(rawUrl, info); - - String host = redisConnectionInfo.getHost(); - int port = redisConnectionInfo.getPort(); - int dbIndex = redisConnectionInfo.getDbIndex(); - int timeout = redisConnectionInfo.getTimeout(); - boolean ssl = redisConnectionInfo.isSsl(); - String username = redisConnectionInfo.getUsername(); - String password = redisConnectionInfo.getPassword(); - - try { - final Jedis jedis = new Jedis(host, port, timeout, timeout, ssl); - jedis.connect(); - - if (username != null) { - jedis.auth(username, password); - } - else if (password != null) { - jedis.auth(password); - } - if (dbIndex != 0) { - jedis.select(dbIndex); - } -// if (clientName != null) { -// jedis.clientSetname(clientName); -// } - - return new RedisConnection(new JedisRedisClient(jedis), dbIndex + "", info); - } - catch (Exception e) { - LOGGER.log("Cannot init RedisConnection %s", e); - throw new SQLException("Cannot init RedisConnection", e); - } - } - - @Override - public boolean acceptsURL(String url) - throws SQLException - { - return url.toLowerCase().startsWith(REDIS_JDBC_PREFIX); - } - - @Override - public DriverPropertyInfo[] getPropertyInfo(String url, Properties info) - throws SQLException - { - return new DriverPropertyInfo[0]; - } - - @Override - public int getMajorVersion() - { - return 1; - } - - @Override - public int getMinorVersion() - { - return 0; - } - - @Override - public boolean jdbcCompliant() - { - return false; - } - - @Override - public java.util.logging.Logger getParentLogger() - throws SQLFeatureNotSupportedException - { - // ref: com.mysql.cj.jdbc.NonRegisteringDriver.getParentLogger - LOGGER.log("getParentLogger not implemented"); - throw new SQLFeatureNotSupportedException("getParentLogger not implemented"); - } -} diff --git a/driver/datacap-driver-redis/src/main/resources/META-INF/services/java.sql.Driver b/driver/datacap-driver-redis/src/main/resources/META-INF/services/java.sql.Driver index c1aa638315..f296b09ca2 100644 --- a/driver/datacap-driver-redis/src/main/resources/META-INF/services/java.sql.Driver +++ b/driver/datacap-driver-redis/src/main/resources/META-INF/services/java.sql.Driver @@ -1,2 +1 @@ -io.edurt.datacap.driver.redis.RedisDriver -io.edurt.datacap.driver.cluster.RedisClusterDriver \ No newline at end of file +io.edurt.datacap.driver.RedisJdbcDriver diff --git a/driver/datacap-driver-redis/src/test/java/io/edurt/datacap/RedisTest.java b/driver/datacap-driver-redis/src/test/java/io/edurt/datacap/RedisTest.java deleted file mode 100644 index a38bae159d..0000000000 --- a/driver/datacap-driver-redis/src/test/java/io/edurt/datacap/RedisTest.java +++ /dev/null @@ -1,88 +0,0 @@ -package io.edurt.datacap; - -import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; -import io.edurt.datacap.core.Logger; -import io.edurt.datacap.core.RedisStatement; -import lombok.AllArgsConstructor; -import lombok.Builder; -import lombok.Data; -import lombok.NoArgsConstructor; - -import java.math.BigDecimal; -import java.sql.*; - -@SuppressFBWarnings(value = {"OBL_UNSATISFIED_OBLIGATION", "SQL_BAD_RESULTSET_ACCESS"}, - justification = "I prefer to suppress these FindBugs warnings") -public class RedisTest -{ - private final static Logger LOGGER = new Logger(RedisStatement.class); - - public static void main(String[] args) - throws SQLException, ClassNotFoundException - { - Class.forName("com.itmuch.redis.jdbc.redis.RedisDriver"); - - Connection connection = DriverManager.getConnection("jdbc:redis://localhost:6379/0"); - Statement statement = connection.createStatement(); - - connection.setSchema("11"); - ResultSet rs = statement.executeQuery("get a"); - while (rs.next()) { - String string = rs.getString(0); - System.out.println(string); - } - -// statement.execute("set a b"); -// ResultSet rs = statement.executeQuery("get a"); -// while (rs.next()) { -// LOGGER.log("rs1:" + rs.getString(0)); -// } -// - ResultSet resultSet = statement.executeQuery("keys *"); - while (resultSet.next()) { - LOGGER.log(resultSet.getString(0)); - } - - connection.setSchema("11"); - ResultSet resultSet2 = statement.executeQuery("set ab99 ab88"); - while (resultSet2.next()) { - LOGGER.log(resultSet.getString(0)); - } - - resultSet.close(); - statement.close(); - connection.close(); - -// statement.execute("ZADD runoobkey 2 mongodb"); -// statement.execute("ZADD runoobkey 3 elasticsearch"); -// statement.execute("ZADD runoobkey 4 mysql"); -// -// ResultSet rs2 = statement.executeQuery("ZRANGE runoobkey 0 10 WITHSCORES"); -// while (rs2.next()) { -// LOGGER.log("rs2:" + rs2.getString(0)); -// } -// -// statement.execute("HMSET myhash field1 field2"); -// ResultSet rs3 = statement.executeQuery("HGETALL myhash"); -// while (rs3.next()) { -// LOGGER.log("rs3:" + rs3.getString(0)); -// } - -// ResultSet rs4 = statement.executeQuery("get user"); -// while (rs4.next()) { -// LOGGER.log("rs4:" + rs4.getString(0)); -// } - } -} - -@Data -@Builder -@AllArgsConstructor -@NoArgsConstructor -class User -{ - private String name; - private Short age; - private String email; - private BigDecimal money; -} diff --git a/pom.xml b/pom.xml index a9579ab309..c40999a2d9 100644 --- a/pom.xml +++ b/pom.xml @@ -205,6 +205,7 @@ 2.3.2 2.1.214 4.11.1 + 3.27.0 3.6.0 0.10.2 @@ -426,6 +427,11 @@ mongodb-driver-sync ${mongodb-driver.version} + + org.redisson + redisson + ${redisson.version} + diff --git a/test/datacap-test-driver/pom.xml b/test/datacap-test-driver/pom.xml index b4219f1ed8..b1e1887132 100644 --- a/test/datacap-test-driver/pom.xml +++ b/test/datacap-test-driver/pom.xml @@ -24,5 +24,11 @@ ${project.version} test + + io.edurt.datacap + datacap-driver-redis + ${project.version} + test + diff --git a/test/datacap-test-driver/src/test/java/io/edurt/datacap/test/redis/RedisJdbcBaseTest.java b/test/datacap-test-driver/src/test/java/io/edurt/datacap/test/redis/RedisJdbcBaseTest.java new file mode 100644 index 0000000000..5c37e03317 --- /dev/null +++ b/test/datacap-test-driver/src/test/java/io/edurt/datacap/test/redis/RedisJdbcBaseTest.java @@ -0,0 +1,114 @@ +package io.edurt.datacap.test.redis; + +import org.junit.After; +import org.junit.Before; +import org.junit.ClassRule; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.redisson.Redisson; +import org.redisson.api.RBucket; +import org.redisson.api.RList; +import org.redisson.api.RMap; +import org.redisson.api.RSet; +import org.redisson.api.RedissonClient; +import org.redisson.config.Config; +import org.testcontainers.containers.GenericContainer; +import org.testcontainers.containers.wait.strategy.Wait; +import org.testcontainers.utility.DockerImageName; + +import java.sql.Connection; +import java.sql.DriverManager; +import java.sql.Statement; +import java.time.Duration; +import java.util.List; +import java.util.Properties; + +public abstract class RedisJdbcBaseTest +{ + private static final Logger log = LoggerFactory.getLogger(RedisJdbcBaseTest.class); + @ClassRule + public static final GenericContainer REDIS_CONTAINER = new GenericContainer<>(DockerImageName.parse("redis:7-alpine")) + .withExposedPorts(6379) + .waitingFor(Wait.forListeningPort() + .withStartupTimeout(Duration.ofSeconds(30))); + + protected Statement statement; + protected Connection connection; + protected RedissonClient redissonClient; + + @Before + public void init() + { + try { + initializeRedisData(); + initializeJdbcConnection(); + } + catch (Exception e) { + throw new RuntimeException("Failed to initialize test environment", e); + } + } + + private void initializeRedisData() + { + Config config = new Config(); + config.useSingleServer() + .setAddress(String.format("redis://%s:%d", + REDIS_CONTAINER.getHost(), + REDIS_CONTAINER.getFirstMappedPort())); + redissonClient = Redisson.create(config); + + redissonClient.getKeys().flushdb(); + + RBucket stringKey = redissonClient.getBucket("test:string"); + stringKey.set("hello world"); + + RList listKey = redissonClient.getList("test:list"); + listKey.addAll(List.of("item1", "item2", "item3")); + + RSet setKey = redissonClient.getSet("test:set"); + setKey.addAll(List.of("member1", "member2", "member3")); + + RMap hashKey = redissonClient.getMap("test:hash"); + hashKey.put("field1", "value1"); + hashKey.put("field2", "value2"); + } + + private void initializeJdbcConnection() + throws Exception + { + Class.forName("io.edurt.datacap.driver.RedisJdbcDriver"); + Properties props = new Properties(); + props.setProperty("database", "0"); + + String jdbcUrl = String.format("jdbc:redis://%s:%d", + REDIS_CONTAINER.getHost(), + REDIS_CONTAINER.getFirstMappedPort() + ); + connection = DriverManager.getConnection(jdbcUrl, props); + statement = connection.createStatement(); + } + + @After + public void cleanup() + { + try { + if (statement != null) { + statement.close(); + } + if (connection != null) { + connection.close(); + } + if (redissonClient != null) { + redissonClient.shutdown(); + } + } + catch (Exception e) { + log.error("Failed to cleanup test environment", e); + } + } + + static { + REDIS_CONTAINER.setPortBindings(List.of("6379:6379")); + REDIS_CONTAINER.start(); + } +} diff --git a/test/datacap-test-driver/src/test/java/io/edurt/datacap/test/redis/RedisJdbcDriverConnectionTest.java b/test/datacap-test-driver/src/test/java/io/edurt/datacap/test/redis/RedisJdbcDriverConnectionTest.java new file mode 100644 index 0000000000..9d7207c1da --- /dev/null +++ b/test/datacap-test-driver/src/test/java/io/edurt/datacap/test/redis/RedisJdbcDriverConnectionTest.java @@ -0,0 +1,64 @@ +package io.edurt.datacap.test.redis; + +import lombok.extern.slf4j.Slf4j; +import org.junit.Test; + +import java.sql.Connection; +import java.sql.DriverManager; +import java.sql.SQLException; +import java.util.Properties; + +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertTrue; + +@Slf4j +public class RedisJdbcDriverConnectionTest + extends RedisJdbcBaseTest +{ + @Test + public void testConnection() + { + assertNotNull("Connection should not be null", connection); + } + + @Test + public void testConnectionNotClosed() + throws SQLException + { + assertFalse("Connection should not be closed", connection.isClosed()); + } + + @Test + public void testCreateStatement() + throws SQLException + { + assertNotNull("Statement should not be null", statement); + } + + @Test + public void testCloseConnection() + throws Exception + { + Properties props = new Properties(); + props.setProperty("database", "0"); + + String jdbcUrl = String.format("jdbc:redis://%s:%d", + REDIS_CONTAINER.getHost(), + REDIS_CONTAINER.getFirstMappedPort() + ); + + Connection conn = DriverManager.getConnection(jdbcUrl, props); + assertFalse("Connection should be open", conn.isClosed()); + + conn.close(); + assertTrue("Connection should be closed", conn.isClosed()); + } + + @Test + public void testAutoCommit() + throws SQLException + { + assertTrue("Auto commit should be true", connection.getAutoCommit()); + } +} diff --git a/test/datacap-test-driver/src/test/java/io/edurt/datacap/test/redis/RedisJdbcDriverSelectTest.java b/test/datacap-test-driver/src/test/java/io/edurt/datacap/test/redis/RedisJdbcDriverSelectTest.java new file mode 100644 index 0000000000..b7dbfceb4a --- /dev/null +++ b/test/datacap-test-driver/src/test/java/io/edurt/datacap/test/redis/RedisJdbcDriverSelectTest.java @@ -0,0 +1,86 @@ +package io.edurt.datacap.test.redis; + +import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.sql.ResultSet; +import java.sql.SQLException; + +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertTrue; + +public class RedisJdbcDriverSelectTest + extends RedisJdbcBaseTest +{ + private static final Logger log = LoggerFactory.getLogger(RedisJdbcDriverSelectTest.class); + + @Test + public void testSelectAllKeys() + throws SQLException + { + try (ResultSet rs = statement.executeQuery("SELECT * FROM test")) { + assertTrue("Should have at least one row", rs.next()); + + String key = rs.getString("key"); + String type = rs.getString("type"); + Object value = rs.getObject("value"); + + assertNotNull("Key should not be null", key); + assertNotNull("Type should not be null", type); + log.info("Key: {}, Type: {}, Value: {}", key, type, value); + + int count = 1; + while (rs.next()) { + log.info("Key: {}, Type: {}, Value: {}", + rs.getString("key"), + rs.getString("type"), + rs.getObject("value")); + count++; + } + log.info("Total keys: {}", count); + } + } + + @Test + public void testSelectByPattern() + throws SQLException + { + try (ResultSet rs = statement.executeQuery("SELECT * FROM test:*")) { + assertTrue("Should have at least one row", rs.next()); + + String key = rs.getString("key"); + assertTrue("Key should start with 'test:'", key.startsWith("test:")); + log.info("Key: {}, Type: {}, Value: {}", + key, + rs.getString("type"), + rs.getObject("value")); + + int count = 1; + while (rs.next()) { + log.info("Key: {}, Type: {}, Value: {}", + rs.getString("key"), + rs.getString("type"), + rs.getObject("value")); + count++; + } + log.info("Total matching keys: {}", count); + } + } + + @Test + public void testSelectStringKey() + throws SQLException + { + try (ResultSet rs = statement.executeQuery("SELECT * FROM test:string")) { + assertTrue("Should have at least one row", rs.next()); + + String key = rs.getString("key"); + String type = rs.getString("type"); + Object value = rs.getObject("value"); + + log.info("Key: {}, Type: {}, Value: {}", key, type, value); + assertTrue("Key should contain 'test:string'", key.contains("test:string")); + } + } +} diff --git a/test/datacap-test-driver/src/test/java/io/edurt/datacap/test/redis/RedisJdbcDriverShowTest.java b/test/datacap-test-driver/src/test/java/io/edurt/datacap/test/redis/RedisJdbcDriverShowTest.java new file mode 100644 index 0000000000..bde81bef44 --- /dev/null +++ b/test/datacap-test-driver/src/test/java/io/edurt/datacap/test/redis/RedisJdbcDriverShowTest.java @@ -0,0 +1,73 @@ +package io.edurt.datacap.test.redis; + +import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.sql.ResultSet; +import java.sql.SQLException; + +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertTrue; + +public class RedisJdbcDriverShowTest + extends RedisJdbcBaseTest +{ + private static final Logger log = LoggerFactory.getLogger(RedisJdbcDriverShowTest.class); + + @Test + public void testShowDatabases() + throws SQLException + { + try (ResultSet rs = statement.executeQuery("SHOW DATABASES")) { + assertTrue("Should have at least one database", rs.next()); + String name = rs.getString("name"); + assertNotNull("Database name should not be null", name); + log.info("Database: {}", name); + + int count = 1; + while (rs.next()) { + count++; + } + log.info("Total databases: {}", count); + } + } + + @Test + public void testShowTables() + throws SQLException + { + try (ResultSet rs = statement.executeQuery("SHOW TABLES")) { + assertTrue("Should have at least one key", rs.next()); + String name = rs.getString("name"); + assertNotNull("Key name should not be null", name); + log.info("Key: {}", name); + + int count = 1; + while (rs.next()) { + log.info("Key: {}", rs.getString("name")); + count++; + } + log.info("Total keys: {}", count); + } + } + + @Test + public void testShowColumns() + throws SQLException + { + try (ResultSet rs = statement.executeQuery("SHOW COLUMNS FROM test:hash")) { + assertTrue("Should have at least one field", rs.next()); + String name = rs.getString("name"); + assertNotNull("Field name should not be null", name); + log.info("Field: {}", name); + + int count = 1; + while (rs.next()) { + log.info("Field: {}", rs.getString("name")); + count++; + } + log.info("Total fields: {}", count); + } + } +} diff --git a/test/datacap-test-driver/src/test/java/io/edurt/datacap/test/redis/RedisParserTest.java b/test/datacap-test-driver/src/test/java/io/edurt/datacap/test/redis/RedisParserTest.java new file mode 100644 index 0000000000..c1713211fd --- /dev/null +++ b/test/datacap-test-driver/src/test/java/io/edurt/datacap/test/redis/RedisParserTest.java @@ -0,0 +1,79 @@ +package io.edurt.datacap.test.redis; + +import io.edurt.datacap.driver.parser.RedisParser; +import io.edurt.datacap.driver.parser.RedisSelectParser; +import io.edurt.datacap.driver.parser.RedisShowParser; +import io.edurt.datacap.sql.statement.ShowStatement; +import lombok.extern.slf4j.Slf4j; +import org.junit.Test; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertTrue; + +@Slf4j +public class RedisParserTest +{ + @Test + public void testParseShowDatabases() + { + RedisParser parser = RedisParser.createParser("SHOW DATABASES"); + assertTrue("Parser should be RedisShowParser", parser instanceof RedisShowParser); + assertEquals("Show type should be DATABASES", ShowStatement.ShowType.DATABASES, parser.getShowType()); + } + + @Test + public void testParseShowTables() + { + RedisParser parser = RedisParser.createParser("SHOW TABLES"); + assertTrue("Parser should be RedisShowParser", parser instanceof RedisShowParser); + assertEquals("Show type should be TABLES", ShowStatement.ShowType.TABLES, parser.getShowType()); + } + + @Test + public void testParseShowTablesWithPattern() + { + RedisParser parser = RedisParser.createParser("SHOW TABLES LIKE 'test%'"); + assertTrue("Parser should be RedisShowParser", parser instanceof RedisShowParser); + RedisShowParser showParser = (RedisShowParser) parser; + assertNotNull("Pattern should not be null", showParser.getPattern()); + log.info("Pattern: {}", showParser.getPattern()); + } + + @Test + public void testParseShowColumns() + { + RedisParser parser = RedisParser.createParser("SHOW COLUMNS FROM test:hash"); + assertTrue("Parser should be RedisShowParser", parser instanceof RedisShowParser); + assertEquals("Show type should be COLUMNS", ShowStatement.ShowType.COLUMNS, parser.getShowType()); + assertEquals("Collection should be test:hash", "test:hash", parser.getCollection()); + } + + @Test + public void testParseSelect() + { + RedisParser parser = RedisParser.createParser("SELECT * FROM test:*"); + assertTrue("Parser should be RedisSelectParser", parser instanceof RedisSelectParser); + assertEquals("Collection should be test:*", "test:*", parser.getCollection()); + } + + @Test + public void testParseSelectAll() + { + RedisParser parser = RedisParser.createParser("SELECT * FROM test"); + assertTrue("Parser should be RedisSelectParser", parser instanceof RedisSelectParser); + assertEquals("Collection should be test", "test", parser.getCollection()); + } + + @Test(expected = IllegalArgumentException.class) + public void testParseInvalidSql() + { + RedisParser.createParser(""); + } + + @Test(expected = IllegalArgumentException.class) + public void testParseNullSql() + { + RedisParser.createParser(null); + } +} From f32d817a7f138e88103cc9325c045d19e77cbeba Mon Sep 17 00:00:00 2001 From: qianmoQ Date: Fri, 6 Feb 2026 18:09:02 +0800 Subject: [PATCH 2/5] =?UTF-8?q?feat(condor):=20SQLExecutor=20=E5=8A=9F?= =?UTF-8?q?=E8=83=BD=E5=AE=8C=E5=96=84?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- client/datacap-cli/pom.xml | 2 +- configure/etc/bin/install-plugin.bat | 2 +- configure/etc/bin/install-plugin.sh | 2 +- configure/etc/conf/application.properties | 2 +- configure/metadata.json | 408 +++++++++--------- convert/datacap-convert-csv/pom.xml | 2 +- convert/datacap-convert-json/pom.xml | 2 +- convert/datacap-convert-none/pom.xml | 2 +- convert/datacap-convert-spi/pom.xml | 2 +- convert/datacap-convert-txt/pom.xml | 2 +- convert/datacap-convert-xml/pom.xml | 2 +- core/datacap-captcha/pom.xml | 2 +- core/datacap-common/pom.xml | 2 +- core/datacap-condor/pom.xml | 2 +- .../datacap/condor/ComparisonOperator.java | 5 +- .../io/edurt/datacap/condor/DataType.java | 24 +- .../io/edurt/datacap/condor/SQLExecutor.java | 226 ++++++++-- .../condor/condition/AndCondition.java | 22 + .../datacap/condor/condition/OrCondition.java | 22 + .../condor/condition/SimpleCondition.java | 30 +- .../datacap/condor/manager/TableManager.java | 42 +- core/datacap-parser/pom.xml | 2 +- .../java/io/edurt/datacap/sql/SQLVisitor.java | 31 +- .../sql/statement/DeleteStatement.java | 25 ++ .../sql/statement/UpdateStatement.java | 30 ++ core/datacap-plugin/pom.xml | 2 +- core/datacap-security/pom.xml | 2 +- core/datacap-server/pom.xml | 2 +- core/datacap-service/pom.xml | 2 +- core/datacap-spi/pom.xml | 2 +- core/datacap-sql/pom.xml | 2 +- core/datacap-ui/package.json | 2 +- driver/datacap-driver-mongodb/pom.xml | 2 +- driver/datacap-driver-redis/pom.xml | 2 +- executor/datacap-executor-local/pom.xml | 2 +- executor/datacap-executor-seatunnel/pom.xml | 2 +- executor/datacap-executor-spi/pom.xml | 2 +- fs/datacap-fs-alioss/pom.xml | 2 +- fs/datacap-fs-amazon-s3/pom.xml | 2 +- fs/datacap-fs-local/pom.xml | 2 +- fs/datacap-fs-minio/pom.xml | 2 +- fs/datacap-fs-qiniu/pom.xml | 2 +- fs/datacap-fs-spi/pom.xml | 2 +- fs/datacap-fs-tencent-cos/pom.xml | 2 +- lib/datacap-http/pom.xml | 2 +- lib/datacap-logger/pom.xml | 2 +- lib/datacap-schedule/pom.xml | 2 +- lib/datacap-shell/pom.xml | 2 +- notify/datacap-notify-dingtalk/pom.xml | 2 +- notify/datacap-notify-spi/pom.xml | 2 +- parser/datacap-parser-mysql/pom.xml | 2 +- parser/datacap-parser-spi/pom.xml | 2 +- parser/datacap-parser-trino/pom.xml | 2 +- plugin/datacap-plugin-alioss/pom.xml | 2 +- plugin/datacap-plugin-cassandra/pom.xml | 2 +- plugin/datacap-plugin-ceresdb/pom.xml | 2 +- plugin/datacap-plugin-clickhouse-http/pom.xml | 2 +- plugin/datacap-plugin-clickhouse/pom.xml | 2 +- plugin/datacap-plugin-cratedb-http/pom.xml | 2 +- plugin/datacap-plugin-cratedb/pom.xml | 2 +- plugin/datacap-plugin-db2/pom.xml | 2 +- plugin/datacap-plugin-dm/pom.xml | 2 +- plugin/datacap-plugin-dolphindb/pom.xml | 2 +- plugin/datacap-plugin-doris/pom.xml | 2 +- plugin/datacap-plugin-dremio/pom.xml | 2 +- plugin/datacap-plugin-druid/pom.xml | 2 +- plugin/datacap-plugin-duckdb/pom.xml | 2 +- .../datacap-plugin-elasticsearch-8x/pom.xml | 2 +- plugin/datacap-plugin-greptimedb/pom.xml | 2 +- plugin/datacap-plugin-h2-tcp/pom.xml | 2 +- plugin/datacap-plugin-h2/pom.xml | 2 +- plugin/datacap-plugin-hdfs/pom.xml | 2 +- plugin/datacap-plugin-hive-2x/pom.xml | 2 +- plugin/datacap-plugin-hologres/pom.xml | 2 +- plugin/datacap-plugin-ignite/pom.xml | 2 +- plugin/datacap-plugin-impala/pom.xml | 2 +- plugin/datacap-plugin-influxdb/pom.xml | 2 +- plugin/datacap-plugin-iotdb/pom.xml | 2 +- plugin/datacap-plugin-kafka/pom.xml | 2 +- plugin/datacap-plugin-kylin/pom.xml | 2 +- plugin/datacap-plugin-kyuubi/pom.xml | 2 +- plugin/datacap-plugin-matrixone/pom.xml | 2 +- plugin/datacap-plugin-monetdb/pom.xml | 2 +- plugin/datacap-plugin-mongo-atlas/pom.xml | 2 +- plugin/datacap-plugin-mongo-community/pom.xml | 2 +- plugin/datacap-plugin-mysql/pom.xml | 2 +- plugin/datacap-plugin-neo4j/pom.xml | 2 +- plugin/datacap-plugin-oceanbase/pom.xml | 2 +- plugin/datacap-plugin-oracle/pom.xml | 2 +- plugin/datacap-plugin-paradedb/pom.xml | 2 +- plugin/datacap-plugin-phoenix/pom.xml | 2 +- plugin/datacap-plugin-pinot/pom.xml | 2 +- plugin/datacap-plugin-postgresql/pom.xml | 2 +- plugin/datacap-plugin-presto/pom.xml | 2 +- plugin/datacap-plugin-questdb/pom.xml | 2 +- plugin/datacap-plugin-redis-jdbc/pom.xml | 2 +- plugin/datacap-plugin-redis/pom.xml | 2 +- plugin/datacap-plugin-scylladb/pom.xml | 2 +- plugin/datacap-plugin-snowflake/pom.xml | 2 +- plugin/datacap-plugin-solr/pom.xml | 2 +- plugin/datacap-plugin-sqlserver/pom.xml | 2 +- plugin/datacap-plugin-starrocks/pom.xml | 2 +- plugin/datacap-plugin-tdengine/pom.xml | 2 +- plugin/datacap-plugin-timescale/pom.xml | 2 +- plugin/datacap-plugin-trino/pom.xml | 2 +- plugin/datacap-plugin-ydb/pom.xml | 2 +- plugin/datacap-plugin-zookeeper/pom.xml | 2 +- pom.xml | 2 +- scheduler/datacap-scheduler-local/pom.xml | 2 +- scheduler/datacap-scheduler-spi/pom.xml | 2 +- shaded/datacap-shaded-neo4j/pom.xml | 2 +- shaded/datacap-shaded-pinot/pom.xml | 2 +- shaded/datacap-shaded-ydb/pom.xml | 2 +- test/datacap-test-condor/pom.xml | 2 +- test/datacap-test-convert/pom.xml | 2 +- test/datacap-test-core/pom.xml | 2 +- test/datacap-test-driver/pom.xml | 2 +- .../datacap/test/redis/RedisParserTest.java | 2 + test/datacap-test-executor/pom.xml | 2 +- test/datacap-test-fs/pom.xml | 2 +- test/datacap-test-lib/pom.xml | 2 +- test/datacap-test-parser/pom.xml | 2 +- test/datacap-test-plugin/pom.xml | 2 +- test/datacap-test-scheduler/pom.xml | 2 +- 124 files changed, 729 insertions(+), 362 deletions(-) create mode 100644 core/datacap-condor/src/main/java/io/edurt/datacap/condor/condition/AndCondition.java create mode 100644 core/datacap-condor/src/main/java/io/edurt/datacap/condor/condition/OrCondition.java create mode 100644 core/datacap-parser/src/main/java/io/edurt/datacap/sql/statement/DeleteStatement.java create mode 100644 core/datacap-parser/src/main/java/io/edurt/datacap/sql/statement/UpdateStatement.java diff --git a/client/datacap-cli/pom.xml b/client/datacap-cli/pom.xml index 221a6efa29..f6debafde9 100644 --- a/client/datacap-cli/pom.xml +++ b/client/datacap-cli/pom.xml @@ -6,7 +6,7 @@ io.edurt.datacap datacap - 2025.1.2 + 2026.0.0 ../../pom.xml diff --git a/configure/etc/bin/install-plugin.bat b/configure/etc/bin/install-plugin.bat index d7e6e9f691..388a4c7a9f 100644 --- a/configure/etc/bin/install-plugin.bat +++ b/configure/etc/bin/install-plugin.bat @@ -1,7 +1,7 @@ @echo off setlocal EnableDelayedExpansion set "HOME=%cd%" -set "VERSION=2025.1.2 +set "VERSION=2026.0.0 set "CDN_CENTER=https://repo1.maven.org/maven2/io/edurt/datacap" :: 检查并创建临时目录 diff --git a/configure/etc/bin/install-plugin.sh b/configure/etc/bin/install-plugin.sh index 38ce124d4c..973611dc67 100644 --- a/configure/etc/bin/install-plugin.sh +++ b/configure/etc/bin/install-plugin.sh @@ -1,7 +1,7 @@ #!/bin/sh HOME=$(pwd) -VERSION=2025.1.2 +VERSION=2026.0.0 CDN_CENTER="https://repo1.maven.org/maven2/io/edurt/datacap" install_package() { diff --git a/configure/etc/conf/application.properties b/configure/etc/conf/application.properties index 1d76d243b1..942fdafc57 100644 --- a/configure/etc/conf/application.properties +++ b/configure/etc/conf/application.properties @@ -1,5 +1,5 @@ ################################### Banner configure ################################# -app.version=2025.1.2 +app.version=2026.0.0 ################################### Basic configure ################################# server.port=9096 diff --git a/configure/metadata.json b/configure/metadata.json index 1945844d10..b70fab0e91 100644 --- a/configure/metadata.json +++ b/configure/metadata.json @@ -10,15 +10,15 @@ "description": "A powerful MySQL integration plugin that provides comprehensive database connectivity. Features include table/view management, data import/export, SQL query execution, stored procedure support, and real-time data synchronization. Optimized for high-performance data operations with connection pooling and batch processing capabilities.", "i18nFormat": true, "type": "Connector", - "version": "2025.1.2", + "version": "2026.0.0", "author": "datacap-community", "logo": "http://devlive-cdn.oss-cn-beijing.aliyuncs.com/applications/datacap/resources/logo/plugin/mysql.svg", - "released": "2025-08-17 15:17:44", + "released": "2026-01-28 16:48:33", "supportVersion": [ "8.0", "5.7" ], - "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-mysql/2025.1.2/datacap-plugin-mysql-2025.1.2-bin.tar.gz" + "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-mysql/2026.0.0/datacap-plugin-mysql-2026.0.0-bin.tar.gz" }, { "key": "datacap-plugin-clickhouse", @@ -26,14 +26,14 @@ "description": "ClickHouse is a distributed column-oriented database. It is a distributed database with a focus on performance, scalability, and ease of use.", "i18nFormat": true, "type": "Connector", - "version": "2025.1.2", + "version": "2026.0.0", "author": "datacap-community", "logo": "http://devlive-cdn.oss-cn-beijing.aliyuncs.com/applications/datacap/resources/logo/plugin/clickhouse.svg", - "released": "2025-08-17 15:17:44", + "released": "2026-01-28 16:48:33", "supportVersion": [ "ALL" ], - "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-clickhouse/2025.1.2/datacap-plugin-clickhouse-2025.1.2-bin.tar.gz" + "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-clickhouse/2026.0.0/datacap-plugin-clickhouse-2026.0.0-bin.tar.gz" }, { "key": "datacap-plugin-influxdb", @@ -41,14 +41,14 @@ "description": "InfluxDB is a time series database that stores and retrieves data points. It is a distributed database with a focus on performance, scalability, and ease of use.", "i18nFormat": true, "type": "Connector", - "version": "2025.1.2", + "version": "2026.0.0", "author": "datacap-community", "logo": "http://devlive-cdn.oss-cn-beijing.aliyuncs.com/applications/datacap/resources/logo/plugin/influxdb.svg", - "released": "2025-08-17 15:17:44", + "released": "2026-01-28 16:48:33", "supportVersion": [ "ALL" ], - "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-influxdb/2025.1.2/datacap-plugin-influxdb-2025.1.2-bin.tar.gz" + "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-influxdb/2026.0.0/datacap-plugin-influxdb-2026.0.0-bin.tar.gz" }, { "key": "datacap-plugin-solr", @@ -56,14 +56,14 @@ "description": "Solr is highly reliable, scalable and fault tolerant, providing distributed indexing, replication and load-balanced querying, automated failover and recovery, centralized configuration and more. Solr powers the search and navigation features of many of the world's largest internet sites.", "i18nFormat": true, "type": "Connector", - "version": "2025.1.2", + "version": "2026.0.0", "author": "datacap-community", "logo": "http://devlive-cdn.oss-cn-beijing.aliyuncs.com/applications/datacap/resources/logo/plugin/solr.svg", - "released": "2025-08-17 15:17:44", + "released": "2026-01-28 16:48:33", "supportVersion": [ "ALL" ], - "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-solr/2025.1.2/datacap-plugin-solr-2025.1.2-bin.tar.gz" + "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-solr/2026.0.0/datacap-plugin-solr-2026.0.0-bin.tar.gz" }, { "key": "datacap-plugin-cratedb", @@ -71,14 +71,14 @@ "description": "CrateDB is a hyper-fast database for real-time analytics and hybrid search. It handles multiple types of data and combines the simplicity of SQL with the scalability of a distributed architecture.", "i18nFormat": true, "type": "Connector", - "version": "2025.1.2", + "version": "2026.0.0", "author": "datacap-community", "logo": "http://devlive-cdn.oss-cn-beijing.aliyuncs.com/applications/datacap/resources/logo/plugin/cratedb.svg", - "released": "2025-08-17 15:17:44", + "released": "2026-01-28 16:48:33", "supportVersion": [ "ALL" ], - "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-cratedb/2025.1.2/datacap-plugin-cratedb-2025.1.2-bin.tar.gz" + "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-cratedb/2026.0.0/datacap-plugin-cratedb-2026.0.0-bin.tar.gz" }, { "key": "datacap-plugin-cratedb-http", @@ -86,14 +86,14 @@ "description": "Use HTTP to access the remote CrateDB database", "i18nFormat": true, "type": "Connector", - "version": "2025.1.2", + "version": "2026.0.0", "author": "datacap-community", "logo": "http://devlive-cdn.oss-cn-beijing.aliyuncs.com/applications/datacap/resources/logo/plugin/cratedbhttp.svg", - "released": "2025-08-17 15:17:44", + "released": "2026-01-28 16:48:33", "supportVersion": [ "ALL" ], - "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-cratedb-http/2025.1.2/datacap-plugin-cratedb-http-2025.1.2-bin.tar.gz" + "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-cratedb-http/2026.0.0/datacap-plugin-cratedb-http-2026.0.0-bin.tar.gz" }, { "key": "datacap-plugin-zookeeper", @@ -101,14 +101,14 @@ "description": "Zookeeper is a distributed, distributed configuration service. It is a distributed database with a focus on performance, scalability, and ease of use.", "i18nFormat": true, "type": "Connector", - "version": "2025.1.2", + "version": "2026.0.0", "author": "datacap-community", "logo": "http://devlive-cdn.oss-cn-beijing.aliyuncs.com/applications/datacap/resources/logo/plugin/zookeeper.svg", - "released": "2025-08-17 15:17:44", + "released": "2026-01-28 16:48:33", "supportVersion": [ "ALL" ], - "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-zookeeper/2025.1.2/datacap-plugin-zookeeper-2025.1.2-bin.tar.gz" + "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-zookeeper/2026.0.0/datacap-plugin-zookeeper-2026.0.0-bin.tar.gz" }, { "key": "datacap-plugin-kafka", @@ -116,14 +116,14 @@ "description": "Apache Kafka is an open-source distributed event streaming platform used by thousands of companies for high-performance data pipelines, streaming analytics, data integration, and mission-critical applications.", "i18nFormat": true, "type": "Connector", - "version": "2025.1.2", + "version": "2026.0.0", "author": "datacap-community", "logo": "http://devlive-cdn.oss-cn-beijing.aliyuncs.com/applications/datacap/resources/logo/plugin/kafka.svg", - "released": "2025-08-17 15:17:44", + "released": "2026-01-28 16:48:33", "supportVersion": [ "ALL" ], - "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-kafka/2025.1.2/datacap-plugin-kafka-2025.1.2-bin.tar.gz" + "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-kafka/2026.0.0/datacap-plugin-kafka-2026.0.0-bin.tar.gz" }, { "key": "datacap-plugin-redis", @@ -131,14 +131,14 @@ "description": "Redis is an in-memory data store used by millions of developers as a cache, vector database, document database, streaming engine, and message broker. Redis has built-in replication and different levels of on-disk persistence. It supports complex data types (for example, strings, hashes, lists, sets, sorted sets, and JSON), with atomic operations defined on those data types.", "i18nFormat": true, "type": "Connector", - "version": "2025.1.2", + "version": "2026.0.0", "author": "datacap-community", "logo": "http://devlive-cdn.oss-cn-beijing.aliyuncs.com/applications/datacap/resources/logo/plugin/redis.svg", - "released": "2025-08-17 15:17:44", + "released": "2026-01-28 16:48:33", "supportVersion": [ "ALL" ], - "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-redis/2025.1.2/datacap-plugin-redis-2025.1.2-bin.tar.gz" + "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-redis/2026.0.0/datacap-plugin-redis-2026.0.0-bin.tar.gz" }, { "key": "datacap-plugin-h2", @@ -146,14 +146,14 @@ "description": "H2 is a small, fast, and easy-to-use in-memory database.", "i18nFormat": true, "type": "Connector", - "version": "2025.1.2", + "version": "2026.0.0", "author": "datacap-community", "logo": "http://devlive-cdn.oss-cn-beijing.aliyuncs.com/applications/datacap/resources/logo/plugin/h2.svg", - "released": "2025-08-17 15:17:44", + "released": "2026-01-28 16:48:33", "supportVersion": [ "ALL" ], - "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-h2/2025.1.2/datacap-plugin-h2-2025.1.2-bin.tar.gz" + "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-h2/2026.0.0/datacap-plugin-h2-2026.0.0-bin.tar.gz" }, { "key": "datacap-plugin-db2", @@ -161,14 +161,14 @@ "description": "IBM® Db2® is the cloud-native database built to power low-latency transactions, real-time analytics and AI applications at scale.", "i18nFormat": true, "type": "Connector", - "version": "2025.1.2", + "version": "2026.0.0", "author": "datacap-community", "logo": "http://devlive-cdn.oss-cn-beijing.aliyuncs.com/applications/datacap/resources/logo/plugin/db2.svg", - "released": "2025-08-17 15:17:44", + "released": "2026-01-28 16:48:33", "supportVersion": [ "ALL" ], - "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-db2/2025.1.2/datacap-plugin-db2-2025.1.2-bin.tar.gz" + "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-db2/2026.0.0/datacap-plugin-db2-2026.0.0-bin.tar.gz" }, { "key": "datacap-plugin-dm", @@ -176,14 +176,14 @@ "description": "A professional database software developer and service provider, we provide users with technically advanced and detailed products.", "i18nFormat": true, "type": "Connector", - "version": "2025.1.2", + "version": "2026.0.0", "author": "datacap-community", "logo": "http://devlive-cdn.oss-cn-beijing.aliyuncs.com/applications/datacap/resources/logo/plugin/dm.svg", - "released": "2025-08-17 15:17:44", + "released": "2026-01-28 16:48:33", "supportVersion": [ "ALL" ], - "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-dm/2025.1.2/datacap-plugin-dm-2025.1.2-bin.tar.gz" + "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-dm/2026.0.0/datacap-plugin-dm-2026.0.0-bin.tar.gz" }, { "key": "datacap-plugin-hdfs", @@ -191,14 +191,14 @@ "description": "Hadoop Distributed File System (HDFS) is a distributed file system based on the Hadoop project.", "i18nFormat": true, "type": "Connector", - "version": "2025.1.2", + "version": "2026.0.0", "author": "datacap-community", "logo": "http://devlive-cdn.oss-cn-beijing.aliyuncs.com/applications/datacap/resources/logo/plugin/hdfs.svg", - "released": "2025-08-17 15:17:44", + "released": "2026-01-28 16:48:33", "supportVersion": [ "ALL" ], - "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-hdfs/2025.1.2/datacap-plugin-hdfs-2025.1.2-bin.tar.gz" + "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-hdfs/2026.0.0/datacap-plugin-hdfs-2026.0.0-bin.tar.gz" }, { "key": "datacap-plugin-alioss", @@ -206,14 +206,14 @@ "description": "Ali OSS is a cloud storage service that provides a simple, secure and efficient way to store and manage data.", "i18nFormat": true, "type": "Connector", - "version": "2025.1.2", + "version": "2026.0.0", "author": "datacap-community", "logo": "http://devlive-cdn.oss-cn-beijing.aliyuncs.com/applications/datacap/resources/logo/plugin/alioss.svg", - "released": "2025-08-17 15:17:44", + "released": "2026-01-28 16:48:33", "supportVersion": [ "ALL" ], - "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-alioss/2025.1.2/datacap-plugin-alioss-2025.1.2-bin.tar.gz" + "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-alioss/2026.0.0/datacap-plugin-alioss-2026.0.0-bin.tar.gz" }, { "key": "datacap-plugin-dremio", @@ -221,14 +221,14 @@ "description": "Dremio is a free, open source, distributed data platform.", "i18nFormat": true, "type": "Connector", - "version": "2025.1.2", + "version": "2026.0.0", "author": "datacap-community", "logo": "http://devlive-cdn.oss-cn-beijing.aliyuncs.com/applications/datacap/resources/logo/plugin/dremio.svg", - "released": "2025-08-17 15:17:44", + "released": "2026-01-28 16:48:33", "supportVersion": [ "ALL" ], - "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-dremio/2025.1.2/datacap-plugin-dremio-2025.1.2-bin.tar.gz" + "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-dremio/2026.0.0/datacap-plugin-dremio-2026.0.0-bin.tar.gz" }, { "key": "datacap-plugin-druid", @@ -236,14 +236,14 @@ "description": "A high performance, real-time analytics database that delivers sub-second queries on streaming and batch data at scale and under load.", "i18nFormat": true, "type": "Connector", - "version": "2025.1.2", + "version": "2026.0.0", "author": "datacap-community", "logo": "http://devlive-cdn.oss-cn-beijing.aliyuncs.com/applications/datacap/resources/logo/plugin/druid.svg", - "released": "2025-08-17 15:17:44", + "released": "2026-01-28 16:48:33", "supportVersion": [ "ALL" ], - "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-druid/2025.1.2/datacap-plugin-druid-2025.1.2-bin.tar.gz" + "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-druid/2026.0.0/datacap-plugin-druid-2026.0.0-bin.tar.gz" }, { "key": "datacap-plugin-duckdb", @@ -251,14 +251,14 @@ "description": "To use DuckDB, you must first create a connection to a database. The exact syntax varies between the client APIs but it typically involves passing an argument to configure persistence.", "i18nFormat": true, "type": "Connector", - "version": "2025.1.2", + "version": "2026.0.0", "author": "datacap-community", "logo": "http://devlive-cdn.oss-cn-beijing.aliyuncs.com/applications/datacap/resources/logo/plugin/duckdb.svg", - "released": "2025-08-17 15:17:44", + "released": "2026-01-28 16:48:33", "supportVersion": [ "ALL" ], - "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-duckdb/2025.1.2/datacap-plugin-duckdb-2025.1.2-bin.tar.gz" + "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-duckdb/2026.0.0/datacap-plugin-duckdb-2026.0.0-bin.tar.gz" }, { "key": "datacap-plugin-pinot", @@ -266,14 +266,14 @@ "description": "Apache Pinot is a real-time distributed OLAP datastore, built to deliver scalable real-time analytics with low latency. It can ingest from batch data sources (such as Hadoop HDFS, Amazon S3, Azure ADLS, Google Cloud Storage) as well as stream data sources (such as Apache Kafka).", "i18nFormat": true, "type": "Connector", - "version": "2025.1.2", + "version": "2026.0.0", "author": "datacap-community", "logo": "http://devlive-cdn.oss-cn-beijing.aliyuncs.com/applications/datacap/resources/logo/plugin/pinot.svg", - "released": "2025-08-17 15:17:44", + "released": "2026-01-28 16:48:33", "supportVersion": [ "ALL" ], - "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-pinot/2025.1.2/datacap-plugin-pinot-2025.1.2-bin.tar.gz" + "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-pinot/2026.0.0/datacap-plugin-pinot-2026.0.0-bin.tar.gz" }, { "key": "datacap-plugin-cassandra", @@ -281,14 +281,14 @@ "description": "Cassandra is a distributed key-value store for the cloud.", "i18nFormat": true, "type": "Connector", - "version": "2025.1.2", + "version": "2026.0.0", "author": "datacap-community", "logo": "http://devlive-cdn.oss-cn-beijing.aliyuncs.com/applications/datacap/resources/logo/plugin/cassandra.svg", - "released": "2025-08-17 15:17:44", + "released": "2026-01-28 16:48:33", "supportVersion": [ "ALL" ], - "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-cassandra/2025.1.2/datacap-plugin-cassandra-2025.1.2-bin.tar.gz" + "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-cassandra/2026.0.0/datacap-plugin-cassandra-2026.0.0-bin.tar.gz" }, { "key": "datacap-plugin-matrixone", @@ -296,14 +296,14 @@ "description": "MatrixOne is a hyper-converged cloud & edge native distributed database with a structure that separates storage, computation, and transactions to form a consolidated HSTAP data engine. This engine enables a single database system to accommodate diverse business loads such as OLTP, OLAP, and stream computing. It also supports deployment and utilization across public, private, and edge clouds, ensuring compatibility with diverse infrastructures.", "i18nFormat": true, "type": "Connector", - "version": "2025.1.2", + "version": "2026.0.0", "author": "datacap-community", "logo": "http://devlive-cdn.oss-cn-beijing.aliyuncs.com/applications/datacap/resources/logo/plugin/matrixone.svg", - "released": "2025-08-17 15:17:44", + "released": "2026-01-28 16:48:33", "supportVersion": [ "ALL" ], - "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-matrixone/2025.1.2/datacap-plugin-matrixone-2025.1.2-bin.tar.gz" + "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-matrixone/2026.0.0/datacap-plugin-matrixone-2026.0.0-bin.tar.gz" }, { "key": "datacap-plugin-scylladb", @@ -311,14 +311,14 @@ "description": "ScyllaDB is a distributed database written in C++ and designed to fully exploit modern cloud infrastructure. It utilizes a shard-per-core architecture, meaning each CPU core has dedicated resources that independently handle data for maximum efficiency. ", "i18nFormat": true, "type": "Connector", - "version": "2025.1.2", + "version": "2026.0.0", "author": "datacap-community", "logo": "http://devlive-cdn.oss-cn-beijing.aliyuncs.com/applications/datacap/resources/logo/plugin/scylladb.svg", - "released": "2025-08-17 15:17:44", + "released": "2026-01-28 16:48:33", "supportVersion": [ "ALL" ], - "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-scylladb/2025.1.2/datacap-plugin-scylladb-2025.1.2-bin.tar.gz" + "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-scylladb/2026.0.0/datacap-plugin-scylladb-2026.0.0-bin.tar.gz" }, { "key": "datacap-plugin-paradedb", @@ -326,14 +326,14 @@ "description": "ParadeDB is a modern Elasticsearch alternative built on Postgres. Built for real-time, update-heavy workloads.", "i18nFormat": true, "type": "Connector", - "version": "2025.1.2", + "version": "2026.0.0", "author": "datacap-community", "logo": "http://devlive-cdn.oss-cn-beijing.aliyuncs.com/applications/datacap/resources/logo/plugin/paradedb.svg", - "released": "2025-08-17 15:17:44", + "released": "2026-01-28 16:48:33", "supportVersion": [ "ALL" ], - "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-paradedb/2025.1.2/datacap-plugin-paradedb-2025.1.2-bin.tar.gz" + "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-paradedb/2026.0.0/datacap-plugin-paradedb-2026.0.0-bin.tar.gz" }, { "key": "datacap-plugin-hive-2x", @@ -341,14 +341,14 @@ "description": "The Apache Hive ™ is a distributed, fault-tolerant data warehouse system that enables analytics at a massive scale and facilitates reading, writing, and managing petabytes of data residing in distributed storage using SQL.", "i18nFormat": true, "type": "Connector", - "version": "2025.1.2", + "version": "2026.0.0", "author": "datacap-community", "logo": "http://devlive-cdn.oss-cn-beijing.aliyuncs.com/applications/datacap/resources/logo/plugin/hive2x.svg", - "released": "2025-08-17 15:17:44", + "released": "2026-01-28 16:48:33", "supportVersion": [ "2.x" ], - "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-hive-2x/2025.1.2/datacap-plugin-hive-2x-2025.1.2-bin.tar.gz" + "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-hive-2x/2026.0.0/datacap-plugin-hive-2x-2026.0.0-bin.tar.gz" }, { "key": "datacap-plugin-kyuubi", @@ -356,14 +356,14 @@ "description": "Apache Kyuubi, a distributed and multi-tenant gateway to provide serverless SQL on lakehouses.", "i18nFormat": true, "type": "Connector", - "version": "2025.1.2", + "version": "2026.0.0", "author": "datacap-community", "logo": "http://devlive-cdn.oss-cn-beijing.aliyuncs.com/applications/datacap/resources/logo/plugin/kyuubi.svg", - "released": "2025-08-17 15:17:44", + "released": "2026-01-28 16:48:33", "supportVersion": [ "ALL" ], - "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-kyuubi/2025.1.2/datacap-plugin-kyuubi-2025.1.2-bin.tar.gz" + "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-kyuubi/2026.0.0/datacap-plugin-kyuubi-2026.0.0-bin.tar.gz" }, { "key": "datacap-plugin-ignite", @@ -371,14 +371,14 @@ "description": "Apache Ignite is a distributed database for high-performance computing with in-memory speed.", "i18nFormat": true, "type": "Connector", - "version": "2025.1.2", + "version": "2026.0.0", "author": "datacap-community", "logo": "http://devlive-cdn.oss-cn-beijing.aliyuncs.com/applications/datacap/resources/logo/plugin/ignite.svg", - "released": "2025-08-17 15:17:44", + "released": "2026-01-28 16:48:33", "supportVersion": [ "ALL" ], - "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-ignite/2025.1.2/datacap-plugin-ignite-2025.1.2-bin.tar.gz" + "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-ignite/2026.0.0/datacap-plugin-ignite-2026.0.0-bin.tar.gz" }, { "key": "datacap-plugin-impala", @@ -386,14 +386,14 @@ "description": "Apache Impala is the open source, native analytic database for open data and table formats.", "i18nFormat": true, "type": "Connector", - "version": "2025.1.2", + "version": "2026.0.0", "author": "datacap-community", "logo": "http://devlive-cdn.oss-cn-beijing.aliyuncs.com/applications/datacap/resources/logo/plugin/impala.svg", - "released": "2025-08-17 15:17:44", + "released": "2026-01-28 16:48:33", "supportVersion": [ "ALL" ], - "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-impala/2025.1.2/datacap-plugin-impala-2025.1.2-bin.tar.gz" + "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-impala/2026.0.0/datacap-plugin-impala-2026.0.0-bin.tar.gz" }, { "key": "datacap-plugin-kylin", @@ -401,14 +401,14 @@ "description": "Apache Kylin is a leading open source OLAP engine for Big Data capable for sub-second query latency on trillions of records. Since being created and open sourced by eBay in 2014, and graduated to Top Level Project of Apache Software Foundation in 2015. Kylin has quickly been adopted by thousands of organizations world widely as their critical analytics application for Big Data.", "i18nFormat": true, "type": "Connector", - "version": "2025.1.2", + "version": "2026.0.0", "author": "datacap-community", "logo": "http://devlive-cdn.oss-cn-beijing.aliyuncs.com/applications/datacap/resources/logo/plugin/kylin.svg", - "released": "2025-08-17 15:17:44", + "released": "2026-01-28 16:48:33", "supportVersion": [ "ALL" ], - "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-kylin/2025.1.2/datacap-plugin-kylin-2025.1.2-bin.tar.gz" + "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-kylin/2026.0.0/datacap-plugin-kylin-2026.0.0-bin.tar.gz" }, { "key": "datacap-plugin-timescale", @@ -416,14 +416,14 @@ "description": "TimescaleDB is an open-source database designed to make SQL scalable for time-series data. It is engineered up from PostgreSQL and packaged as a PostgreSQL extension, providing automatic partitioning across time and space (partitioning key), as well as full SQL support.", "i18nFormat": true, "type": "Connector", - "version": "2025.1.2", + "version": "2026.0.0", "author": "datacap-community", "logo": "http://devlive-cdn.oss-cn-beijing.aliyuncs.com/applications/datacap/resources/logo/plugin/timescale.svg", - "released": "2025-08-17 15:17:44", + "released": "2026-01-28 16:48:33", "supportVersion": [ "ALL" ], - "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-timescale/2025.1.2/datacap-plugin-timescale-2025.1.2-bin.tar.gz" + "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-timescale/2026.0.0/datacap-plugin-timescale-2026.0.0-bin.tar.gz" }, { "key": "datacap-plugin-iotdb", @@ -431,14 +431,14 @@ "description": "Apache IoTDB (Database for Internet of Things) is an IoT native database with high performance for data management and analysis, deployable on the edge and the cloud. ", "i18nFormat": true, "type": "Connector", - "version": "2025.1.2", + "version": "2026.0.0", "author": "datacap-community", "logo": "http://devlive-cdn.oss-cn-beijing.aliyuncs.com/applications/datacap/resources/logo/plugin/iotdb.svg", - "released": "2025-08-17 15:17:44", + "released": "2026-01-28 16:48:33", "supportVersion": [ "ALL" ], - "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-iotdb/2025.1.2/datacap-plugin-iotdb-2025.1.2-bin.tar.gz" + "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-iotdb/2026.0.0/datacap-plugin-iotdb-2026.0.0-bin.tar.gz" }, { "key": "datacap-plugin-monetdb", @@ -446,14 +446,14 @@ "description": "MonetDB is a high performance relational database system for analytics.", "i18nFormat": true, "type": "Connector", - "version": "2025.1.2", + "version": "2026.0.0", "author": "datacap-community", "logo": "http://devlive-cdn.oss-cn-beijing.aliyuncs.com/applications/datacap/resources/logo/plugin/monetdb.svg", - "released": "2025-08-17 15:17:44", + "released": "2026-01-28 16:48:33", "supportVersion": [ "ALL" ], - "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-monetdb/2025.1.2/datacap-plugin-monetdb-2025.1.2-bin.tar.gz" + "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-monetdb/2026.0.0/datacap-plugin-monetdb-2026.0.0-bin.tar.gz" }, { "key": "datacap-plugin-ydb", @@ -461,14 +461,14 @@ "description": "YDB is a versatile open source Distributed SQL Database that combines high availability and scalability with strong consistency and ACID transactions. It accommodates transactional (OLTP), analytical (OLAP), and streaming workloads simultaneously.", "i18nFormat": true, "type": "Connector", - "version": "2025.1.2", + "version": "2026.0.0", "author": "datacap-community", "logo": "http://devlive-cdn.oss-cn-beijing.aliyuncs.com/applications/datacap/resources/logo/plugin/ydb.svg", - "released": "2025-08-17 15:17:44", + "released": "2026-01-28 16:48:33", "supportVersion": [ "ALL" ], - "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-ydb/2025.1.2/datacap-plugin-ydb-2025.1.2-bin.tar.gz" + "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-ydb/2026.0.0/datacap-plugin-ydb-2026.0.0-bin.tar.gz" }, { "key": "datacap-plugin-neo4j", @@ -476,14 +476,14 @@ "description": "Neo4j is a graph database for knowledge management and analysis.", "i18nFormat": true, "type": "Connector", - "version": "2025.1.2", + "version": "2026.0.0", "author": "datacap-community", "logo": "http://devlive-cdn.oss-cn-beijing.aliyuncs.com/applications/datacap/resources/logo/plugin/neo4j.svg", - "released": "2025-08-17 15:17:44", + "released": "2026-01-28 16:48:33", "supportVersion": [ "ALL" ], - "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-neo4j/2025.1.2/datacap-plugin-neo4j-2025.1.2-bin.tar.gz" + "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-neo4j/2026.0.0/datacap-plugin-neo4j-2026.0.0-bin.tar.gz" }, { "key": "datacap-plugin-oceanbase", @@ -491,14 +491,14 @@ "description": "OceanBase is an enterprise distributed relational database with high availability, high performance, horizontal scalability, and compatibility with SQL standards.", "i18nFormat": true, "type": "Connector", - "version": "2025.1.2", + "version": "2026.0.0", "author": "datacap-community", "logo": "http://devlive-cdn.oss-cn-beijing.aliyuncs.com/applications/datacap/resources/logo/plugin/oceanbase.svg", - "released": "2025-08-17 15:17:44", + "released": "2026-01-28 16:48:33", "supportVersion": [ "ALL" ], - "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-oceanbase/2025.1.2/datacap-plugin-oceanbase-2025.1.2-bin.tar.gz" + "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-oceanbase/2026.0.0/datacap-plugin-oceanbase-2026.0.0-bin.tar.gz" }, { "key": "datacap-plugin-oracle", @@ -506,14 +506,14 @@ "description": "Oracle is an advanced, enterprise-class relational database management system.", "i18nFormat": true, "type": "Connector", - "version": "2025.1.2", + "version": "2026.0.0", "author": "datacap-community", "logo": "http://devlive-cdn.oss-cn-beijing.aliyuncs.com/applications/datacap/resources/logo/plugin/oracle.svg", - "released": "2025-08-17 15:17:44", + "released": "2026-01-28 16:48:33", "supportVersion": [ "ALL" ], - "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-oracle/2025.1.2/datacap-plugin-oracle-2025.1.2-bin.tar.gz" + "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-oracle/2026.0.0/datacap-plugin-oracle-2026.0.0-bin.tar.gz" }, { "key": "datacap-plugin-phoenix", @@ -521,14 +521,14 @@ "description": "Apache Phoenix is a SQL skin over HBase delivered as a client-embedded JDBC driver targeting low latency queries over HBase data. ", "i18nFormat": true, "type": "Connector", - "version": "2025.1.2", + "version": "2026.0.0", "author": "datacap-community", "logo": "http://devlive-cdn.oss-cn-beijing.aliyuncs.com/applications/datacap/resources/logo/plugin/phoenix.svg", - "released": "2025-08-17 15:17:44", + "released": "2026-01-28 16:48:33", "supportVersion": [ "ALL" ], - "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-phoenix/2025.1.2/datacap-plugin-phoenix-2025.1.2-bin.tar.gz" + "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-phoenix/2026.0.0/datacap-plugin-phoenix-2026.0.0-bin.tar.gz" }, { "key": "datacap-plugin-postgresql", @@ -536,14 +536,14 @@ "description": "PostgreSQL is a powerful, open source object-relational database system.", "i18nFormat": true, "type": "Connector", - "version": "2025.1.2", + "version": "2026.0.0", "author": "datacap-community", "logo": "http://devlive-cdn.oss-cn-beijing.aliyuncs.com/applications/datacap/resources/logo/plugin/postgresql.svg", - "released": "2025-08-17 15:17:44", + "released": "2026-01-28 16:48:33", "supportVersion": [ "ALL" ], - "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-postgresql/2025.1.2/datacap-plugin-postgresql-2025.1.2-bin.tar.gz" + "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-postgresql/2026.0.0/datacap-plugin-postgresql-2026.0.0-bin.tar.gz" }, { "key": "datacap-plugin-presto", @@ -551,14 +551,14 @@ "description": "Presto is a distributed SQL query engine for big data.", "i18nFormat": true, "type": "Connector", - "version": "2025.1.2", + "version": "2026.0.0", "author": "datacap-community", "logo": "http://devlive-cdn.oss-cn-beijing.aliyuncs.com/applications/datacap/resources/logo/plugin/presto.svg", - "released": "2025-08-17 15:17:44", + "released": "2026-01-28 16:48:33", "supportVersion": [ "ALL" ], - "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-presto/2025.1.2/datacap-plugin-presto-2025.1.2-bin.tar.gz" + "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-presto/2026.0.0/datacap-plugin-presto-2026.0.0-bin.tar.gz" }, { "key": "datacap-plugin-snowflake", @@ -566,14 +566,14 @@ "description": "Snowflake is a database for analytics.", "i18nFormat": true, "type": "Connector", - "version": "2025.1.2", + "version": "2026.0.0", "author": "datacap-community", "logo": "http://devlive-cdn.oss-cn-beijing.aliyuncs.com/applications/datacap/resources/logo/plugin/snowflake.svg", - "released": "2025-08-17 15:17:44", + "released": "2026-01-28 16:48:33", "supportVersion": [ "ALL" ], - "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-snowflake/2025.1.2/datacap-plugin-snowflake-2025.1.2-bin.tar.gz" + "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-snowflake/2026.0.0/datacap-plugin-snowflake-2026.0.0-bin.tar.gz" }, { "key": "datacap-plugin-sqlserver", @@ -581,14 +581,14 @@ "description": "Microsoft SQL Server is a relational database management system.", "i18nFormat": true, "type": "Connector", - "version": "2025.1.2", + "version": "2026.0.0", "author": "datacap-community", "logo": "http://devlive-cdn.oss-cn-beijing.aliyuncs.com/applications/datacap/resources/logo/plugin/sqlserver.svg", - "released": "2025-08-17 15:17:44", + "released": "2026-01-28 16:48:33", "supportVersion": [ "ALL" ], - "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-sqlserver/2025.1.2/datacap-plugin-sqlserver-2025.1.2-bin.tar.gz" + "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-sqlserver/2026.0.0/datacap-plugin-sqlserver-2026.0.0-bin.tar.gz" }, { "key": "datacap-plugin-tdengine", @@ -596,14 +596,14 @@ "description": "TDengine is an open source, high-performance, cloud native time-series database optimized for Internet of Things (IoT), Connected Cars, and Industrial IoT.", "i18nFormat": true, "type": "Connector", - "version": "2025.1.2", + "version": "2026.0.0", "author": "datacap-community", "logo": "http://devlive-cdn.oss-cn-beijing.aliyuncs.com/applications/datacap/resources/logo/plugin/tdengine.svg", - "released": "2025-08-17 15:17:44", + "released": "2026-01-28 16:48:33", "supportVersion": [ "ALL" ], - "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-tdengine/2025.1.2/datacap-plugin-tdengine-2025.1.2-bin.tar.gz" + "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-tdengine/2026.0.0/datacap-plugin-tdengine-2026.0.0-bin.tar.gz" }, { "key": "datacap-plugin-trino", @@ -611,14 +611,14 @@ "description": "Trino is a distributed SQL query engine for big data.", "i18nFormat": true, "type": "Connector", - "version": "2025.1.2", + "version": "2026.0.0", "author": "datacap-community", "logo": "http://devlive-cdn.oss-cn-beijing.aliyuncs.com/applications/datacap/resources/logo/plugin/trino.svg", - "released": "2025-08-17 15:17:44", + "released": "2026-01-28 16:48:33", "supportVersion": [ "ALL" ], - "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-trino/2025.1.2/datacap-plugin-trino-2025.1.2-bin.tar.gz" + "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-trino/2026.0.0/datacap-plugin-trino-2026.0.0-bin.tar.gz" }, { "key": "datacap-plugin-doris", @@ -626,14 +626,14 @@ "description": "Apache Doris is a distributed OLAP database.", "i18nFormat": true, "type": "Connector", - "version": "2025.1.2", + "version": "2026.0.0", "author": "datacap-community", "logo": "http://devlive-cdn.oss-cn-beijing.aliyuncs.com/applications/datacap/resources/logo/plugin/doris.svg", - "released": "2025-08-17 15:17:44", + "released": "2026-01-28 16:48:33", "supportVersion": [ "ALL" ], - "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-doris/2025.1.2/datacap-plugin-doris-2025.1.2-bin.tar.gz" + "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-doris/2026.0.0/datacap-plugin-doris-2026.0.0-bin.tar.gz" }, { "key": "datacap-plugin-starrocks", @@ -641,14 +641,14 @@ "description": "StarRocks is a distributed OLAP database.", "i18nFormat": true, "type": "Connector", - "version": "2025.1.2", + "version": "2026.0.0", "author": "datacap-community", "logo": "http://devlive-cdn.oss-cn-beijing.aliyuncs.com/applications/datacap/resources/logo/plugin/starrocks.svg", - "released": "2025-08-17 15:17:44", + "released": "2026-01-28 16:48:33", "supportVersion": [ "ALL" ], - "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-starrocks/2025.1.2/datacap-plugin-starrocks-2025.1.2-bin.tar.gz" + "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-starrocks/2026.0.0/datacap-plugin-starrocks-2026.0.0-bin.tar.gz" }, { "key": "datacap-plugin-hologres", @@ -656,14 +656,14 @@ "description": "Hologres is a unified real-time data warehousing service developed by Alibaba Cloud. You can use Hologres to write, update, process, and analyze large amounts of data in real time.", "i18nFormat": true, "type": "Connector", - "version": "2025.1.2", + "version": "2026.0.0", "author": "datacap-community", "logo": "http://devlive-cdn.oss-cn-beijing.aliyuncs.com/applications/datacap/resources/logo/plugin/hologres.svg", - "released": "2025-08-17 15:17:44", + "released": "2026-01-28 16:48:33", "supportVersion": [ "ALL" ], - "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-hologres/2025.1.2/datacap-plugin-hologres-2025.1.2-bin.tar.gz" + "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-hologres/2026.0.0/datacap-plugin-hologres-2026.0.0-bin.tar.gz" }, { "key": "datacap-plugin-greptimedb", @@ -671,14 +671,14 @@ "description": "GreptimeDB is an open-source unified & cost-effective time-series database for Metrics, Logs, and Events (also Traces in plan). You can gain real-time insights from Edge to Cloud at Any Scale.", "i18nFormat": true, "type": "Connector", - "version": "2025.1.2", + "version": "2026.0.0", "author": "datacap-community", "logo": "http://devlive-cdn.oss-cn-beijing.aliyuncs.com/applications/datacap/resources/logo/plugin/greptimedb.svg", - "released": "2025-08-17 15:17:44", + "released": "2026-01-28 16:48:33", "supportVersion": [ "ALL" ], - "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-greptimedb/2025.1.2/datacap-plugin-greptimedb-2025.1.2-bin.tar.gz" + "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-greptimedb/2026.0.0/datacap-plugin-greptimedb-2026.0.0-bin.tar.gz" }, { "key": "datacap-plugin-questdb", @@ -686,14 +686,14 @@ "description": "QuestDB is the world's fastest growing open-source time-series database. It offers massive ingestion throughput, millisecond queries, powerful time-series SQL extensions, and scales well with minimal and maximal hardware. Save costs with better performance and efficiency.", "i18nFormat": true, "type": "Connector", - "version": "2025.1.2", + "version": "2026.0.0", "author": "datacap-community", "logo": "http://devlive-cdn.oss-cn-beijing.aliyuncs.com/applications/datacap/resources/logo/plugin/questdb.svg", - "released": "2025-08-17 15:17:44", + "released": "2026-01-28 16:48:33", "supportVersion": [ "ALL" ], - "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-questdb/2025.1.2/datacap-plugin-questdb-2025.1.2-bin.tar.gz" + "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-questdb/2026.0.0/datacap-plugin-questdb-2026.0.0-bin.tar.gz" }, { "key": "datacap-plugin-h2-tcp", @@ -701,14 +701,14 @@ "description": "Use TCP to access the remote H2 database", "i18nFormat": true, "type": "Connector", - "version": "2025.1.2", + "version": "2026.0.0", "author": "datacap-community", "logo": "http://devlive-cdn.oss-cn-beijing.aliyuncs.com/applications/datacap/resources/logo/plugin/h2tcp.svg", - "released": "2025-08-17 15:17:44", + "released": "2026-01-28 16:48:33", "supportVersion": [ "ALL" ], - "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-h2-tcp/2025.1.2/datacap-plugin-h2-tcp-2025.1.2-bin.tar.gz" + "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-h2-tcp/2026.0.0/datacap-plugin-h2-tcp-2026.0.0-bin.tar.gz" }, { "key": "datacap-plugin-clickhouse-http", @@ -716,14 +716,14 @@ "description": "Use HTTP to access the remote ClickHouse database", "i18nFormat": true, "type": "Connector", - "version": "2025.1.2", + "version": "2026.0.0", "author": "datacap-community", "logo": "http://devlive-cdn.oss-cn-beijing.aliyuncs.com/applications/datacap/resources/logo/plugin/clickhousehttp.svg", - "released": "2025-08-17 15:17:44", + "released": "2026-01-28 16:48:33", "supportVersion": [ "ALL" ], - "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-clickhouse-http/2025.1.2/datacap-plugin-clickhouse-http-2025.1.2-bin.tar.gz" + "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-clickhouse-http/2026.0.0/datacap-plugin-clickhouse-http-2026.0.0-bin.tar.gz" }, { "key": "datacap-plugin-ceresdb", @@ -731,14 +731,14 @@ "description": "CeresDB is a database system designed to allow for the storage and retrieval of semi-structured data, i.e. data that conforms to a “top-level schema” where columns types are known, but those columns can in-turn contain dictionaries or lists.", "i18nFormat": true, "type": "Connector", - "version": "2025.1.2", + "version": "2026.0.0", "author": "datacap-community", "logo": "http://devlive-cdn.oss-cn-beijing.aliyuncs.com/applications/datacap/resources/logo/plugin/ceresdb.svg", - "released": "2025-08-17 15:17:44", + "released": "2026-01-28 16:48:33", "supportVersion": [ "ALL" ], - "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-ceresdb/2025.1.2/datacap-plugin-ceresdb-2025.1.2-bin.tar.gz" + "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-ceresdb/2026.0.0/datacap-plugin-ceresdb-2026.0.0-bin.tar.gz" }, { "key": "datacap-plugin-redis-jdbc", @@ -746,14 +746,14 @@ "description": "Use JDBC to access Redis", "i18nFormat": true, "type": "Connector", - "version": "2025.1.2", + "version": "2026.0.0", "author": "datacap-community", "logo": "http://devlive-cdn.oss-cn-beijing.aliyuncs.com/applications/datacap/resources/logo/plugin/redisjdbc.svg", - "released": "2025-08-17 15:17:44", + "released": "2026-01-28 16:48:33", "supportVersion": [ "ALL" ], - "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-redis-jdbc/2025.1.2/datacap-plugin-redis-jdbc-2025.1.2-bin.tar.gz" + "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-redis-jdbc/2026.0.0/datacap-plugin-redis-jdbc-2026.0.0-bin.tar.gz" }, { "key": "datacap-plugin-elasticsearch-8x", @@ -761,14 +761,14 @@ "description": "Use Elasticsearch 8.x", "i18nFormat": true, "type": "Connector", - "version": "2025.1.2", + "version": "2026.0.0", "author": "datacap-community", "logo": "http://devlive-cdn.oss-cn-beijing.aliyuncs.com/applications/datacap/resources/logo/plugin/elasticsearch8x.svg", - "released": "2025-08-17 15:17:44", + "released": "2026-01-28 16:48:33", "supportVersion": [ "8.x" ], - "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-elasticsearch-8x/2025.1.2/datacap-plugin-elasticsearch-8x-2025.1.2-bin.tar.gz" + "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-elasticsearch-8x/2026.0.0/datacap-plugin-elasticsearch-8x-2026.0.0-bin.tar.gz" }, { "key": "datacap-plugin-mongo-atlas", @@ -776,14 +776,14 @@ "description": "The MongoDB Atlas SQL JDBC Driver provides SQL connectivity to MongoDB Atlas for client applications developed in Java.", "i18nFormat": true, "type": "Connector", - "version": "2025.1.2", + "version": "2026.0.0", "author": "datacap-community", "logo": "http://devlive-cdn.oss-cn-beijing.aliyuncs.com/applications/datacap/resources/logo/plugin/mongoatlas.svg", - "released": "2025-08-17 15:17:44", + "released": "2026-01-28 16:48:33", "supportVersion": [ "ALL" ], - "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-mongo-atlas/2025.1.2/datacap-plugin-mongo-atlas-2025.1.2-bin.tar.gz" + "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-mongo-atlas/2026.0.0/datacap-plugin-mongo-atlas-2026.0.0-bin.tar.gz" }, { "key": "datacap-plugin-mongo-community", @@ -791,14 +791,14 @@ "description": "The MongoDB Community SQL JDBC Driver provides SQL connectivity to MongoDB Community for client applications developed in Java.", "i18nFormat": true, "type": "Connector", - "version": "2025.1.2", + "version": "2026.0.0", "author": "datacap-community", "logo": "http://devlive-cdn.oss-cn-beijing.aliyuncs.com/applications/datacap/resources/logo/plugin/mongocommunity.svg", - "released": "2025-08-17 15:17:44", + "released": "2026-01-28 16:48:33", "supportVersion": [ "ALL" ], - "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-mongo-community/2025.1.2/datacap-plugin-mongo-community-2025.1.2-bin.tar.gz" + "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-mongo-community/2026.0.0/datacap-plugin-mongo-community-2026.0.0-bin.tar.gz" }, { "key": "datacap-plugin-dolphindb", @@ -806,14 +806,14 @@ "description": "DolphinDB is a real-time platform for analytics and stream processing, powered by a high-performance time series database. It offers capabilities related to efficient writes, fast queries, complex analysis, distributed parallel computing, and low-latency stream computing. It also supports high availability.", "i18nFormat": true, "type": "Connector", - "version": "2025.1.2", + "version": "2026.0.0", "author": "datacap-community", "logo": "http://devlive-cdn.oss-cn-beijing.aliyuncs.com/applications/datacap/resources/logo/plugin/dolphindb.svg", - "released": "2025-08-17 15:17:44", + "released": "2026-01-28 16:48:33", "supportVersion": [ "ALL" ], - "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-dolphindb/2025.1.2/datacap-plugin-dolphindb-2025.1.2-bin.tar.gz" + "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-dolphindb/2026.0.0/datacap-plugin-dolphindb-2026.0.0-bin.tar.gz" }, { "key": "datacap-convert-csv", @@ -821,14 +821,14 @@ "description": "A CSV file conversion plugin for DataCap.", "i18nFormat": true, "type": "Converter", - "version": "2025.1.2", + "version": "2026.0.0", "author": "datacap-community", "logo": "http://devlive-cdn.oss-cn-beijing.aliyuncs.com/applications/datacap/resources/logo/convert/csv.svg", - "released": "2025-08-17 15:17:44", + "released": "2026-01-28 16:48:33", "supportVersion": [ "ALL" ], - "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-convert-csv/2025.1.2/datacap-convert-csv-2025.1.2-bin.tar.gz" + "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-convert-csv/2026.0.0/datacap-convert-csv-2026.0.0-bin.tar.gz" }, { "key": "datacap-convert-txt", @@ -836,14 +836,14 @@ "description": "A TXT file conversion plugin for DataCap.", "i18nFormat": true, "type": "Converter", - "version": "2025.1.2", + "version": "2026.0.0", "author": "datacap-community", "logo": "http://devlive-cdn.oss-cn-beijing.aliyuncs.com/applications/datacap/resources/logo/convert/txt.svg", - "released": "2025-08-17 15:17:44", + "released": "2026-01-28 16:48:33", "supportVersion": [ "ALL" ], - "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-convert-txt/2025.1.2/datacap-convert-txt-2025.1.2-bin.tar.gz" + "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-convert-txt/2026.0.0/datacap-convert-txt-2026.0.0-bin.tar.gz" }, { "key": "datacap-convert-json", @@ -851,14 +851,14 @@ "description": "A JSON file conversion plugin for DataCap.", "i18nFormat": true, "type": "Converter", - "version": "2025.1.2", + "version": "2026.0.0", "author": "datacap-community", "logo": "http://devlive-cdn.oss-cn-beijing.aliyuncs.com/applications/datacap/resources/logo/convert/json.svg", - "released": "2025-08-17 15:17:44", + "released": "2026-01-28 16:48:33", "supportVersion": [ "ALL" ], - "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-convert-json/2025.1.2/datacap-convert-json-2025.1.2-bin.tar.gz" + "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-convert-json/2026.0.0/datacap-convert-json-2026.0.0-bin.tar.gz" }, { "key": "datacap-convert-xml", @@ -866,14 +866,14 @@ "description": "A XML file conversion plugin for DataCap.", "i18nFormat": true, "type": "Converter", - "version": "2025.1.2", + "version": "2026.0.0", "author": "datacap-community", "logo": "http://devlive-cdn.oss-cn-beijing.aliyuncs.com/applications/datacap/resources/logo/convert/xml.svg", - "released": "2025-08-17 15:17:44", + "released": "2026-01-28 16:48:33", "supportVersion": [ "ALL" ], - "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-convert-xml/2025.1.2/datacap-convert-xml-2025.1.2-bin.tar.gz" + "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-convert-xml/2026.0.0/datacap-convert-xml-2026.0.0-bin.tar.gz" }, { "key": "datacap-convert-none", @@ -881,14 +881,14 @@ "description": "A multidimensional arrays file conversion plugin for DataCap.", "i18nFormat": true, "type": "Converter", - "version": "2025.1.2", + "version": "2026.0.0", "author": "datacap-community", "logo": "http://devlive-cdn.oss-cn-beijing.aliyuncs.com/applications/datacap/resources/logo/convert/none.svg", - "released": "2025-08-17 15:17:44", + "released": "2026-01-28 16:48:33", "supportVersion": [ "ALL" ], - "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-convert-none/2025.1.2/datacap-convert-none-2025.1.2-bin.tar.gz" + "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-convert-none/2026.0.0/datacap-convert-none-2026.0.0-bin.tar.gz" }, { "key": "datacap-executor-local", @@ -896,14 +896,14 @@ "description": "A local execution plugin for DataCap.", "i18nFormat": true, "type": "Executor", - "version": "2025.1.2", + "version": "2026.0.0", "author": "datacap-community", "logo": "http://devlive-cdn.oss-cn-beijing.aliyuncs.com/applications/datacap/resources/logo/executor/local.svg", - "released": "2025-08-17 15:17:44", + "released": "2026-01-28 16:48:33", "supportVersion": [ "ALL" ], - "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-executor-local/2025.1.2/datacap-executor-local-2025.1.2-bin.tar.gz" + "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-executor-local/2026.0.0/datacap-executor-local-2026.0.0-bin.tar.gz" }, { "key": "datacap-executor-seatunnel", @@ -911,14 +911,14 @@ "description": "A Seatunnel execution plugin for DataCap.", "i18nFormat": true, "type": "Executor", - "version": "2025.1.2", + "version": "2026.0.0", "author": "datacap-community", "logo": "http://devlive-cdn.oss-cn-beijing.aliyuncs.com/applications/datacap/resources/logo/executor/seatunnel.svg", - "released": "2025-08-17 15:17:44", + "released": "2026-01-28 16:48:33", "supportVersion": [ "ALL" ], - "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-executor-seatunnel/2025.1.2/datacap-executor-seatunnel-2025.1.2-bin.tar.gz" + "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-executor-seatunnel/2026.0.0/datacap-executor-seatunnel-2026.0.0-bin.tar.gz" }, { "key": "datacap-scheduler-local", @@ -926,14 +926,14 @@ "description": "A local scheduling plugin for DataCap.", "i18nFormat": true, "type": "Scheduler", - "version": "2025.1.2", + "version": "2026.0.0", "author": "datacap-community", "logo": "http://devlive-cdn.oss-cn-beijing.aliyuncs.com/applications/datacap/resources/logo/scheduler/local.svg", - "released": "2025-08-17 15:17:44", + "released": "2026-01-28 16:48:33", "supportVersion": [ "ALL" ], - "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-scheduler-local/2025.1.2/datacap-scheduler-local-2025.1.2-bin.tar.gz" + "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-scheduler-local/2026.0.0/datacap-scheduler-local-2026.0.0-bin.tar.gz" }, { "key": "datacap-fs-local", @@ -941,14 +941,14 @@ "description": "A local file system plugin for DataCap.", "i18nFormat": true, "type": "FileSystem", - "version": "2025.1.2", + "version": "2026.0.0", "author": "datacap-community", "logo": "http://devlive-cdn.oss-cn-beijing.aliyuncs.com/applications/datacap/resources/logo/fs/local.svg", - "released": "2025-08-17 15:17:44", + "released": "2026-01-28 16:48:33", "supportVersion": [ "ALL" ], - "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-fs-local/2025.1.2/datacap-fs-local-2025.1.2-bin.tar.gz" + "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-fs-local/2026.0.0/datacap-fs-local-2026.0.0-bin.tar.gz" }, { "key": "datacap-fs-qiniu", @@ -956,14 +956,14 @@ "description": "A Qiniu file system plugin for DataCap.", "i18nFormat": true, "type": "FileSystem", - "version": "2025.1.2", + "version": "2026.0.0", "author": "datacap-community", "logo": "http://devlive-cdn.oss-cn-beijing.aliyuncs.com/applications/datacap/resources/logo/fs/qiniu.svg", - "released": "2025-08-17 15:17:44", + "released": "2026-01-28 16:48:33", "supportVersion": [ "ALL" ], - "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-fs-qiniu/2025.1.2/datacap-fs-qiniu-2025.1.2-bin.tar.gz" + "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-fs-qiniu/2026.0.0/datacap-fs-qiniu-2026.0.0-bin.tar.gz" }, { "key": "datacap-fs-alioss", @@ -971,14 +971,14 @@ "description": "A Ali OSS file system plugin for DataCap.", "i18nFormat": true, "type": "FileSystem", - "version": "2025.1.2", + "version": "2026.0.0", "author": "datacap-community", "logo": "http://devlive-cdn.oss-cn-beijing.aliyuncs.com/applications/datacap/resources/logo/fs/alioss.svg", - "released": "2025-08-17 15:17:44", + "released": "2026-01-28 16:48:33", "supportVersion": [ "ALL" ], - "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-fs-alioss/2025.1.2/datacap-fs-alioss-2025.1.2-bin.tar.gz" + "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-fs-alioss/2026.0.0/datacap-fs-alioss-2026.0.0-bin.tar.gz" }, { "key": "datacap-fs-tencent-cos", @@ -986,14 +986,14 @@ "description": "A Tencent COS file system plugin for DataCap.", "i18nFormat": true, "type": "FileSystem", - "version": "2025.1.2", + "version": "2026.0.0", "author": "datacap-community", "logo": "http://devlive-cdn.oss-cn-beijing.aliyuncs.com/applications/datacap/resources/logo/fs/tencent-cos.svg", - "released": "2025-08-17 15:17:44", + "released": "2026-01-28 16:48:33", "supportVersion": [ "ALL" ], - "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-fs-tencent-cos/2025.1.2/datacap-fs-tencent-cos-2025.1.2-bin.tar.gz" + "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-fs-tencent-cos/2026.0.0/datacap-fs-tencent-cos-2026.0.0-bin.tar.gz" }, { "key": "datacap-fs-amazon-s3", @@ -1001,14 +1001,14 @@ "description": "A Amazon S3 file system plugin for DataCap.", "i18nFormat": true, "type": "FileSystem", - "version": "2025.1.2", + "version": "2026.0.0", "author": "datacap-community", "logo": "http://devlive-cdn.oss-cn-beijing.aliyuncs.com/applications/datacap/resources/logo/fs/amazon-s3.svg", - "released": "2025-08-17 15:17:44", + "released": "2026-01-28 16:48:33", "supportVersion": [ "ALL" ], - "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-fs-amazon-s3/2025.1.2/datacap-fs-amazon-s3-2025.1.2-bin.tar.gz" + "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-fs-amazon-s3/2026.0.0/datacap-fs-amazon-s3-2026.0.0-bin.tar.gz" }, { "key": "datacap-fs-minio", @@ -1016,14 +1016,14 @@ "description": "A Minio file system plugin for DataCap.", "i18nFormat": true, "type": "FileSystem", - "version": "2025.1.2", + "version": "2026.0.0", "author": "datacap-community", "logo": "http://devlive-cdn.oss-cn-beijing.aliyuncs.com/applications/datacap/resources/logo/fs/minio.svg", - "released": "2025-08-17 15:17:44", + "released": "2026-01-28 16:48:33", "supportVersion": [ "ALL" ], - "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-fs-minio/2025.1.2/datacap-fs-minio-2025.1.2-bin.tar.gz" + "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-fs-minio/2026.0.0/datacap-fs-minio-2026.0.0-bin.tar.gz" } ] } \ No newline at end of file diff --git a/convert/datacap-convert-csv/pom.xml b/convert/datacap-convert-csv/pom.xml index 117333d118..974da8d400 100644 --- a/convert/datacap-convert-csv/pom.xml +++ b/convert/datacap-convert-csv/pom.xml @@ -5,7 +5,7 @@ io.edurt.datacap datacap - 2025.1.2 + 2026.0.0 ../../pom.xml 4.0.0 diff --git a/convert/datacap-convert-json/pom.xml b/convert/datacap-convert-json/pom.xml index 54d702aedf..663d5b01aa 100644 --- a/convert/datacap-convert-json/pom.xml +++ b/convert/datacap-convert-json/pom.xml @@ -5,7 +5,7 @@ io.edurt.datacap datacap - 2025.1.2 + 2026.0.0 ../../pom.xml 4.0.0 diff --git a/convert/datacap-convert-none/pom.xml b/convert/datacap-convert-none/pom.xml index 2cfedc80c3..88a9612c02 100644 --- a/convert/datacap-convert-none/pom.xml +++ b/convert/datacap-convert-none/pom.xml @@ -5,7 +5,7 @@ io.edurt.datacap datacap - 2025.1.2 + 2026.0.0 ../../pom.xml 4.0.0 diff --git a/convert/datacap-convert-spi/pom.xml b/convert/datacap-convert-spi/pom.xml index 81d9952a3b..bc1f5b48b2 100644 --- a/convert/datacap-convert-spi/pom.xml +++ b/convert/datacap-convert-spi/pom.xml @@ -5,7 +5,7 @@ io.edurt.datacap datacap - 2025.1.2 + 2026.0.0 ../../pom.xml 4.0.0 diff --git a/convert/datacap-convert-txt/pom.xml b/convert/datacap-convert-txt/pom.xml index 4b4e5cb44b..a9826ed151 100644 --- a/convert/datacap-convert-txt/pom.xml +++ b/convert/datacap-convert-txt/pom.xml @@ -5,7 +5,7 @@ io.edurt.datacap datacap - 2025.1.2 + 2026.0.0 ../../pom.xml 4.0.0 diff --git a/convert/datacap-convert-xml/pom.xml b/convert/datacap-convert-xml/pom.xml index 08a8f3d4f8..335c5a62c4 100644 --- a/convert/datacap-convert-xml/pom.xml +++ b/convert/datacap-convert-xml/pom.xml @@ -5,7 +5,7 @@ io.edurt.datacap datacap - 2025.1.2 + 2026.0.0 ../../pom.xml 4.0.0 diff --git a/core/datacap-captcha/pom.xml b/core/datacap-captcha/pom.xml index e2ca7594a2..08b94418ae 100644 --- a/core/datacap-captcha/pom.xml +++ b/core/datacap-captcha/pom.xml @@ -6,7 +6,7 @@ io.edurt.datacap datacap - 2025.1.2 + 2026.0.0 ../../pom.xml diff --git a/core/datacap-common/pom.xml b/core/datacap-common/pom.xml index 3dfd02466c..c0fba0db2c 100644 --- a/core/datacap-common/pom.xml +++ b/core/datacap-common/pom.xml @@ -6,7 +6,7 @@ io.edurt.datacap datacap - 2025.1.2 + 2026.0.0 ../../pom.xml diff --git a/core/datacap-condor/pom.xml b/core/datacap-condor/pom.xml index 0557a51b88..4a59452c27 100644 --- a/core/datacap-condor/pom.xml +++ b/core/datacap-condor/pom.xml @@ -4,7 +4,7 @@ io.edurt.datacap datacap - 2025.1.2 + 2026.0.0 ../../pom.xml diff --git a/core/datacap-condor/src/main/java/io/edurt/datacap/condor/ComparisonOperator.java b/core/datacap-condor/src/main/java/io/edurt/datacap/condor/ComparisonOperator.java index 93c5e1d5cb..d7a8463f4e 100644 --- a/core/datacap-condor/src/main/java/io/edurt/datacap/condor/ComparisonOperator.java +++ b/core/datacap-condor/src/main/java/io/edurt/datacap/condor/ComparisonOperator.java @@ -3,6 +3,9 @@ public enum ComparisonOperator { EQUALS, + NOT_EQUALS, GREATER_THAN, - LESS_THAN + GREATER_THAN_OR_EQUALS, + LESS_THAN, + LESS_THAN_OR_EQUALS } diff --git a/core/datacap-condor/src/main/java/io/edurt/datacap/condor/DataType.java b/core/datacap-condor/src/main/java/io/edurt/datacap/condor/DataType.java index 6d2d870960..982e5bdbf6 100644 --- a/core/datacap-condor/src/main/java/io/edurt/datacap/condor/DataType.java +++ b/core/datacap-condor/src/main/java/io/edurt/datacap/condor/DataType.java @@ -2,8 +2,28 @@ public enum DataType { - INTEGER, + CHARACTER, + CHAR, VARCHAR, + BINARY, + VARBINARY, + TINYINT, + SMALLINT, + INTEGER, + INT, + BIGINT, + FLOAT, + REAL, + DOUBLE, + DECIMAL, + NUMERIC, + DATE, + TIME, + TIMESTAMP, + DATETIME, BOOLEAN, - DOUBLE + BLOB, + TEXT, + JSON, + XML } diff --git a/core/datacap-condor/src/main/java/io/edurt/datacap/condor/SQLExecutor.java b/core/datacap-condor/src/main/java/io/edurt/datacap/condor/SQLExecutor.java index 7a273701d6..aba79b61b2 100644 --- a/core/datacap-condor/src/main/java/io/edurt/datacap/condor/SQLExecutor.java +++ b/core/datacap-condor/src/main/java/io/edurt/datacap/condor/SQLExecutor.java @@ -1,5 +1,9 @@ package io.edurt.datacap.condor; +import io.edurt.datacap.condor.condition.AndCondition; +import io.edurt.datacap.condor.condition.Condition; +import io.edurt.datacap.condor.condition.OrCondition; +import io.edurt.datacap.condor.condition.SimpleCondition; import io.edurt.datacap.condor.manager.DatabaseManager; import io.edurt.datacap.condor.manager.TableManager; import io.edurt.datacap.condor.metadata.ColumnDefinition; @@ -9,23 +13,29 @@ import io.edurt.datacap.sql.SQLParser; import io.edurt.datacap.sql.node.ColumnConstraint; import io.edurt.datacap.sql.node.ConstraintType; +import io.edurt.datacap.sql.node.Expression; import io.edurt.datacap.sql.node.TableConstraint; import io.edurt.datacap.sql.node.element.ColumnElement; import io.edurt.datacap.sql.node.element.SelectElement; import io.edurt.datacap.sql.node.element.TableElement; import io.edurt.datacap.sql.statement.CreateDatabaseStatement; import io.edurt.datacap.sql.statement.CreateTableStatement; +import io.edurt.datacap.sql.statement.DeleteStatement; import io.edurt.datacap.sql.statement.DropDatabaseStatement; import io.edurt.datacap.sql.statement.DropTableStatement; import io.edurt.datacap.sql.statement.InsertStatement; import io.edurt.datacap.sql.statement.SQLStatement; import io.edurt.datacap.sql.statement.SelectStatement; +import io.edurt.datacap.sql.statement.ShowStatement; +import io.edurt.datacap.sql.statement.UpdateStatement; import io.edurt.datacap.sql.statement.UseDatabaseStatement; import java.util.ArrayList; import java.util.Arrays; +import java.util.HashMap; import java.util.HashSet; import java.util.List; +import java.util.Map; import java.util.Set; import java.util.stream.Collectors; @@ -45,42 +55,49 @@ public SQLResult execute(String sql) SQLStatement statement = SQLParser.parse(sql); if (statement instanceof CreateDatabaseStatement) { - CreateDatabaseStatement createDatabaseStatement = (CreateDatabaseStatement) statement; - return (SQLResult) executeCreateDatabase(createDatabaseStatement); + return (SQLResult) executeCreateDatabase((CreateDatabaseStatement) statement); } if (statement instanceof DropDatabaseStatement) { - DropDatabaseStatement dropDatabaseStatement = (DropDatabaseStatement) statement; - return (SQLResult) executeDropDatabase(dropDatabaseStatement); + return (SQLResult) executeDropDatabase((DropDatabaseStatement) statement); } if (statement instanceof UseDatabaseStatement) { - UseDatabaseStatement useDatabaseStatement = (UseDatabaseStatement) statement; - return (SQLResult) executeUseDatabase(useDatabaseStatement); + return (SQLResult) executeUseDatabase((UseDatabaseStatement) statement); + } + + if (statement instanceof ShowStatement) { + return (SQLResult) executeShow((ShowStatement) statement); } if (statement instanceof CreateTableStatement) { ensureCurrentTableManager(); - CreateTableStatement createTableStatement = (CreateTableStatement) statement; - return (SQLResult) executeCreateTable(createTableStatement); + return (SQLResult) executeCreateTable((CreateTableStatement) statement); } if (statement instanceof DropTableStatement) { ensureCurrentTableManager(); - DropTableStatement dropTableStatement = (DropTableStatement) statement; - return (SQLResult) executeDropTable(dropTableStatement); + return (SQLResult) executeDropTable((DropTableStatement) statement); } if (statement instanceof InsertStatement) { ensureCurrentTableManager(); - InsertStatement insertStatement = (InsertStatement) statement; - return (SQLResult) executeInsert(insertStatement); + return (SQLResult) executeInsert((InsertStatement) statement); } if (statement instanceof SelectStatement) { ensureCurrentTableManager(); - SelectStatement selectStatement = (SelectStatement) statement; - return (SQLResult) executeSelect(selectStatement); + return (SQLResult) executeSelect((SelectStatement) statement); + } + + if (statement instanceof UpdateStatement) { + ensureCurrentTableManager(); + return (SQLResult) executeUpdate((UpdateStatement) statement); + } + + if (statement instanceof DeleteStatement) { + ensureCurrentTableManager(); + return (SQLResult) executeDelete((DeleteStatement) statement); } return new SQLResult<>(false, String.format("Unsupported SQL statement: %s", statement)); @@ -104,14 +121,10 @@ private SQLResult executeCreateDatabase(CreateDatabaseStatement statement) try { String databaseName = statement.getDatabaseName(); - // 检查是否带有 IF NOT EXISTS - // Check if IF NOT EXISTS is present if (statement.isIfNotExists() && databaseManager.databaseExists(databaseName)) { return new SQLResult<>(true, "Database already exists"); } - // 执行创建数据库 - // Execute database creation databaseManager.createDatabase(databaseName); return new SQLResult<>(true, "Database created successfully"); } @@ -140,6 +153,7 @@ private SQLResult executeUseDatabase(UseDatabaseStatement statement) try { String databaseName = statement.getDatabaseName(); databaseManager.useDatabase(databaseName); + tableManager = null; return new SQLResult<>(true, "Database changed"); } catch (DatabaseException e) { @@ -147,9 +161,58 @@ private SQLResult executeUseDatabase(UseDatabaseStatement statement) } } + private SQLResult> executeShow(ShowStatement statement) + { + try { + List rows = new ArrayList<>(); + + switch (statement.getShowType()) { + case DATABASES: + for (String dbName : databaseManager.listDatabases()) { + RowDefinition row = new RowDefinition(); + row.setValue("Database", dbName); + rows.add(row); + } + break; + case TABLES: + ensureCurrentTableManager(); + for (String tableName : tableManager.listTables()) { + RowDefinition row = new RowDefinition(); + row.setValue("Table", tableName); + rows.add(row); + } + break; + case COLUMNS: + ensureCurrentTableManager(); + String tableName = statement.getTableName(); + TableDefinition metadata = tableManager.getTableMetadata(tableName); + for (ColumnDefinition col : metadata.getColumns()) { + RowDefinition row = new RowDefinition(); + row.setValue("Field", col.getName()); + row.setValue("Type", col.getType().name()); + row.setValue("Null", col.isNullable() ? "YES" : "NO"); + row.setValue("Key", col.isPrimaryKey() ? "PRI" : ""); + rows.add(row); + } + break; + default: + return new SQLResult<>(false, "Unsupported SHOW type: " + statement.getShowType()); + } + + return new SQLResult<>(true, String.format("Show %d rows", rows.size()), rows); + } + catch (Exception e) { + return new SQLResult<>(false, "Failed to execute SHOW: " + e.getMessage()); + } + } + private SQLResult executeCreateTable(CreateTableStatement statement) { try { + if (statement.isIfNotExists() && tableManager.tableExists(statement.getTableName())) { + return new SQLResult<>(true, "Table already exists"); + } + List columns = convertToColumns(statement.getColumns()); TableDefinition metadata = new TableDefinition(statement.getTableName(), columns); @@ -164,7 +227,12 @@ private SQLResult executeCreateTable(CreateTableStatement statement) private SQLResult executeDropTable(DropTableStatement statement) { try { - tableManager.dropTable(statement.getTableNames().get(0)); + String tableName = statement.getTableNames().get(0); + if (statement.isIfExists() && !tableManager.tableExists(tableName)) { + return new SQLResult<>(true, "Table does not exist"); + } + + tableManager.dropTable(tableName); return new SQLResult<>(true, "Table dropped successfully"); } catch (Exception e) { @@ -175,7 +243,6 @@ private SQLResult executeDropTable(DropTableStatement statement) private SQLResult executeInsert(InsertStatement statement) { try { - // TODO: Support check is multiple insert for InsertStatement if (statement.getSimpleValues().size() == 1) { tableManager.insert( statement.getTableName(), @@ -201,12 +268,21 @@ private SQLResult executeInsert(InsertStatement statement) private SQLResult> executeSelect(SelectStatement statement) { try { + Condition whereCondition = null; + if (statement.getWhereClause() != null) { + whereCondition = convertExpressionToCondition(statement.getWhereClause()); + } + + List columnNames = statement.getSelectElements().stream() + .map(SelectElement::getColumn) + .collect(Collectors.toList()); + + boolean isSelectAll = columnNames.stream().anyMatch("*"::equals); + List rows = tableManager.select( statement.getFromSources().get(0).getTableName(), - statement.getSelectElements().stream() - .map(SelectElement::getColumn) - .collect(Collectors.toList()), - null + isSelectAll ? null : columnNames, + whereCondition ); return new SQLResult<>( true, @@ -219,6 +295,106 @@ private SQLResult> executeSelect(SelectStatement statement) } } + private SQLResult executeUpdate(UpdateStatement statement) + { + try { + Condition whereCondition = null; + if (statement.getWhereClause() != null) { + whereCondition = convertExpressionToCondition(statement.getWhereClause()); + } + + Map setValues = new HashMap<>(); + for (Map.Entry entry : statement.getSetValues().entrySet()) { + setValues.put(entry.getKey(), entry.getValue().getValue()); + } + + int updatedCount = tableManager.update( + statement.getTableName(), + setValues, + whereCondition + ); + return new SQLResult<>(true, String.format("Updated %d rows", updatedCount), updatedCount); + } + catch (Exception e) { + return new SQLResult<>(false, "Failed to update rows: " + e.getMessage()); + } + } + + private SQLResult executeDelete(DeleteStatement statement) + { + try { + Condition whereCondition = null; + if (statement.getWhereClause() != null) { + whereCondition = convertExpressionToCondition(statement.getWhereClause()); + } + + int deletedCount = tableManager.delete( + statement.getTableName(), + whereCondition + ); + return new SQLResult<>(true, String.format("Deleted %d rows", deletedCount), deletedCount); + } + catch (Exception e) { + return new SQLResult<>(false, "Failed to delete rows: " + e.getMessage()); + } + } + + private Condition convertExpressionToCondition(Expression expression) + { + if (expression == null) { + return null; + } + + if (expression.getType() == Expression.ExpressionType.BINARY_OP) { + String operator = (String) expression.getValue(); + List children = expression.getChildren(); + + if ("AND".equalsIgnoreCase(operator)) { + Condition left = convertExpressionToCondition(children.get(0)); + Condition right = convertExpressionToCondition(children.get(1)); + return new AndCondition(left, right); + } + + if ("OR".equalsIgnoreCase(operator)) { + Condition left = convertExpressionToCondition(children.get(0)); + Condition right = convertExpressionToCondition(children.get(1)); + return new OrCondition(left, right); + } + + Expression leftExpr = children.get(0); + Expression rightExpr = children.get(1); + + String columnName = (String) leftExpr.getValue(); + Object value = rightExpr.getValue(); + + ComparisonOperator compOp = convertOperator(operator); + return new SimpleCondition(columnName, value, compOp); + } + + return null; + } + + private ComparisonOperator convertOperator(String operator) + { + switch (operator) { + case "=": + return ComparisonOperator.EQUALS; + case "!=": + case "<>": + return ComparisonOperator.NOT_EQUALS; + case ">": + return ComparisonOperator.GREATER_THAN; + case ">=": + return ComparisonOperator.GREATER_THAN_OR_EQUALS; + case "<": + return ComparisonOperator.LESS_THAN; + case "<=": + return ComparisonOperator.LESS_THAN_OR_EQUALS; + default: + return ComparisonOperator.EQUALS; + } + } + private List convertToColumns(List elements) { List columns = new ArrayList<>(); @@ -265,8 +441,6 @@ else if (constraint.getType() == ConstraintType.NOT_NULL) { private DataType convertDataType(io.edurt.datacap.sql.node.DataType sourceType) { - // Implement conversion logic from source DataType to target DataType - // This depends on your DataType enum definition return DataType.valueOf(sourceType.getBaseType()); } } diff --git a/core/datacap-condor/src/main/java/io/edurt/datacap/condor/condition/AndCondition.java b/core/datacap-condor/src/main/java/io/edurt/datacap/condor/condition/AndCondition.java new file mode 100644 index 0000000000..145c3e3c85 --- /dev/null +++ b/core/datacap-condor/src/main/java/io/edurt/datacap/condor/condition/AndCondition.java @@ -0,0 +1,22 @@ +package io.edurt.datacap.condor.condition; + +import io.edurt.datacap.condor.metadata.RowDefinition; + +public class AndCondition + implements Condition +{ + private final Condition left; + private final Condition right; + + public AndCondition(Condition left, Condition right) + { + this.left = left; + this.right = right; + } + + @Override + public boolean evaluate(RowDefinition row) + { + return left.evaluate(row) && right.evaluate(row); + } +} diff --git a/core/datacap-condor/src/main/java/io/edurt/datacap/condor/condition/OrCondition.java b/core/datacap-condor/src/main/java/io/edurt/datacap/condor/condition/OrCondition.java new file mode 100644 index 0000000000..ec307d7062 --- /dev/null +++ b/core/datacap-condor/src/main/java/io/edurt/datacap/condor/condition/OrCondition.java @@ -0,0 +1,22 @@ +package io.edurt.datacap.condor.condition; + +import io.edurt.datacap.condor.metadata.RowDefinition; + +public class OrCondition + implements Condition +{ + private final Condition left; + private final Condition right; + + public OrCondition(Condition left, Condition right) + { + this.left = left; + this.right = right; + } + + @Override + public boolean evaluate(RowDefinition row) + { + return left.evaluate(row) || right.evaluate(row); + } +} diff --git a/core/datacap-condor/src/main/java/io/edurt/datacap/condor/condition/SimpleCondition.java b/core/datacap-condor/src/main/java/io/edurt/datacap/condor/condition/SimpleCondition.java index 7d7098833b..59d90860fb 100644 --- a/core/datacap-condor/src/main/java/io/edurt/datacap/condor/condition/SimpleCondition.java +++ b/core/datacap-condor/src/main/java/io/edurt/datacap/condor/condition/SimpleCondition.java @@ -3,8 +3,6 @@ import io.edurt.datacap.condor.ComparisonOperator; import io.edurt.datacap.condor.metadata.RowDefinition; -import java.util.Comparator; - public class SimpleCondition implements Condition { @@ -19,24 +17,44 @@ public SimpleCondition(String columnName, Object value, ComparisonOperator opera this.operator = operator; } + @SuppressWarnings("unchecked") @Override public boolean evaluate(RowDefinition row) { Object rowValue = row.getValue(columnName); if (rowValue == null || value == null) { - return false; + return operator == ComparisonOperator.EQUALS && rowValue == value; } switch (operator) { case EQUALS: return value.equals(rowValue); + case NOT_EQUALS: + return !value.equals(rowValue); case GREATER_THAN: - return Comparator.comparing(Object::toString).compare(rowValue, value) > 0; + return compareValues(rowValue, value) > 0; + case GREATER_THAN_OR_EQUALS: + return compareValues(rowValue, value) >= 0; case LESS_THAN: - return Comparator.comparing(Object::toString).compare(rowValue, value) < 0; -// return ((Comparable) rowValue).compareTo(value) < 0; + return compareValues(rowValue, value) < 0; + case LESS_THAN_OR_EQUALS: + return compareValues(rowValue, value) <= 0; default: return false; } } + + @SuppressWarnings("unchecked") + private int compareValues(Object left, Object right) + { + if (left instanceof Comparable && right instanceof Comparable) { + if (left.getClass() == right.getClass()) { + return ((Comparable) left).compareTo(right); + } + if (left instanceof Number && right instanceof Number) { + return Double.compare(((Number) left).doubleValue(), ((Number) right).doubleValue()); + } + } + return left.toString().compareTo(right.toString()); + } } diff --git a/core/datacap-condor/src/main/java/io/edurt/datacap/condor/manager/TableManager.java b/core/datacap-condor/src/main/java/io/edurt/datacap/condor/manager/TableManager.java index 0d0384a098..b01c770a86 100644 --- a/core/datacap-condor/src/main/java/io/edurt/datacap/condor/manager/TableManager.java +++ b/core/datacap-condor/src/main/java/io/edurt/datacap/condor/manager/TableManager.java @@ -17,7 +17,6 @@ import java.io.ObjectOutputStream; import java.nio.file.Files; import java.nio.file.Path; -import java.nio.file.Paths; import java.nio.file.StandardOpenOption; import java.util.ArrayList; import java.util.Comparator; @@ -189,7 +188,9 @@ public int update(String tableName, Map setValues, Condition whe for (RowDefinition row : rows) { if (whereCondition == null || whereCondition.evaluate(row)) { -// updateRow(row, setValues); + for (Map.Entry entry : setValues.entrySet()) { + row.setValue(entry.getKey(), entry.getValue()); + } updatedCount++; } } @@ -292,7 +293,7 @@ private void appendRowToFile(String tableName, RowDefinition row) private void saveAllRows(String tableName, List rows) throws TableException { - Path dataPath = Paths.get(dataDir + tableName + ".data"); + Path dataPath = dataDir.resolve(tableName).resolve("data").resolve("table.data"); try (ObjectOutputStream oos = new ObjectOutputStream(Files.newOutputStream(dataPath, StandardOpenOption.CREATE, StandardOpenOption.TRUNCATE_EXISTING))) { for (RowDefinition row : rows) { @@ -376,16 +377,38 @@ private boolean isValueTypeValid(Object value, DataType expectedType) } switch (expectedType) { + case TINYINT: + case SMALLINT: case INTEGER: - return value instanceof Integer; + case INT: + case BIGINT: + return value instanceof Number; + case FLOAT: + case REAL: + case DOUBLE: + case DECIMAL: + case NUMERIC: + return value instanceof Number; + case CHARACTER: + case CHAR: case VARCHAR: + case TEXT: + case JSON: + case XML: return value instanceof String; case BOOLEAN: return value instanceof Boolean; - case DOUBLE: - return value instanceof Double; + case BINARY: + case VARBINARY: + case BLOB: + return value instanceof byte[] || value instanceof String; + case DATE: + case TIME: + case TIMESTAMP: + case DATETIME: + return value instanceof String || value instanceof Number; default: - return false; + return true; } } @@ -453,6 +476,11 @@ private TableDefinition loadTableMetadata(String tableName) } } + public String[] listTables() + { + return tableMetadataCache.keySet().toArray(new String[0]); + } + public boolean tableExists(String tableName) { return tableMetadataCache.containsKey(tableName); diff --git a/core/datacap-parser/pom.xml b/core/datacap-parser/pom.xml index 024f6abbbe..213249d2bd 100644 --- a/core/datacap-parser/pom.xml +++ b/core/datacap-parser/pom.xml @@ -6,7 +6,7 @@ io.edurt.datacap datacap - 2025.1.2 + 2026.0.0 ../../pom.xml diff --git a/core/datacap-parser/src/main/java/io/edurt/datacap/sql/SQLVisitor.java b/core/datacap-parser/src/main/java/io/edurt/datacap/sql/SQLVisitor.java index 6eb3b7eaca..2b84c03465 100644 --- a/core/datacap-parser/src/main/java/io/edurt/datacap/sql/SQLVisitor.java +++ b/core/datacap-parser/src/main/java/io/edurt/datacap/sql/SQLVisitor.java @@ -21,16 +21,20 @@ import io.edurt.datacap.sql.processor.ShowProcessor; import io.edurt.datacap.sql.statement.CreateDatabaseStatement; import io.edurt.datacap.sql.statement.CreateTableStatement; +import io.edurt.datacap.sql.statement.DeleteStatement; import io.edurt.datacap.sql.statement.DropDatabaseStatement; import io.edurt.datacap.sql.statement.DropTableStatement; import io.edurt.datacap.sql.statement.InsertStatement; import io.edurt.datacap.sql.statement.SQLStatement; import io.edurt.datacap.sql.statement.SelectStatement; +import io.edurt.datacap.sql.statement.UpdateStatement; import io.edurt.datacap.sql.statement.UseDatabaseStatement; import org.antlr.v4.runtime.RuleContext; import java.util.ArrayList; +import java.util.LinkedHashMap; import java.util.List; +import java.util.Map; import java.util.stream.Collectors; public class SQLVisitor @@ -199,15 +203,34 @@ else if (ctx.selectStatement() != null) { @Override public SQLStatement visitUpdateStatement(SqlBaseParser.UpdateStatementContext ctx) { - // TODO: Implement update statement parsing - return null; + String tableName = ctx.tableName().getText(); + + Map setValues = new LinkedHashMap<>(); + for (SqlBaseParser.UpdateElementContext elementCtx : ctx.updateElement()) { + String columnName = elementCtx.columnName().getText(); + Expression value = processExpression(elementCtx.expression()); + setValues.put(columnName, value); + } + + Expression whereClause = null; + if (ctx.whereClause() != null) { + whereClause = processExpression(ctx.whereClause().expression()); + } + + return new UpdateStatement(tableName, setValues, whereClause); } @Override public SQLStatement visitDeleteStatement(SqlBaseParser.DeleteStatementContext ctx) { - // TODO: Implement delete statement parsing - return null; + String tableName = ctx.tableName().getText(); + + Expression whereClause = null; + if (ctx.whereClause() != null) { + whereClause = processExpression(ctx.whereClause().expression()); + } + + return new DeleteStatement(tableName, whereClause); } @Override diff --git a/core/datacap-parser/src/main/java/io/edurt/datacap/sql/statement/DeleteStatement.java b/core/datacap-parser/src/main/java/io/edurt/datacap/sql/statement/DeleteStatement.java new file mode 100644 index 0000000000..f41d1430f6 --- /dev/null +++ b/core/datacap-parser/src/main/java/io/edurt/datacap/sql/statement/DeleteStatement.java @@ -0,0 +1,25 @@ +package io.edurt.datacap.sql.statement; + +import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; +import io.edurt.datacap.sql.node.Expression; +import lombok.Getter; +import lombok.Setter; +import lombok.ToString; + +@Getter +@Setter +@ToString +@SuppressFBWarnings(value = {"EI_EXPOSE_REP", "EI_EXPOSE_REP2"}) +public class DeleteStatement + extends SQLStatement +{ + private final String tableName; + private final Expression whereClause; + + public DeleteStatement(String tableName, Expression whereClause) + { + super(StatementType.DELETE); + this.tableName = tableName; + this.whereClause = whereClause; + } +} diff --git a/core/datacap-parser/src/main/java/io/edurt/datacap/sql/statement/UpdateStatement.java b/core/datacap-parser/src/main/java/io/edurt/datacap/sql/statement/UpdateStatement.java new file mode 100644 index 0000000000..2bef2f3c48 --- /dev/null +++ b/core/datacap-parser/src/main/java/io/edurt/datacap/sql/statement/UpdateStatement.java @@ -0,0 +1,30 @@ +package io.edurt.datacap.sql.statement; + +import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; +import io.edurt.datacap.sql.node.Expression; +import lombok.Getter; +import lombok.Setter; +import lombok.ToString; + +import java.util.List; +import java.util.Map; + +@Getter +@Setter +@ToString +@SuppressFBWarnings(value = {"EI_EXPOSE_REP", "EI_EXPOSE_REP2"}) +public class UpdateStatement + extends SQLStatement +{ + private final String tableName; + private final Map setValues; + private final Expression whereClause; + + public UpdateStatement(String tableName, Map setValues, Expression whereClause) + { + super(StatementType.UPDATE); + this.tableName = tableName; + this.setValues = setValues; + this.whereClause = whereClause; + } +} diff --git a/core/datacap-plugin/pom.xml b/core/datacap-plugin/pom.xml index 3f61de42d0..81004ab135 100644 --- a/core/datacap-plugin/pom.xml +++ b/core/datacap-plugin/pom.xml @@ -6,7 +6,7 @@ io.edurt.datacap datacap - 2025.1.2 + 2026.0.0 ../../pom.xml diff --git a/core/datacap-security/pom.xml b/core/datacap-security/pom.xml index 3d06985821..5c9200ccc3 100644 --- a/core/datacap-security/pom.xml +++ b/core/datacap-security/pom.xml @@ -6,7 +6,7 @@ io.edurt.datacap datacap - 2025.1.2 + 2026.0.0 ../../pom.xml diff --git a/core/datacap-server/pom.xml b/core/datacap-server/pom.xml index 15424e9e2f..1abd5f03d7 100644 --- a/core/datacap-server/pom.xml +++ b/core/datacap-server/pom.xml @@ -5,7 +5,7 @@ datacap io.edurt.datacap - 2025.1.2 + 2026.0.0 ../../pom.xml 4.0.0 diff --git a/core/datacap-service/pom.xml b/core/datacap-service/pom.xml index 60e43e2607..1d75c72c62 100644 --- a/core/datacap-service/pom.xml +++ b/core/datacap-service/pom.xml @@ -6,7 +6,7 @@ io.edurt.datacap datacap - 2025.1.2 + 2026.0.0 ../../pom.xml diff --git a/core/datacap-spi/pom.xml b/core/datacap-spi/pom.xml index ee49f73947..9e996f2376 100644 --- a/core/datacap-spi/pom.xml +++ b/core/datacap-spi/pom.xml @@ -5,7 +5,7 @@ datacap io.edurt.datacap - 2025.1.2 + 2026.0.0 ../../pom.xml 4.0.0 diff --git a/core/datacap-sql/pom.xml b/core/datacap-sql/pom.xml index f10b9034cd..eafe1a83eb 100644 --- a/core/datacap-sql/pom.xml +++ b/core/datacap-sql/pom.xml @@ -4,7 +4,7 @@ io.edurt.datacap datacap - 2025.1.2 + 2026.0.0 ../../pom.xml diff --git a/core/datacap-ui/package.json b/core/datacap-ui/package.json index 43b40992e7..52ff84dd88 100644 --- a/core/datacap-ui/package.json +++ b/core/datacap-ui/package.json @@ -1,7 +1,7 @@ { "name": "datacap-ui", "description": "DataCap console", - "version": "2025.1.2", + "version": "2026.0.0", "private": true, "scripts": { "dev": "vite", diff --git a/driver/datacap-driver-mongodb/pom.xml b/driver/datacap-driver-mongodb/pom.xml index d5a0e4c565..c9a2f2a083 100644 --- a/driver/datacap-driver-mongodb/pom.xml +++ b/driver/datacap-driver-mongodb/pom.xml @@ -6,7 +6,7 @@ io.edurt.datacap datacap - 2025.1.2 + 2026.0.0 ../../pom.xml diff --git a/driver/datacap-driver-redis/pom.xml b/driver/datacap-driver-redis/pom.xml index 084c2c7739..6362e235a3 100644 --- a/driver/datacap-driver-redis/pom.xml +++ b/driver/datacap-driver-redis/pom.xml @@ -5,7 +5,7 @@ io.edurt.datacap datacap - 2025.1.2 + 2026.0.0 ../../pom.xml diff --git a/executor/datacap-executor-local/pom.xml b/executor/datacap-executor-local/pom.xml index 2e2efebbfd..53d18ae770 100644 --- a/executor/datacap-executor-local/pom.xml +++ b/executor/datacap-executor-local/pom.xml @@ -4,7 +4,7 @@ io.edurt.datacap datacap - 2025.1.2 + 2026.0.0 ../../pom.xml diff --git a/executor/datacap-executor-seatunnel/pom.xml b/executor/datacap-executor-seatunnel/pom.xml index 2b8c7a1914..e07b24a172 100644 --- a/executor/datacap-executor-seatunnel/pom.xml +++ b/executor/datacap-executor-seatunnel/pom.xml @@ -6,7 +6,7 @@ io.edurt.datacap datacap - 2025.1.2 + 2026.0.0 ../../pom.xml diff --git a/executor/datacap-executor-spi/pom.xml b/executor/datacap-executor-spi/pom.xml index 5e17160672..5e4587484a 100644 --- a/executor/datacap-executor-spi/pom.xml +++ b/executor/datacap-executor-spi/pom.xml @@ -4,7 +4,7 @@ io.edurt.datacap datacap - 2025.1.2 + 2026.0.0 ../../pom.xml diff --git a/fs/datacap-fs-alioss/pom.xml b/fs/datacap-fs-alioss/pom.xml index a2c10c9482..3eb6b259b5 100644 --- a/fs/datacap-fs-alioss/pom.xml +++ b/fs/datacap-fs-alioss/pom.xml @@ -6,7 +6,7 @@ io.edurt.datacap datacap - 2025.1.2 + 2026.0.0 ../../pom.xml diff --git a/fs/datacap-fs-amazon-s3/pom.xml b/fs/datacap-fs-amazon-s3/pom.xml index dd96b2467a..feced81f7d 100644 --- a/fs/datacap-fs-amazon-s3/pom.xml +++ b/fs/datacap-fs-amazon-s3/pom.xml @@ -6,7 +6,7 @@ io.edurt.datacap datacap - 2025.1.2 + 2026.0.0 ../../pom.xml diff --git a/fs/datacap-fs-local/pom.xml b/fs/datacap-fs-local/pom.xml index 2eca14dd08..57553dd741 100644 --- a/fs/datacap-fs-local/pom.xml +++ b/fs/datacap-fs-local/pom.xml @@ -3,7 +3,7 @@ io.edurt.datacap datacap - 2025.1.2 + 2026.0.0 ../../pom.xml 4.0.0 diff --git a/fs/datacap-fs-minio/pom.xml b/fs/datacap-fs-minio/pom.xml index df82ddb9a8..15e5d1f97f 100644 --- a/fs/datacap-fs-minio/pom.xml +++ b/fs/datacap-fs-minio/pom.xml @@ -6,7 +6,7 @@ io.edurt.datacap datacap - 2025.1.2 + 2026.0.0 ../../pom.xml diff --git a/fs/datacap-fs-qiniu/pom.xml b/fs/datacap-fs-qiniu/pom.xml index 6a2e558c6e..af95dcf5ca 100644 --- a/fs/datacap-fs-qiniu/pom.xml +++ b/fs/datacap-fs-qiniu/pom.xml @@ -4,7 +4,7 @@ io.edurt.datacap datacap - 2025.1.2 + 2026.0.0 ../../pom.xml diff --git a/fs/datacap-fs-spi/pom.xml b/fs/datacap-fs-spi/pom.xml index c63252ac32..78f9a50cd7 100644 --- a/fs/datacap-fs-spi/pom.xml +++ b/fs/datacap-fs-spi/pom.xml @@ -3,7 +3,7 @@ io.edurt.datacap datacap - 2025.1.2 + 2026.0.0 ../../pom.xml 4.0.0 diff --git a/fs/datacap-fs-tencent-cos/pom.xml b/fs/datacap-fs-tencent-cos/pom.xml index 8f7c829614..ada24f2013 100644 --- a/fs/datacap-fs-tencent-cos/pom.xml +++ b/fs/datacap-fs-tencent-cos/pom.xml @@ -6,7 +6,7 @@ io.edurt.datacap datacap - 2025.1.2 + 2026.0.0 ../../pom.xml diff --git a/lib/datacap-http/pom.xml b/lib/datacap-http/pom.xml index d682505637..0a5a7db745 100644 --- a/lib/datacap-http/pom.xml +++ b/lib/datacap-http/pom.xml @@ -6,7 +6,7 @@ io.edurt.datacap datacap - 2025.1.2 + 2026.0.0 ../../pom.xml diff --git a/lib/datacap-logger/pom.xml b/lib/datacap-logger/pom.xml index dfdbccae01..7aadb0d7d9 100644 --- a/lib/datacap-logger/pom.xml +++ b/lib/datacap-logger/pom.xml @@ -6,7 +6,7 @@ io.edurt.datacap datacap - 2025.1.2 + 2026.0.0 ../../pom.xml diff --git a/lib/datacap-schedule/pom.xml b/lib/datacap-schedule/pom.xml index 6b9dadbbb7..00871c6736 100644 --- a/lib/datacap-schedule/pom.xml +++ b/lib/datacap-schedule/pom.xml @@ -6,7 +6,7 @@ io.edurt.datacap datacap - 2025.1.2 + 2026.0.0 ../../pom.xml diff --git a/lib/datacap-shell/pom.xml b/lib/datacap-shell/pom.xml index 24762df52d..e6f6b60b2a 100644 --- a/lib/datacap-shell/pom.xml +++ b/lib/datacap-shell/pom.xml @@ -6,7 +6,7 @@ io.edurt.datacap datacap - 2025.1.2 + 2026.0.0 ../../pom.xml diff --git a/notify/datacap-notify-dingtalk/pom.xml b/notify/datacap-notify-dingtalk/pom.xml index ba8f73f690..d619be2acf 100644 --- a/notify/datacap-notify-dingtalk/pom.xml +++ b/notify/datacap-notify-dingtalk/pom.xml @@ -5,7 +5,7 @@ io.edurt.datacap datacap - 2025.1.2 + 2026.0.0 ../../pom.xml 4.0.0 diff --git a/notify/datacap-notify-spi/pom.xml b/notify/datacap-notify-spi/pom.xml index 3ee506446a..dc4d0a1c13 100644 --- a/notify/datacap-notify-spi/pom.xml +++ b/notify/datacap-notify-spi/pom.xml @@ -5,7 +5,7 @@ io.edurt.datacap datacap - 2025.1.2 + 2026.0.0 ../../pom.xml 4.0.0 diff --git a/parser/datacap-parser-mysql/pom.xml b/parser/datacap-parser-mysql/pom.xml index f66669330f..a2effd3b80 100644 --- a/parser/datacap-parser-mysql/pom.xml +++ b/parser/datacap-parser-mysql/pom.xml @@ -4,7 +4,7 @@ io.edurt.datacap datacap - 2025.1.2 + 2026.0.0 ../../pom.xml diff --git a/parser/datacap-parser-spi/pom.xml b/parser/datacap-parser-spi/pom.xml index 35bf6358a8..4ad6af463d 100644 --- a/parser/datacap-parser-spi/pom.xml +++ b/parser/datacap-parser-spi/pom.xml @@ -4,7 +4,7 @@ io.edurt.datacap datacap - 2025.1.2 + 2026.0.0 ../../pom.xml diff --git a/parser/datacap-parser-trino/pom.xml b/parser/datacap-parser-trino/pom.xml index fc081f71eb..8ec6995d50 100644 --- a/parser/datacap-parser-trino/pom.xml +++ b/parser/datacap-parser-trino/pom.xml @@ -4,7 +4,7 @@ io.edurt.datacap datacap - 2025.1.2 + 2026.0.0 ../../pom.xml diff --git a/plugin/datacap-plugin-alioss/pom.xml b/plugin/datacap-plugin-alioss/pom.xml index 29bc105b61..928bb43f06 100644 --- a/plugin/datacap-plugin-alioss/pom.xml +++ b/plugin/datacap-plugin-alioss/pom.xml @@ -6,7 +6,7 @@ datacap io.edurt.datacap - 2025.1.2 + 2026.0.0 ../../pom.xml diff --git a/plugin/datacap-plugin-cassandra/pom.xml b/plugin/datacap-plugin-cassandra/pom.xml index 799ebcb552..889cdbeb8b 100644 --- a/plugin/datacap-plugin-cassandra/pom.xml +++ b/plugin/datacap-plugin-cassandra/pom.xml @@ -6,7 +6,7 @@ io.edurt.datacap datacap - 2025.1.2 + 2026.0.0 ../../pom.xml diff --git a/plugin/datacap-plugin-ceresdb/pom.xml b/plugin/datacap-plugin-ceresdb/pom.xml index 9eaa350c43..f29e195748 100644 --- a/plugin/datacap-plugin-ceresdb/pom.xml +++ b/plugin/datacap-plugin-ceresdb/pom.xml @@ -6,7 +6,7 @@ io.edurt.datacap datacap - 2025.1.2 + 2026.0.0 ../../pom.xml diff --git a/plugin/datacap-plugin-clickhouse-http/pom.xml b/plugin/datacap-plugin-clickhouse-http/pom.xml index 8420ba7543..641e6fbe50 100644 --- a/plugin/datacap-plugin-clickhouse-http/pom.xml +++ b/plugin/datacap-plugin-clickhouse-http/pom.xml @@ -5,7 +5,7 @@ datacap io.edurt.datacap - 2025.1.2 + 2026.0.0 ../../pom.xml 4.0.0 diff --git a/plugin/datacap-plugin-clickhouse/pom.xml b/plugin/datacap-plugin-clickhouse/pom.xml index 9df768a0d0..08d4b272a9 100644 --- a/plugin/datacap-plugin-clickhouse/pom.xml +++ b/plugin/datacap-plugin-clickhouse/pom.xml @@ -5,7 +5,7 @@ datacap io.edurt.datacap - 2025.1.2 + 2026.0.0 ../../pom.xml 4.0.0 diff --git a/plugin/datacap-plugin-cratedb-http/pom.xml b/plugin/datacap-plugin-cratedb-http/pom.xml index e4c0ab35f6..832bf9d1b1 100644 --- a/plugin/datacap-plugin-cratedb-http/pom.xml +++ b/plugin/datacap-plugin-cratedb-http/pom.xml @@ -5,7 +5,7 @@ datacap io.edurt.datacap - 2025.1.2 + 2026.0.0 ../../pom.xml 4.0.0 diff --git a/plugin/datacap-plugin-cratedb/pom.xml b/plugin/datacap-plugin-cratedb/pom.xml index 681dffbbd7..260fb58d6f 100644 --- a/plugin/datacap-plugin-cratedb/pom.xml +++ b/plugin/datacap-plugin-cratedb/pom.xml @@ -5,7 +5,7 @@ datacap io.edurt.datacap - 2025.1.2 + 2026.0.0 ../../pom.xml 4.0.0 diff --git a/plugin/datacap-plugin-db2/pom.xml b/plugin/datacap-plugin-db2/pom.xml index 0497cb8240..c74a875e0c 100644 --- a/plugin/datacap-plugin-db2/pom.xml +++ b/plugin/datacap-plugin-db2/pom.xml @@ -6,7 +6,7 @@ io.edurt.datacap datacap - 2025.1.2 + 2026.0.0 ../../pom.xml diff --git a/plugin/datacap-plugin-dm/pom.xml b/plugin/datacap-plugin-dm/pom.xml index a89eacd47f..4cec9935ef 100644 --- a/plugin/datacap-plugin-dm/pom.xml +++ b/plugin/datacap-plugin-dm/pom.xml @@ -5,7 +5,7 @@ datacap io.edurt.datacap - 2025.1.2 + 2026.0.0 ../../pom.xml 4.0.0 diff --git a/plugin/datacap-plugin-dolphindb/pom.xml b/plugin/datacap-plugin-dolphindb/pom.xml index 07ffdc1557..04c791b572 100644 --- a/plugin/datacap-plugin-dolphindb/pom.xml +++ b/plugin/datacap-plugin-dolphindb/pom.xml @@ -3,7 +3,7 @@ io.edurt.datacap datacap - 2025.1.2 + 2026.0.0 ../../pom.xml 4.0.0 diff --git a/plugin/datacap-plugin-doris/pom.xml b/plugin/datacap-plugin-doris/pom.xml index 3005fb641a..700ece07b8 100644 --- a/plugin/datacap-plugin-doris/pom.xml +++ b/plugin/datacap-plugin-doris/pom.xml @@ -6,7 +6,7 @@ io.edurt.datacap datacap - 2025.1.2 + 2026.0.0 ../../pom.xml diff --git a/plugin/datacap-plugin-dremio/pom.xml b/plugin/datacap-plugin-dremio/pom.xml index e7b22f4be2..a74ec84bef 100644 --- a/plugin/datacap-plugin-dremio/pom.xml +++ b/plugin/datacap-plugin-dremio/pom.xml @@ -5,7 +5,7 @@ datacap io.edurt.datacap - 2025.1.2 + 2026.0.0 ../../pom.xml 4.0.0 diff --git a/plugin/datacap-plugin-druid/pom.xml b/plugin/datacap-plugin-druid/pom.xml index 3f3662dce0..a9099eddfd 100644 --- a/plugin/datacap-plugin-druid/pom.xml +++ b/plugin/datacap-plugin-druid/pom.xml @@ -5,7 +5,7 @@ datacap io.edurt.datacap - 2025.1.2 + 2026.0.0 ../../pom.xml 4.0.0 diff --git a/plugin/datacap-plugin-duckdb/pom.xml b/plugin/datacap-plugin-duckdb/pom.xml index 0fcba41e24..299dc9d8ed 100644 --- a/plugin/datacap-plugin-duckdb/pom.xml +++ b/plugin/datacap-plugin-duckdb/pom.xml @@ -6,7 +6,7 @@ io.edurt.datacap datacap - 2025.1.2 + 2026.0.0 ../../pom.xml diff --git a/plugin/datacap-plugin-elasticsearch-8x/pom.xml b/plugin/datacap-plugin-elasticsearch-8x/pom.xml index faea45fda8..5652bcd2e6 100644 --- a/plugin/datacap-plugin-elasticsearch-8x/pom.xml +++ b/plugin/datacap-plugin-elasticsearch-8x/pom.xml @@ -5,7 +5,7 @@ datacap io.edurt.datacap - 2025.1.2 + 2026.0.0 ../../pom.xml 4.0.0 diff --git a/plugin/datacap-plugin-greptimedb/pom.xml b/plugin/datacap-plugin-greptimedb/pom.xml index 75768e3c8e..41bf7e8fbc 100644 --- a/plugin/datacap-plugin-greptimedb/pom.xml +++ b/plugin/datacap-plugin-greptimedb/pom.xml @@ -6,7 +6,7 @@ io.edurt.datacap datacap - 2025.1.2 + 2026.0.0 ../../pom.xml diff --git a/plugin/datacap-plugin-h2-tcp/pom.xml b/plugin/datacap-plugin-h2-tcp/pom.xml index 7a1c231e7e..c9d4f77b63 100644 --- a/plugin/datacap-plugin-h2-tcp/pom.xml +++ b/plugin/datacap-plugin-h2-tcp/pom.xml @@ -5,7 +5,7 @@ datacap io.edurt.datacap - 2025.1.2 + 2026.0.0 ../../pom.xml 4.0.0 diff --git a/plugin/datacap-plugin-h2/pom.xml b/plugin/datacap-plugin-h2/pom.xml index 881e4b0a82..e289eef146 100644 --- a/plugin/datacap-plugin-h2/pom.xml +++ b/plugin/datacap-plugin-h2/pom.xml @@ -6,7 +6,7 @@ io.edurt.datacap datacap - 2025.1.2 + 2026.0.0 ../../pom.xml diff --git a/plugin/datacap-plugin-hdfs/pom.xml b/plugin/datacap-plugin-hdfs/pom.xml index e1342b6c06..eba3157434 100644 --- a/plugin/datacap-plugin-hdfs/pom.xml +++ b/plugin/datacap-plugin-hdfs/pom.xml @@ -6,7 +6,7 @@ io.edurt.datacap datacap - 2025.1.2 + 2026.0.0 ../../pom.xml diff --git a/plugin/datacap-plugin-hive-2x/pom.xml b/plugin/datacap-plugin-hive-2x/pom.xml index b4c76806b1..0bfac552e5 100644 --- a/plugin/datacap-plugin-hive-2x/pom.xml +++ b/plugin/datacap-plugin-hive-2x/pom.xml @@ -5,7 +5,7 @@ datacap io.edurt.datacap - 2025.1.2 + 2026.0.0 ../../pom.xml 4.0.0 diff --git a/plugin/datacap-plugin-hologres/pom.xml b/plugin/datacap-plugin-hologres/pom.xml index 549332454a..89e8fc5702 100644 --- a/plugin/datacap-plugin-hologres/pom.xml +++ b/plugin/datacap-plugin-hologres/pom.xml @@ -6,7 +6,7 @@ io.edurt.datacap datacap - 2025.1.2 + 2026.0.0 ../../pom.xml diff --git a/plugin/datacap-plugin-ignite/pom.xml b/plugin/datacap-plugin-ignite/pom.xml index 4522f552a4..c36144a701 100644 --- a/plugin/datacap-plugin-ignite/pom.xml +++ b/plugin/datacap-plugin-ignite/pom.xml @@ -6,7 +6,7 @@ io.edurt.datacap datacap - 2025.1.2 + 2026.0.0 ../../pom.xml diff --git a/plugin/datacap-plugin-impala/pom.xml b/plugin/datacap-plugin-impala/pom.xml index 3d49f0ae63..dbdc77207d 100644 --- a/plugin/datacap-plugin-impala/pom.xml +++ b/plugin/datacap-plugin-impala/pom.xml @@ -5,7 +5,7 @@ datacap io.edurt.datacap - 2025.1.2 + 2026.0.0 ../../pom.xml 4.0.0 diff --git a/plugin/datacap-plugin-influxdb/pom.xml b/plugin/datacap-plugin-influxdb/pom.xml index fb9ca30ab4..6c3f1335b7 100644 --- a/plugin/datacap-plugin-influxdb/pom.xml +++ b/plugin/datacap-plugin-influxdb/pom.xml @@ -5,7 +5,7 @@ io.edurt.datacap datacap - 2025.1.2 + 2026.0.0 ../../pom.xml 4.0.0 diff --git a/plugin/datacap-plugin-iotdb/pom.xml b/plugin/datacap-plugin-iotdb/pom.xml index 60b75ac447..1d1b565600 100644 --- a/plugin/datacap-plugin-iotdb/pom.xml +++ b/plugin/datacap-plugin-iotdb/pom.xml @@ -6,7 +6,7 @@ io.edurt.datacap datacap - 2025.1.2 + 2026.0.0 ../../pom.xml diff --git a/plugin/datacap-plugin-kafka/pom.xml b/plugin/datacap-plugin-kafka/pom.xml index 60f526e949..3eda878984 100644 --- a/plugin/datacap-plugin-kafka/pom.xml +++ b/plugin/datacap-plugin-kafka/pom.xml @@ -6,7 +6,7 @@ io.edurt.datacap datacap - 2025.1.2 + 2026.0.0 ../../pom.xml diff --git a/plugin/datacap-plugin-kylin/pom.xml b/plugin/datacap-plugin-kylin/pom.xml index 9d9c83ab0f..d58952e884 100644 --- a/plugin/datacap-plugin-kylin/pom.xml +++ b/plugin/datacap-plugin-kylin/pom.xml @@ -5,7 +5,7 @@ datacap io.edurt.datacap - 2025.1.2 + 2026.0.0 ../../pom.xml 4.0.0 diff --git a/plugin/datacap-plugin-kyuubi/pom.xml b/plugin/datacap-plugin-kyuubi/pom.xml index 7d7fd72c44..dff875831d 100644 --- a/plugin/datacap-plugin-kyuubi/pom.xml +++ b/plugin/datacap-plugin-kyuubi/pom.xml @@ -5,7 +5,7 @@ datacap io.edurt.datacap - 2025.1.2 + 2026.0.0 ../../pom.xml 4.0.0 diff --git a/plugin/datacap-plugin-matrixone/pom.xml b/plugin/datacap-plugin-matrixone/pom.xml index fb1735bdb1..52ca514e6d 100644 --- a/plugin/datacap-plugin-matrixone/pom.xml +++ b/plugin/datacap-plugin-matrixone/pom.xml @@ -4,7 +4,7 @@ io.edurt.datacap datacap - 2025.1.2 + 2026.0.0 ../../pom.xml diff --git a/plugin/datacap-plugin-monetdb/pom.xml b/plugin/datacap-plugin-monetdb/pom.xml index 7815379e0b..8cba3c67f3 100644 --- a/plugin/datacap-plugin-monetdb/pom.xml +++ b/plugin/datacap-plugin-monetdb/pom.xml @@ -5,7 +5,7 @@ datacap io.edurt.datacap - 2025.1.2 + 2026.0.0 ../../pom.xml 4.0.0 diff --git a/plugin/datacap-plugin-mongo-atlas/pom.xml b/plugin/datacap-plugin-mongo-atlas/pom.xml index c8ca88cace..2be5df9b3f 100644 --- a/plugin/datacap-plugin-mongo-atlas/pom.xml +++ b/plugin/datacap-plugin-mongo-atlas/pom.xml @@ -5,7 +5,7 @@ datacap io.edurt.datacap - 2025.1.2 + 2026.0.0 ../../pom.xml 4.0.0 diff --git a/plugin/datacap-plugin-mongo-community/pom.xml b/plugin/datacap-plugin-mongo-community/pom.xml index e016263a6d..05d3539799 100644 --- a/plugin/datacap-plugin-mongo-community/pom.xml +++ b/plugin/datacap-plugin-mongo-community/pom.xml @@ -6,7 +6,7 @@ io.edurt.datacap datacap - 2025.1.2 + 2026.0.0 ../../pom.xml diff --git a/plugin/datacap-plugin-mysql/pom.xml b/plugin/datacap-plugin-mysql/pom.xml index 4c1ec6a2d8..e6374e0e3e 100644 --- a/plugin/datacap-plugin-mysql/pom.xml +++ b/plugin/datacap-plugin-mysql/pom.xml @@ -6,7 +6,7 @@ io.edurt.datacap datacap - 2025.1.2 + 2026.0.0 ../../pom.xml diff --git a/plugin/datacap-plugin-neo4j/pom.xml b/plugin/datacap-plugin-neo4j/pom.xml index 3f9a22f8a5..770665b91a 100644 --- a/plugin/datacap-plugin-neo4j/pom.xml +++ b/plugin/datacap-plugin-neo4j/pom.xml @@ -6,7 +6,7 @@ io.edurt.datacap datacap - 2025.1.2 + 2026.0.0 ../../pom.xml diff --git a/plugin/datacap-plugin-oceanbase/pom.xml b/plugin/datacap-plugin-oceanbase/pom.xml index 48ed9a99df..1fad819f4f 100644 --- a/plugin/datacap-plugin-oceanbase/pom.xml +++ b/plugin/datacap-plugin-oceanbase/pom.xml @@ -5,7 +5,7 @@ datacap io.edurt.datacap - 2025.1.2 + 2026.0.0 ../../pom.xml 4.0.0 diff --git a/plugin/datacap-plugin-oracle/pom.xml b/plugin/datacap-plugin-oracle/pom.xml index cc4931f83f..1d5fb6b02b 100644 --- a/plugin/datacap-plugin-oracle/pom.xml +++ b/plugin/datacap-plugin-oracle/pom.xml @@ -5,7 +5,7 @@ datacap io.edurt.datacap - 2025.1.2 + 2026.0.0 ../../pom.xml 4.0.0 diff --git a/plugin/datacap-plugin-paradedb/pom.xml b/plugin/datacap-plugin-paradedb/pom.xml index f52f14d32a..9299c00f1f 100644 --- a/plugin/datacap-plugin-paradedb/pom.xml +++ b/plugin/datacap-plugin-paradedb/pom.xml @@ -4,7 +4,7 @@ io.edurt.datacap datacap - 2025.1.2 + 2026.0.0 ../../pom.xml diff --git a/plugin/datacap-plugin-phoenix/pom.xml b/plugin/datacap-plugin-phoenix/pom.xml index 9fcc397916..da420c5b60 100644 --- a/plugin/datacap-plugin-phoenix/pom.xml +++ b/plugin/datacap-plugin-phoenix/pom.xml @@ -5,7 +5,7 @@ datacap io.edurt.datacap - 2025.1.2 + 2026.0.0 ../../pom.xml 4.0.0 diff --git a/plugin/datacap-plugin-pinot/pom.xml b/plugin/datacap-plugin-pinot/pom.xml index 22ac768e28..3054424650 100644 --- a/plugin/datacap-plugin-pinot/pom.xml +++ b/plugin/datacap-plugin-pinot/pom.xml @@ -6,7 +6,7 @@ io.edurt.datacap datacap - 2025.1.2 + 2026.0.0 ../../pom.xml diff --git a/plugin/datacap-plugin-postgresql/pom.xml b/plugin/datacap-plugin-postgresql/pom.xml index 06dde76838..650e302c05 100644 --- a/plugin/datacap-plugin-postgresql/pom.xml +++ b/plugin/datacap-plugin-postgresql/pom.xml @@ -5,7 +5,7 @@ datacap io.edurt.datacap - 2025.1.2 + 2026.0.0 ../../pom.xml 4.0.0 diff --git a/plugin/datacap-plugin-presto/pom.xml b/plugin/datacap-plugin-presto/pom.xml index 56f1a04a85..f1483f0a6c 100644 --- a/plugin/datacap-plugin-presto/pom.xml +++ b/plugin/datacap-plugin-presto/pom.xml @@ -5,7 +5,7 @@ datacap io.edurt.datacap - 2025.1.2 + 2026.0.0 ../../pom.xml 4.0.0 diff --git a/plugin/datacap-plugin-questdb/pom.xml b/plugin/datacap-plugin-questdb/pom.xml index 539aac0857..7511766aaf 100644 --- a/plugin/datacap-plugin-questdb/pom.xml +++ b/plugin/datacap-plugin-questdb/pom.xml @@ -6,7 +6,7 @@ io.edurt.datacap datacap - 2025.1.2 + 2026.0.0 ../../pom.xml diff --git a/plugin/datacap-plugin-redis-jdbc/pom.xml b/plugin/datacap-plugin-redis-jdbc/pom.xml index a5098d6e97..7e4a411af1 100644 --- a/plugin/datacap-plugin-redis-jdbc/pom.xml +++ b/plugin/datacap-plugin-redis-jdbc/pom.xml @@ -5,7 +5,7 @@ datacap io.edurt.datacap - 2025.1.2 + 2026.0.0 ../../pom.xml 4.0.0 diff --git a/plugin/datacap-plugin-redis/pom.xml b/plugin/datacap-plugin-redis/pom.xml index 0a48da618d..9c96787aca 100644 --- a/plugin/datacap-plugin-redis/pom.xml +++ b/plugin/datacap-plugin-redis/pom.xml @@ -5,7 +5,7 @@ datacap io.edurt.datacap - 2025.1.2 + 2026.0.0 ../../pom.xml 4.0.0 diff --git a/plugin/datacap-plugin-scylladb/pom.xml b/plugin/datacap-plugin-scylladb/pom.xml index 74841689b8..00f62e4f5d 100644 --- a/plugin/datacap-plugin-scylladb/pom.xml +++ b/plugin/datacap-plugin-scylladb/pom.xml @@ -4,7 +4,7 @@ io.edurt.datacap datacap - 2025.1.2 + 2026.0.0 ../../pom.xml diff --git a/plugin/datacap-plugin-snowflake/pom.xml b/plugin/datacap-plugin-snowflake/pom.xml index 711aebde68..31f4d148e5 100644 --- a/plugin/datacap-plugin-snowflake/pom.xml +++ b/plugin/datacap-plugin-snowflake/pom.xml @@ -6,7 +6,7 @@ io.edurt.datacap datacap - 2025.1.2 + 2026.0.0 ../../pom.xml diff --git a/plugin/datacap-plugin-solr/pom.xml b/plugin/datacap-plugin-solr/pom.xml index ead22ae988..cf2d9454db 100644 --- a/plugin/datacap-plugin-solr/pom.xml +++ b/plugin/datacap-plugin-solr/pom.xml @@ -5,7 +5,7 @@ io.edurt.datacap datacap - 2025.1.2 + 2026.0.0 ../../pom.xml 4.0.0 diff --git a/plugin/datacap-plugin-sqlserver/pom.xml b/plugin/datacap-plugin-sqlserver/pom.xml index 5816016f08..0cde483481 100644 --- a/plugin/datacap-plugin-sqlserver/pom.xml +++ b/plugin/datacap-plugin-sqlserver/pom.xml @@ -5,7 +5,7 @@ datacap io.edurt.datacap - 2025.1.2 + 2026.0.0 ../../pom.xml 4.0.0 diff --git a/plugin/datacap-plugin-starrocks/pom.xml b/plugin/datacap-plugin-starrocks/pom.xml index 1ba21e5b50..5fc596afca 100644 --- a/plugin/datacap-plugin-starrocks/pom.xml +++ b/plugin/datacap-plugin-starrocks/pom.xml @@ -6,7 +6,7 @@ io.edurt.datacap datacap - 2025.1.2 + 2026.0.0 ../../pom.xml diff --git a/plugin/datacap-plugin-tdengine/pom.xml b/plugin/datacap-plugin-tdengine/pom.xml index d8eff4c5ef..3e06d29f70 100644 --- a/plugin/datacap-plugin-tdengine/pom.xml +++ b/plugin/datacap-plugin-tdengine/pom.xml @@ -5,7 +5,7 @@ datacap io.edurt.datacap - 2025.1.2 + 2026.0.0 ../../pom.xml 4.0.0 diff --git a/plugin/datacap-plugin-timescale/pom.xml b/plugin/datacap-plugin-timescale/pom.xml index 085029046c..4a0dadb907 100644 --- a/plugin/datacap-plugin-timescale/pom.xml +++ b/plugin/datacap-plugin-timescale/pom.xml @@ -4,7 +4,7 @@ io.edurt.datacap datacap - 2025.1.2 + 2026.0.0 ../../pom.xml diff --git a/plugin/datacap-plugin-trino/pom.xml b/plugin/datacap-plugin-trino/pom.xml index 6b54bcaada..93730ffc12 100644 --- a/plugin/datacap-plugin-trino/pom.xml +++ b/plugin/datacap-plugin-trino/pom.xml @@ -5,7 +5,7 @@ datacap io.edurt.datacap - 2025.1.2 + 2026.0.0 ../../pom.xml 4.0.0 diff --git a/plugin/datacap-plugin-ydb/pom.xml b/plugin/datacap-plugin-ydb/pom.xml index 1093c280b4..71cd21562f 100644 --- a/plugin/datacap-plugin-ydb/pom.xml +++ b/plugin/datacap-plugin-ydb/pom.xml @@ -6,7 +6,7 @@ io.edurt.datacap datacap - 2025.1.2 + 2026.0.0 ../../pom.xml diff --git a/plugin/datacap-plugin-zookeeper/pom.xml b/plugin/datacap-plugin-zookeeper/pom.xml index b464186b44..6e1903e45e 100644 --- a/plugin/datacap-plugin-zookeeper/pom.xml +++ b/plugin/datacap-plugin-zookeeper/pom.xml @@ -5,7 +5,7 @@ datacap io.edurt.datacap - 2025.1.2 + 2026.0.0 ../../pom.xml 4.0.0 diff --git a/pom.xml b/pom.xml index c40999a2d9..5f3937a689 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ io.edurt.datacap datacap pom - 2025.1.2 + 2026.0.0 client/datacap-cli diff --git a/scheduler/datacap-scheduler-local/pom.xml b/scheduler/datacap-scheduler-local/pom.xml index 0ba98fea48..db1e37b99d 100644 --- a/scheduler/datacap-scheduler-local/pom.xml +++ b/scheduler/datacap-scheduler-local/pom.xml @@ -4,7 +4,7 @@ io.edurt.datacap datacap - 2025.1.2 + 2026.0.0 ../../pom.xml diff --git a/scheduler/datacap-scheduler-spi/pom.xml b/scheduler/datacap-scheduler-spi/pom.xml index e620fb35ad..471d8cdff0 100644 --- a/scheduler/datacap-scheduler-spi/pom.xml +++ b/scheduler/datacap-scheduler-spi/pom.xml @@ -4,7 +4,7 @@ io.edurt.datacap datacap - 2025.1.2 + 2026.0.0 ../../pom.xml diff --git a/shaded/datacap-shaded-neo4j/pom.xml b/shaded/datacap-shaded-neo4j/pom.xml index 4bd21a6dcf..de0cabe0cf 100644 --- a/shaded/datacap-shaded-neo4j/pom.xml +++ b/shaded/datacap-shaded-neo4j/pom.xml @@ -6,7 +6,7 @@ io.edurt.datacap datacap - 2025.1.2 + 2026.0.0 ../../pom.xml diff --git a/shaded/datacap-shaded-pinot/pom.xml b/shaded/datacap-shaded-pinot/pom.xml index 1d8029bff7..b7adabbc32 100644 --- a/shaded/datacap-shaded-pinot/pom.xml +++ b/shaded/datacap-shaded-pinot/pom.xml @@ -6,7 +6,7 @@ io.edurt.datacap datacap - 2025.1.2 + 2026.0.0 ../../pom.xml diff --git a/shaded/datacap-shaded-ydb/pom.xml b/shaded/datacap-shaded-ydb/pom.xml index a89125b04e..76fa5159cc 100644 --- a/shaded/datacap-shaded-ydb/pom.xml +++ b/shaded/datacap-shaded-ydb/pom.xml @@ -6,7 +6,7 @@ io.edurt.datacap datacap - 2025.1.2 + 2026.0.0 ../../pom.xml diff --git a/test/datacap-test-condor/pom.xml b/test/datacap-test-condor/pom.xml index fa1fc2989f..ba845646a4 100644 --- a/test/datacap-test-condor/pom.xml +++ b/test/datacap-test-condor/pom.xml @@ -4,7 +4,7 @@ io.edurt.datacap datacap - 2025.1.2 + 2026.0.0 ../../pom.xml diff --git a/test/datacap-test-convert/pom.xml b/test/datacap-test-convert/pom.xml index 7cf61f0dbc..3bf8835bb7 100644 --- a/test/datacap-test-convert/pom.xml +++ b/test/datacap-test-convert/pom.xml @@ -6,7 +6,7 @@ io.edurt.datacap datacap - 2025.1.2 + 2026.0.0 ../../pom.xml diff --git a/test/datacap-test-core/pom.xml b/test/datacap-test-core/pom.xml index 926185f3c1..2b6e08679e 100644 --- a/test/datacap-test-core/pom.xml +++ b/test/datacap-test-core/pom.xml @@ -6,7 +6,7 @@ io.edurt.datacap datacap - 2025.1.2 + 2026.0.0 ../../pom.xml diff --git a/test/datacap-test-driver/pom.xml b/test/datacap-test-driver/pom.xml index b1e1887132..c58cbc26c2 100644 --- a/test/datacap-test-driver/pom.xml +++ b/test/datacap-test-driver/pom.xml @@ -6,7 +6,7 @@ io.edurt.datacap datacap - 2025.1.2 + 2026.0.0 ../../pom.xml diff --git a/test/datacap-test-driver/src/test/java/io/edurt/datacap/test/redis/RedisParserTest.java b/test/datacap-test-driver/src/test/java/io/edurt/datacap/test/redis/RedisParserTest.java index c1713211fd..a1a14252ee 100644 --- a/test/datacap-test-driver/src/test/java/io/edurt/datacap/test/redis/RedisParserTest.java +++ b/test/datacap-test-driver/src/test/java/io/edurt/datacap/test/redis/RedisParserTest.java @@ -1,5 +1,6 @@ package io.edurt.datacap.test.redis; +import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; import io.edurt.datacap.driver.parser.RedisParser; import io.edurt.datacap.driver.parser.RedisSelectParser; import io.edurt.datacap.driver.parser.RedisShowParser; @@ -12,6 +13,7 @@ import static org.junit.Assert.assertTrue; @Slf4j +@SuppressFBWarnings(value = {"JUA_DONT_ASSERT_INSTANCEOF_IN_TESTS"}) public class RedisParserTest { @Test diff --git a/test/datacap-test-executor/pom.xml b/test/datacap-test-executor/pom.xml index c6d2d70f4c..90cd43b5b4 100644 --- a/test/datacap-test-executor/pom.xml +++ b/test/datacap-test-executor/pom.xml @@ -6,7 +6,7 @@ io.edurt.datacap datacap - 2025.1.2 + 2026.0.0 ../../pom.xml diff --git a/test/datacap-test-fs/pom.xml b/test/datacap-test-fs/pom.xml index e9985dafea..b0b088c8d0 100644 --- a/test/datacap-test-fs/pom.xml +++ b/test/datacap-test-fs/pom.xml @@ -6,7 +6,7 @@ io.edurt.datacap datacap - 2025.1.2 + 2026.0.0 ../../pom.xml diff --git a/test/datacap-test-lib/pom.xml b/test/datacap-test-lib/pom.xml index 695f268c5b..3622437ffe 100644 --- a/test/datacap-test-lib/pom.xml +++ b/test/datacap-test-lib/pom.xml @@ -6,7 +6,7 @@ io.edurt.datacap datacap - 2025.1.2 + 2026.0.0 ../../pom.xml diff --git a/test/datacap-test-parser/pom.xml b/test/datacap-test-parser/pom.xml index 5b1eefd2f5..3e659952de 100644 --- a/test/datacap-test-parser/pom.xml +++ b/test/datacap-test-parser/pom.xml @@ -6,7 +6,7 @@ io.edurt.datacap datacap - 2025.1.2 + 2026.0.0 ../../pom.xml diff --git a/test/datacap-test-plugin/pom.xml b/test/datacap-test-plugin/pom.xml index 192875eaa8..f789314fb6 100644 --- a/test/datacap-test-plugin/pom.xml +++ b/test/datacap-test-plugin/pom.xml @@ -6,7 +6,7 @@ io.edurt.datacap datacap - 2025.1.2 + 2026.0.0 ../../pom.xml diff --git a/test/datacap-test-scheduler/pom.xml b/test/datacap-test-scheduler/pom.xml index 08b49b9bd9..d363ffbafa 100644 --- a/test/datacap-test-scheduler/pom.xml +++ b/test/datacap-test-scheduler/pom.xml @@ -6,7 +6,7 @@ io.edurt.datacap datacap - 2025.1.2 + 2026.0.0 ../../pom.xml From 113c9f5ea143b3acb3d70bd67d673e72d43df787 Mon Sep 17 00:00:00 2001 From: qianmoQ Date: Fri, 6 Feb 2026 18:30:15 +0800 Subject: [PATCH 3/5] =?UTF-8?q?feat(condor):=20=E4=BD=BF=E7=94=A8=E8=87=AA?= =?UTF-8?q?=E5=AE=9A=E4=B9=89=E8=A1=8C=E5=BC=8F=E4=BA=8C=E8=BF=9B=E5=88=B6?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../io/AppendableObjectInputStream.java | 30 -- .../io/AppendableObjectOutputStream.java | 27 -- .../condor/io/BinaryMetadataCodec.java | 105 ++++++ .../datacap/condor/io/BinaryRowCodec.java | 201 ++++++++++++ .../datacap/condor/manager/TableManager.java | 101 ++---- .../condor/metadata/ColumnDefinition.java | 3 - .../condor/metadata/RowDefinition.java | 4 +- .../condor/metadata/TableDefinition.java | 2 - .../edurt/datacap/condor/SQLExecutorTest.java | 308 ++++++++++++++++++ .../condor/condition/ConditionTest.java | 164 ++++++++++ .../datacap/condor/io/BinaryCodecTest.java | 283 ++++++++++++++++ .../condor/manager/DatabaseManagerTest.java | 142 ++++++++ .../condor/manager/TableManagerTest.java | 285 ++++++++++++++++ 13 files changed, 1517 insertions(+), 138 deletions(-) delete mode 100644 core/datacap-condor/src/main/java/io/edurt/datacap/condor/io/AppendableObjectInputStream.java delete mode 100644 core/datacap-condor/src/main/java/io/edurt/datacap/condor/io/AppendableObjectOutputStream.java create mode 100644 core/datacap-condor/src/main/java/io/edurt/datacap/condor/io/BinaryMetadataCodec.java create mode 100644 core/datacap-condor/src/main/java/io/edurt/datacap/condor/io/BinaryRowCodec.java create mode 100644 test/datacap-test-condor/src/test/java/io/edurt/datacap/condor/SQLExecutorTest.java create mode 100644 test/datacap-test-condor/src/test/java/io/edurt/datacap/condor/condition/ConditionTest.java create mode 100644 test/datacap-test-condor/src/test/java/io/edurt/datacap/condor/io/BinaryCodecTest.java create mode 100644 test/datacap-test-condor/src/test/java/io/edurt/datacap/condor/manager/DatabaseManagerTest.java create mode 100644 test/datacap-test-condor/src/test/java/io/edurt/datacap/condor/manager/TableManagerTest.java diff --git a/core/datacap-condor/src/main/java/io/edurt/datacap/condor/io/AppendableObjectInputStream.java b/core/datacap-condor/src/main/java/io/edurt/datacap/condor/io/AppendableObjectInputStream.java deleted file mode 100644 index ca8c508466..0000000000 --- a/core/datacap-condor/src/main/java/io/edurt/datacap/condor/io/AppendableObjectInputStream.java +++ /dev/null @@ -1,30 +0,0 @@ -package io.edurt.datacap.condor.io; - -import lombok.extern.slf4j.Slf4j; - -import java.io.IOException; -import java.io.InputStream; -import java.io.ObjectInputStream; - -@Slf4j -public class AppendableObjectInputStream - extends ObjectInputStream -{ - private boolean firstObject = true; - - public AppendableObjectInputStream(InputStream in) - throws IOException - { - super(in); - } - - @Override - protected void readStreamHeader() - throws IOException - { - if (firstObject) { - super.readStreamHeader(); - firstObject = false; - } - } -} diff --git a/core/datacap-condor/src/main/java/io/edurt/datacap/condor/io/AppendableObjectOutputStream.java b/core/datacap-condor/src/main/java/io/edurt/datacap/condor/io/AppendableObjectOutputStream.java deleted file mode 100644 index d2983ad6d7..0000000000 --- a/core/datacap-condor/src/main/java/io/edurt/datacap/condor/io/AppendableObjectOutputStream.java +++ /dev/null @@ -1,27 +0,0 @@ -package io.edurt.datacap.condor.io; - -import java.io.IOException; -import java.io.ObjectOutputStream; -import java.io.OutputStream; - -public class AppendableObjectOutputStream - extends ObjectOutputStream -{ - private boolean firstObject = true; - - public AppendableObjectOutputStream(OutputStream out) - throws IOException - { - super(out); - } - - @Override - protected void writeStreamHeader() - throws IOException - { - if (firstObject) { - super.writeStreamHeader(); - firstObject = false; - } - } -} diff --git a/core/datacap-condor/src/main/java/io/edurt/datacap/condor/io/BinaryMetadataCodec.java b/core/datacap-condor/src/main/java/io/edurt/datacap/condor/io/BinaryMetadataCodec.java new file mode 100644 index 0000000000..2e71e1276a --- /dev/null +++ b/core/datacap-condor/src/main/java/io/edurt/datacap/condor/io/BinaryMetadataCodec.java @@ -0,0 +1,105 @@ +package io.edurt.datacap.condor.io; + +import io.edurt.datacap.condor.DataType; +import io.edurt.datacap.condor.metadata.ColumnDefinition; +import io.edurt.datacap.condor.metadata.TableDefinition; +import lombok.extern.slf4j.Slf4j; + +import java.io.DataInputStream; +import java.io.DataOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.StandardOpenOption; +import java.util.ArrayList; +import java.util.List; + +@Slf4j +public class BinaryMetadataCodec +{ + private static final byte[] MAGIC = {'C', 'M', 'T', 'A'}; + private static final short VERSION = 1; + + public static void write(Path metaPath, TableDefinition metadata) + throws IOException + { + if (!Files.exists(metaPath.getParent())) { + Files.createDirectories(metaPath.getParent()); + } + + try (OutputStream os = Files.newOutputStream(metaPath, + StandardOpenOption.CREATE, StandardOpenOption.TRUNCATE_EXISTING); + DataOutputStream dos = new DataOutputStream(os)) { + dos.write(MAGIC); + dos.writeShort(VERSION); + + writeString(dos, metadata.getTableName()); + + List columns = metadata.getColumns(); + dos.writeShort(columns.size()); + + for (ColumnDefinition col : columns) { + writeString(dos, col.getName()); + writeString(dos, col.getType().name()); + dos.writeBoolean(col.isPrimaryKey()); + dos.writeBoolean(col.isNullable()); + } + } + } + + public static TableDefinition read(Path metaPath) + throws IOException + { + try (InputStream is = Files.newInputStream(metaPath); + DataInputStream dis = new DataInputStream(is)) { + byte[] magic = new byte[4]; + dis.readFully(magic); + if (magic[0] != MAGIC[0] || magic[1] != MAGIC[1] + || magic[2] != MAGIC[2] || magic[3] != MAGIC[3]) { + throw new IOException("Invalid metadata file format"); + } + + short version = dis.readShort(); + if (version > VERSION) { + throw new IOException("Unsupported metadata file version: " + version); + } + + String tableName = readString(dis); + + int colCount = dis.readShort(); + List columns = new ArrayList<>(); + + for (int i = 0; i < colCount; i++) { + String name = readString(dis); + String typeName = readString(dis); + boolean isPrimaryKey = dis.readBoolean(); + boolean isNullable = dis.readBoolean(); + + DataType type = DataType.valueOf(typeName); + columns.add(new ColumnDefinition(name, type, isPrimaryKey, isNullable)); + } + + return new TableDefinition(tableName, columns); + } + } + + private static void writeString(DataOutputStream dos, String str) + throws IOException + { + byte[] bytes = str.getBytes(StandardCharsets.UTF_8); + dos.writeShort(bytes.length); + dos.write(bytes); + } + + private static String readString(DataInputStream dis) + throws IOException + { + int len = dis.readShort(); + byte[] bytes = new byte[len]; + dis.readFully(bytes); + return new String(bytes, StandardCharsets.UTF_8); + } +} diff --git a/core/datacap-condor/src/main/java/io/edurt/datacap/condor/io/BinaryRowCodec.java b/core/datacap-condor/src/main/java/io/edurt/datacap/condor/io/BinaryRowCodec.java new file mode 100644 index 0000000000..aaf3653066 --- /dev/null +++ b/core/datacap-condor/src/main/java/io/edurt/datacap/condor/io/BinaryRowCodec.java @@ -0,0 +1,201 @@ +package io.edurt.datacap.condor.io; + +import io.edurt.datacap.condor.metadata.RowDefinition; +import lombok.extern.slf4j.Slf4j; + +import java.io.DataInputStream; +import java.io.DataOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.StandardOpenOption; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; + +@Slf4j +public class BinaryRowCodec +{ + private static final byte[] MAGIC = {'C', 'D', 'O', 'R'}; + private static final short VERSION = 1; + + private static final byte TYPE_NULL = 0; + private static final byte TYPE_INTEGER = 1; + private static final byte TYPE_LONG = 2; + private static final byte TYPE_DOUBLE = 3; + private static final byte TYPE_BOOLEAN = 4; + private static final byte TYPE_STRING = 5; + private static final byte TYPE_FLOAT = 6; + + public static void writeAll(Path dataPath, List rows) + throws IOException + { + try (OutputStream os = Files.newOutputStream(dataPath, + StandardOpenOption.CREATE, StandardOpenOption.TRUNCATE_EXISTING); + DataOutputStream dos = new DataOutputStream(os)) { + dos.write(MAGIC); + dos.writeShort(VERSION); + dos.writeInt(rows.size()); + + for (RowDefinition row : rows) { + writeRow(dos, row); + } + } + } + + public static void appendRows(Path dataPath, List rows) + throws IOException + { + if (!Files.exists(dataPath) || Files.size(dataPath) == 0) { + writeAll(dataPath, rows); + return; + } + + List existing = readAll(dataPath); + existing.addAll(rows); + writeAll(dataPath, existing); + } + + public static void appendRow(Path dataPath, RowDefinition row) + throws IOException + { + List rows = new ArrayList<>(); + rows.add(row); + appendRows(dataPath, rows); + } + + public static List readAll(Path dataPath) + throws IOException + { + List rows = new ArrayList<>(); + + if (!Files.exists(dataPath) || Files.size(dataPath) == 0) { + return rows; + } + + try (InputStream is = Files.newInputStream(dataPath); + DataInputStream dis = new DataInputStream(is)) { + byte[] magic = new byte[4]; + dis.readFully(magic); + if (magic[0] != MAGIC[0] || magic[1] != MAGIC[1] + || magic[2] != MAGIC[2] || magic[3] != MAGIC[3]) { + throw new IOException("Invalid data file format"); + } + + short version = dis.readShort(); + if (version > VERSION) { + throw new IOException("Unsupported data file version: " + version); + } + + int rowCount = dis.readInt(); + for (int i = 0; i < rowCount; i++) { + rows.add(readRow(dis)); + } + } + + return rows; + } + + private static void writeRow(DataOutputStream dos, RowDefinition row) + throws IOException + { + Map values = row.getValues(); + dos.writeShort(values.size()); + + for (Map.Entry entry : values.entrySet()) { + writeString(dos, entry.getKey()); + writeValue(dos, entry.getValue()); + } + } + + private static RowDefinition readRow(DataInputStream dis) + throws IOException + { + RowDefinition row = new RowDefinition(); + int colCount = dis.readShort(); + + for (int i = 0; i < colCount; i++) { + String name = readString(dis); + Object value = readValue(dis); + row.setValue(name, value); + } + + return row; + } + + private static void writeValue(DataOutputStream dos, Object value) + throws IOException + { + if (value == null) { + dos.writeByte(TYPE_NULL); + } + else if (value instanceof Integer) { + dos.writeByte(TYPE_INTEGER); + dos.writeInt((Integer) value); + } + else if (value instanceof Long) { + dos.writeByte(TYPE_LONG); + dos.writeLong((Long) value); + } + else if (value instanceof Double) { + dos.writeByte(TYPE_DOUBLE); + dos.writeDouble((Double) value); + } + else if (value instanceof Float) { + dos.writeByte(TYPE_FLOAT); + dos.writeFloat((Float) value); + } + else if (value instanceof Boolean) { + dos.writeByte(TYPE_BOOLEAN); + dos.writeBoolean((Boolean) value); + } + else { + dos.writeByte(TYPE_STRING); + writeString(dos, value.toString()); + } + } + + private static Object readValue(DataInputStream dis) + throws IOException + { + byte type = dis.readByte(); + switch (type) { + case TYPE_NULL: + return null; + case TYPE_INTEGER: + return dis.readInt(); + case TYPE_LONG: + return dis.readLong(); + case TYPE_DOUBLE: + return dis.readDouble(); + case TYPE_FLOAT: + return dis.readFloat(); + case TYPE_BOOLEAN: + return dis.readBoolean(); + case TYPE_STRING: + return readString(dis); + default: + throw new IOException("Unknown value type: " + type); + } + } + + private static void writeString(DataOutputStream dos, String str) + throws IOException + { + byte[] bytes = str.getBytes(StandardCharsets.UTF_8); + dos.writeShort(bytes.length); + dos.write(bytes); + } + + private static String readString(DataInputStream dis) + throws IOException + { + int len = dis.readShort(); + byte[] bytes = new byte[len]; + dis.readFully(bytes); + return new String(bytes, StandardCharsets.UTF_8); + } +} diff --git a/core/datacap-condor/src/main/java/io/edurt/datacap/condor/manager/TableManager.java b/core/datacap-condor/src/main/java/io/edurt/datacap/condor/manager/TableManager.java index b01c770a86..9d16562505 100644 --- a/core/datacap-condor/src/main/java/io/edurt/datacap/condor/manager/TableManager.java +++ b/core/datacap-condor/src/main/java/io/edurt/datacap/condor/manager/TableManager.java @@ -4,20 +4,16 @@ import io.edurt.datacap.condor.DataType; import io.edurt.datacap.condor.TableException; import io.edurt.datacap.condor.condition.Condition; -import io.edurt.datacap.condor.io.AppendableObjectInputStream; -import io.edurt.datacap.condor.io.AppendableObjectOutputStream; +import io.edurt.datacap.condor.io.BinaryMetadataCodec; +import io.edurt.datacap.condor.io.BinaryRowCodec; import io.edurt.datacap.condor.metadata.ColumnDefinition; import io.edurt.datacap.condor.metadata.RowDefinition; import io.edurt.datacap.condor.metadata.TableDefinition; import lombok.extern.slf4j.Slf4j; -import java.io.EOFException; import java.io.IOException; -import java.io.ObjectInputStream; -import java.io.ObjectOutputStream; import java.nio.file.Files; import java.nio.file.Path; -import java.nio.file.StandardOpenOption; import java.util.ArrayList; import java.util.Comparator; import java.util.HashMap; @@ -87,7 +83,6 @@ public void createTable(TableDefinition metadata) try { saveTableMetadata(metadata); - createTableDataFile(metadata.getTableName()); tableMetadataCache.put(metadata.getTableName(), metadata); @@ -145,26 +140,18 @@ public void batchInsert(String tableName, List columnNames, List values : valuesList) { validateInsertData(metadata, columnNames, values); } - // Create all rows List rows = new ArrayList<>(); for (List values : valuesList) { rows.add(createRow(metadata, columnNames, values)); } - // Batch write to file - Path dataPath = dataDir.resolve(tableName) - .resolve("data") - .resolve("table.data"); - try (ObjectOutputStream oos = new AppendableObjectOutputStream( - Files.newOutputStream(dataPath, StandardOpenOption.APPEND))) { - for (RowDefinition row : rows) { - oos.writeObject(row); - } + Path dataPath = getDataPath(tableName); + try { + BinaryRowCodec.appendRows(dataPath, rows); } catch (IOException e) { throw new TableException("Failed to batch insert rows: " + e.getMessage()); @@ -178,7 +165,7 @@ public void batchInsert(String tableName, List columnNames, List setValues, Condition whereCondition) throws TableException { - TableDefinition metadata = getTableMetadata(tableName); + getTableMetadata(tableName); ReadWriteLock lock = tableLocks.get(tableName); lock.writeLock().lock(); @@ -275,14 +262,17 @@ public List select(String tableName, List columnNames, Co } } + private Path getDataPath(String tableName) + { + return dataDir.resolve(tableName).resolve("data").resolve("table.data"); + } + private void appendRowToFile(String tableName, RowDefinition row) throws TableException { - Path dataPath = dataDir.resolve(tableName) - .resolve("data") - .resolve("table.data"); - try (ObjectOutputStream oos = new AppendableObjectOutputStream(Files.newOutputStream(dataPath, StandardOpenOption.APPEND))) { - oos.writeObject(row); + Path dataPath = getDataPath(tableName); + try { + BinaryRowCodec.appendRow(dataPath, row); } catch (IOException e) { log.error("Failed to append row to file", e); @@ -293,12 +283,9 @@ private void appendRowToFile(String tableName, RowDefinition row) private void saveAllRows(String tableName, List rows) throws TableException { - Path dataPath = dataDir.resolve(tableName).resolve("data").resolve("table.data"); - try (ObjectOutputStream oos = new ObjectOutputStream(Files.newOutputStream(dataPath, StandardOpenOption.CREATE, - StandardOpenOption.TRUNCATE_EXISTING))) { - for (RowDefinition row : rows) { - oos.writeObject(row); - } + Path dataPath = getDataPath(tableName); + try { + BinaryRowCodec.writeAll(dataPath, rows); } catch (IOException e) { throw new TableException("Failed to save rows to file: " + e.getMessage()); @@ -308,30 +295,14 @@ private void saveAllRows(String tableName, List rows) private List readAllRows(String tableName) throws TableException { - List rows = new ArrayList<>(); - Path dataPath = dataDir.resolve(tableName).resolve("data").resolve("table.data"); - - if (!Files.exists(dataPath)) { - return rows; - } - - try (AppendableObjectInputStream ois = new AppendableObjectInputStream(Files.newInputStream(dataPath))) { - while (true) { - try { - RowDefinition row = (RowDefinition) ois.readObject(); - rows.add(row); - } - catch (EOFException e) { - break; - } - } + Path dataPath = getDataPath(tableName); + try { + return BinaryRowCodec.readAll(dataPath); } - catch (IOException | ClassNotFoundException e) { + catch (IOException e) { log.error("Failed to read rows", e); throw new TableException("Failed to read rows: " + e.getMessage()); } - - return rows; } private void validateTableName(String tableName) @@ -436,28 +407,16 @@ private void saveTableMetadata(TableDefinition metadata) Path metaPath = dataDir.resolve(metadata.getTableName()) .resolve("metadata") .resolve("table.meta"); - if (!Files.exists(metaPath)) { - Files.createDirectories(metaPath.getParent()); - } - - try (ObjectOutputStream oos = new ObjectOutputStream(Files.newOutputStream(metaPath))) { - oos.writeObject(metadata); - } - catch (IOException e) { - log.error("Failed to save table metadata", e); - throw new IOException("Failed to save table metadata", e); - } + BinaryMetadataCodec.write(metaPath, metadata); } private void createTableDataFile(String tableName) throws IOException { - Path metaPath = dataDir.resolve(tableName) - .resolve("data") - .resolve("table.data"); - if (!Files.exists(metaPath)) { - Files.createDirectories(metaPath.getParent()); - Files.createFile(metaPath); + Path dataPath = getDataPath(tableName); + if (!Files.exists(dataPath)) { + Files.createDirectories(dataPath.getParent()); + Files.createFile(dataPath); } } @@ -467,13 +426,7 @@ private TableDefinition loadTableMetadata(String tableName) Path metaPath = dataDir.resolve(tableName) .resolve("metadata") .resolve("table.meta"); - try (ObjectInputStream ois = new ObjectInputStream(Files.newInputStream(metaPath))) { - return (TableDefinition) ois.readObject(); - } - catch (IOException | ClassNotFoundException e) { - log.error("Failed to load table metadata", e); - throw new IOException("Failed to load table metadata", e); - } + return BinaryMetadataCodec.read(metaPath); } public String[] listTables() diff --git a/core/datacap-condor/src/main/java/io/edurt/datacap/condor/metadata/ColumnDefinition.java b/core/datacap-condor/src/main/java/io/edurt/datacap/condor/metadata/ColumnDefinition.java index d59bc5c4b5..1891872ed3 100644 --- a/core/datacap-condor/src/main/java/io/edurt/datacap/condor/metadata/ColumnDefinition.java +++ b/core/datacap-condor/src/main/java/io/edurt/datacap/condor/metadata/ColumnDefinition.java @@ -3,11 +3,8 @@ import io.edurt.datacap.condor.DataType; import lombok.Getter; -import java.io.Serializable; - @Getter public class ColumnDefinition - implements Serializable { private String name; private DataType type; diff --git a/core/datacap-condor/src/main/java/io/edurt/datacap/condor/metadata/RowDefinition.java b/core/datacap-condor/src/main/java/io/edurt/datacap/condor/metadata/RowDefinition.java index 4a4b25d38d..130a3d24f6 100644 --- a/core/datacap-condor/src/main/java/io/edurt/datacap/condor/metadata/RowDefinition.java +++ b/core/datacap-condor/src/main/java/io/edurt/datacap/condor/metadata/RowDefinition.java @@ -1,16 +1,16 @@ package io.edurt.datacap.condor.metadata; import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; +import lombok.Getter; import lombok.Setter; -import java.io.Serializable; import java.util.HashMap; import java.util.Map; @SuppressFBWarnings(value = {"EI_EXPOSE_REP", "EI_EXPOSE_REP2"}) public class RowDefinition - implements Serializable { + @Getter @Setter private Map values; diff --git a/core/datacap-condor/src/main/java/io/edurt/datacap/condor/metadata/TableDefinition.java b/core/datacap-condor/src/main/java/io/edurt/datacap/condor/metadata/TableDefinition.java index cc5ea37285..e29b5017e8 100644 --- a/core/datacap-condor/src/main/java/io/edurt/datacap/condor/metadata/TableDefinition.java +++ b/core/datacap-condor/src/main/java/io/edurt/datacap/condor/metadata/TableDefinition.java @@ -4,14 +4,12 @@ import lombok.Getter; import lombok.ToString; -import java.io.Serializable; import java.util.List; @Getter @ToString @SuppressFBWarnings(value = {"EI_EXPOSE_REP", "EI_EXPOSE_REP2"}) public class TableDefinition - implements Serializable { private String tableName; private List columns; diff --git a/test/datacap-test-condor/src/test/java/io/edurt/datacap/condor/SQLExecutorTest.java b/test/datacap-test-condor/src/test/java/io/edurt/datacap/condor/SQLExecutorTest.java new file mode 100644 index 0000000000..f61f8405b6 --- /dev/null +++ b/test/datacap-test-condor/src/test/java/io/edurt/datacap/condor/SQLExecutorTest.java @@ -0,0 +1,308 @@ +package io.edurt.datacap.condor; + +import io.edurt.datacap.condor.manager.DatabaseManager; +import io.edurt.datacap.condor.metadata.RowDefinition; +import lombok.extern.slf4j.Slf4j; +import org.junit.After; +import org.junit.Before; +import org.junit.FixMethodOrder; +import org.junit.Test; +import org.junit.runners.MethodSorters; + +import java.util.List; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertTrue; + +@Slf4j +@FixMethodOrder(MethodSorters.NAME_ASCENDING) +public class SQLExecutorTest +{ + private DatabaseManager databaseManager; + private SQLExecutor executor; + + @Before + public void setUp() + { + databaseManager = DatabaseManager.createManager(); + executor = new SQLExecutor(databaseManager); + + executor.execute("CREATE DATABASE IF NOT EXISTS executor_test"); + executor.execute("USE executor_test"); + executor.execute("DROP TABLE IF EXISTS users"); + executor.execute("CREATE TABLE IF NOT EXISTS users (id INT, name VARCHAR(255), age INT)"); + } + + @After + public void tearDown() + { + executor.execute("DROP DATABASE IF EXISTS executor_test"); + } + + @Test + public void testCreateDatabase() + { + SQLResult result = executor.execute("CREATE DATABASE IF NOT EXISTS newdb"); + assertTrue(result.isSuccess()); + executor.execute("DROP DATABASE IF EXISTS newdb"); + } + + @Test + public void testDropDatabase() + { + executor.execute("CREATE DATABASE IF NOT EXISTS dropdb"); + SQLResult result = executor.execute("DROP DATABASE IF EXISTS dropdb"); + assertTrue(result.isSuccess()); + } + + @Test + public void testUseDatabase() + { + SQLResult result = executor.execute("USE executor_test"); + assertTrue(result.isSuccess()); + } + + @Test + public void testCreateTable() + { + executor.execute("DROP TABLE IF EXISTS new_table"); + SQLResult result = executor.execute( + "CREATE TABLE IF NOT EXISTS new_table (id INT, value VARCHAR(100))"); + assertTrue(result.isSuccess()); + } + + @Test + public void testCreateTableIfNotExists() + { + SQLResult result = executor.execute( + "CREATE TABLE IF NOT EXISTS users (id INT, name VARCHAR(255), age INT)"); + assertTrue(result.isSuccess()); + } + + @Test + public void testDropTable() + { + executor.execute("CREATE TABLE IF NOT EXISTS droptable (id INT)"); + SQLResult result = executor.execute("DROP TABLE IF EXISTS droptable"); + assertTrue(result.isSuccess()); + } + + @Test + public void testDropTableIfExists() + { + SQLResult result = executor.execute("DROP TABLE IF EXISTS nonexistent_table"); + assertTrue(result.isSuccess()); + } + + @Test + public void testInsertSingle() + { + SQLResult result = executor.execute( + "INSERT INTO users (id, name, age) VALUES (1, 'Alice', 25)"); + assertTrue(result.isSuccess()); + } + + @Test + public void testInsertBatch() + { + SQLResult result = executor.execute( + "INSERT INTO users (id, name, age) VALUES (1, 'Alice', 25), (2, 'Bob', 30)"); + assertTrue(result.isSuccess()); + } + + @Test + public void testSelectAll() + { + executor.execute("INSERT INTO users (id, name, age) VALUES (1, 'Alice', 25)"); + executor.execute("INSERT INTO users (id, name, age) VALUES (2, 'Bob', 30)"); + + SQLResult> result = executor.execute( + "SELECT id, name, age FROM users"); + assertTrue(result.isSuccess()); + assertNotNull(result.getData()); + assertEquals(2, result.getData().size()); + } + + @Test + public void testSelectWithWhere() + { + executor.execute("INSERT INTO users (id, name, age) VALUES (1, 'Alice', 25)"); + executor.execute("INSERT INTO users (id, name, age) VALUES (2, 'Bob', 30)"); + executor.execute("INSERT INTO users (id, name, age) VALUES (3, 'Charlie', 35)"); + + SQLResult> result = executor.execute( + "SELECT id, name, age FROM users WHERE age > 28"); + assertTrue(result.isSuccess()); + assertEquals(2, result.getData().size()); + } + + @Test + public void testSelectWithWhereEquals() + { + executor.execute("INSERT INTO users (id, name, age) VALUES (1, 'Alice', 25)"); + executor.execute("INSERT INTO users (id, name, age) VALUES (2, 'Bob', 30)"); + + SQLResult> result = executor.execute( + "SELECT id, name FROM users WHERE name = 'Bob'"); + assertTrue(result.isSuccess()); + assertEquals(1, result.getData().size()); + assertEquals("Bob", result.getData().get(0).getValue("name")); + } + + @Test + public void testSelectWithWhereAnd() + { + executor.execute("INSERT INTO users (id, name, age) VALUES (1, 'Alice', 25)"); + executor.execute("INSERT INTO users (id, name, age) VALUES (2, 'Bob', 30)"); + executor.execute("INSERT INTO users (id, name, age) VALUES (3, 'Charlie', 35)"); + + SQLResult> result = executor.execute( + "SELECT id, name FROM users WHERE age > 20 AND age < 32"); + assertTrue(result.isSuccess()); + assertEquals(2, result.getData().size()); + } + + @Test + public void testSelectWithWhereOr() + { + executor.execute("INSERT INTO users (id, name, age) VALUES (1, 'Alice', 25)"); + executor.execute("INSERT INTO users (id, name, age) VALUES (2, 'Bob', 30)"); + executor.execute("INSERT INTO users (id, name, age) VALUES (3, 'Charlie', 35)"); + + SQLResult> result = executor.execute( + "SELECT id, name FROM users WHERE name = 'Alice' OR name = 'Charlie'"); + assertTrue(result.isSuccess()); + assertEquals(2, result.getData().size()); + } + + @Test + public void testUpdate() + { + executor.execute("INSERT INTO users (id, name, age) VALUES (1, 'Alice', 25)"); + executor.execute("INSERT INTO users (id, name, age) VALUES (2, 'Bob', 30)"); + + SQLResult result = executor.execute( + "UPDATE users SET age = 26 WHERE name = 'Alice'"); + assertTrue(result.isSuccess()); + + SQLResult> selectResult = executor.execute( + "SELECT id, name, age FROM users WHERE name = 'Alice'"); + assertEquals(26, selectResult.getData().get(0).getValue("age")); + } + + @Test + public void testUpdateMultipleColumns() + { + executor.execute("INSERT INTO users (id, name, age) VALUES (1, 'Alice', 25)"); + + SQLResult result = executor.execute( + "UPDATE users SET name = 'Alicia', age = 26 WHERE id = 1"); + assertTrue(result.isSuccess()); + + SQLResult> selectResult = executor.execute( + "SELECT id, name, age FROM users WHERE id = 1"); + assertEquals("Alicia", selectResult.getData().get(0).getValue("name")); + assertEquals(26, selectResult.getData().get(0).getValue("age")); + } + + @Test + public void testDelete() + { + executor.execute("INSERT INTO users (id, name, age) VALUES (1, 'Alice', 25)"); + executor.execute("INSERT INTO users (id, name, age) VALUES (2, 'Bob', 30)"); + executor.execute("INSERT INTO users (id, name, age) VALUES (3, 'Charlie', 35)"); + + SQLResult result = executor.execute( + "DELETE FROM users WHERE id = 2"); + assertTrue(result.isSuccess()); + + SQLResult> selectResult = executor.execute( + "SELECT id, name FROM users"); + assertEquals(2, selectResult.getData().size()); + } + + @Test + public void testDeleteWithCondition() + { + executor.execute("INSERT INTO users (id, name, age) VALUES (1, 'Alice', 25)"); + executor.execute("INSERT INTO users (id, name, age) VALUES (2, 'Bob', 30)"); + executor.execute("INSERT INTO users (id, name, age) VALUES (3, 'Charlie', 35)"); + + SQLResult result = executor.execute( + "DELETE FROM users WHERE age >= 30"); + assertTrue(result.isSuccess()); + + SQLResult> selectResult = executor.execute( + "SELECT id, name FROM users"); + assertEquals(1, selectResult.getData().size()); + assertEquals("Alice", selectResult.getData().get(0).getValue("name")); + } + + @Test + public void testShowDatabases() + { + SQLResult> result = executor.execute("SHOW DATABASES"); + assertTrue(result.isSuccess()); + assertNotNull(result.getData()); + assertFalse(result.getData().isEmpty()); + } + + @Test + public void testShowTables() + { + SQLResult> result = executor.execute("SHOW TABLES"); + assertTrue(result.isSuccess()); + assertNotNull(result.getData()); + } + + @Test + public void testShowColumns() + { + SQLResult> result = executor.execute( + "SHOW COLUMNS FROM users"); + assertTrue(result.isSuccess()); + assertNotNull(result.getData()); + assertEquals(3, result.getData().size()); + + RowDefinition firstCol = result.getData().get(0); + assertNotNull(firstCol.getValue("Field")); + assertNotNull(firstCol.getValue("Type")); + } + + @Test + public void testFullLifecycle() + { + // Insert + executor.execute("INSERT INTO users (id, name, age) VALUES (1, 'Alice', 25)"); + executor.execute("INSERT INTO users (id, name, age) VALUES (2, 'Bob', 30)"); + executor.execute("INSERT INTO users (id, name, age) VALUES (3, 'Charlie', 35)"); + + // Select all + SQLResult> all = executor.execute("SELECT id, name, age FROM users"); + assertEquals(3, all.getData().size()); + + // Update + executor.execute("UPDATE users SET age = 31 WHERE name = 'Bob'"); + + // Verify update + SQLResult> updated = executor.execute( + "SELECT id, name, age FROM users WHERE name = 'Bob'"); + assertEquals(31, updated.getData().get(0).getValue("age")); + + // Delete + executor.execute("DELETE FROM users WHERE age > 32"); + + // Verify delete + SQLResult> afterDelete = executor.execute("SELECT id, name, age FROM users"); + assertEquals(2, afterDelete.getData().size()); + } + + @Test + public void testUnsupportedSQL() + { + SQLResult result = executor.execute("THIS IS NOT SQL"); + assertFalse(result.isSuccess()); + } +} diff --git a/test/datacap-test-condor/src/test/java/io/edurt/datacap/condor/condition/ConditionTest.java b/test/datacap-test-condor/src/test/java/io/edurt/datacap/condor/condition/ConditionTest.java new file mode 100644 index 0000000000..d614e6d94e --- /dev/null +++ b/test/datacap-test-condor/src/test/java/io/edurt/datacap/condor/condition/ConditionTest.java @@ -0,0 +1,164 @@ +package io.edurt.datacap.condor.condition; + +import io.edurt.datacap.condor.ComparisonOperator; +import io.edurt.datacap.condor.metadata.RowDefinition; +import lombok.extern.slf4j.Slf4j; +import org.junit.Test; + +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; + +@Slf4j +public class ConditionTest +{ + private RowDefinition createRow(String name, int age, double score) + { + RowDefinition row = new RowDefinition(); + row.setValue("name", name); + row.setValue("age", age); + row.setValue("score", score); + return row; + } + + @Test + public void testEquals() + { + SimpleCondition condition = new SimpleCondition("name", "Alice", ComparisonOperator.EQUALS); + assertTrue(condition.evaluate(createRow("Alice", 25, 90.0))); + assertFalse(condition.evaluate(createRow("Bob", 30, 85.0))); + } + + @Test + public void testNotEquals() + { + SimpleCondition condition = new SimpleCondition("name", "Alice", ComparisonOperator.NOT_EQUALS); + assertFalse(condition.evaluate(createRow("Alice", 25, 90.0))); + assertTrue(condition.evaluate(createRow("Bob", 30, 85.0))); + } + + @Test + public void testGreaterThan() + { + SimpleCondition condition = new SimpleCondition("age", 25, ComparisonOperator.GREATER_THAN); + assertFalse(condition.evaluate(createRow("Alice", 25, 90.0))); + assertTrue(condition.evaluate(createRow("Bob", 30, 85.0))); + assertFalse(condition.evaluate(createRow("Charlie", 20, 95.0))); + } + + @Test + public void testGreaterThanOrEquals() + { + SimpleCondition condition = new SimpleCondition("age", 25, ComparisonOperator.GREATER_THAN_OR_EQUALS); + assertTrue(condition.evaluate(createRow("Alice", 25, 90.0))); + assertTrue(condition.evaluate(createRow("Bob", 30, 85.0))); + assertFalse(condition.evaluate(createRow("Charlie", 20, 95.0))); + } + + @Test + public void testLessThan() + { + SimpleCondition condition = new SimpleCondition("age", 30, ComparisonOperator.LESS_THAN); + assertTrue(condition.evaluate(createRow("Alice", 25, 90.0))); + assertFalse(condition.evaluate(createRow("Bob", 30, 85.0))); + assertFalse(condition.evaluate(createRow("Charlie", 35, 95.0))); + } + + @Test + public void testLessThanOrEquals() + { + SimpleCondition condition = new SimpleCondition("age", 30, ComparisonOperator.LESS_THAN_OR_EQUALS); + assertTrue(condition.evaluate(createRow("Alice", 25, 90.0))); + assertTrue(condition.evaluate(createRow("Bob", 30, 85.0))); + assertFalse(condition.evaluate(createRow("Charlie", 35, 95.0))); + } + + @Test + public void testDoubleComparison() + { + SimpleCondition condition = new SimpleCondition("score", 90.0, ComparisonOperator.GREATER_THAN); + assertFalse(condition.evaluate(createRow("Alice", 25, 90.0))); + assertTrue(condition.evaluate(createRow("Charlie", 20, 95.0))); + assertFalse(condition.evaluate(createRow("Bob", 30, 85.0))); + } + + @Test + public void testNullValue() + { + RowDefinition row = new RowDefinition(); + row.setValue("name", null); + row.setValue("age", 25); + + SimpleCondition condition = new SimpleCondition("name", "Alice", ComparisonOperator.EQUALS); + assertFalse(condition.evaluate(row)); + } + + @Test + public void testBothNull() + { + RowDefinition row = new RowDefinition(); + row.setValue("name", null); + + SimpleCondition condition = new SimpleCondition("name", null, ComparisonOperator.EQUALS); + assertTrue(condition.evaluate(row)); + } + + @Test + public void testAndCondition() + { + SimpleCondition ageCondition = new SimpleCondition("age", 20, ComparisonOperator.GREATER_THAN); + SimpleCondition scoreCondition = new SimpleCondition("score", 90.0, ComparisonOperator.GREATER_THAN_OR_EQUALS); + AndCondition andCondition = new AndCondition(ageCondition, scoreCondition); + + assertTrue(andCondition.evaluate(createRow("Alice", 25, 90.0))); + assertFalse(andCondition.evaluate(createRow("Bob", 30, 85.0))); + assertFalse(andCondition.evaluate(createRow("Charlie", 18, 95.0))); + } + + @Test + public void testOrCondition() + { + SimpleCondition nameCondition = new SimpleCondition("name", "Alice", ComparisonOperator.EQUALS); + SimpleCondition ageCondition = new SimpleCondition("age", 35, ComparisonOperator.EQUALS); + OrCondition orCondition = new OrCondition(nameCondition, ageCondition); + + assertTrue(orCondition.evaluate(createRow("Alice", 25, 90.0))); + assertTrue(orCondition.evaluate(createRow("Charlie", 35, 95.0))); + assertFalse(orCondition.evaluate(createRow("Bob", 30, 85.0))); + } + + @Test + public void testNestedConditions() + { + // (name = 'Alice' AND age > 20) OR score >= 95 + SimpleCondition nameCondition = new SimpleCondition("name", "Alice", ComparisonOperator.EQUALS); + SimpleCondition ageCondition = new SimpleCondition("age", 20, ComparisonOperator.GREATER_THAN); + AndCondition andCondition = new AndCondition(nameCondition, ageCondition); + + SimpleCondition scoreCondition = new SimpleCondition("score", 95.0, ComparisonOperator.GREATER_THAN_OR_EQUALS); + OrCondition orCondition = new OrCondition(andCondition, scoreCondition); + + assertTrue(orCondition.evaluate(createRow("Alice", 25, 80.0))); + assertTrue(orCondition.evaluate(createRow("Bob", 30, 95.0))); + assertFalse(orCondition.evaluate(createRow("Bob", 30, 85.0))); + assertFalse(orCondition.evaluate(createRow("Alice", 18, 80.0))); + } + + @Test + public void testCrossTypeNumericComparison() + { + RowDefinition row = new RowDefinition(); + row.setValue("value", 10); + + // Integer vs Long comparison + SimpleCondition condition = new SimpleCondition("value", 5L, ComparisonOperator.GREATER_THAN); + assertTrue(condition.evaluate(row)); + } + + @Test + public void testStringComparison() + { + SimpleCondition condition = new SimpleCondition("name", "Bob", ComparisonOperator.LESS_THAN); + assertTrue(condition.evaluate(createRow("Alice", 25, 90.0))); + assertFalse(condition.evaluate(createRow("Charlie", 30, 85.0))); + } +} diff --git a/test/datacap-test-condor/src/test/java/io/edurt/datacap/condor/io/BinaryCodecTest.java b/test/datacap-test-condor/src/test/java/io/edurt/datacap/condor/io/BinaryCodecTest.java new file mode 100644 index 0000000000..de66662b18 --- /dev/null +++ b/test/datacap-test-condor/src/test/java/io/edurt/datacap/condor/io/BinaryCodecTest.java @@ -0,0 +1,283 @@ +package io.edurt.datacap.condor.io; + +import io.edurt.datacap.condor.DataType; +import io.edurt.datacap.condor.metadata.ColumnDefinition; +import io.edurt.datacap.condor.metadata.RowDefinition; +import io.edurt.datacap.condor.metadata.TableDefinition; +import lombok.extern.slf4j.Slf4j; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; + +@Slf4j +public class BinaryCodecTest +{ + private Path tempDir; + + @Before + public void setUp() + throws IOException + { + tempDir = Files.createTempDirectory("condor_test_"); + } + + @After + public void tearDown() + throws IOException + { + if (tempDir != null && Files.exists(tempDir)) { + Files.walk(tempDir) + .sorted((a, b) -> b.compareTo(a)) + .forEach(path -> { + try { + Files.deleteIfExists(path); + } + catch (IOException ignored) { + } + }); + } + } + + @Test + public void testRowCodecRoundTrip() + throws IOException + { + Path dataPath = tempDir.resolve("test.data"); + + List rows = new ArrayList<>(); + + RowDefinition row1 = new RowDefinition(); + row1.setValue("id", 1); + row1.setValue("name", "Alice"); + row1.setValue("score", 95.5); + row1.setValue("active", true); + rows.add(row1); + + RowDefinition row2 = new RowDefinition(); + row2.setValue("id", 2); + row2.setValue("name", "Bob"); + row2.setValue("score", 87.3); + row2.setValue("active", false); + rows.add(row2); + + BinaryRowCodec.writeAll(dataPath, rows); + + List loaded = BinaryRowCodec.readAll(dataPath); + assertEquals(2, loaded.size()); + + assertEquals(1, loaded.get(0).getValue("id")); + assertEquals("Alice", loaded.get(0).getValue("name")); + assertEquals(95.5, (double) loaded.get(0).getValue("score"), 0.01); + assertEquals(true, loaded.get(0).getValue("active")); + + assertEquals(2, loaded.get(1).getValue("id")); + assertEquals("Bob", loaded.get(1).getValue("name")); + } + + @Test + public void testRowCodecNullValues() + throws IOException + { + Path dataPath = tempDir.resolve("null.data"); + + List rows = new ArrayList<>(); + RowDefinition row = new RowDefinition(); + row.setValue("id", 1); + row.setValue("name", null); + rows.add(row); + + BinaryRowCodec.writeAll(dataPath, rows); + + List loaded = BinaryRowCodec.readAll(dataPath); + assertEquals(1, loaded.size()); + assertEquals(1, loaded.get(0).getValue("id")); + assertNull(loaded.get(0).getValue("name")); + } + + @Test + public void testRowCodecAppend() + throws IOException + { + Path dataPath = tempDir.resolve("append.data"); + + RowDefinition row1 = new RowDefinition(); + row1.setValue("id", 1); + BinaryRowCodec.appendRow(dataPath, row1); + + RowDefinition row2 = new RowDefinition(); + row2.setValue("id", 2); + BinaryRowCodec.appendRow(dataPath, row2); + + List loaded = BinaryRowCodec.readAll(dataPath); + assertEquals(2, loaded.size()); + assertEquals(1, loaded.get(0).getValue("id")); + assertEquals(2, loaded.get(1).getValue("id")); + } + + @Test + public void testRowCodecAppendBatch() + throws IOException + { + Path dataPath = tempDir.resolve("batch.data"); + + RowDefinition row1 = new RowDefinition(); + row1.setValue("id", 1); + BinaryRowCodec.appendRow(dataPath, row1); + + List batch = new ArrayList<>(); + RowDefinition row2 = new RowDefinition(); + row2.setValue("id", 2); + batch.add(row2); + RowDefinition row3 = new RowDefinition(); + row3.setValue("id", 3); + batch.add(row3); + BinaryRowCodec.appendRows(dataPath, batch); + + List loaded = BinaryRowCodec.readAll(dataPath); + assertEquals(3, loaded.size()); + } + + @Test + public void testRowCodecEmptyFile() + throws IOException + { + Path dataPath = tempDir.resolve("empty.data"); + Files.createFile(dataPath); + + List loaded = BinaryRowCodec.readAll(dataPath); + assertTrue(loaded.isEmpty()); + } + + @Test + public void testRowCodecNonExistentFile() + throws IOException + { + Path dataPath = tempDir.resolve("nonexistent.data"); + + List loaded = BinaryRowCodec.readAll(dataPath); + assertTrue(loaded.isEmpty()); + } + + @Test + public void testRowCodecAllTypes() + throws IOException + { + Path dataPath = tempDir.resolve("types.data"); + + List rows = new ArrayList<>(); + RowDefinition row = new RowDefinition(); + row.setValue("int_val", 42); + row.setValue("long_val", 123456789L); + row.setValue("double_val", 3.14); + row.setValue("float_val", 2.71f); + row.setValue("bool_val", true); + row.setValue("string_val", "hello world"); + row.setValue("null_val", null); + rows.add(row); + + BinaryRowCodec.writeAll(dataPath, rows); + + List loaded = BinaryRowCodec.readAll(dataPath); + assertEquals(1, loaded.size()); + RowDefinition loadedRow = loaded.get(0); + + assertEquals(42, loadedRow.getValue("int_val")); + assertEquals(123456789L, loadedRow.getValue("long_val")); + assertEquals(3.14, (double) loadedRow.getValue("double_val"), 0.001); + assertEquals(2.71f, (float) loadedRow.getValue("float_val"), 0.001); + assertEquals(true, loadedRow.getValue("bool_val")); + assertEquals("hello world", loadedRow.getValue("string_val")); + assertNull(loadedRow.getValue("null_val")); + } + + @Test + public void testMetadataCodecRoundTrip() + throws IOException + { + Path metaPath = tempDir.resolve("table.meta"); + + List columns = Arrays.asList( + new ColumnDefinition("id", DataType.INTEGER, true, false), + new ColumnDefinition("name", DataType.VARCHAR, false, true), + new ColumnDefinition("score", DataType.DOUBLE, false, true), + new ColumnDefinition("active", DataType.BOOLEAN, false, true) + ); + TableDefinition original = new TableDefinition("test_table", columns); + + BinaryMetadataCodec.write(metaPath, original); + + TableDefinition loaded = BinaryMetadataCodec.read(metaPath); + assertEquals("test_table", loaded.getTableName()); + assertEquals(4, loaded.getColumns().size()); + + ColumnDefinition idCol = loaded.getColumn("id"); + assertEquals("id", idCol.getName()); + assertEquals(DataType.INTEGER, idCol.getType()); + assertTrue(idCol.isPrimaryKey()); + assertFalse(idCol.isNullable()); + + ColumnDefinition nameCol = loaded.getColumn("name"); + assertEquals("name", nameCol.getName()); + assertEquals(DataType.VARCHAR, nameCol.getType()); + assertFalse(nameCol.isPrimaryKey()); + assertTrue(nameCol.isNullable()); + } + + @Test + public void testMetadataCodecAllDataTypes() + throws IOException + { + Path metaPath = tempDir.resolve("alltypes.meta"); + + List columns = Arrays.asList( + new ColumnDefinition("c1", DataType.INTEGER, false, true), + new ColumnDefinition("c2", DataType.BIGINT, false, true), + new ColumnDefinition("c3", DataType.VARCHAR, false, true), + new ColumnDefinition("c4", DataType.TEXT, false, true), + new ColumnDefinition("c5", DataType.DOUBLE, false, true), + new ColumnDefinition("c6", DataType.FLOAT, false, true), + new ColumnDefinition("c7", DataType.BOOLEAN, false, true), + new ColumnDefinition("c8", DataType.TIMESTAMP, false, true) + ); + TableDefinition original = new TableDefinition("type_table", columns); + + BinaryMetadataCodec.write(metaPath, original); + TableDefinition loaded = BinaryMetadataCodec.read(metaPath); + + assertEquals(8, loaded.getColumns().size()); + assertEquals(DataType.BIGINT, loaded.getColumn("c2").getType()); + assertEquals(DataType.TEXT, loaded.getColumn("c4").getType()); + assertEquals(DataType.FLOAT, loaded.getColumn("c6").getType()); + assertEquals(DataType.TIMESTAMP, loaded.getColumn("c8").getType()); + } + + @Test + public void testMetadataCodecCreateDirectory() + throws IOException + { + Path metaPath = tempDir.resolve("sub").resolve("dir").resolve("table.meta"); + + List columns = Arrays.asList( + new ColumnDefinition("id", DataType.INTEGER, true, false) + ); + TableDefinition original = new TableDefinition("t", columns); + + BinaryMetadataCodec.write(metaPath, original); + assertTrue(Files.exists(metaPath)); + + TableDefinition loaded = BinaryMetadataCodec.read(metaPath); + assertEquals("t", loaded.getTableName()); + } +} diff --git a/test/datacap-test-condor/src/test/java/io/edurt/datacap/condor/manager/DatabaseManagerTest.java b/test/datacap-test-condor/src/test/java/io/edurt/datacap/condor/manager/DatabaseManagerTest.java new file mode 100644 index 0000000000..edd4ad2e74 --- /dev/null +++ b/test/datacap-test-condor/src/test/java/io/edurt/datacap/condor/manager/DatabaseManagerTest.java @@ -0,0 +1,142 @@ +package io.edurt.datacap.condor.manager; + +import io.edurt.datacap.condor.DatabaseException; +import io.edurt.datacap.condor.metadata.DatabaseDefinition; +import lombok.extern.slf4j.Slf4j; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +import static org.junit.Assert.assertArrayEquals; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; + +@Slf4j +public class DatabaseManagerTest +{ + private DatabaseManager manager; + + @Before + public void setUp() + { + manager = DatabaseManager.createManager(); + } + + @After + public void tearDown() + { + for (String db : manager.listDatabases()) { + try { + manager.dropDatabase(db); + } + catch (DatabaseException ignored) { + } + } + } + + @Test + public void testCreateDatabase() + throws DatabaseException + { + manager.createDatabase("testdb"); + assertTrue(manager.databaseExists("testdb")); + } + + @Test(expected = DatabaseException.class) + public void testCreateDuplicateDatabase() + throws DatabaseException + { + manager.createDatabase("dupdb"); + manager.createDatabase("dupdb"); + } + + @Test(expected = DatabaseException.class) + public void testCreateDatabaseInvalidName() + throws DatabaseException + { + manager.createDatabase("123invalid"); + } + + @Test(expected = DatabaseException.class) + public void testCreateDatabaseEmptyName() + throws DatabaseException + { + manager.createDatabase(""); + } + + @Test + public void testDropDatabase() + throws DatabaseException + { + manager.createDatabase("dropme"); + assertTrue(manager.databaseExists("dropme")); + + manager.dropDatabase("dropme"); + assertFalse(manager.databaseExists("dropme")); + } + + @Test(expected = DatabaseException.class) + public void testDropNonExistentDatabase() + throws DatabaseException + { + manager.dropDatabase("nonexistent"); + } + + @Test + public void testUseDatabase() + throws DatabaseException + { + manager.createDatabase("usedb"); + manager.useDatabase("usedb"); + + DatabaseDefinition current = manager.getCurrentDatabase(); + assertNotNull(current); + assertEquals("usedb", current.getName()); + } + + @Test(expected = DatabaseException.class) + public void testUseNonExistentDatabase() + throws DatabaseException + { + manager.useDatabase("nonexistent"); + } + + @Test(expected = DatabaseException.class) + public void testGetCurrentDatabaseWithoutSelection() + throws DatabaseException + { + DatabaseManager freshManager = DatabaseManager.createManager(); + freshManager.getCurrentDatabase(); + } + + @Test + public void testDropCurrentDatabaseResetsSelection() + throws DatabaseException + { + manager.createDatabase("currentdb"); + manager.useDatabase("currentdb"); + manager.dropDatabase("currentdb"); + + try { + manager.getCurrentDatabase(); + fail("Should have thrown DatabaseException"); + } + catch (DatabaseException e) { + // expected + } + } + + @Test + public void testListDatabases() + throws DatabaseException + { + manager.createDatabase("db1"); + manager.createDatabase("db2"); + + String[] databases = manager.listDatabases(); + assertEquals(2, databases.length); + } +} diff --git a/test/datacap-test-condor/src/test/java/io/edurt/datacap/condor/manager/TableManagerTest.java b/test/datacap-test-condor/src/test/java/io/edurt/datacap/condor/manager/TableManagerTest.java new file mode 100644 index 0000000000..bbe312557c --- /dev/null +++ b/test/datacap-test-condor/src/test/java/io/edurt/datacap/condor/manager/TableManagerTest.java @@ -0,0 +1,285 @@ +package io.edurt.datacap.condor.manager; + +import io.edurt.datacap.condor.ComparisonOperator; +import io.edurt.datacap.condor.DataType; +import io.edurt.datacap.condor.DatabaseException; +import io.edurt.datacap.condor.TableException; +import io.edurt.datacap.condor.condition.SimpleCondition; +import io.edurt.datacap.condor.metadata.ColumnDefinition; +import io.edurt.datacap.condor.metadata.RowDefinition; +import io.edurt.datacap.condor.metadata.TableDefinition; +import lombok.extern.slf4j.Slf4j; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; + +@Slf4j +public class TableManagerTest +{ + private DatabaseManager databaseManager; + private TableManager tableManager; + + @Before + public void setUp() + throws DatabaseException + { + databaseManager = DatabaseManager.createManager(); + if (!databaseManager.databaseExists("table_test_db")) { + databaseManager.createDatabase("table_test_db"); + } + databaseManager.useDatabase("table_test_db"); + tableManager = databaseManager.getCurrentDatabase().getTableManager(); + } + + @After + public void tearDown() + { + try { + databaseManager.dropDatabase("table_test_db"); + } + catch (DatabaseException ignored) { + } + } + + private TableDefinition createUserTable() + { + List columns = Arrays.asList( + new ColumnDefinition("id", DataType.INTEGER, true, false), + new ColumnDefinition("name", DataType.VARCHAR, false, true), + new ColumnDefinition("age", DataType.INTEGER, false, true) + ); + return new TableDefinition("users", columns); + } + + @Test + public void testCreateTable() + throws TableException + { + tableManager.createTable(createUserTable()); + assertTrue(tableManager.tableExists("users")); + } + + @Test(expected = TableException.class) + public void testCreateDuplicateTable() + throws TableException + { + tableManager.createTable(createUserTable()); + tableManager.createTable(createUserTable()); + } + + @Test + public void testDropTable() + throws TableException + { + tableManager.createTable(createUserTable()); + assertTrue(tableManager.tableExists("users")); + + tableManager.dropTable("users"); + assertFalse(tableManager.tableExists("users")); + } + + @Test(expected = TableException.class) + public void testDropNonExistentTable() + throws TableException + { + tableManager.dropTable("nonexistent"); + } + + @Test + public void testInsertAndSelect() + throws TableException + { + tableManager.createTable(createUserTable()); + + tableManager.insert("users", + Arrays.asList("id", "name", "age"), + Arrays.asList(1, "Alice", 25)); + + List rows = tableManager.select("users", null, null); + assertEquals(1, rows.size()); + assertEquals(1, rows.get(0).getValue("id")); + assertEquals("Alice", rows.get(0).getValue("name")); + assertEquals(25, rows.get(0).getValue("age")); + } + + @Test + public void testBatchInsert() + throws TableException + { + tableManager.createTable(createUserTable()); + + List> valuesList = Arrays.asList( + Arrays.asList(1, "Alice", 25), + Arrays.asList(2, "Bob", 30), + Arrays.asList(3, "Charlie", 35) + ); + + tableManager.batchInsert("users", + Arrays.asList("id", "name", "age"), + valuesList); + + List rows = tableManager.select("users", null, null); + assertEquals(3, rows.size()); + } + + @Test + public void testSelectWithColumnProjection() + throws TableException + { + tableManager.createTable(createUserTable()); + + tableManager.insert("users", + Arrays.asList("id", "name", "age"), + Arrays.asList(1, "Alice", 25)); + + List rows = tableManager.select("users", + Arrays.asList("name"), null); + assertEquals(1, rows.size()); + assertEquals("Alice", rows.get(0).getValue("name")); + } + + @Test + public void testSelectWithWhereCondition() + throws TableException + { + tableManager.createTable(createUserTable()); + + tableManager.insert("users", Arrays.asList("id", "name", "age"), Arrays.asList(1, "Alice", 25)); + tableManager.insert("users", Arrays.asList("id", "name", "age"), Arrays.asList(2, "Bob", 30)); + tableManager.insert("users", Arrays.asList("id", "name", "age"), Arrays.asList(3, "Charlie", 35)); + + SimpleCondition condition = new SimpleCondition("age", 30, ComparisonOperator.GREATER_THAN); + List rows = tableManager.select("users", null, condition); + assertEquals(1, rows.size()); + assertEquals("Charlie", rows.get(0).getValue("name")); + } + + @Test + public void testUpdate() + throws TableException + { + tableManager.createTable(createUserTable()); + + tableManager.insert("users", Arrays.asList("id", "name", "age"), Arrays.asList(1, "Alice", 25)); + tableManager.insert("users", Arrays.asList("id", "name", "age"), Arrays.asList(2, "Bob", 30)); + + Map setValues = new HashMap<>(); + setValues.put("age", 26); + + SimpleCondition condition = new SimpleCondition("name", "Alice", ComparisonOperator.EQUALS); + int updated = tableManager.update("users", setValues, condition); + assertEquals(1, updated); + + List rows = tableManager.select("users", null, + new SimpleCondition("name", "Alice", ComparisonOperator.EQUALS)); + assertEquals(26, rows.get(0).getValue("age")); + } + + @Test + public void testUpdateAll() + throws TableException + { + tableManager.createTable(createUserTable()); + + tableManager.insert("users", Arrays.asList("id", "name", "age"), Arrays.asList(1, "Alice", 25)); + tableManager.insert("users", Arrays.asList("id", "name", "age"), Arrays.asList(2, "Bob", 30)); + + Map setValues = new HashMap<>(); + setValues.put("age", 0); + + int updated = tableManager.update("users", setValues, null); + assertEquals(2, updated); + + List rows = tableManager.select("users", null, null); + for (RowDefinition row : rows) { + assertEquals(0, row.getValue("age")); + } + } + + @Test + public void testDelete() + throws TableException + { + tableManager.createTable(createUserTable()); + + tableManager.insert("users", Arrays.asList("id", "name", "age"), Arrays.asList(1, "Alice", 25)); + tableManager.insert("users", Arrays.asList("id", "name", "age"), Arrays.asList(2, "Bob", 30)); + tableManager.insert("users", Arrays.asList("id", "name", "age"), Arrays.asList(3, "Charlie", 35)); + + SimpleCondition condition = new SimpleCondition("id", 2, ComparisonOperator.EQUALS); + int deleted = tableManager.delete("users", condition); + assertEquals(1, deleted); + + List rows = tableManager.select("users", null, null); + assertEquals(2, rows.size()); + } + + @Test + public void testDeleteAll() + throws TableException + { + tableManager.createTable(createUserTable()); + + tableManager.insert("users", Arrays.asList("id", "name", "age"), Arrays.asList(1, "Alice", 25)); + tableManager.insert("users", Arrays.asList("id", "name", "age"), Arrays.asList(2, "Bob", 30)); + + int deleted = tableManager.delete("users", null); + assertEquals(2, deleted); + + List rows = tableManager.select("users", null, null); + assertEquals(0, rows.size()); + } + + @Test + public void testListTables() + throws TableException + { + tableManager.createTable(createUserTable()); + + List columns = Arrays.asList( + new ColumnDefinition("oid", DataType.INTEGER, true, false), + new ColumnDefinition("total", DataType.DOUBLE, false, true) + ); + tableManager.createTable(new TableDefinition("orders", columns)); + + String[] tables = tableManager.listTables(); + assertEquals(2, tables.length); + } + + @Test(expected = TableException.class) + public void testInsertInvalidColumn() + throws TableException + { + tableManager.createTable(createUserTable()); + tableManager.insert("users", + Arrays.asList("id", "nonexistent"), + Arrays.asList(1, "value")); + } + + @Test(expected = TableException.class) + public void testInsertColumnCountMismatch() + throws TableException + { + tableManager.createTable(createUserTable()); + tableManager.insert("users", + Arrays.asList("id", "name"), + Arrays.asList(1)); + } + + @Test(expected = TableException.class) + public void testSelectNonExistentColumn() + throws TableException + { + tableManager.createTable(createUserTable()); + tableManager.select("users", Arrays.asList("nonexistent"), null); + } +} From a127a45bcf0fd502ad62872560f09646635f7028 Mon Sep 17 00:00:00 2001 From: qianmoQ Date: Sat, 7 Feb 2026 11:32:22 +0800 Subject: [PATCH 4/5] =?UTF-8?q?feat(condor):=20=E4=BF=AE=E5=A4=8D=E4=BB=A3?= =?UTF-8?q?=E7=A0=81=E6=A0=BC=E5=BC=8F=E9=97=AE=E9=A2=98?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../src/main/java/io/edurt/datacap/condor/SQLExecutor.java | 2 ++ .../io/edurt/datacap/condor/io/BinaryMetadataCodec.java | 6 ++++-- .../io/edurt/datacap/driver/RedisResultSetMetaData.java | 2 +- .../java/io/edurt/datacap/condor/io/BinaryCodecTest.java | 2 ++ 4 files changed, 9 insertions(+), 3 deletions(-) diff --git a/core/datacap-condor/src/main/java/io/edurt/datacap/condor/SQLExecutor.java b/core/datacap-condor/src/main/java/io/edurt/datacap/condor/SQLExecutor.java index aba79b61b2..2b18e8625f 100644 --- a/core/datacap-condor/src/main/java/io/edurt/datacap/condor/SQLExecutor.java +++ b/core/datacap-condor/src/main/java/io/edurt/datacap/condor/SQLExecutor.java @@ -1,5 +1,6 @@ package io.edurt.datacap.condor; +import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; import io.edurt.datacap.condor.condition.AndCondition; import io.edurt.datacap.condor.condition.Condition; import io.edurt.datacap.condor.condition.OrCondition; @@ -39,6 +40,7 @@ import java.util.Set; import java.util.stream.Collectors; +@SuppressFBWarnings(value = {"REC_CATCH_EXCEPTION"}) public class SQLExecutor { private final DatabaseManager databaseManager; diff --git a/core/datacap-condor/src/main/java/io/edurt/datacap/condor/io/BinaryMetadataCodec.java b/core/datacap-condor/src/main/java/io/edurt/datacap/condor/io/BinaryMetadataCodec.java index 2e71e1276a..b1f5117ad2 100644 --- a/core/datacap-condor/src/main/java/io/edurt/datacap/condor/io/BinaryMetadataCodec.java +++ b/core/datacap-condor/src/main/java/io/edurt/datacap/condor/io/BinaryMetadataCodec.java @@ -1,5 +1,6 @@ package io.edurt.datacap.condor.io; +import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; import io.edurt.datacap.condor.DataType; import io.edurt.datacap.condor.metadata.ColumnDefinition; import io.edurt.datacap.condor.metadata.TableDefinition; @@ -18,6 +19,7 @@ import java.util.List; @Slf4j +@SuppressFBWarnings(value = {"NP_NULL_ON_SOME_PATH_FROM_RETURN_VALUE"}) public class BinaryMetadataCodec { private static final byte[] MAGIC = {'C', 'M', 'T', 'A'}; @@ -32,7 +34,7 @@ public static void write(Path metaPath, TableDefinition metadata) try (OutputStream os = Files.newOutputStream(metaPath, StandardOpenOption.CREATE, StandardOpenOption.TRUNCATE_EXISTING); - DataOutputStream dos = new DataOutputStream(os)) { + DataOutputStream dos = new DataOutputStream(os)) { dos.write(MAGIC); dos.writeShort(VERSION); @@ -54,7 +56,7 @@ public static TableDefinition read(Path metaPath) throws IOException { try (InputStream is = Files.newInputStream(metaPath); - DataInputStream dis = new DataInputStream(is)) { + DataInputStream dis = new DataInputStream(is)) { byte[] magic = new byte[4]; dis.readFully(magic); if (magic[0] != MAGIC[0] || magic[1] != MAGIC[1] diff --git a/driver/datacap-driver-redis/src/main/java/io/edurt/datacap/driver/RedisResultSetMetaData.java b/driver/datacap-driver-redis/src/main/java/io/edurt/datacap/driver/RedisResultSetMetaData.java index 241cdb0567..9408895485 100644 --- a/driver/datacap-driver-redis/src/main/java/io/edurt/datacap/driver/RedisResultSetMetaData.java +++ b/driver/datacap-driver-redis/src/main/java/io/edurt/datacap/driver/RedisResultSetMetaData.java @@ -8,7 +8,7 @@ import java.util.List; import java.util.Map; -@SuppressFBWarnings(value = {"NP_NONNULL_RETURN_VIOLATION", "EI_EXPOSE_REP2"}) +@SuppressFBWarnings(value = {"NP_NONNULL_RETURN_VIOLATION", "EI_EXPOSE_REP2", "REC_CATCH_EXCEPTION"}) public class RedisResultSetMetaData implements ResultSetMetaData { diff --git a/test/datacap-test-condor/src/test/java/io/edurt/datacap/condor/io/BinaryCodecTest.java b/test/datacap-test-condor/src/test/java/io/edurt/datacap/condor/io/BinaryCodecTest.java index de66662b18..2edb563180 100644 --- a/test/datacap-test-condor/src/test/java/io/edurt/datacap/condor/io/BinaryCodecTest.java +++ b/test/datacap-test-condor/src/test/java/io/edurt/datacap/condor/io/BinaryCodecTest.java @@ -1,5 +1,6 @@ package io.edurt.datacap.condor.io; +import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; import io.edurt.datacap.condor.DataType; import io.edurt.datacap.condor.metadata.ColumnDefinition; import io.edurt.datacap.condor.metadata.RowDefinition; @@ -22,6 +23,7 @@ import static org.junit.Assert.assertTrue; @Slf4j +@SuppressFBWarnings(value = {"CNT_ROUGH_CONSTANT_VALUE"}) public class BinaryCodecTest { private Path tempDir; From 4f7755f8976b17ef38c92543f36a9e536c5b56a3 Mon Sep 17 00:00:00 2001 From: qianmoQ Date: Sat, 7 Feb 2026 11:59:44 +0800 Subject: [PATCH 5/5] =?UTF-8?q?feat(condor):=20=E4=BF=AE=E5=A4=8D=E4=BB=A3?= =?UTF-8?q?=E7=A0=81=E6=A0=BC=E5=BC=8F=E9=97=AE=E9=A2=98?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../main/java/io/edurt/datacap/condor/io/BinaryRowCodec.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/core/datacap-condor/src/main/java/io/edurt/datacap/condor/io/BinaryRowCodec.java b/core/datacap-condor/src/main/java/io/edurt/datacap/condor/io/BinaryRowCodec.java index aaf3653066..edccebcc4a 100644 --- a/core/datacap-condor/src/main/java/io/edurt/datacap/condor/io/BinaryRowCodec.java +++ b/core/datacap-condor/src/main/java/io/edurt/datacap/condor/io/BinaryRowCodec.java @@ -35,7 +35,7 @@ public static void writeAll(Path dataPath, List rows) { try (OutputStream os = Files.newOutputStream(dataPath, StandardOpenOption.CREATE, StandardOpenOption.TRUNCATE_EXISTING); - DataOutputStream dos = new DataOutputStream(os)) { + DataOutputStream dos = new DataOutputStream(os)) { dos.write(MAGIC); dos.writeShort(VERSION); dos.writeInt(rows.size()); @@ -77,7 +77,7 @@ public static List readAll(Path dataPath) } try (InputStream is = Files.newInputStream(dataPath); - DataInputStream dis = new DataInputStream(is)) { + DataInputStream dis = new DataInputStream(is)) { byte[] magic = new byte[4]; dis.readFully(magic); if (magic[0] != MAGIC[0] || magic[1] != MAGIC[1]