diff --git a/oracle-plugin/src/e2e-test/features/sink/OracleRunTime.feature b/oracle-plugin/src/e2e-test/features/sink/OracleRunTime.feature index c2b56e8b7..67293700b 100644 --- a/oracle-plugin/src/e2e-test/features/sink/OracleRunTime.feature +++ b/oracle-plugin/src/e2e-test/features/sink/OracleRunTime.feature @@ -117,3 +117,53 @@ Feature: Oracle - Verify data transfer from BigQuery source to Oracle sink Then Verify the pipeline status is "Succeeded" Then Validate records transferred to target table with record counts of BigQuery table Then Validate the values of records transferred to target Oracle table is equal to the values from source BigQuery table + + @BQ_SOURCE_TEST_SMALL_CASE @ORACLE_TEST_TABLE + Scenario: To verify data is getting transferred from BigQuery source to Oracle sink successfully when schema is coming in small case + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "BigQuery" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "Oracle" from the plugins list as: "Sink" + Then Connect plugins: "BigQuery" and "Oracle" to establish connection + Then Navigate to the properties page of plugin: "BigQuery" + Then Replace input plugin property: "project" with value: "projectId" + Then Enter input plugin property: "datasetProject" with value: "projectId" + Then Enter input plugin property: "referenceName" with value: "BQReferenceName" + Then Enter input plugin property: "dataset" with value: "dataset" + Then Enter input plugin property: "table" with value: "bqSourceTable" + Then Click on the Get Schema button + Then Verify the Output Schema matches the Expected Schema: "bqOutputDatatypesSchemaSmallCase" + Then Validate "BigQuery" plugin properties + Then Close the Plugin Properties page + Then Navigate to the properties page of plugin: "Oracle" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Replace input plugin property: "host" with value: "host" for Credentials and Authorization related fields + Then Replace input plugin property: "port" with value: "port" for Credentials and Authorization related fields + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Select radio button plugin property: "connectionType" with value: "service" + Then Select radio button plugin property: "role" with value: "normal" + Then Enter input plugin property: "referenceName" with value: "sourceRef" + Then Replace input plugin property: "database" with value: "databaseName" + Then Replace input plugin property: "tableName" with value: "targetTable" + Then Replace input plugin property: "dbSchemaName" with value: "schema" + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "targetRef" + Then Select radio button plugin property: "connectionType" with value: "service" + Then Select radio button plugin property: "role" with value: "normal" + Then Validate "Oracle" plugin properties + Then Close the Plugin Properties page + Then Save the pipeline + Then Preview and run the pipeline + Then Verify the preview of pipeline is "success" + Then Click on preview data for Oracle sink + Then Close the preview data + Then Deploy the pipeline + Then Run the Pipeline in Runtime + Then Wait till pipeline is in running state + Then Open and capture logs + Then Verify the pipeline status is "Succeeded" + Then Validate records transferred to target table with record counts of BigQuery table + Then Validate the values of records transferred to target Oracle table is equal to the values from source BigQuery table with case diff --git a/oracle-plugin/src/e2e-test/java/io.cdap.plugin/BQValidation.java b/oracle-plugin/src/e2e-test/java/io.cdap.plugin/BQValidation.java index 6edfcc8fd..b7d93c80a 100644 --- a/oracle-plugin/src/e2e-test/java/io.cdap.plugin/BQValidation.java +++ b/oracle-plugin/src/e2e-test/java/io.cdap.plugin/BQValidation.java @@ -68,11 +68,12 @@ public static boolean validateDBToBQRecordValues(String schema, String sourceTab ResultSet.HOLD_CURSORS_OVER_COMMIT); ResultSet rsSource = statement1.executeQuery(getSourceQuery); - return compareResultSetAndJsonData(rsSource, jsonResponse); + return compareResultSetAndJsonData(rsSource, jsonResponse, false); } } - public static boolean validateBQToDBRecordValues(String schema, String sourceTable, String targetTable) + public static boolean validateBQToDBRecordValues(String schema, String sourceTable, String targetTable, + boolean isSchemaSmallCase) throws SQLException, ClassNotFoundException, ParseException, IOException, InterruptedException { List jsonResponse = new ArrayList<>(); List bigQueryRows = new ArrayList<>(); @@ -88,7 +89,7 @@ public static boolean validateBQToDBRecordValues(String schema, String sourceTab ResultSet.HOLD_CURSORS_OVER_COMMIT); ResultSet rsTarget = statement1.executeQuery(getTargetQuery); - return compareResultSetAndJsonData(rsTarget, jsonResponse); + return compareResultSetAndJsonData(rsTarget, jsonResponse, isSchemaSmallCase); } } @@ -119,7 +120,8 @@ private static void getBigQueryTableData(String table, List bigQueryRows * @throws ParseException If an error occurs while parsing the data. */ - public static boolean compareResultSetAndJsonData(ResultSet rsSource, List bigQueryData) + public static boolean compareResultSetAndJsonData(ResultSet rsSource, List bigQueryData, + boolean isSchemaSmallCase) throws SQLException, ParseException { ResultSetMetaData mdSource = rsSource.getMetaData(); boolean result = false; @@ -146,7 +148,8 @@ public static boolean compareResultSetAndJsonData(ResultSet rsSource, List columnTypes) protected SchemaReader getSchemaReader() { return new OracleSinkSchemaReader(); } + + @Override + protected void insertOperation(PreparedStatement stmt) throws SQLException { + for (int fieldIndex = 0; fieldIndex < columnTypes.size(); fieldIndex++) { + ColumnType columnType = columnTypes.get(fieldIndex); + // Get the field from the schema using the column name with ignoring case. + Schema.Field field = record.getSchema().getField(columnType.getName(), true); + writeToDB(stmt, field, fieldIndex); + } + } }