Skip to content

Commit

Permalink
CDAP-21027 : upgrading from hadoop 2 to hadoop 3.3.6
Browse files Browse the repository at this point in the history
  • Loading branch information
sahusanket committed Jul 25, 2024
1 parent 8b639ef commit 6c6d489
Show file tree
Hide file tree
Showing 3 changed files with 47 additions and 11 deletions.
50 changes: 42 additions & 8 deletions pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -69,14 +69,14 @@
<properties>
<jee.version>7</jee.version>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<avro.version>1.8.2</avro.version>
<bigquery.connector.hadoop2.version>hadoop2-1.2.0</bigquery.connector.hadoop2.version>
<avro.version>1.11.0</avro.version>
<bigquery.connector.hadoop3.version>hadoop3-1.2.0</bigquery.connector.hadoop3.version>
<commons.codec.version>1.4</commons.codec.version>
<cdap.version>6.9.1</cdap.version>
<cdap.plugin.version>2.11.1</cdap.plugin.version>
<cdap.version>6.11.0-SNAPSHOT</cdap.version>
<cdap.plugin.version>2.13.0-SNAPSHOT</cdap.plugin.version>
<dropwizard.metrics-core.version>3.2.6</dropwizard.metrics-core.version>
<flogger.system.backend.version>0.7.1</flogger.system.backend.version>
<gcs.connector.version>hadoop2-2.2.9</gcs.connector.version>
<gcs.connector.version>hadoop3-2.2.21</gcs.connector.version>
<google.cloud.bigtable.version>1.17.1</google.cloud.bigtable.version>
<google.cloud.bigquery.version>1.137.1</google.cloud.bigquery.version>
<google.cloud.kms.version>2.0.2</google.cloud.kms.version>
Expand All @@ -88,7 +88,7 @@
<google.protobuf.java.version>3.19.4</google.protobuf.java.version>
<google.tink.version>1.3.0-rc3</google.tink.version>
<guava.version>27.0.1-jre</guava.version>
<hadoop.version>2.9.2</hadoop.version>
<hadoop.version>3.3.6</hadoop.version>
<hbase-shaded-client.version>1.4.13</hbase-shaded-client.version>
<hbase-shaded-server.version>1.4.13</hbase-shaded-server.version>
<httpclient.version>4.5.13</httpclient.version>
Expand Down Expand Up @@ -259,6 +259,16 @@
<groupId>org.apache.avro</groupId>
<artifactId>avro</artifactId>
<version>${avro.version}</version>
<exclusions>
<exclusion>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-core</artifactId>
</exclusion>
<exclusion>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-databind</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>io.cdap.cdap</groupId>
Expand Down Expand Up @@ -323,7 +333,7 @@
<dependency>
<groupId>com.google.cloud.bigdataoss</groupId>
<artifactId>bigquery-connector</artifactId>
<version>${bigquery.connector.hadoop2.version}</version>
<version>${bigquery.connector.hadoop3.version}</version>
<exclusions>
<!-- Dependencies are excluded because they are missing in central Maven repository -->
<exclusion>
Expand Down Expand Up @@ -638,8 +648,18 @@
<dependency>
<groupId>org.apache.avro</groupId>
<artifactId>avro-mapred</artifactId>
<classifier>hadoop2</classifier>
<!-- <classifier>hadoop3</classifier>-->
<version>${avro.version}</version>
<exclusions>
<exclusion>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-core</artifactId>
</exclusion>
<exclusion>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-databind</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
Expand Down Expand Up @@ -681,6 +701,14 @@
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
</exclusion>
<exclusion>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-mapper-asl</artifactId>
</exclusion>
<exclusion>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-core-asl</artifactId>
</exclusion>
</exclusions>
</dependency>
<!-- Start: Testing dependencies -->
Expand Down Expand Up @@ -783,6 +811,12 @@
<groupId>org.apache.bahir</groupId>
<artifactId>spark-streaming-pubsub_2.12</artifactId>
<version>2.4.0</version>
<exclusions>
<exclusion>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-core-asl</artifactId>
</exclusion>
</exclusions>
</dependency>
<!-- End: dependency for Google PubSub Streaming Source -->

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,6 @@
import io.cdap.cdap.etl.api.relational.Expression;
import io.cdap.plugin.gcp.bigquery.relational.SQLExpression;
import io.cdap.plugin.gcp.bigquery.sqlengine.util.BigQuerySQLEngineUtils;
import org.mortbay.log.Log;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,13 +24,16 @@
import org.junit.Assert;
import org.junit.Test;
import org.mockito.ArgumentMatchers;
import org.mortbay.log.Log;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import static org.powermock.api.mockito.PowerMockito.mock;
import static org.powermock.api.mockito.PowerMockito.when;

public class BigQueryExecuteConfigTest {

private static final Logger LOG = LoggerFactory.getLogger(BigQueryExecuteConfigTest.class);

@Test
public void testBigQueryExecuteValidSQL() throws Exception {
BigQueryExecute.Config config = getConfig("select * from dataset.table where id=1");
Expand Down Expand Up @@ -63,7 +66,7 @@ public void testBigQueryExecuteSQLWithNonExistentResource() throws Exception {
when(bigQuery.create(ArgumentMatchers.any(JobInfo.class))).thenThrow(new BigQueryException(404, ""));

config.validateSQLSyntax(failureCollector, bigQuery);
Log.warn("size : {}", failureCollector.getValidationFailures().size());
LOG.warn("size : {}", failureCollector.getValidationFailures().size());
Assert.assertEquals(1, failureCollector.getValidationFailures().size());
Assert.assertEquals(String.format("%s.", errorMessage),
failureCollector.getValidationFailures().get(0).getMessage());
Expand Down

0 comments on commit 6c6d489

Please sign in to comment.