cnauroth commented on code in PR #7869: URL: https://github.com/apache/hadoop/pull/7869#discussion_r2277700002
########## hadoop-tools/hadoop-gcp/pom.xml: ########## @@ -0,0 +1,514 @@ +<?xml version="1.0" encoding="UTF-8"?> +<!-- + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. See accompanying LICENSE file. +--> +<project xmlns="http://maven.apache.org/POM/4.0.0" + xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" + xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd"> + <modelVersion>4.0.0</modelVersion> + <parent> + <groupId>org.apache.hadoop</groupId> + <artifactId>hadoop-project</artifactId> + <version>3.5.0-SNAPSHOT</version> + <relativePath>../../hadoop-project</relativePath> + </parent> + <artifactId>hadoop-gcp</artifactId> + <version>3.5.0-SNAPSHOT</version> + <name>Apache Hadoop Google Cloud Platform support</name> + <description> + This module contains code to support integration with Google Cloud Platform. + It also declares the dependencies needed to work with Google Cloud Storage. + </description> + <packaging>jar</packaging> + + <properties> + <file.encoding>UTF-8</file.encoding> + <downloadSources>true</downloadSources> + <hadoop.tmp.dir>${project.build.directory}/test</hadoop.tmp.dir> + </properties> + + <profiles> + <profile> + <id>tests-off</id> + <activation> + <file> + <missing>src/test/resources/auth-keys.xml</missing> + </file> + </activation> + <properties> + <skipITs>true</skipITs> + </properties> + </profile> + <profile> + <id>tests-on</id> + <activation> + <file> + <exists>src/test/resources/auth-keys.xml</exists> + </file> + </activation> + <properties> + <skipITs>false</skipITs> + </properties> + </profile> + <profile> + <id>parallel-tests</id> + <activation> + <property> + <name>parallel-tests</name> + </property> + </activation> + <build> + <plugins> + <plugin> + <groupId>org.apache.hadoop</groupId> + <artifactId>hadoop-maven-plugins</artifactId> + <executions> + <execution> + <id>parallel-tests-createdir</id> + <goals> + <goal>parallel-tests-createdir</goal> + </goals> + </execution> + </executions> + </plugin> + <plugin> + <groupId>org.apache.maven.plugins</groupId> + <artifactId>maven-surefire-plugin</artifactId> + <configuration> + <forkCount>${testsThreadCount}</forkCount> + <reuseForks>false</reuseForks> + <trimStackTrace>false</trimStackTrace> + <argLine>${maven-surefire-plugin.argLine} -DminiClusterDedicatedDirs=true</argLine> + <systemPropertyVariables> + <testsThreadCount>${testsThreadCount}</testsThreadCount> + <test.build.data>${test.build.data}/${surefire.forkNumber}</test.build.data> + <test.build.dir>${test.build.dir}/${surefire.forkNumber}</test.build.dir> + <hadoop.tmp.dir>${hadoop.tmp.dir}/${surefire.forkNumber}</hadoop.tmp.dir> + <test.unique.fork.id>job-${job.id}-fork-000${surefire.forkNumber}</test.unique.fork.id> + </systemPropertyVariables> + </configuration> + </plugin> + <plugin> + <groupId>org.apache.maven.plugins</groupId> + <artifactId>maven-failsafe-plugin</artifactId> + <executions> + <execution> + <id>default-integration-test</id> + <goals> + <goal>integration-test</goal> + <goal>verify</goal> + </goals> + <configuration> + <forkCount>${testsThreadCount}</forkCount> + <reuseForks>false</reuseForks> + <argLine>${maven-surefire-plugin.argLine} -DminiClusterDedicatedDirs=true</argLine> + <forkedProcessTimeoutInSeconds>${fs.gs.scale.test.timeout}</forkedProcessTimeoutInSeconds> + <trimStackTrace>false</trimStackTrace> + <systemPropertyVariables> + <!-- Tell tests that they are being executed in parallel --> + <test.parallel.execution>true</test.parallel.execution> + <test.build.data>${test.build.data}/${surefire.forkNumber}</test.build.data> + <test.build.dir>${test.build.dir}/${surefire.forkNumber}</test.build.dir> + <hadoop.tmp.dir>${hadoop.tmp.dir}/${surefire.forkNumber}</hadoop.tmp.dir> + + <!-- Due to a Maven quirk, setting this to just --> + <!-- surefire.forkNumber won't do the parameter --> + <!-- substitution. Putting a prefix in front of it like --> + <!-- "fork-" makes it work. --> + <test.unique.fork.id>job-${job.id}-fork-000${surefire.forkNumber}</test.unique.fork.id> + <test.default.timeout>${test.integration.timeout}</test.default.timeout> + </systemPropertyVariables> + <includes> + <include>**/ITest*.java</include> + </includes> + </configuration> + </execution> + <execution> + <id>sequential-integration-tests</id> + <goals> + <goal>integration-test</goal> + <goal>verify</goal> + </goals> + <configuration> + <forkedProcessTimeoutInSeconds>${fs.gs.scale.test.timeout}</forkedProcessTimeoutInSeconds> + <trimStackTrace>false</trimStackTrace> + <systemPropertyVariables> + <!-- Tell tests that they are being executed sequentially --> + <test.parallel.execution>false</test.parallel.execution> + <test.unique.fork.id>job-${job.id}</test.unique.fork.id> + </systemPropertyVariables> + <!-- Do a sequential run for tests that cannot handle --> + <!-- parallel execution. --> + <includes> + <include>**/ITest*.java</include> + </includes> + </configuration> + </execution> + </executions> + </plugin> + </plugins> + </build> + </profile> + <profile> + <id>sequential-tests</id> + <activation> + <property> + <name>!parallel-tests</name> + </property> + </activation> + <build> + <plugins> + <plugin> + <groupId>org.apache.maven.plugins</groupId> + <artifactId>maven-failsafe-plugin</artifactId> + <executions> + <execution> + <goals> + <goal>integration-test</goal> + <goal>verify</goal> + </goals> + <configuration> + <trimStackTrace>false</trimStackTrace> + <systemPropertyVariables> + <test.unique.fork.id>job-${job.id}</test.unique.fork.id> + </systemPropertyVariables> + </configuration> + </execution> + </executions> + </plugin> + </plugins> + </build> + </profile> + </profiles> + + <build> + <plugins> + <plugin> + <artifactId>maven-shade-plugin</artifactId> + <executions> + <execution> + <phase>package</phase> + <goals> + <goal>shade</goal> + </goals> + <configuration> + <transformers> + <transformer + implementation="org.apache.maven.plugins.shade.resource.ApacheLicenseResourceTransformer"/> + <transformer + implementation="org.apache.maven.plugins.shade.resource.ApacheNoticeResourceTransformer"/> + <transformer + implementation="org.apache.maven.plugins.shade.resource.ServicesResourceTransformer"/> + </transformers> + <filters> + + <filter> + <artifact>com.google.auth:*</artifact> + <includes> + <include>**</include> + </includes> + </filter> + <filter> + <artifact>io.grpc:*</artifact> + <includes> + <include>**</include> + </includes> + </filter> + <filter> + <artifact>io.opencensus:*</artifact> + <includes> + <include>**</include> + </includes> + </filter> + <filter> + <artifact>*:*</artifact> + <excludes> + <exclude>*.json</exclude> + <exclude>google/**</exclude> + <exclude>grpc/**</exclude> + </excludes> + </filter> + </filters> + <artifactSet> + <includes> + <include>com.google.api</include> + <include>com.google.api-client</include> + <include>com.google.api.grpc</include> + <include>com.google.apis</include> + <include>com.google.auth</include> + <include>com.google.cloud</include> + <include>com.google.cloud.bigdataoss</include> + <include>com.google.cloud.grpc</include> + <include>com.google.cloud.http</include> + <include>com.google.flogger</include> + <include>com.google.code.gson</include> + <include>com.google.guava</include> + <include>com.google.http-client</include> + <include>com.google.oauth-client</include> + <include>com.google.protobuf</include> + <include>com.google.re2j</include> + <include>com.google.storage.v2</include> + <include>com.lmax</include> + <include>io.grpc</include> + <include>io.opencensus</include> + <include>io.opentelemetry</include> + <include>io.opentelemetry.api</include> + <include>io.opentelemetry.contrib</include> + <include>io.opentelemetry.semconv</include> + <include>io.perfmark</include> + <include>org.apache.httpcomponents</include> + <include>org.threeten:threetenbp</include> + </includes> + </artifactSet> + <minimizeJar>true</minimizeJar> + <relocations> + <relocation> + <pattern>com</pattern> + <shadedPattern>com.google.cloud.hadoop.repackaged.ossgcs.com</shadedPattern> + <includes> + <include>com.google.api.**</include> + <include>com.google.api.gax.**</include> + <include>com.google.auth.**</include> + <include>com.google.cloud.*</include> + <include>com.google.cloud.audit.**</include> + <include>com.google.cloud.grpc.**</include> + <include>com.google.cloud.hadoop.gcsio.**</include> + <include>com.google.cloud.hadoop.util.**</include> + <include>com.google.cloud.http.**</include> + <include>com.google.cloud.monitoring.**</include> + <include>com.google.cloud.opentelemetry.**</include> + <include>com.google.cloud.spi.**</include> + <include>com.google.cloud.storage.**</include> + <include>com.google.common.**</include> + <include>com.google.geo.**</include> + <include>com.google.gson.**</include> + <include>com.google.google.storage.**</include> + <include>com.google.iam.**</include> + <include>com.google.logging.**</include> + <include>com.google.longrunning.**</include> + <include>com.google.monitoring.**</include> + <include>com.google.protobuf.**</include> + <include>com.google.re2j.**</include> + <include>com.google.rpc.**</include> + <include>com.google.storage.**</include> + <include>com.google.thirdparty.**</include> + <include>com.google.type.**</include> + <include>com.lmax.disruptor.**</include> + </includes> + <excludes> + <exclude>com.google.cloud.hadoop.util.AccessTokenProvider</exclude> + <exclude>com.google.cloud.hadoop.util.AccessTokenProvider$AccessToken</exclude> + <exclude>com.google.cloud.hadoop.util.AccessTokenProvider$AccessTokenType</exclude> + <exclude>com.google.cloud.hadoop.util.AccessBoundary</exclude> + <exclude>com.google.cloud.hadoop.util.AccessBoundary$Action</exclude> + <exclude>com.google.cloud.hadoop.util.AutoValue_AccessBoundary</exclude> + </excludes> + </relocation> + <relocation> + <pattern>org</pattern> + <shadedPattern>com.google.cloud.hadoop.repackaged.ossgcs.org</shadedPattern> + <includes> + <include>org.apache.http.**</include> + <include>org.threeten.**</include> + </includes> + </relocation> + + <relocation> + <pattern>io.grpc.netty.shaded</pattern> + <shadedPattern> + com.google.cloud.hadoop.repackaged.ossgcs.io.grpc.netty.shaded + </shadedPattern> + </relocation> + <relocation> + <pattern>io</pattern> + <shadedPattern>com.google.cloud.hadoop.repackaged.ossgcs.io</shadedPattern> + <includes> + <include>io.grpc.**</include> + <include>io.opencensus.**</include> + <include>io.perfmark.**</include> + </includes> + </relocation> + <relocation> + <pattern>META-INF/native/io_grpc_netty_shaded_</pattern> + <shadedPattern> + META-INF/native/com_google_cloud_hadoop_repackaged_gcs_io_grpc_netty_shaded_ + </shadedPattern> + </relocation> + <relocation> + <pattern>META-INF/native/libio_grpc_netty_shaded_</pattern> + <shadedPattern> + META-INF/native/libcom_google_cloud_hadoop_repackaged_gcs_io_grpc_netty_shaded_ + </shadedPattern> + </relocation> + </relocations> + <shadedArtifactAttached>true</shadedArtifactAttached> + </configuration> + </execution> + </executions> + </plugin> + + <plugin> + <groupId>com.github.spotbugs</groupId> + <artifactId>spotbugs-maven-plugin</artifactId> + <configuration> + <xmlOutput>true</xmlOutput> + <excludeFilterFile>${basedir}/dev-support/findbugs-exclude.xml + </excludeFilterFile> + <effort>Max</effort> + </configuration> + </plugin> + <plugin> + <groupId>org.apache.maven.plugins</groupId> + <artifactId>maven-checkstyle-plugin</artifactId> + <configuration> + <suppressionsLocation>src/config/checkstyle-suppressions.xml</suppressionsLocation> + </configuration> + </plugin> + <plugin> + <groupId>org.apache.maven.plugins</groupId> + <artifactId>maven-surefire-plugin</artifactId> + <configuration> + <forkedProcessTimeoutInSeconds>3600</forkedProcessTimeoutInSeconds> + <systemPropertyVariables> + <test.default.timeout>${test.integration.timeout}</test.default.timeout> + </systemPropertyVariables> + </configuration> + </plugin> + <plugin> + <groupId>org.apache.maven.plugins</groupId> + <artifactId>maven-enforcer-plugin</artifactId> + <executions> + <execution> + <id>banned-illegal-imports</id> + <phase>process-sources</phase> + <goals> + <goal>enforce</goal> + </goals> + <configuration> + <rules> + <restrictImports> + <includeTestCode>false</includeTestCode> + <reason>Restrict mapreduce imports to committer code</reason> + <exclusions> + + </exclusions> + <bannedImports> + <bannedImport>org.apache.hadoop.mapreduce.**</bannedImport> + <bannedImport>org.apache.hadoop.mapred.**</bannedImport> + </bannedImports> + </restrictImports> + <restrictImports> + <includeTestCode>false</includeTestCode> + <reason>Restrict encryption client imports to encryption client factory</reason> + <exclusions> + + </exclusions> + <bannedImports> + + </bannedImports> + </restrictImports> + </rules> + </configuration> + </execution> + </executions> + </plugin> + </plugins> + </build> + +<dependencyManagement> + <dependencies> + <dependency> + <!-- + We're using a specific Protobuf version to ensure compatibility with the + Google Cloud Storage (GCS) client. The GCS client often relies on + particular Long-Term Support (LTS) versions of Protobuf. When we upgrade + the GCS client, we'll likely need to update Protobuf too. To prevent + dependency conflicts, Protobuf will be shaded within the GCS connector's + fat JAR. + --> + <groupId>com.google.protobuf</groupId> + <artifactId>protobuf-java</artifactId> + <version>3.25.5</version> + </dependency> + </dependencies> + </dependencyManagement> + + <dependencies> + <dependency> + <groupId>org.apache.hadoop</groupId> + <artifactId>hadoop-common</artifactId> + <scope>provided</scope> + <exclusions> + <exclusion> + <groupId>javax.servlet</groupId> + <artifactId>servlet-api</artifactId> + </exclusion> + <exclusion> + <groupId>javax.enterprise</groupId> + <artifactId>cdi-api</artifactId> + </exclusion> + <!-- Exclude protobuf-java 2.5.0 --> + <exclusion> + <groupId>com.google.protobuf</groupId> + <artifactId>protobuf-java</artifactId> + </exclusion> + </exclusions> + </dependency> + <dependency> + <groupId>org.apache.hadoop</groupId> + <artifactId>hadoop-common</artifactId> + <scope>test</scope> + <type>test-jar</type> + </dependency> + <dependency> + <groupId>org.assertj</groupId> + <artifactId>assertj-core</artifactId> + <scope>test</scope> + </dependency> + <dependency> + <groupId>junit</groupId> Review Comment: Great point! I sent up #7872 for this. -- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. To unsubscribe, e-mail: common-issues-unsubscr...@hadoop.apache.org For queries about this service, please contact Infrastructure at: us...@infra.apache.org --------------------------------------------------------------------- To unsubscribe, e-mail: common-issues-unsubscr...@hadoop.apache.org For additional commands, e-mail: common-issues-h...@hadoop.apache.org