<?xml version="1.0" encoding="UTF-8"?> <!-- ~ Licensed to the Apache Software Foundation (ASF) under one or more ~ contributor license agreements. See the NOTICE file distributed with ~ this work for additional information regarding copyright ownership. ~ The ASF licenses this file to You under the Apache License, Version 2.0 ~ (the "License"); you may not use this file except in compliance with ~ the License. You may obtain a copy of the License at ~ ~ http://www.apache.org/licenses/LICENSE-2.0 ~ ~ Unless required by applicable law or agreed to in writing, software ~ distributed under the License is distributed on an "AS IS" BASIS, ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. ~ See the License for the specific language governing permissions and ~ limitations under the License. --> <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> <modelVersion>4.0.0</modelVersion> <parent> <groupId>org.apache.spark</groupId> <artifactId>spark-parent</artifactId> <version>1.2.0-SNAPSHOT</version> <relativePath>../pom.xml</relativePath> </parent> <groupId>org.apache.spark</groupId> <artifactId>spark-assembly_2.10</artifactId> <name>Spark Project Assembly</name> <url>http://spark.apache.org/</url> <packaging>pom</packaging> <properties> <sbt.project.name>assembly</sbt.project.name> <spark.jar.dir>scala-${scala.binary.version}</spark.jar.dir> <spark.jar.basename>spark-assembly-${project.version}-hadoop${hadoop.version}.jar</spark.jar.basename> <spark.jar>${project.build.directory}/${spark.jar.dir}/${spark.jar.basename}</spark.jar> <deb.pkg.name>spark</deb.pkg.name> <deb.install.path>/usr/share/spark</deb.install.path> <deb.user>root</deb.user> <deb.bin.filemode>744</deb.bin.filemode> </properties> <dependencies> <!-- Promote Guava to compile scope in this module so it's included while shading. --> <dependency> <groupId>com.google.guava</groupId> <artifactId>guava</artifactId> <scope>compile</scope> </dependency> <dependency> <groupId>org.apache.spark</groupId> <artifactId>spark-core_${scala.binary.version}</artifactId> <version>${project.version}</version> </dependency> <dependency> <groupId>org.apache.spark</groupId> <artifactId>spark-bagel_${scala.binary.version}</artifactId> <version>${project.version}</version> </dependency> <dependency> <groupId>org.apache.spark</groupId> <artifactId>spark-mllib_${scala.binary.version}</artifactId> <version>${project.version}</version> </dependency> <dependency> <groupId>org.apache.spark</groupId> <artifactId>spark-repl_${scala.binary.version}</artifactId> <version>${project.version}</version> </dependency> <dependency> <groupId>org.apache.spark</groupId> <artifactId>spark-streaming_${scala.binary.version}</artifactId> <version>${project.version}</version> </dependency> <dependency> <groupId>org.apache.spark</groupId> <artifactId>spark-graphx_${scala.binary.version}</artifactId> <version>${project.version}</version> </dependency> <dependency> <groupId>org.apache.spark</groupId> <artifactId>spark-sql_${scala.binary.version}</artifactId> <version>${project.version}</version> </dependency> </dependencies> <build> <plugins> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-deploy-plugin</artifactId> <configuration> <skip>true</skip> </configuration> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-install-plugin</artifactId> <configuration> <skip>true</skip> </configuration> </plugin> <!-- Use the shade plugin to create a big JAR with all the dependencies --> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-shade-plugin</artifactId> <configuration> <shadedArtifactAttached>false</shadedArtifactAttached> <outputFile>${spark.jar}</outputFile> <artifactSet> <includes> <include>*:*</include> </includes> </artifactSet> <filters> <filter> <artifact>*:*</artifact> <excludes> <exclude>org/datanucleus/**</exclude> <exclude>META-INF/*.SF</exclude> <exclude>META-INF/*.DSA</exclude> <exclude>META-INF/*.RSA</exclude> </excludes> </filter> </filters> </configuration> <executions> <execution> <phase>package</phase> <goals> <goal>shade</goal> </goals> <configuration> <relocations> <relocation> <pattern>com.google</pattern> <shadedPattern>org.spark-project.guava</shadedPattern> <includes> <include>com.google.common.**</include> </includes> <excludes> <exclude>com.google.common.base.Optional**</exclude> </excludes> </relocation> </relocations> <transformers> <transformer implementation="org.apache.maven.plugins.shade.resource.ServicesResourceTransformer" /> <transformer implementation="org.apache.maven.plugins.shade.resource.AppendingTransformer"> <resource>META-INF/services/org.apache.hadoop.fs.FileSystem</resource> </transformer> <transformer implementation="org.apache.maven.plugins.shade.resource.AppendingTransformer"> <resource>reference.conf</resource> </transformer> <transformer implementation="org.apache.maven.plugins.shade.resource.DontIncludeResourceTransformer"> <resource>log4j.properties</resource> </transformer> <transformer implementation="org.apache.maven.plugins.shade.resource.ApacheLicenseResourceTransformer"/> <transformer implementation="org.apache.maven.plugins.shade.resource.ApacheNoticeResourceTransformer"/> </transformers> </configuration> </execution> </executions> </plugin> </plugins> </build> <profiles> <profile> <id>yarn-alpha</id> <dependencies> <dependency> <groupId>org.apache.spark</groupId> <artifactId>spark-yarn-alpha_${scala.binary.version}</artifactId> <version>${project.version}</version> </dependency> </dependencies> </profile> <profile> <id>yarn</id> <dependencies> <dependency> <groupId>org.apache.spark</groupId> <artifactId>spark-yarn_${scala.binary.version}</artifactId> <version>${project.version}</version> </dependency> </dependencies> </profile> <profile> <id>hive</id> <dependencies> <dependency> <groupId>org.apache.spark</groupId> <artifactId>spark-hive_${scala.binary.version}</artifactId> <version>${project.version}</version> </dependency> <dependency> <groupId>org.apache.spark</groupId> <artifactId>spark-hive-thriftserver_${scala.binary.version}</artifactId> <version>${project.version}</version> </dependency> </dependencies> </profile> <profile> <id>spark-ganglia-lgpl</id> <dependencies> <dependency> <groupId>org.apache.spark</groupId> <artifactId>spark-ganglia-lgpl_${scala.binary.version}</artifactId> <version>${project.version}</version> </dependency> </dependencies> </profile> <profile> <id>bigtop-dist</id> <!-- This profile uses the assembly plugin to create a special "dist" package for BigTop that contains Spark but not the Hadoop JARs it depends on. --> <build> <plugins> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-assembly-plugin</artifactId> <version>2.4</version> <executions> <execution> <id>dist</id> <phase>package</phase> <goals> <goal>single</goal> </goals> <configuration> <descriptors> <descriptor>src/main/assembly/assembly.xml</descriptor> </descriptors> </configuration> </execution> </executions> </plugin> </plugins> </build> </profile> <profile> <id>deb</id> <build> <plugins> <plugin> <groupId>org.codehaus.mojo</groupId> <artifactId>buildnumber-maven-plugin</artifactId> <version>1.2</version> <executions> <execution> <phase>validate</phase> <goals> <goal>create</goal> </goals> <configuration> <shortRevisionLength>8</shortRevisionLength> </configuration> </execution> </executions> </plugin> <plugin> <groupId>org.vafer</groupId> <artifactId>jdeb</artifactId> <version>0.11</version> <executions> <execution> <phase>package</phase> <goals> <goal>jdeb</goal> </goals> <configuration> <deb>${project.build.directory}/${deb.pkg.name}_${project.version}-${buildNumber}_all.deb</deb> <attach>false</attach> <compression>gzip</compression> <dataSet> <data> <src>${spark.jar}</src> <type>file</type> <mapper> <type>perm</type> <user>${deb.user}</user> <group>${deb.user}</group> <prefix>${deb.install.path}/jars</prefix> </mapper> </data> <data> <src>${basedir}/src/deb/RELEASE</src> <type>file</type> <mapper> <type>perm</type> <user>${deb.user}</user> <group>${deb.user}</group> <prefix>${deb.install.path}</prefix> </mapper> </data> <data> <src>${basedir}/../conf</src> <type>directory</type> <mapper> <type>perm</type> <user>${deb.user}</user> <group>${deb.user}</group> <prefix>${deb.install.path}/conf</prefix> <filemode>744</filemode> </mapper> </data> <data> <src>${basedir}/../bin</src> <type>directory</type> <mapper> <type>perm</type> <user>${deb.user}</user> <group>${deb.user}</group> <prefix>${deb.install.path}/bin</prefix> <filemode>${deb.bin.filemode}</filemode> </mapper> </data> <data> <src>${basedir}/../sbin</src> <type>directory</type> <mapper> <type>perm</type> <user>${deb.user}</user> <group>${deb.user}</group> <prefix>${deb.install.path}/sbin</prefix> <filemode>744</filemode> </mapper> </data> <data> <src>${basedir}/../python</src> <type>directory</type> <mapper> <type>perm</type> <user>${deb.user}</user> <group>${deb.user}</group> <prefix>${deb.install.path}/python</prefix> <filemode>744</filemode> </mapper> </data> </dataSet> </configuration> </execution> </executions> </plugin> </plugins> </build> </profile> </profiles> </project>