开发spark使用的pom文件
来源:互联网 发布:wpf 实时显示数据 编辑:程序博客网 时间:2024/06/08 13:38
有朋友反映开发spark 没有需要的pom文件
<?xml version="1.0" encoding="UTF-8"?>
-<project xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns="http://maven.apache.org/POM/4.0.0">
<modelVersion>4.0.0</modelVersion>
<groupId>SparkApps</groupId>
<artifactId>SparkApps</artifactId>
<version>1.0-SNAPSHOT</version>
-<properties>
<scala.version>2.11.8</scala.version>
<spark.version>2.1.0</spark.version>
<jedis.version>2.8.2</jedis.version>
<fastjson.version>1.2.14</fastjson.version>
<jetty.version>9.2.5.v20141112</jetty.version>
<container.version>2.17</container.version>
<java.version>1.8</java.version>
</properties>
-<repositories>
-<repository>
<id>scala-tools.org</id>
<name>Scala-Tools Maven2 Repository</name>
<url>http://scala-tools.org/repo-releases</url>
</repository>
</repositories>
-<pluginRepositories>
-<pluginRepository>
<id>scala-tools.org</id>
<name>Scala-Tools Maven2 Repository</name>
<url>http://scala-tools.org/repo-releases</url>
</pluginRepository>
</pluginRepositories>
-<dependencies>
<!-- put javax.ws.rs as the first dependency, it is important!!! -->
-<dependency>
<groupId>javax.ws.rs</groupId>
<artifactId>javax.ws.rs-api</artifactId>
<version>2.0</version>
</dependency>
-<dependency>
<groupId>org.scala-lang</groupId>
<artifactId>scala-library</artifactId>
<version>${scala.version}</version>
</dependency>
-<dependency>
<groupId>org.scala-lang</groupId>
<artifactId>scala-compiler</artifactId>
<version>${scala.version}</version>
</dependency>
-<dependency>
<groupId>org.scala-lang</groupId>
<artifactId>scala-reflect</artifactId>
<version>${scala.version}</version>
</dependency>
-<dependency>
<groupId>org.scala-lang</groupId>
<artifactId>scalap</artifactId>
<version>${scala.version}</version>
</dependency>
-<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>4.4</version>
<scope>test</scope>
</dependency>
-<dependency>
<groupId>org.specs</groupId>
<artifactId>specs</artifactId>
<version>1.2.5</version>
<scope>test</scope>
</dependency>
-<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-core_2.11</artifactId>
<version>${spark.version}</version>
</dependency>
-<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-launcher_2.11</artifactId>
<version>2.1.0</version>
</dependency>
-<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-network-shuffle_2.11</artifactId>
<version>2.1.0</version>
</dependency>
-<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-sql_2.11</artifactId>
<version>${spark.version}</version>
</dependency>
-<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-hive_2.11</artifactId>
<version>2.1.0</version>
</dependency>
-<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-catalyst_2.11</artifactId>
<version>2.1.0</version>
</dependency>
-<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-streaming-flume-assembly_2.11</artifactId>
<version>2.1.0</version>
</dependency>
-<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-streaming-flume_2.11</artifactId>
<version>2.1.0</version>
</dependency>
-<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-streaming_2.11</artifactId>
<version>${spark.version}</version>
</dependency>
-<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-graphx_2.11</artifactId>
<version>2.1.0</version>
</dependency>
-<dependency>
<groupId>org.scalanlp</groupId>
<artifactId>breeze_2.11</artifactId>
<version>0.11.2</version>
<scope>compile</scope>
-<exclusions>
-<exclusion>
<artifactId>junit</artifactId>
<groupId>junit</groupId>
</exclusion>
-<exclusion>
<artifactId>commons-math3</artifactId>
<groupId>org.apache.commons</groupId>
</exclusion>
</exclusions>
</dependency>
-<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-math3</artifactId>
<version>3.4.1</version>
<scope>compile</scope>
</dependency>
-<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-mllib_2.11</artifactId>
<version>2.1.0</version>
</dependency>
-<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-mllib-local_2.11</artifactId>
<version>2.1.0</version>
<scope>compile</scope>
</dependency>
-<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-mllib-local_2.11</artifactId>
<version>2.1.0</version>
<type>test-jar</type>
<scope>test</scope>
</dependency>
-<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-repl_2.11</artifactId>
<version>2.1.0</version>
</dependency>
-<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-client</artifactId>
<version>2.6.0</version>
</dependency>
-<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-streaming-kafka-0-8_2.10</artifactId>
<version>2.1.0</version>
</dependency>
-<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-streaming-flume_2.11</artifactId>
<version>${spark.version}</version>
</dependency>
-<dependency>
<groupId>mysql</groupId>
<artifactId>mysql-connector-java</artifactId>
<version>5.1.6</version>
</dependency>
-<dependency>
<groupId>org.apache.hive</groupId>
<artifactId>hive-jdbc</artifactId>
<version>1.2.1</version>
</dependency>
-<dependency>
<groupId>org.apache.httpcomponents</groupId>
<artifactId>httpclient</artifactId>
<version>4.4.1</version>
</dependency>
-<dependency>
<groupId>org.apache.httpcomponents</groupId>
<artifactId>httpcore</artifactId>
<version>4.4.1</version>
</dependency>
<!-- https://mvnrepository.com/artifact/org.apache.hadoop/hadoop-common -->
-<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<version>2.6.0</version>
</dependency>
-<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-client</artifactId>
<version>2.6.0</version>
</dependency>
<!-- https://mvnrepository.com/artifact/org.apache.hadoop/hadoop-hdfs -->
-<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdfs</artifactId>
<version>2.6.0</version>
</dependency>
-<dependency>
<groupId>redis.clients</groupId>
<artifactId>jedis</artifactId>
<version>${jedis.version}</version>
</dependency>
-<dependency>
<groupId>org.json</groupId>
<artifactId>json</artifactId>
<version>20090211</version>
</dependency>
-<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-core</artifactId>
<version>2.4.3</version>
</dependency>
-<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-databind</artifactId>
<version>2.4.3</version>
</dependency>
-<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-annotations</artifactId>
<version>2.4.3</version>
</dependency>
-<dependency>
<groupId>com.alibaba</groupId>
<artifactId>fastjson</artifactId>
<version>1.1.41</version>
</dependency>
-<dependency>
<groupId>fastutil</groupId>
<artifactId>fastutil</artifactId>
<version>5.0.9</version>
</dependency>
-<dependency>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-server</artifactId>
<version>${jetty.version}</version>
</dependency>
-<dependency>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-servlet</artifactId>
<version>${jetty.version}</version>
</dependency>
-<dependency>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-util</artifactId>
<version>${jetty.version}</version>
</dependency>
-<dependency>
<groupId>org.glassfish.jersey.core</groupId>
<artifactId>jersey-server</artifactId>
<version>${container.version}</version>
</dependency>
-<dependency>
<groupId>org.glassfish.jersey.containers</groupId>
<artifactId>jersey-container-servlet-core</artifactId>
<version>${container.version}</version>
</dependency>
-<dependency>
<groupId>org.glassfish.jersey.containers</groupId>
<artifactId>jersey-container-jetty-http</artifactId>
<version>${container.version}</version>
</dependency>
</dependencies>
-<build>
-<plugins>
-<plugin>
<artifactId>maven-assembly-plugin</artifactId>
-<configuration>
<classifier>dist</classifier>
<appendAssemblyId>true</appendAssemblyId>
-<descriptorRefs>
<descriptor>jar-with-dependencies</descriptor>
</descriptorRefs>
</configuration>
-<executions>
-<execution>
<id>make-assembly</id>
<phase>package</phase>
-<goals>
<goal>single</goal>
</goals>
</execution>
</executions>
</plugin>
-<plugin>
<artifactId>maven-compiler-plugin</artifactId>
-<configuration>
<source>1.7</source>
<target>1.7</target>
</configuration>
</plugin>
-<plugin>
<groupId>net.alchim31.maven</groupId>
<artifactId>scala-maven-plugin</artifactId>
<version>3.2.2</version>
-<executions>
-<execution>
<id>scala-compile-first</id>
<phase>process-resources</phase>
-<goals>
<goal>compile</goal>
</goals>
</execution>
</executions>
-<configuration>
<scalaVersion>${scala.version}</scalaVersion>
<recompileMode>incremental</recompileMode>
<useZincServer>true</useZincServer>
-<args>
<arg>-unchecked</arg>
<arg>-deprecation</arg>
<arg>-feature</arg>
</args>
-<jvmArgs>
<jvmArg>-Xms1024m</jvmArg>
<jvmArg>-Xmx1024m</jvmArg>
</jvmArgs>
-<javacArgs>
<javacArg>-source</javacArg>
<javacArg>${java.version}</javacArg>
<javacArg>-target</javacArg>
<javacArg>${java.version}</javacArg>
<javacArg>-Xlint:all,-serial,-path</javacArg>
</javacArgs>
</configuration>
</plugin>
</plugins>
</build>
</project>
0 0
- 开发spark使用的pom文件
- Spark开发环境搭建之使用Scala和maven的pom文件
- maven中pom.xml文件的使用
- 【spark】创建一个基于maven的spark项目所需要的pom.xml文件模板
- ideal+maven+scala配置spark的pom.xml文件内容,直接写spark代码
- maven 的pom文件
- pom文件的说明
- springmvc_hibernate的pom文件
- Maven的pom文件
- Maven的Pom文件
- springboot 的pom文件
- hdfs的pom文件
- maven基础学习-父pom文件的使用(七)
- 使用maven的pom文件停在building workplace
- 使用maven构建ssm项目的pom文件
- hibernate的pom文件配置
- maven hibernate的pom文件
- Maven的POM文件简介
- linux 查看进程,停止进程
- FreeMarker模板
- 算法库的设计之成员变量的类型
- JPA问题解决(1)--org.hibernate.jpa.internal.QueryImpl cannot be cast to javax.management.Query报错问题
- JDBC访问SparkSQL
- 开发spark使用的pom文件
- Linux access()函数使用
- 项目中封装的自定义Toast
- SDWebImage 源码阅读(一)
- Spring注解之@ModelAttribute理解
- Apache shiro集群实现 (六)分布式集群系统下的高可用session解决方案---Session共享
- nginx 限速设置
- 关于单边账的解释及解决(收单行业)
- BZOJ 2460 [BeiJing2011] 元素