spark--transform算子--cartesian

来源:互联网 发布:网络挂机赚钱平台 编辑:程序博客网 时间:2024/06/05 08:26
import org.apache.spark.{SparkConf, SparkContext}/**  * Created by lp on 2017/6/16.  */object T_cartesian {  System.setProperty("hadoop.home.dir","F:\\hadoop-2.6.5")  def main(args: Array[String]): Unit = {    val conf = new SparkConf().setAppName("cartesian_test").setMaster("local")    val sc = new SparkContext(conf)    val rdd = sc.parallelize(List(1,2,3))    val rdd1 = sc.parallelize(List("A", "B"))    //这个方法用于计算两个(K,V)数据集之间的笛卡尔积    val sum = rdd.cartesian(rdd1)      .foreach(println)  }}
运行结果:
(1,A)
(1,B)
(2,A)
(2,B)
(3,A)
(3,B)