Spark Programming by Java——RDD基本操作

来源:互联网 发布:tensorflow theano 编辑:程序博客网 时间:2024/05/06 06:53

在一个内容为{1, 2, 3, 3}的RDD上进行的基本操作

函数名(表现形式为scala) 目的 示例 结果 collect() 返回RDD中的所有元素 rdd.example() {1, 2, 3, 4} count() RDD中的元素数目 rdd.count() 4 countByValue() RDD中每个元素的出现次数 rdd.countByValue() {(1,1),(2,1),(3,2)} take(num) 返回RDD中num个数量的元素 rdd.take(2) {1,2} top(num) 返回RDD中最大的num个元素 rdd.top(2) {3,3} takeOrdered(num)(ordering) 根据你给的排序方法返回一个元素序列 rdd.takeOrdered(2)(myOrdering) {3, 3} takeSample(withReplacement, num, [speed]) 随机返回num个元素 rdd.takeSample(false, 1) 无值 reduce(func) 在一次遍历中合并RDD中所有的元素(例如,求和) rdd.reduce((x, y) => x + y) 9 fold(zero)(func) 和reduce功能一样,但是提供一个初值 rdd.fold(0)((x, y) => x + y) 9 aggregate(zeroValue)(seqOp, comOp) 和reduce()函数类似,但是用于返回不同的数据类型 rdd.aggregate((0, 0)) ((x, y) =>(x._1 + y, x._2 + 1), (x, y) =>(x._1 + y._1, x._2 + y._2)) (9,4) foreach(func) 将RDD中所有的元素都用于提供的方法 rdd.foreach(func) nothing

Java实现基本代码

import java.util.Arrays;import java.util.List;import org.apache.spark.SparkConf;import org.apache.spark.api.java.JavaRDD;import org.apache.spark.api.java.JavaSparkContext;import org.apache.spark.api.java.function.Function2;import org.apache.spark.api.java.function.VoidFunction;public class SimpleRDD {    public static void main(String[] args) {        SparkConf conf = new SparkConf().setMaster("local").setAppName("SimpleRDD");        JavaSparkContext sc = new JavaSparkContext(conf);        JavaRDD<Integer> rdd = sc.parallelize(Arrays.asList(1, 2, 3 ,3), 2);        System.out.println("rdd collect" + rdd.collect());        System.out.println("rdd count" + rdd.count());        System.out.println("rdd countByValue" + rdd.countByValue());        System.out.println("rdd take" + rdd.take(2));        System.out.println("rdd top" + rdd.top(2));        System.out.println("rdd takeOrdered" + rdd.takeOrdered(2));        System.out.println("rdd reduce" + rdd.reduce((x, y) -> x + y));        System.out.println("rdd fold" + rdd.fold(0, (x, y) -> x+y));        System.out.println("rdd aggregate test");        List<Integer> data = Arrays.asList(5, 1, 1, 4, 4, 2, 2);        JavaRDD<Integer> javaRDD = sc.parallelize(data, 2);        Integer aggregateValue = javaRDD.aggregate(3, new Function2<Integer, Integer, Integer>() {            @Override            public Integer call(Integer v1, Integer v2) throws Exception {                    System.out.println("seq~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~" + v1 + "," + v2);                    return Math.max(v1, v2);              }        }, new Function2<Integer, Integer, Integer>() {              int i = 0;              @Override              public Integer call(Integer v1, Integer v2) throws Exception {                System.out.println("comb~~~~~~~~~i~~~~~~~~~~~~~~~~~~~"+i++);                    System.out.println("comb~~~~~~~~~v1~~~~~~~~~~~~~~~~~~~" + v1);                    System.out.println("comb~~~~~~~~~v2~~~~~~~~~~~~~~~~~~~" + v2);                    return v1 + v2;             }        });        System.out.println("~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~"+aggregateValue);        System.out.println("foreach");        rdd.foreach(new VoidFunction<Integer>() {            @Override            public void call(Integer t) throws Exception {                System.out.println(t);            }        });    }}
1 0
原创粉丝点击