【云星数据---Apache Flink实战系列(精品版)】:Apache Flink批处理API详解与编程实战024--DateSet实用API详解024

来源:互联网 发布:三维人像综合数据门 编辑:程序博客网 时间:2024/06/07 22:29

一、Flink DateSet定制API详解(JAVA版) -001

Map

element为粒度,对element进行11的转化

执行程序:

package code.book.batch.dataset.advance.api;import org.apache.flink.api.common.functions.MapFunction;import org.apache.flink.api.java.DataSet;import org.apache.flink.api.java.ExecutionEnvironment;import org.apache.flink.api.java.tuple.Tuple2;public class MapFunction001java {    public static void main(String[] args) throws Exception {        // 1.设置运行环境,准备运行的数据        final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();        DataSet<String> text = env.fromElements("flink vs spark", "buffer vs  shuffle");        // 2.以element为粒度,将element进行map操作,转化为大写并添加后缀字符串"--##bigdata##"        DataSet<String> text2 = text.map(new MapFunction<String, String>() {            @Override            public String map(String s) throws Exception {                return s.toUpperCase() + "--##bigdata##";            }        });        text2.print();        // 4.以element为粒度,将element进行map操作,转化为大写并,并计算line的长度。        DataSet< Tuple2<String, Integer>> text3= text.map(        new MapFunction<String, Tuple2<String,Integer> >() {            @Override            public Tuple2<String, Integer> map(String s) throws Exception {                //转化为大写并,并计算矩阵的长度。                return new Tuple2<String, Integer>(s.toUpperCase(),s.length());            }        });        text3.print();        // 4.以element为粒度,将element进行map操作,转化为大写并,并计算line的长度。        //4.1定义class        class Wc{            private String line;            private int lineLength;            public Wc(String line, int lineLength) {                this.line = line;                this.lineLength = lineLength;            }            @Override            public String toString() {                return "Wc{" + "line='" + line + '\'' + ", lineLength='" + lineLength + '\'' + '}';            }        }         //4.2转化成class类型        DataSet<Wc> text4= text.map(new MapFunction<String, Wc>() {            @Override            public Wc map(String s) throws Exception {                return new Wc(s.toUpperCase(),s.length());            }        });        text4.print();    }}

执行结果:

text2.print();FLINK VS SPARK--##bigdata##BUFFER VS  SHUFFLE--##bigdata##text3.print();(FLINK VS SPARK,14)(BUFFER VS  SHUFFLE,18)text4.print();Wc{line='FLINK VS SPARK', lineLength='14'}Wc{line='BUFFER VS  SHUFFLE', lineLength='18'}

MapPartition

element为粒度,对element进行1:n的转化。

执行程序:

package code.book.batch.dataset.advance.api;import org.apache.flink.api.common.functions.MapFunction;import org.apache.flink.api.common.functions.MapPartitionFunction;import org.apache.flink.api.java.DataSet;import org.apache.flink.api.java.ExecutionEnvironment;import org.apache.flink.api.java.operators.MapPartitionOperator;import org.apache.flink.util.Collector;public class MapPartitionFunction001java {    public static void main(String[] args) throws Exception {        // 1.设置运行环境,准备运行的数据        final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();        DataSet<String> text = env.fromElements("flink vs spark", "buffer vs  shuffer");        //2.以partition为粒度,进行map操作,计算element个数        final MapPartitionOperator<String, Long> text2 = text.mapPartition(        new MapPartitionFunction<String, Long>() {            @Override            public void mapPartition(Iterable<String> iterable, Collector<Long> collector)            throws Exception {                long c = 0;                for (String s : iterable) {                    c++;                }                collector.collect(c);            }        });        text2.print();        //3.以partition为粒度,进行map操作,转化element内容        final MapPartitionOperator<String, String> text3 = text.mapPartition(        new MapPartitionFunction<String, String>() {            @Override            public void mapPartition(Iterable<String> iterable, Collector<String> collector)            throws Exception {                for (String s : iterable) {                    s = s.toUpperCase() + "--##bigdata##";                    collector.collect(s);                }            }        });        text3.print();        //4.以partition为粒度,进行map操作,转化为大写并,并计算line的长度。        //4.1定义class        class Wc{            private String line;            private int lineLength;            public Wc(String line, int lineLength) {                this.line = line;                this.lineLength = lineLength;            }            @Override            public String toString() {                return "Wc{" + "line='" + line + '\'' + ", lineLength='" + lineLength + '\'' + '}';            }        }        //4.2转化成class类型        final MapPartitionOperator<String, Wc> text4 = text.mapPartition(        new MapPartitionFunction<String, Wc>() {            @Override            public void mapPartition(Iterable<String> iterable, Collector<Wc> collector)            throws Exception {                Iterator<String> itor = iterable.iterator();                while (itor.hasNext()) {                    String  s = itor.next();                    collector.collect(new Wc(s.toUpperCase(), s.length()));                }            }        });        text4.print();    }}

执行结果:

text2.print();2text3.print();FLINK VS SPARK--##bigdata##BUFFER VS  SHUFFER--##bigdata##text4.print();Wc{line='FLINK VS SPARK', lineLength='14'}Wc{line='BUFFER VS  SHUFFER', lineLength='18'}
阅读全文
0 0
原创粉丝点击