2、Flink DataStreamAPI 概述(下)

2024-05-01 1343阅读

温馨提示:这篇文章已超过374天没有更新,请注意相关的内容是否还可用!

代码示例

2、Flink DataStreamAPI 概述(下)
(图片来源网络,侵删)

Maven 依赖

        
            org.apache.flink
            flink-streaming-java
            1.19.0
        
        
            org.apache.flink
            flink-clients
            1.19.0
        
        
            org.slf4j
            slf4j-api
            1.7.25
        
        
            org.slf4j
            slf4j-log4j12
            1.7.25
        
    

log4j.properties

log4j.rootLogger=INFO, console
log4j.appender.console=org.apache.log4j.ConsoleAppender
log4j.appender.console.layout=org.apache.log4j.PatternLayout
log4j.appender.console.layout.ConversionPattern=%d{HH:mm:ss,SSS} %-5p %-60c %x - %m%n

1、_01_QuickStart

import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.core.execution.JobClient;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.windowing.assigners.TumblingProcessingTimeWindows;
import org.apache.flink.util.Collector;
import java.time.Duration;
public class _01_QuickStart {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        DataStream dataStream = env
                .socketTextStream("localhost", 8888)
                .flatMap(new Splitter())
                .keyBy(value -> value.f0)
                .window(TumblingProcessingTimeWindows.of(Duration.ofSeconds(5)))
                .sum(1);
        dataStream.print();
//        JobExecutionResult jobExecutionResult = env.execute("Window WordCount");
        //程序完成时打印
        //JobExecutionResult=>Program execution finished
        //Job with JobID c01e1255752cbb34469a9a10177e637c has finished.
        //Job Runtime: 25596 ms
//        System.out.println("JobExecutionResult=>"+jobExecutionResult.getJobExecutionResult());
        JobClient jobClient = env.executeAsync("Window WordCount");
        // Java程序可以通过JobClient同Flink Job交互
        // jobID=>32e976f03ac7243c09a5cf07c0739921
        // jobStatus=>RUNNING
        System.out.println("jobID=>"+jobClient.getJobID());
        System.out.println("jobStatus=>"+jobClient.getJobStatus().get());
    }
    public static class Splitter implements FlatMapFunction {
        @Override
        public void flatMap(String sentence, Collector out) throws Exception {
            for (String word : sentence.split(",")) {
                out.collect(new Tuple2(word, 1));
            }
        }
    }
}

2、_02_ReadFileSource

import org.apache.flink.api.common.ExecutionConfig;
import org.apache.flink.api.java.io.TextInputFormat;
import org.apache.flink.core.fs.Path;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.source.FileProcessingMode;
import java.io.File;
public class _02_ReadFileSource {
    public static void main(String[] args) throws Exception {
        ExecutionConfig executionConfig = new ExecutionConfig();
        executionConfig.setAutoWatermarkInterval(1000);
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(executionConfig.toConfiguration());
        env.setBufferTimeout(1000);
        System.out.println("自动生成水位线间隔=>"+env.getConfig().getAutoWatermarkInterval());
        //第一次打印
        //1> a
        //5> h
        //6> i
        //4> f
        //2> c
        //3> e
        //1> b
        //2> d
        //4> g
        //向本地文件中新增三行insert
        //第二次打印
        //5> insert
        //2> i
        //2> insert
        //3> insert
        //1> g
        //1> h
        //8> d
        //7> a
        //8> e
        //7> b
        //8> f
        //7> c
        DataStreamSource source = env.readFile(new TextInputFormat(Path.fromLocalFile(new File("word.txt")))
                , "/Users/***/Desktop/word.txt"
                , FileProcessingMode.PROCESS_CONTINUOUSLY
                , 10000);
        source.print();
        env.execute();
    }
}

3、_03_CollectAsync

import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.util.CloseableIterator;
public class _03_CollectAsync {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        // 从元素列表创建一个 DataStream
        DataStream myInts = env.fromElements(1, 2, 3, 4, 5);
        CloseableIterator iterator = myInts.collectAsync();
        env.execute();
        while (iterator.hasNext()){
            System.out.println("iterator=>"+iterator.next());
        }
    }
}

4、_04_JobClientStopWithSavepoint

import org.apache.flink.api.java.io.TextInputFormat;
import org.apache.flink.core.execution.JobClient;
import org.apache.flink.core.fs.Path;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.source.FileProcessingMode;
import org.apache.flink.util.CloseableIterator;
import java.io.File;
// 在 程序完成时 或者 CheckPoint触发时 才会输出结果
public class _04_JobClientStopWithSavepoint {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.enableCheckpointing(2000);
        DataStreamSource source = env.readFile(new TextInputFormat(Path.fromLocalFile(new File("word.txt")))
                , "/Users/***/Desktop/word.txt"
                , FileProcessingMode.PROCESS_CONTINUOUSLY
                , 10000);
        CloseableIterator iterator = source.collectAsync();
        JobClient jobClient = env.executeAsync();
//        TimeUnit.SECONDS.sleep(5);
//        jobClient.stopWithSavepoint(false,"/Users/hhx/Desktop/", SavepointFormatType.DEFAULT);
        while (iterator.hasNext()){
            System.out.println("iterator=>"+iterator.next());
        }
    }
}
VPS购买请点击我

免责声明:我们致力于保护作者版权,注重分享,被刊用文章因无法核实真实出处,未能及时与作者取得联系,或有版权异议的,请联系管理员,我们会立即处理! 部分文章是来自自研大数据AI进行生成,内容摘自(百度百科,百度知道,头条百科,中国民法典,刑法,牛津词典,新华词典,汉语词典,国家院校,科普平台)等数据,内容仅供学习参考,不准确地方联系删除处理! 图片声明:本站部分配图来自人工智能系统AI生成,觅知网授权图片,PxHere摄影无版权图库和百度,360,搜狗等多加搜索引擎自动关键词搜索配图,如有侵权的图片,请第一时间联系我们,邮箱:ciyunidc@ciyunshuju.com。本站只作为美观性配图使用,无任何非法侵犯第三方意图,一切解释权归图片著作权方,本站不承担任何责任。如有恶意碰瓷者,必当奉陪到底严惩不贷!

目录[+]