【1】引入第三方Bahir
提供的Flink-redis
相关依赖包
<!-- https://mvnrepository.com/artifact/org.apache.bahir/flink-connector-redis -->
<dependency><groupId>org.apache.bahir</groupId><artifactId>flink-connector-redis_2.11</artifactId><version>1.0</version>
</dependency>
【2】Flink
连接Redis
并输出Sink
处理结果
package com.zzx.flinkimport org.apache.flink.streaming.api.scala._
import org.apache.flink.streaming.connectors.redis.RedisSink
import org.apache.flink.streaming.connectors.redis.common.config.FlinkJedisPoolConfig
import org.apache.flink.streaming.connectors.redis.common.mapper.{RedisCommand, RedisCommandDescription, RedisMapper}object RedisSinkTest {def main(args: Array[String]): Unit = {// 创建一个流处理执行环境val env = StreamExecutionEnvironment.getExecutionEnvironment//从文件中读取数据并转换为 类val inputStreamFromFile: DataStream[String] = env.readTextFile("E:\\Project\\flink\\src\\main\\resources\\wordcount.txt")//转换 SensorReading为用户自定义的类,是从文件转换而来的val dataStream: DataStream[SensorReading] = inputStreamFromFile.map( data => {var dataArray = data.split(",")SensorReading(dataArray(0),dataArray(1).toLong,dataArray(2).toDouble)})//定义一个 redis 的配置类 继承了FlinkJedisConfigBase 正是 SensorReading需要传入的参数,底层将有些数据保存成了状态数据。val conf = new FlinkJedisPoolConfig.Builder().setHost("192.168.52.131").setPort(6379).setPassword("zzx").build()//定义 RedisMapper 数据保存的类型val myMapper = new RedisMapper[SensorReading] {//定义保存数据到 redis的命令,hset table key valueoverride def getCommandDescription: RedisCommandDescription = {// hset tablesnamenew RedisCommandDescription(RedisCommand.HSET , "sensor_temp")}//设置keyoverride def getKeyFromData(data: SensorReading): String = data.id//设置valueoverride def getValueFromData(data: SensorReading): String = data.temperature.toString}dataStream.addSink(new RedisSink[SensorReading](conf,myMapper))env.execute("Redis Sink test")}
}
查看源码可知RedisSink
是继承自RichSinkFunction<IN>
类
public class RedisSink<IN> extends RichSinkFunction<IN> {
【3】查看Redis
输出信息