升级 Flink 版本为 1.17.1 & 解决 Kafka 依赖冲突问题 & Redis 支持 哨兵模式连接

pull/43/head
sky.huang 1 year ago
parent d09e3b78bc
commit 4587c97511

@ -23,13 +23,13 @@
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-connector-hive_2.12</artifactId>
<version>1.16.0</version>
<version>${flink.version}</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-table-api-java-bridge</artifactId>
<version>1.16.0</version>
<version>${flink.version}</version>
<scope>provided</scope>
</dependency>

@ -15,26 +15,27 @@
<dependencies>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-walkthrough-common_${scala.binary.version}</artifactId>
<artifactId>flink-walkthrough-common</artifactId>
<version>${flink.version}</version>
</dependency>
<!-- This dependency is provided, because it should not be packaged into the JAR file. -->
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-streaming-java_${scala.binary.version}</artifactId>
<artifactId>flink-streaming-java</artifactId>
<version>${flink.version}</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-clients_${scala.binary.version}</artifactId>
<artifactId>flink-clients</artifactId>
<version>${flink.version}</version>
<scope>provided</scope>
</dependency>
<!-- Add connector dependencies here. They must be in the default scope (compile). -->
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-connector-kafka_${scala.binary.version}</artifactId>
<artifactId>flink-connector-kafka</artifactId>
<version>${flink.version}</version>
</dependency>
@ -85,7 +86,7 @@
<resource>META-INF/spring.schemas</resource>
</transformer>
<transformer
implementation="org.apache.maven.plugins.shade.resource.ServicesResourceTransformer" />
implementation="org.apache.maven.plugins.shade.resource.ServicesResourceTransformer"/>
<transformer
implementation="org.apache.maven.plugins.shade.resource.ManifestResourceTransformer">
<mainClass>com.java3y.austin.stream.AustinBootStrap</mainClass>

@ -22,10 +22,21 @@ public class AustinFlinkConstant {
* TODO 使redis ip:port
* (ip,hostsip)
*/
public static final String REDIS_MODE_SENTINEL = "SENTINEL";
public static final String REDIS_MODE_SINGLE = "SINGLE";
public static final String REDIS_MODE = REDIS_MODE_SENTINEL;
public static final String REDIS_IP = "austin-redis";
public static final String REDIS_PORT = "6379";
public static final Integer REDIS_PORT = 6379;
public static final String REDIS_PASSWORD = "austin";
public static final String MASTER_ID = "mymaster";
/**
* Flink

@ -1,5 +1,6 @@
package com.java3y.austin.stream.utils;
import cn.hutool.core.util.StrUtil;
import com.java3y.austin.stream.callback.RedisPipelineCallBack;
import com.java3y.austin.stream.constants.AustinFlinkConstant;
import io.lettuce.core.LettuceFutures;
@ -26,10 +27,18 @@ public class LettuceRedisUtils {
private static RedisClient redisClient;
static {
RedisURI redisUri = RedisURI.Builder.redis(AustinFlinkConstant.REDIS_IP)
.withPort(Integer.valueOf(AustinFlinkConstant.REDIS_PORT))
.withPassword(AustinFlinkConstant.REDIS_PASSWORD.toCharArray())
.build();
RedisURI redisUri = null;
if (StrUtil.equals(AustinFlinkConstant.REDIS_MODE_SENTINEL, AustinFlinkConstant.REDIS_MODE)) {
redisUri = RedisURI.Builder.sentinel(AustinFlinkConstant.REDIS_IP, AustinFlinkConstant.MASTER_ID)
.withPassword(AustinFlinkConstant.REDIS_PASSWORD.toCharArray())
.build();
} else if (StrUtil.equals(AustinFlinkConstant.REDIS_MODE_SINGLE, AustinFlinkConstant.REDIS_MODE)) {
redisUri = RedisURI.Builder.redis(AustinFlinkConstant.REDIS_IP)
.withPort(Integer.valueOf(AustinFlinkConstant.REDIS_PORT))
.withPassword(AustinFlinkConstant.REDIS_PASSWORD.toCharArray())
.build();
}
redisClient = RedisClient.create(redisUri);
}

@ -32,9 +32,8 @@
<properties>
<java.version>1.8</java.version>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<flink.version>1.14.3</flink.version>
<flink.version>1.17.1</flink.version>
<target.java.version>1.8</target.java.version>
<scala.binary.version>2.11</scala.binary.version>
<maven.compiler.source>${target.java.version}</maven.compiler.source>
<maven.compiler.target>${target.java.version}</maven.compiler.target>
<log4j.version>2.17.1</log4j.version>
@ -54,7 +53,7 @@
<dependency>
<groupId>cn.hutool</groupId>
<artifactId>hutool-all</artifactId>
<version>5.7.15</version>
<version>5.8.16</version>
</dependency>
<!--guava工具包-->
@ -135,21 +134,28 @@
<version>2.3.0</version>
</dependency>
<!--Kafka 消息队列-->
<dependency>
<groupId>org.apache.kafka</groupId>
<artifactId>kafka-clients</artifactId>
<version>3.2.3</version>
</dependency>
<!--flink相关依赖-->
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-walkthrough-common_${scala.binary.version}</artifactId>
<artifactId>flink-walkthrough-common</artifactId>
<version>${flink.version}</version>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-streaming-java_${scala.binary.version}</artifactId>
<artifactId>flink-streaming-java</artifactId>
<version>${flink.version}</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-clients_${scala.binary.version}</artifactId>
<artifactId>flink-clients</artifactId>
<version>${flink.version}</version>
<scope>provided</scope>
</dependency>
@ -157,7 +163,7 @@
<!--kafkaFlink连接器-->
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-connector-kafka_${scala.binary.version}</artifactId>
<artifactId>flink-connector-kafka</artifactId>
<version>${flink.version}</version>
</dependency>

Loading…
Cancel
Save