diff --git a/austin-client/pom.xml b/austin-client/pom.xml
deleted file mode 100644
index e0766b9..0000000
--- a/austin-client/pom.xml
+++ /dev/null
@@ -1,31 +0,0 @@
-
-
-
- austin
- com.java3y.austin
- 0.0.1-SNAPSHOT
-
- 4.0.0
-
- austin-client
-
-
- 8
- 8
-
-
-
-
- com.java3y.austin
- austin-service-api
- 0.0.1-SNAPSHOT
-
-
- cn.hutool
- hutool-all
-
-
-
-
\ No newline at end of file
diff --git a/austin-client/src/main/java/com/java3y/austin/client/service/AustinService.java b/austin-client/src/main/java/com/java3y/austin/client/service/AustinService.java
deleted file mode 100644
index 847fe8b..0000000
--- a/austin-client/src/main/java/com/java3y/austin/client/service/AustinService.java
+++ /dev/null
@@ -1,63 +0,0 @@
-package com.java3y.austin.client.service;
-
-
-import cn.hutool.core.util.StrUtil;
-import cn.hutool.http.HttpUtil;
-import cn.hutool.json.JSON;
-import cn.hutool.json.JSONUtil;
-import com.java3y.austin.service.api.domain.MessageParam;
-import com.java3y.austin.service.api.domain.SendRequest;
-import com.java3y.austin.service.api.domain.SendResponse;
-import com.java3y.austin.service.api.enums.BusinessCode;
-
-/**
- * 对外提供的接口
- *
- * @author 3y
- */
-public class AustinService {
-
- private static final String SEND_PATH = "/send";
- private static final String RECALL_PATH = "/messageTemplate/recall";
-
- /**
- * 发送消息
- *
- * @param host 调用的接口host
- * @param sendRequest 发送消息入参
- * @return
- * @throws Exception
- */
- public static SendResponse send(String host, SendRequest sendRequest) throws Exception {
- String url = host + SEND_PATH;
- String result = HttpUtil.post(url, JSONUtil.toJsonStr(sendRequest));
- return JSONUtil.toBean(result, SendResponse.class);
- }
-
- /**
- * 根据模板ID撤回消息
- *
- * @param host 调用的接口host
- * @param id 撤回消息的模板ID
- * @return
- * @throws Exception
- */
- public static SendResponse recall(String host, String id) throws Exception {
- String url = host + RECALL_PATH + StrUtil.SLASH + id;
- String result = HttpUtil.post(url, id);
- return JSONUtil.toBean(result, SendResponse.class);
- }
-
- public static void main(String[] args) {
- SendRequest request = SendRequest.builder().code(BusinessCode.COMMON_SEND.getCode())
- .messageTemplateId(68L)
- .messageParam(MessageParam.builder().receiver("phone").build()).build();
-
- try {
- AustinService.send("url", request);
- } catch (Exception e) {
- System.out.println(e);
-
- }
- }
-}
diff --git a/austin-data-house/pom.xml b/austin-data-house/pom.xml
new file mode 100644
index 0000000..2bd15f7
--- /dev/null
+++ b/austin-data-house/pom.xml
@@ -0,0 +1,127 @@
+
+
+
+ austin
+ com.java3y.austin
+ 0.0.1-SNAPSHOT
+
+ 4.0.0
+
+ austin-data-house
+
+
+ 8
+ 8
+
+
+
+
+
+
+ org.apache.flink
+ flink-connector-hive_2.12
+ 1.14.3
+
+
+
+
+ org.apache.flink
+ flink-table-api-java-bridge_2.12
+ 1.14.3
+
+
+
+
+
+ org.apache.hive
+ hive-exec
+ 3.1.2
+
+
+
+
+ org.apache.hadoop
+ hadoop-mapreduce-client-core
+ 3.3.1
+
+
+ org.apache.hadoop
+ hadoop-common
+ 3.3.1
+
+
+ org.apache.hadoop
+ hadoop-mapreduce-client-common
+ 3.3.1
+
+
+ org.apache.hadoop
+ hadoop-mapreduce-client-jobclient
+ 3.3.1
+
+
+
+
+
+
+
+ org.apache.maven.plugins
+ maven-shade-plugin
+
+ false
+
+
+
+ package
+
+ shade
+
+
+
+
+
+ *:*
+
+ META-INF/*.SF
+ META-INF/*.DSA
+ META-INF/*.RSA
+
+
+
+
+
+ META-INF/spring.handlers
+
+
+ META-INF/spring.factories
+
+
+ META-INF/spring.schemas
+
+
+
+ com.java3y.austin.stream.AustinBootStrap
+
+
+
+
+
+
+
+ org.springframework.boot
+ spring-boot-maven-plugin
+ 2.2.2.RELEASE
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/austin-data-house/src/main/java/com/java3y/austin/datahouse/AustinBootStrap.java b/austin-data-house/src/main/java/com/java3y/austin/datahouse/AustinBootStrap.java
new file mode 100644
index 0000000..428b76e
--- /dev/null
+++ b/austin-data-house/src/main/java/com/java3y/austin/datahouse/AustinBootStrap.java
@@ -0,0 +1,82 @@
+package com.java3y.austin.datahouse;
+
+import org.apache.flink.streaming.api.CheckpointingMode;
+import org.apache.flink.streaming.api.environment.ExecutionCheckpointingOptions;
+import org.apache.flink.table.api.EnvironmentSettings;
+import org.apache.flink.table.api.SqlDialect;
+import org.apache.flink.table.api.TableEnvironment;
+import org.apache.flink.table.catalog.hive.HiveCatalog;
+
+import java.time.Duration;
+
+/**
+ * flink启动类
+ *
+ * @author 3y
+ */
+public class AustinBootStrap {
+
+ public static void main(String[] args) throws Exception {
+
+ TableEnvironment tableEnv = TableEnvironment.create(EnvironmentSettings.inStreamingMode());
+ tableEnv.getConfig().getConfiguration().set(ExecutionCheckpointingOptions.CHECKPOINTING_MODE, CheckpointingMode.EXACTLY_ONCE);
+ tableEnv.getConfig().getConfiguration().set(ExecutionCheckpointingOptions.CHECKPOINTING_INTERVAL, Duration.ofSeconds(20));
+ tableEnv.getConfig().setSqlDialect(SqlDialect.HIVE);
+
+ String catalogName = "my_hive";
+
+ HiveCatalog catalog = new HiveCatalog(catalogName, "austin_hive", null);
+
+ tableEnv.registerCatalog(catalogName, catalog);
+ tableEnv.useCatalog(catalogName);
+
+ tableEnv.executeSql("CREATE DATABASE IF NOT EXISTS austin");
+ tableEnv.executeSql("DROP TABLE IF EXISTS austin.message_anchor_info");
+
+ tableEnv.executeSql("create table austin.message_anchor_info(" +
+ "ids String,\n" +
+ "state String,\n" +
+ "businessId String,\n" +
+ "log_ts Timestamp(3),\n" +
+ "WATERMARK FOR log_ts AS log_ts -INTERVAL '5' SECOND" +
+ ")WITH(" +
+ " 'connector' = 'kafka',\n" +
+ "'topic' = 'austinTraceLog',\n" +
+ " 'properties.bootstrap.servers' = 'kafka_ip:9092',\n" +
+ "'properties.group.id' = 'flink1',\n" +
+ "'scan.startup.mode' = 'earliest-offset',\n" +
+ "'format' = 'json',\n" +
+ "'json.fail-on-missing-field' = 'false',\n" +
+ "'json.ignore-parse-errors' = 'true'" +
+ ")");
+
+
+// tableEnv.executeSql("CREATE DATABASE IF NOT EXISTS hive_tmp");
+// tableEnv.executeSql("DROP TABLE IF EXISTS hive_tmp.log_hive");
+//
+// tableEnv.executeSql(" CREATE TABLE hive_tmp.log_hive (\n" +
+// " user_id STRING,\n" +
+// " order_amount DOUBLE\n" +
+// " ) PARTITIONED BY (\n" +
+// " dt STRING,\n" +
+// " hr STRING\n" +
+// " ) STORED AS PARQUET\n" +
+// " TBLPROPERTIES (\n" +
+// " 'sink.partition-commit.trigger' = 'partition-time',\n" +
+// " 'sink.partition-commit.delay' = '1 min',\n" +
+// " 'format' = 'json',\n" +
+// " 'sink.partition-commit.policy.kind' = 'metastore,success-file',\n" +
+// " 'partition.time-extractor.timestamp-pattern'='$dt $hr:00:00'" +
+// " )");
+// tableEnv.getConfig().setSqlDialect(SqlDialect.DEFAULT);
+//
+// tableEnv.executeSql("" +
+// " INSERT INTO hive_tmp.log_hive\n" +
+// " SELECT\n" +
+// " user_id,\n" +
+// " order_amount,\n" +
+// " DATE_FORMAT(log_ts, 'yyyy-MM-dd'), DATE_FORMAT(log_ts, 'HH')\n" +
+// " FROM austin.message_anchor_info");
+
+ }
+}
diff --git a/austin-data-house/src/main/java/com/java3y/austin/datahouse/constants/DataHouseConstant.java b/austin-data-house/src/main/java/com/java3y/austin/datahouse/constants/DataHouseConstant.java
new file mode 100644
index 0000000..7ae66dd
--- /dev/null
+++ b/austin-data-house/src/main/java/com/java3y/austin/datahouse/constants/DataHouseConstant.java
@@ -0,0 +1,12 @@
+package com.java3y.austin.datahouse.constants;
+
+/**
+ * 数据仓库常量
+ *
+ * @author 3y
+ */
+public class DataHouseConstant {
+
+ public static final String HIVE_CONF_ = "austinLogGroup";
+
+}
diff --git a/austin-data-house/src/main/resources/hive-site.xml b/austin-data-house/src/main/resources/hive-site.xml
new file mode 100644
index 0000000..ef0ff43
--- /dev/null
+++ b/austin-data-house/src/main/resources/hive-site.xml
@@ -0,0 +1,71 @@
+
+
+
+
+
+
+
+ javax.jdo.option.ConnectionURL
+ jdbc:postgresql://3y_ip:5432/metastore?createDatabaseIfNotExist=true
+ JDBC connect string for a JDBC metastore
+
+
+
+ javax.jdo.option.ConnectionDriverName
+ org.postgresql.Driver
+ Driver class name for a JDBC metastore
+
+
+
+ javax.jdo.option.ConnectionUserName
+ hive
+ username to use against metastore database
+
+
+
+ javax.jdo.option.ConnectionPassword
+ hive
+ password to use against metastore database
+
+
+
+ hive.metastore.uris
+ thrift://3y_ip:9083
+ Thrift URI for the remote metastore. Used by metastore client to connect to remote metastore.
+
+
+
+
+ datanucleus.schema.autoCreateAll
+ true
+
+
+
+ hive.server2.logging.operation.log.location
+ /root/sd/apache-hive-2.3.4-bin/tmp/operation_logs
+ Top level directory where operation logs are stored if logging functionality is enabled
+
+
+
+
+ hive.exec.scratchdir
+ /root/sd/apache-hive-2.3.4-bin/tmp/hive
+ HDFS root scratch dir for Hive jobs which gets created with write all (733) permission. For each
+ connecting
+ user, an HDFS scratch dir: ${hive.exec.scratchdir}/<username> is created, with
+ ${hive.scratch.dir.permission}.
+
+
+
+
+ hive.exec.local.scratchdir
+ /root/sd/apache-hive-2.3.4-bin/tmp/hive/local
+ Local scratch space for Hive jobs
+
+
+
+ hive.downloaded.resources.dir
+ /root/sd/apache-hive-2.3.4-bin/tmp/hive/resources
+ Temporary local directory for added resources in the remote file system.
+
+
\ No newline at end of file
diff --git a/austin-data-house/src/main/resources/sql-client-defaults.yaml b/austin-data-house/src/main/resources/sql-client-defaults.yaml
new file mode 100644
index 0000000..f24ef2d
--- /dev/null
+++ b/austin-data-house/src/main/resources/sql-client-defaults.yaml
@@ -0,0 +1,5 @@
+catalogs:
+ - name: my_hive
+ type: hive
+ default-database: austin_hive
+ hive-conf-dir: /opt/flink/conf
\ No newline at end of file
diff --git a/docker/flink/docker-compose.yml b/docker/flink/docker-compose.yml
index 7f669a5..276fd31 100644
--- a/docker/flink/docker-compose.yml
+++ b/docker/flink/docker-compose.yml
@@ -1,27 +1,40 @@
version: "2.2"
services:
jobmanager:
- image: flink:latest
+ image: flink:1.14.3
ports:
- "8081:8081"
command: jobmanager
+ volumes:
+ - ./sql-client-defaults.yaml:/opt/flink/conf/sql-client-defaults.yaml:rw
+ - ./hive-site.xml:/opt/flink/conf/hive-site.xml:rw
environment:
- |
FLINK_PROPERTIES=
jobmanager.rpc.address: jobmanager
- - SET_CONTAINER_TIMEZONE=true
- - CONTAINER_TIMEZONE=Asia/Shanghai
- - TZ=Asia/Shanghai
taskmanager:
- image: flink:latest
+ image: flink:1.14.3
depends_on:
- jobmanager
command: taskmanager
+ volumes:
+ - ./sql-client-defaults.yaml:/opt/flink/conf/sql-client-defaults.yaml:rw
+ - ./hive-site.xml:/opt/flink/conf/hive-site.xml:rw
environment:
- |
FLINK_PROPERTIES=
jobmanager.rpc.address: jobmanager
taskmanager.numberOfTaskSlots: 2
- - SET_CONTAINER_TIMEZONE=true
- - CONTAINER_TIMEZONE=Asia/Shanghai
- - TZ=Asia/Shanghai
\ No newline at end of file
+ sql-client:
+ image: flink:1.14.3
+ volumes:
+ - ./sql-client-defaults.yaml:/opt/flink/conf/sql-client-defaults.yaml:rw
+ - ./hive-site.xml:/opt/flink/conf/hive-site.xml:rw
+ command: ./bin/sql-client.sh embedded ../conf/sql-client-defaults.yaml
+ depends_on:
+ - jobmanager
+ environment:
+ - |
+ FLINK_PROPERTIES=
+ jobmanager.rpc.address: jobmanager
+ rest.address: jobmanager
\ No newline at end of file
diff --git a/docker/flink/hive-site.xml b/docker/flink/hive-site.xml
new file mode 100644
index 0000000..ef0ff43
--- /dev/null
+++ b/docker/flink/hive-site.xml
@@ -0,0 +1,71 @@
+
+
+
+
+
+
+
+ javax.jdo.option.ConnectionURL
+ jdbc:postgresql://3y_ip:5432/metastore?createDatabaseIfNotExist=true
+ JDBC connect string for a JDBC metastore
+
+
+
+ javax.jdo.option.ConnectionDriverName
+ org.postgresql.Driver
+ Driver class name for a JDBC metastore
+
+
+
+ javax.jdo.option.ConnectionUserName
+ hive
+ username to use against metastore database
+
+
+
+ javax.jdo.option.ConnectionPassword
+ hive
+ password to use against metastore database
+
+
+
+ hive.metastore.uris
+ thrift://3y_ip:9083
+ Thrift URI for the remote metastore. Used by metastore client to connect to remote metastore.
+
+
+
+
+ datanucleus.schema.autoCreateAll
+ true
+
+
+
+ hive.server2.logging.operation.log.location
+ /root/sd/apache-hive-2.3.4-bin/tmp/operation_logs
+ Top level directory where operation logs are stored if logging functionality is enabled
+
+
+
+
+ hive.exec.scratchdir
+ /root/sd/apache-hive-2.3.4-bin/tmp/hive
+ HDFS root scratch dir for Hive jobs which gets created with write all (733) permission. For each
+ connecting
+ user, an HDFS scratch dir: ${hive.exec.scratchdir}/<username> is created, with
+ ${hive.scratch.dir.permission}.
+
+
+
+
+ hive.exec.local.scratchdir
+ /root/sd/apache-hive-2.3.4-bin/tmp/hive/local
+ Local scratch space for Hive jobs
+
+
+
+ hive.downloaded.resources.dir
+ /root/sd/apache-hive-2.3.4-bin/tmp/hive/resources
+ Temporary local directory for added resources in the remote file system.
+
+
\ No newline at end of file
diff --git a/docker/flink/sql-client-defaults.yaml b/docker/flink/sql-client-defaults.yaml
new file mode 100644
index 0000000..a7e9f9e
--- /dev/null
+++ b/docker/flink/sql-client-defaults.yaml
@@ -0,0 +1,5 @@
+catalogs:
+ - name: default_catalog
+ type: hive
+ default-database: austin_hive
+ hive-conf-dir: /opt/flink/conf
\ No newline at end of file
diff --git a/pom.xml b/pom.xml
index d6100f8..027e5d6 100644
--- a/pom.xml
+++ b/pom.xml
@@ -13,7 +13,7 @@
austin-handler
austin-cron
austin-stream
- austin-client
+ austin-data-house