commit
30aac270e8
139 changed files with 7552 additions and 0 deletions
@ -0,0 +1,34 @@ |
|||||
|
HELP.md |
||||
|
target/ |
||||
|
!.mvn/wrapper/maven-wrapper.jar |
||||
|
!**/src/main/**/target/ |
||||
|
!**/src/test/**/target/ |
||||
|
|
||||
|
### STS ### |
||||
|
.apt_generated |
||||
|
.classpath |
||||
|
.factorypath |
||||
|
.project |
||||
|
.settings |
||||
|
.springBeans |
||||
|
.sts4-cache |
||||
|
|
||||
|
### IntelliJ IDEA ### |
||||
|
.idea |
||||
|
*.iws |
||||
|
*.iml |
||||
|
*.ipr |
||||
|
|
||||
|
### NetBeans ### |
||||
|
/nbproject/private/ |
||||
|
/nbbuild/ |
||||
|
/dist/ |
||||
|
/nbdist/ |
||||
|
/.nb-gradle/ |
||||
|
build/ |
||||
|
!**/src/main/**/build/ |
||||
|
!**/src/test/**/build/ |
||||
|
|
||||
|
### VS Code ### |
||||
|
.vscode/ |
||||
|
logs/ |
||||
@ -0,0 +1,118 @@ |
|||||
|
/* |
||||
|
* Copyright 2007-present the original author or authors. |
||||
|
* |
||||
|
* Licensed under the Apache License, Version 2.0 (the "License"); |
||||
|
* you may not use this file except in compliance with the License. |
||||
|
* You may obtain a copy of the License at |
||||
|
* |
||||
|
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
|
* |
||||
|
* Unless required by applicable law or agreed to in writing, software |
||||
|
* distributed under the License is distributed on an "AS IS" BASIS, |
||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
|
* See the License for the specific language governing permissions and |
||||
|
* limitations under the License. |
||||
|
*/ |
||||
|
|
||||
|
import java.net.*; |
||||
|
import java.io.*; |
||||
|
import java.nio.channels.*; |
||||
|
import java.util.Properties; |
||||
|
|
||||
|
public class MavenWrapperDownloader { |
||||
|
|
||||
|
private static final String WRAPPER_VERSION = "0.5.6"; |
||||
|
/** |
||||
|
* Default URL to download the maven-wrapper.jar from, if no 'downloadUrl' is provided. |
||||
|
*/ |
||||
|
private static final String DEFAULT_DOWNLOAD_URL = "https://repo.maven.apache.org/maven2/io/takari/maven-wrapper/" |
||||
|
+ WRAPPER_VERSION + "/maven-wrapper-" + WRAPPER_VERSION + ".jar"; |
||||
|
|
||||
|
/** |
||||
|
* Path to the maven-wrapper.properties file, which might contain a downloadUrl property to |
||||
|
* use instead of the default one. |
||||
|
*/ |
||||
|
private static final String MAVEN_WRAPPER_PROPERTIES_PATH = |
||||
|
".mvn/wrapper/maven-wrapper.properties"; |
||||
|
|
||||
|
/** |
||||
|
* Path where the maven-wrapper.jar will be saved to. |
||||
|
*/ |
||||
|
private static final String MAVEN_WRAPPER_JAR_PATH = |
||||
|
".mvn/wrapper/maven-wrapper.jar"; |
||||
|
|
||||
|
/** |
||||
|
* Name of the property which should be used to override the default download url for the wrapper. |
||||
|
*/ |
||||
|
private static final String PROPERTY_NAME_WRAPPER_URL = "wrapperUrl"; |
||||
|
|
||||
|
public static void main(String args[]) { |
||||
|
System.out.println("- Downloader started"); |
||||
|
File baseDirectory = new File(args[0]); |
||||
|
System.out.println("- Using base directory: " + baseDirectory.getAbsolutePath()); |
||||
|
|
||||
|
// If the maven-wrapper.properties exists, read it and check if it contains a custom
|
||||
|
// wrapperUrl parameter.
|
||||
|
File mavenWrapperPropertyFile = new File(baseDirectory, MAVEN_WRAPPER_PROPERTIES_PATH); |
||||
|
String url = DEFAULT_DOWNLOAD_URL; |
||||
|
if (mavenWrapperPropertyFile.exists()) { |
||||
|
FileInputStream mavenWrapperPropertyFileInputStream = null; |
||||
|
try { |
||||
|
mavenWrapperPropertyFileInputStream = new FileInputStream(mavenWrapperPropertyFile); |
||||
|
Properties mavenWrapperProperties = new Properties(); |
||||
|
mavenWrapperProperties.load(mavenWrapperPropertyFileInputStream); |
||||
|
url = mavenWrapperProperties.getProperty(PROPERTY_NAME_WRAPPER_URL, url); |
||||
|
} catch (IOException e) { |
||||
|
System.out.println("- ERROR loading '" + MAVEN_WRAPPER_PROPERTIES_PATH + "'"); |
||||
|
} finally { |
||||
|
try { |
||||
|
if (mavenWrapperPropertyFileInputStream != null) { |
||||
|
mavenWrapperPropertyFileInputStream.close(); |
||||
|
} |
||||
|
} catch (IOException e) { |
||||
|
// Ignore ...
|
||||
|
} |
||||
|
} |
||||
|
} |
||||
|
System.out.println("- Downloading from: " + url); |
||||
|
|
||||
|
File outputFile = new File(baseDirectory.getAbsolutePath(), MAVEN_WRAPPER_JAR_PATH); |
||||
|
if (!outputFile.getParentFile().exists()) { |
||||
|
if (!outputFile.getParentFile().mkdirs()) { |
||||
|
System.out.println( |
||||
|
"- ERROR creating output directory '" + outputFile.getParentFile().getAbsolutePath() + "'"); |
||||
|
} |
||||
|
} |
||||
|
System.out.println("- Downloading to: " + outputFile.getAbsolutePath()); |
||||
|
try { |
||||
|
downloadFileFromURL(url, outputFile); |
||||
|
System.out.println("Done"); |
||||
|
System.exit(0); |
||||
|
} catch (Throwable e) { |
||||
|
System.out.println("- Error downloading"); |
||||
|
e.printStackTrace(); |
||||
|
System.exit(1); |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
private static void downloadFileFromURL(String urlString, File destination) throws Exception { |
||||
|
if (System.getenv("MVNW_USERNAME") != null && System.getenv("MVNW_PASSWORD") != null) { |
||||
|
String username = System.getenv("MVNW_USERNAME"); |
||||
|
char[] password = System.getenv("MVNW_PASSWORD").toCharArray(); |
||||
|
Authenticator.setDefault(new Authenticator() { |
||||
|
@Override |
||||
|
protected PasswordAuthentication getPasswordAuthentication() { |
||||
|
return new PasswordAuthentication(username, password); |
||||
|
} |
||||
|
}); |
||||
|
} |
||||
|
URL website = new URL(urlString); |
||||
|
ReadableByteChannel rbc; |
||||
|
rbc = Channels.newChannel(website.openStream()); |
||||
|
FileOutputStream fos = new FileOutputStream(destination); |
||||
|
fos.getChannel().transferFrom(rbc, 0, Long.MAX_VALUE); |
||||
|
fos.close(); |
||||
|
rbc.close(); |
||||
|
} |
||||
|
|
||||
|
} |
||||
Binary file not shown.
@ -0,0 +1,2 @@ |
|||||
|
distributionUrl=https://repo.maven.apache.org/maven2/org/apache/maven/apache-maven/3.8.1/apache-maven-3.8.1-bin.zip |
||||
|
wrapperUrl=https://repo.maven.apache.org/maven2/io/takari/maven-wrapper/0.5.6/maven-wrapper-0.5.6.jar |
||||
@ -0,0 +1,53 @@ |
|||||
|
version: '1.0' |
||||
|
name: branch-pipeline |
||||
|
displayName: BranchPipeline |
||||
|
stages: |
||||
|
- stage: |
||||
|
name: compile |
||||
|
displayName: 编译 |
||||
|
steps: |
||||
|
- step: build@maven |
||||
|
name: build_maven |
||||
|
displayName: Maven 构建 |
||||
|
# 支持6、7、8、9、10、11六个版本 |
||||
|
jdkVersion: 8 |
||||
|
# 支持2.2.1、3.2.5、3.3.9、3.5.2、3.5.3、3.5.4、3.6.1、3.6.3八个版本 |
||||
|
mavenVersion: 3.3.9 |
||||
|
# 构建命令 |
||||
|
commands: |
||||
|
- mvn -B clean package -Dmaven.test.skip=true |
||||
|
# 非必填字段,开启后表示将构建产物暂存,但不会上传到制品库中,7天后自动清除 |
||||
|
artifacts: |
||||
|
# 构建产物名字,作为产物的唯一标识可向下传递,支持自定义,默认为BUILD_ARTIFACT。在下游可以通过${BUILD_ARTIFACT}方式引用来获取构建物地址 |
||||
|
- name: BUILD_ARTIFACT |
||||
|
# 构建产物获取路径,是指代码编译完毕之后构建物的所在路径,如通常jar包在target目录下。当前目录为代码库根目录 |
||||
|
path: |
||||
|
- ./target |
||||
|
- step: publish@general_artifacts |
||||
|
name: publish_general_artifacts |
||||
|
displayName: 上传制品 |
||||
|
# 上游构建任务定义的产物名,默认BUILD_ARTIFACT |
||||
|
dependArtifact: BUILD_ARTIFACT |
||||
|
# 上传到制品库时的制品命名,默认output |
||||
|
artifactName: output |
||||
|
dependsOn: build_maven |
||||
|
- stage: |
||||
|
name: release |
||||
|
displayName: 发布 |
||||
|
steps: |
||||
|
- step: publish@release_artifacts |
||||
|
name: publish_release_artifacts |
||||
|
displayName: '发布' |
||||
|
# 上游上传制品任务的产出 |
||||
|
dependArtifact: output |
||||
|
# 发布制品版本号 |
||||
|
version: '1.0.0.0' |
||||
|
# 是否开启版本号自增,默认开启 |
||||
|
autoIncrement: true |
||||
|
triggers: |
||||
|
push: |
||||
|
branches: |
||||
|
exclude: |
||||
|
- master |
||||
|
include: |
||||
|
- .* |
||||
@ -0,0 +1,51 @@ |
|||||
|
version: '1.0' |
||||
|
name: master-pipeline |
||||
|
displayName: MasterPipeline |
||||
|
stages: |
||||
|
- stage: |
||||
|
name: compile |
||||
|
displayName: 编译 |
||||
|
steps: |
||||
|
- step: build@maven |
||||
|
name: build_maven |
||||
|
displayName: Maven 构建 |
||||
|
# 支持6、7、8、9、10、11六个版本 |
||||
|
jdkVersion: 8 |
||||
|
# 支持2.2.1、3.2.5、3.3.9、3.5.2、3.5.3、3.5.4、3.6.1、3.6.3八个版本 |
||||
|
mavenVersion: 3.3.9 |
||||
|
# 构建命令 |
||||
|
commands: |
||||
|
- mvn -B clean package -Dmaven.test.skip=true |
||||
|
# 非必填字段,开启后表示将构建产物暂存,但不会上传到制品库中,7天后自动清除 |
||||
|
artifacts: |
||||
|
# 构建产物名字,作为产物的唯一标识可向下传递,支持自定义,默认为BUILD_ARTIFACT。在下游可以通过${BUILD_ARTIFACT}方式引用来获取构建物地址 |
||||
|
- name: BUILD_ARTIFACT |
||||
|
# 构建产物获取路径,是指代码编译完毕之后构建物的所在路径,如通常jar包在target目录下。当前目录为代码库根目录 |
||||
|
path: |
||||
|
- ./target |
||||
|
- step: publish@general_artifacts |
||||
|
name: publish_general_artifacts |
||||
|
displayName: 上传制品 |
||||
|
# 上游构建任务定义的产物名,默认BUILD_ARTIFACT |
||||
|
dependArtifact: BUILD_ARTIFACT |
||||
|
# 上传到制品库时的制品命名,默认output |
||||
|
artifactName: output |
||||
|
dependsOn: build_maven |
||||
|
- stage: |
||||
|
name: release |
||||
|
displayName: 发布 |
||||
|
steps: |
||||
|
- step: publish@release_artifacts |
||||
|
name: publish_release_artifacts |
||||
|
displayName: '发布' |
||||
|
# 上游上传制品任务的产出 |
||||
|
dependArtifact: output |
||||
|
# 发布制品版本号 |
||||
|
version: '1.0.0.0' |
||||
|
# 是否开启版本号自增,默认开启 |
||||
|
autoIncrement: true |
||||
|
triggers: |
||||
|
push: |
||||
|
branches: |
||||
|
include: |
||||
|
- master |
||||
@ -0,0 +1,40 @@ |
|||||
|
version: '1.0' |
||||
|
name: pr-pipeline |
||||
|
displayName: PRPipeline |
||||
|
stages: |
||||
|
- stage: |
||||
|
name: compile |
||||
|
displayName: 编译 |
||||
|
steps: |
||||
|
- step: build@maven |
||||
|
name: build_maven |
||||
|
displayName: Maven 构建 |
||||
|
# 支持6、7、8、9、10、11六个版本 |
||||
|
jdkVersion: 8 |
||||
|
# 支持2.2.1、3.2.5、3.3.9、3.5.2、3.5.3、3.5.4、3.6.1、3.6.3八个版本 |
||||
|
mavenVersion: 3.3.9 |
||||
|
# 构建命令 |
||||
|
commands: |
||||
|
- mvn -B clean package -Dmaven.test.skip=true |
||||
|
# 非必填字段,开启后表示将构建产物暂存,但不会上传到制品库中,7天后自动清除 |
||||
|
artifacts: |
||||
|
# 构建产物名字,作为产物的唯一标识可向下传递,支持自定义,默认为BUILD_ARTIFACT。在下游可以通过${BUILD_ARTIFACT}方式引用来获取构建物地址 |
||||
|
- name: BUILD_ARTIFACT |
||||
|
# 构建产物获取路径,是指代码编译完毕之后构建物的所在路径,如通常jar包在target目录下。当前目录为代码库根目录 |
||||
|
path: |
||||
|
- ./target |
||||
|
- step: publish@general_artifacts |
||||
|
name: publish_general_artifacts |
||||
|
displayName: 上传制品 |
||||
|
# 上游构建任务定义的产物名,默认BUILD_ARTIFACT |
||||
|
dependArtifact: BUILD_ARTIFACT |
||||
|
# 构建产物制品库,默认default,系统默认创建 |
||||
|
artifactRepository: default |
||||
|
# 上传到制品库时的制品命名,默认output |
||||
|
artifactName: output |
||||
|
dependsOn: build_maven |
||||
|
triggers: |
||||
|
pr: |
||||
|
branches: |
||||
|
include: |
||||
|
- master |
||||
@ -0,0 +1,15 @@ |
|||||
|
FROM registry.ap-northeast-1.aliyuncs.com/southwave/jdk17-template:latest |
||||
|
WORKDIR /app |
||||
|
COPY target/data-center-receiver.jar app.jar |
||||
|
EXPOSE 8200 |
||||
|
|
||||
|
# 使用UseCGroupMemoryLimitForHeap |
||||
|
|
||||
|
# ENV JAVA_OPTS="-Xms5g -Xmx5g -XX:+UseParallelGC -XX:ParallelGCThreads=4 -XX:MaxGCPauseMillis=200 -XX:GCTimeRatio=19 -XX:NewRatio=3 -XX:+AlwaysPreTouch -XX:+PrintGCDetails -XX:+PrintGCDateStamps -Xloggc:/app/gc.log" |
||||
|
|
||||
|
# 使用shell方式的ENTRYPOINT来确保环境变量被展开 |
||||
|
ENTRYPOINT java $JAVA_OPTS -jar app.jar -Djavax.net.debug=ssl --spring-profiles=$env |
||||
|
|
||||
|
|
||||
|
|
||||
|
|
||||
@ -0,0 +1,68 @@ |
|||||
|
# 介绍 |
||||
|
组件用于订阅客户端数据。 |
||||
|
组件运行在8201端口 |
||||
|
数据库为data_center |
||||
|
|
||||
|
# 功能 |
||||
|
1. 订阅MQTT数据,并转发至本地8200端口的TECHSOR_dataCenter_sender服务处理 |
||||
|
|
||||
|
# 初始化 |
||||
|
启动组件后,调用以下接口开启所有mqtt订阅: |
||||
|
GET localhost:8200/v1/mqtt/startAll |
||||
|
|
||||
|
|
||||
|
|
||||
|
# 开发测试环境信息 |
||||
|
|
||||
|
Redis 信息 |
||||
|
公网地址: r-uf63x4g5p6ir5xao87pd.redis.rds.aliyuncs.com |
||||
|
内网地址: r-uf63x4g5p6ir5xao87.redis.rds.aliyuncs.com |
||||
|
端口号:6379 |
||||
|
密码:B2BGn4gK4htgkEwP |
||||
|
|
||||
|
|
||||
|
mysql信息 |
||||
|
地址:rm-bp11k2zm2fr7864428o.mysql.rds.aliyuncs.com |
||||
|
端口号:3306 |
||||
|
用户名:zhc |
||||
|
密码:Youqu48bnb1 |
||||
|
|
||||
|
Docker镜像仓库 |
||||
|
sender推送地址及方法: |
||||
|
$ docker login --username=4099*****@qq.com registry.cn-shanghai.aliyuncs.com$ docker tag [ImageId] registry.cn-shanghai.aliyuncs.com/southwave/mini_data_center_sender:[镜像版本号]$ docker push registry.cn-shanghai.aliyuncs.com/southwave/mini_data_center_sender:[镜像版本号] |
||||
|
|
||||
|
receiver推送地址及方法: |
||||
|
$ docker login --username=4099*****@qq.com registry.cn-shanghai.aliyuncs.com$ docker tag [ImageId] registry.cn-shanghai.aliyuncs.com/southwave/mini_data_center_receiver:[镜像版本号]$ docker push registry.cn-shanghai.aliyuncs.com/southwave/mini_data_center_receiver:[镜像版本号] |
||||
|
|
||||
|
|
||||
|
|
||||
|
|
||||
|
|
||||
|
# 服务器1 |
||||
|
47.122.21.64 |
||||
|
root |
||||
|
证书:southwave_datacenter_test.pem |
||||
|
|
||||
|
# 服务器2 |
||||
|
47.122.2.42 |
||||
|
root |
||||
|
证书:southwave_datacenter_test.pem |
||||
|
|
||||
|
|
||||
|
### docker 环境配置 |
||||
|
#### 环境启动问题 |
||||
|
aws上应该可以设置环境变量 `env` |
||||
|
|
||||
|
`env`=dev |
||||
|
或者 |
||||
|
`env`=stg |
||||
|
或者 |
||||
|
`env`=prd |
||||
|
|
||||
|
#### sender地址 |
||||
|
|
||||
|
aws 上设置环境变量 'sendUrl' |
||||
|
|
||||
|
`sendUrl`=http://localhost:8021 |
||||
|
|
||||
|
|
||||
@ -0,0 +1,27 @@ |
|||||
|
-----BEGIN RSA PRIVATE KEY----- |
||||
|
MIIEpAIBAAKCAQEAoXT+WI0CFw06NjKINFhz5y9xs9CeQYHN0+G24fIePPEBPRhW |
||||
|
Faxg36GuFuYKxk59W+Ey8w5Stz8R1wckbU//uaio/P7QhNbl+OpVIKbU93SVW8x6 |
||||
|
VH8roTSf+uYh6k6qO5ejAjqiicuEj8dZ8fQWyYSB4X/mLrXg5k5dxKyjhv5nM8RO |
||||
|
vvFHy/uvcdFOAeBbYrq9pHOkSP3BLU+wjutBDmxEdM9YLJk9qOM9bulxr1+QMXNY |
||||
|
4pjpWDlqfM1BpZZDhKzvUqLVjO+QwQ2E8mUgoK2PVY1umC5X8Jlcfy83A77+pKzl |
||||
|
ZKYpLgfLHj7rV5I12uoBx4mvLO1jw6XrNlIb9QIDAQABAoIBAGe+pDxEBwbG4hO3 |
||||
|
LpvsBjWT38y6DSZsgNRX4cqXZ+Siu7gFLjNo+ypXWmSuVlgMUTK7pqBVIMNMjGsN |
||||
|
1NNEpz4l6Mf/9/6Tk1v5Ps/nQ0rqJ5q/7g7jVCaWiQGP5FUJTQtTqVOiV5SRKFG2 |
||||
|
t83nmMjOEyLRqxdymNuDmW7pu33ebtDZxwm+QeN3nz6TwAbV7Geas5NC1UJLR0+C |
||||
|
nLQh0M9QELv7fKUPiznr6wE9CxB+1fx9es6HJxYK6Q4W+4mjd8CVXXMPDiQhTVEt |
||||
|
jnibk3UfxBQB7fkYP8PUukGq+Z4BgCszoAW0J8gFwiC8YMuzzQw3k83vPCiZZ+JQ |
||||
|
u0hgUPUCgYEA1lQi4mhKNtYMfbVtKEURajvUy5pZw4rzIyo+6XFjE25c3Q/nt3ot |
||||
|
d+vgh6dNCfRll95CB2RKOUoelj3nDeX+aM+QEwwQghLAtAdcRcIhG3DfIYBOJmiw |
||||
|
ugjbk5AW1bc38eQsjtRD7aZMyJjjr2bI7D2D5/MrjqdMZ6sxYdFRmpMCgYEAwNlB |
||||
|
h/Da5kY/z9qcXSnysr78ffU5BLGcxN2At+aXYGKyoR2YQqSME0y1jC9h0w+a7nAx |
||||
|
jCtkz+ozhBkP7o0vRZ8eGbP/65CbPrsY8P9Kpvk7dr2IRMTUjMFihhptGQFrLP3a |
||||
|
g+T2T+gQWS0v5HqiSot6znHZL+jND9elwx8nnFcCgYEAuemhmOL9/TMPArwtQ5El |
||||
|
2hCsNTBeTNBqt0Yd7ED+wAwrYVY6mVzRtARXb1Qf71KgDWwtulu0Rp2Uip6Hnfaz |
||||
|
CBeD0gHVD/9USNVZpOkP7s2pv1WcdJS7N6QXU5jZNekIDjruq7ZUdgCa+iYk2jE+ |
||||
|
eC2kDb9RORzFmedVnpQDRSECgYBH26xTXyfxzhNQ/ABvpoXMnOWweYN5gEUOBgtE |
||||
|
eyPEwoIVDtYBXxbiyh6L0cv9vT7Zwex0cmbqIjZ37m7FUM5gft3UbgHaYNO4GDc+ |
||||
|
9aF3fj7uC8mO9ljM6fIwTgCA5MpuxVh69QHi3HHbCL9jv15hsH9eFYX8GB7w3EXj |
||||
|
4uP7mQKBgQCFG7l/s1VDsLn9VNpkoUBjZMMdrLCyCWVrTEdeYtZ5LIx3etZxgbah |
||||
|
/rvryIDgc/j7riQgEDnqYk19Ee/HVxK1duJO6d/ywDcSlnNMaChrS8khsMrbK6yI |
||||
|
geqH+9jaaPUVacfeVe0MCIGLxnMiUucIUIyp3VV2OuJ2xx68xqw1wA== |
||||
|
-----END RSA PRIVATE KEY----- |
||||
@ -0,0 +1,2 @@ |
|||||
|
scp -i ./TECHSOR_OFFICIAL.pem ./target/TECHSOR_dataCenter_receiver-0.0.1-SNAPSHOT.jar root@8.209.255.206:~ |
||||
|
ssh -i ./TECHSOR_OFFICIAL.pem root@8.209.255.206 |
||||
@ -0,0 +1,20 @@ |
|||||
|
version: '3.8' |
||||
|
# docker pull adoptopenjdk/openjdk8:ppc64le-debian-jdk8u212-b03 |
||||
|
services: |
||||
|
app: |
||||
|
build: . |
||||
|
|
||||
|
ports: |
||||
|
- "8200:8200" |
||||
|
environment: |
||||
|
JAVA_OPTS: "-Xms5g -Xmx5g -XX:+UseParallelGC -XX:ParallelGCThreads=4 -XX:MaxGCPauseMillis=200 -XX:GCTimeRatio=19 -XX:NewRatio=3 -XX:+AlwaysPreTouch -XX:+PrintGCDetails -XX:+PrintGCDateStamps -Xloggc:/app/gc.log" |
||||
|
AGENT_PATH: "-agentpath:/app/liberror-detector-agent.so=packageName=org/springframework/integration,=filePath=./gcclogs/errorlog.log" # Replace this with your actual agent options if necessary |
||||
|
env: "dev" # Replace this with your actual spring profile if necessary |
||||
|
volumes: |
||||
|
- /Users/zhukovasky/IdeaProjects/Datacenter/TECHSOR_dataCenter_receiver/target/data-center-receiver.jar:/app/app.jar |
||||
|
- /Users/zhukovasky/IdeaProjects/Datacenter/TECHSOR_dataCenter_receiver/target/liberror-detector-agent.so:/app/liberror-detector-agent.so |
||||
|
- /Users/zhukovasky/IdeaProjects/Datacenter/TECHSOR_dataCenter_receiver/target/app/gc.log:/app/gc.log # Make sure this path is correct for your gc logs |
||||
|
- /Users/zhukovasky/IdeaProjects/Datacenter/TECHSOR_dataCenter_receiver/target/:/app/ |
||||
|
entrypoint: java $AGENT_PATH $JAVA_OPTS -jar /app/app.jar -Djavax.net.debug=ssl --spring-profiles=$env |
||||
|
|
||||
|
|
||||
@ -0,0 +1,310 @@ |
|||||
|
#!/bin/sh |
||||
|
# ---------------------------------------------------------------------------- |
||||
|
# Licensed to the Apache Software Foundation (ASF) under one |
||||
|
# or more contributor license agreements. See the NOTICE file |
||||
|
# distributed with this work for additional information |
||||
|
# regarding copyright ownership. The ASF licenses this file |
||||
|
# to you under the Apache License, Version 2.0 (the |
||||
|
# "License"); you may not use this file except in compliance |
||||
|
# with the License. You may obtain a copy of the License at |
||||
|
# |
||||
|
# https://www.apache.org/licenses/LICENSE-2.0 |
||||
|
# |
||||
|
# Unless required by applicable law or agreed to in writing, |
||||
|
# software distributed under the License is distributed on an |
||||
|
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY |
||||
|
# KIND, either express or implied. See the License for the |
||||
|
# specific language governing permissions and limitations |
||||
|
# under the License. |
||||
|
# ---------------------------------------------------------------------------- |
||||
|
|
||||
|
# ---------------------------------------------------------------------------- |
||||
|
# Maven Start Up Batch script |
||||
|
# |
||||
|
# Required ENV vars: |
||||
|
# ------------------ |
||||
|
# JAVA_HOME - location of a JDK home dir |
||||
|
# |
||||
|
# Optional ENV vars |
||||
|
# ----------------- |
||||
|
# M2_HOME - location of maven2's installed home dir |
||||
|
# MAVEN_OPTS - parameters passed to the Java VM when running Maven |
||||
|
# e.g. to debug Maven itself, use |
||||
|
# set MAVEN_OPTS=-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=8000 |
||||
|
# MAVEN_SKIP_RC - flag to disable loading of mavenrc files |
||||
|
# ---------------------------------------------------------------------------- |
||||
|
|
||||
|
if [ -z "$MAVEN_SKIP_RC" ] ; then |
||||
|
|
||||
|
if [ -f /etc/mavenrc ] ; then |
||||
|
. /etc/mavenrc |
||||
|
fi |
||||
|
|
||||
|
if [ -f "$HOME/.mavenrc" ] ; then |
||||
|
. "$HOME/.mavenrc" |
||||
|
fi |
||||
|
|
||||
|
fi |
||||
|
|
||||
|
# OS specific support. $var _must_ be set to either true or false. |
||||
|
cygwin=false; |
||||
|
darwin=false; |
||||
|
mingw=false |
||||
|
case "`uname`" in |
||||
|
CYGWIN*) cygwin=true ;; |
||||
|
MINGW*) mingw=true;; |
||||
|
Darwin*) darwin=true |
||||
|
# Use /usr/libexec/java_home if available, otherwise fall back to /Library/Java/Home |
||||
|
# See https://developer.apple.com/library/mac/qa/qa1170/_index.html |
||||
|
if [ -z "$JAVA_HOME" ]; then |
||||
|
if [ -x "/usr/libexec/java_home" ]; then |
||||
|
export JAVA_HOME="`/usr/libexec/java_home`" |
||||
|
else |
||||
|
export JAVA_HOME="/Library/Java/Home" |
||||
|
fi |
||||
|
fi |
||||
|
;; |
||||
|
esac |
||||
|
|
||||
|
if [ -z "$JAVA_HOME" ] ; then |
||||
|
if [ -r /etc/gentoo-release ] ; then |
||||
|
JAVA_HOME=`java-config --jre-home` |
||||
|
fi |
||||
|
fi |
||||
|
|
||||
|
if [ -z "$M2_HOME" ] ; then |
||||
|
## resolve links - $0 may be a link to maven's home |
||||
|
PRG="$0" |
||||
|
|
||||
|
# need this for relative symlinks |
||||
|
while [ -h "$PRG" ] ; do |
||||
|
ls=`ls -ld "$PRG"` |
||||
|
link=`expr "$ls" : '.*-> \(.*\)$'` |
||||
|
if expr "$link" : '/.*' > /dev/null; then |
||||
|
PRG="$link" |
||||
|
else |
||||
|
PRG="`dirname "$PRG"`/$link" |
||||
|
fi |
||||
|
done |
||||
|
|
||||
|
saveddir=`pwd` |
||||
|
|
||||
|
M2_HOME=`dirname "$PRG"`/.. |
||||
|
|
||||
|
# make it fully qualified |
||||
|
M2_HOME=`cd "$M2_HOME" && pwd` |
||||
|
|
||||
|
cd "$saveddir" |
||||
|
# echo Using m2 at $M2_HOME |
||||
|
fi |
||||
|
|
||||
|
# For Cygwin, ensure paths are in UNIX format before anything is touched |
||||
|
if $cygwin ; then |
||||
|
[ -n "$M2_HOME" ] && |
||||
|
M2_HOME=`cygpath --unix "$M2_HOME"` |
||||
|
[ -n "$JAVA_HOME" ] && |
||||
|
JAVA_HOME=`cygpath --unix "$JAVA_HOME"` |
||||
|
[ -n "$CLASSPATH" ] && |
||||
|
CLASSPATH=`cygpath --path --unix "$CLASSPATH"` |
||||
|
fi |
||||
|
|
||||
|
# For Mingw, ensure paths are in UNIX format before anything is touched |
||||
|
if $mingw ; then |
||||
|
[ -n "$M2_HOME" ] && |
||||
|
M2_HOME="`(cd "$M2_HOME"; pwd)`" |
||||
|
[ -n "$JAVA_HOME" ] && |
||||
|
JAVA_HOME="`(cd "$JAVA_HOME"; pwd)`" |
||||
|
fi |
||||
|
|
||||
|
if [ -z "$JAVA_HOME" ]; then |
||||
|
javaExecutable="`which javac`" |
||||
|
if [ -n "$javaExecutable" ] && ! [ "`expr \"$javaExecutable\" : '\([^ ]*\)'`" = "no" ]; then |
||||
|
# readlink(1) is not available as standard on Solaris 10. |
||||
|
readLink=`which readlink` |
||||
|
if [ ! `expr "$readLink" : '\([^ ]*\)'` = "no" ]; then |
||||
|
if $darwin ; then |
||||
|
javaHome="`dirname \"$javaExecutable\"`" |
||||
|
javaExecutable="`cd \"$javaHome\" && pwd -P`/javac" |
||||
|
else |
||||
|
javaExecutable="`readlink -f \"$javaExecutable\"`" |
||||
|
fi |
||||
|
javaHome="`dirname \"$javaExecutable\"`" |
||||
|
javaHome=`expr "$javaHome" : '\(.*\)/bin'` |
||||
|
JAVA_HOME="$javaHome" |
||||
|
export JAVA_HOME |
||||
|
fi |
||||
|
fi |
||||
|
fi |
||||
|
|
||||
|
if [ -z "$JAVACMD" ] ; then |
||||
|
if [ -n "$JAVA_HOME" ] ; then |
||||
|
if [ -x "$JAVA_HOME/jre/sh/java" ] ; then |
||||
|
# IBM's JDK on AIX uses strange locations for the executables |
||||
|
JAVACMD="$JAVA_HOME/jre/sh/java" |
||||
|
else |
||||
|
JAVACMD="$JAVA_HOME/bin/java" |
||||
|
fi |
||||
|
else |
||||
|
JAVACMD="`which java`" |
||||
|
fi |
||||
|
fi |
||||
|
|
||||
|
if [ ! -x "$JAVACMD" ] ; then |
||||
|
echo "Error: JAVA_HOME is not defined correctly." >&2 |
||||
|
echo " We cannot execute $JAVACMD" >&2 |
||||
|
exit 1 |
||||
|
fi |
||||
|
|
||||
|
if [ -z "$JAVA_HOME" ] ; then |
||||
|
echo "Warning: JAVA_HOME environment variable is not set." |
||||
|
fi |
||||
|
|
||||
|
CLASSWORLDS_LAUNCHER=org.codehaus.plexus.classworlds.launcher.Launcher |
||||
|
|
||||
|
# traverses directory structure from process work directory to filesystem root |
||||
|
# first directory with .mvn subdirectory is considered project base directory |
||||
|
find_maven_basedir() { |
||||
|
|
||||
|
if [ -z "$1" ] |
||||
|
then |
||||
|
echo "Path not specified to find_maven_basedir" |
||||
|
return 1 |
||||
|
fi |
||||
|
|
||||
|
basedir="$1" |
||||
|
wdir="$1" |
||||
|
while [ "$wdir" != '/' ] ; do |
||||
|
if [ -d "$wdir"/.mvn ] ; then |
||||
|
basedir=$wdir |
||||
|
break |
||||
|
fi |
||||
|
# workaround for JBEAP-8937 (on Solaris 10/Sparc) |
||||
|
if [ -d "${wdir}" ]; then |
||||
|
wdir=`cd "$wdir/.."; pwd` |
||||
|
fi |
||||
|
# end of workaround |
||||
|
done |
||||
|
echo "${basedir}" |
||||
|
} |
||||
|
|
||||
|
# concatenates all lines of a file |
||||
|
concat_lines() { |
||||
|
if [ -f "$1" ]; then |
||||
|
echo "$(tr -s '\n' ' ' < "$1")" |
||||
|
fi |
||||
|
} |
||||
|
|
||||
|
BASE_DIR=`find_maven_basedir "$(pwd)"` |
||||
|
if [ -z "$BASE_DIR" ]; then |
||||
|
exit 1; |
||||
|
fi |
||||
|
|
||||
|
########################################################################################## |
||||
|
# Extension to allow automatically downloading the maven-wrapper.jar from Maven-central |
||||
|
# This allows using the maven wrapper in projects that prohibit checking in binary data. |
||||
|
########################################################################################## |
||||
|
if [ -r "$BASE_DIR/.mvn/wrapper/maven-wrapper.jar" ]; then |
||||
|
if [ "$MVNW_VERBOSE" = true ]; then |
||||
|
echo "Found .mvn/wrapper/maven-wrapper.jar" |
||||
|
fi |
||||
|
else |
||||
|
if [ "$MVNW_VERBOSE" = true ]; then |
||||
|
echo "Couldn't find .mvn/wrapper/maven-wrapper.jar, downloading it ..." |
||||
|
fi |
||||
|
if [ -n "$MVNW_REPOURL" ]; then |
||||
|
jarUrl="$MVNW_REPOURL/io/takari/maven-wrapper/0.5.6/maven-wrapper-0.5.6.jar" |
||||
|
else |
||||
|
jarUrl="https://repo.maven.apache.org/maven2/io/takari/maven-wrapper/0.5.6/maven-wrapper-0.5.6.jar" |
||||
|
fi |
||||
|
while IFS="=" read key value; do |
||||
|
case "$key" in (wrapperUrl) jarUrl="$value"; break ;; |
||||
|
esac |
||||
|
done < "$BASE_DIR/.mvn/wrapper/maven-wrapper.properties" |
||||
|
if [ "$MVNW_VERBOSE" = true ]; then |
||||
|
echo "Downloading from: $jarUrl" |
||||
|
fi |
||||
|
wrapperJarPath="$BASE_DIR/.mvn/wrapper/maven-wrapper.jar" |
||||
|
if $cygwin; then |
||||
|
wrapperJarPath=`cygpath --path --windows "$wrapperJarPath"` |
||||
|
fi |
||||
|
|
||||
|
if command -v wget > /dev/null; then |
||||
|
if [ "$MVNW_VERBOSE" = true ]; then |
||||
|
echo "Found wget ... using wget" |
||||
|
fi |
||||
|
if [ -z "$MVNW_USERNAME" ] || [ -z "$MVNW_PASSWORD" ]; then |
||||
|
wget "$jarUrl" -O "$wrapperJarPath" |
||||
|
else |
||||
|
wget --http-user=$MVNW_USERNAME --http-password=$MVNW_PASSWORD "$jarUrl" -O "$wrapperJarPath" |
||||
|
fi |
||||
|
elif command -v curl > /dev/null; then |
||||
|
if [ "$MVNW_VERBOSE" = true ]; then |
||||
|
echo "Found curl ... using curl" |
||||
|
fi |
||||
|
if [ -z "$MVNW_USERNAME" ] || [ -z "$MVNW_PASSWORD" ]; then |
||||
|
curl -o "$wrapperJarPath" "$jarUrl" -f |
||||
|
else |
||||
|
curl --user $MVNW_USERNAME:$MVNW_PASSWORD -o "$wrapperJarPath" "$jarUrl" -f |
||||
|
fi |
||||
|
|
||||
|
else |
||||
|
if [ "$MVNW_VERBOSE" = true ]; then |
||||
|
echo "Falling back to using Java to download" |
||||
|
fi |
||||
|
javaClass="$BASE_DIR/.mvn/wrapper/MavenWrapperDownloader.java" |
||||
|
# For Cygwin, switch paths to Windows format before running javac |
||||
|
if $cygwin; then |
||||
|
javaClass=`cygpath --path --windows "$javaClass"` |
||||
|
fi |
||||
|
if [ -e "$javaClass" ]; then |
||||
|
if [ ! -e "$BASE_DIR/.mvn/wrapper/MavenWrapperDownloader.class" ]; then |
||||
|
if [ "$MVNW_VERBOSE" = true ]; then |
||||
|
echo " - Compiling MavenWrapperDownloader.java ..." |
||||
|
fi |
||||
|
# Compiling the Java class |
||||
|
("$JAVA_HOME/bin/javac" "$javaClass") |
||||
|
fi |
||||
|
if [ -e "$BASE_DIR/.mvn/wrapper/MavenWrapperDownloader.class" ]; then |
||||
|
# Running the downloader |
||||
|
if [ "$MVNW_VERBOSE" = true ]; then |
||||
|
echo " - Running MavenWrapperDownloader.java ..." |
||||
|
fi |
||||
|
("$JAVA_HOME/bin/java" -cp .mvn/wrapper MavenWrapperDownloader "$MAVEN_PROJECTBASEDIR") |
||||
|
fi |
||||
|
fi |
||||
|
fi |
||||
|
fi |
||||
|
########################################################################################## |
||||
|
# End of extension |
||||
|
########################################################################################## |
||||
|
|
||||
|
export MAVEN_PROJECTBASEDIR=${MAVEN_BASEDIR:-"$BASE_DIR"} |
||||
|
if [ "$MVNW_VERBOSE" = true ]; then |
||||
|
echo $MAVEN_PROJECTBASEDIR |
||||
|
fi |
||||
|
MAVEN_OPTS="$(concat_lines "$MAVEN_PROJECTBASEDIR/.mvn/jvm.config") $MAVEN_OPTS" |
||||
|
|
||||
|
# For Cygwin, switch paths to Windows format before running java |
||||
|
if $cygwin; then |
||||
|
[ -n "$M2_HOME" ] && |
||||
|
M2_HOME=`cygpath --path --windows "$M2_HOME"` |
||||
|
[ -n "$JAVA_HOME" ] && |
||||
|
JAVA_HOME=`cygpath --path --windows "$JAVA_HOME"` |
||||
|
[ -n "$CLASSPATH" ] && |
||||
|
CLASSPATH=`cygpath --path --windows "$CLASSPATH"` |
||||
|
[ -n "$MAVEN_PROJECTBASEDIR" ] && |
||||
|
MAVEN_PROJECTBASEDIR=`cygpath --path --windows "$MAVEN_PROJECTBASEDIR"` |
||||
|
fi |
||||
|
|
||||
|
# Provide a "standardized" way to retrieve the CLI args that will |
||||
|
# work with both Windows and non-Windows executions. |
||||
|
MAVEN_CMD_LINE_ARGS="$MAVEN_CONFIG $@" |
||||
|
export MAVEN_CMD_LINE_ARGS |
||||
|
|
||||
|
WRAPPER_LAUNCHER=org.apache.maven.wrapper.MavenWrapperMain |
||||
|
|
||||
|
exec "$JAVACMD" \ |
||||
|
$MAVEN_OPTS \ |
||||
|
-classpath "$MAVEN_PROJECTBASEDIR/.mvn/wrapper/maven-wrapper.jar" \ |
||||
|
"-Dmaven.home=${M2_HOME}" "-Dmaven.multiModuleProjectDirectory=${MAVEN_PROJECTBASEDIR}" \ |
||||
|
${WRAPPER_LAUNCHER} $MAVEN_CONFIG "$@" |
||||
@ -0,0 +1,182 @@ |
|||||
|
@REM ---------------------------------------------------------------------------- |
||||
|
@REM Licensed to the Apache Software Foundation (ASF) under one |
||||
|
@REM or more contributor license agreements. See the NOTICE file |
||||
|
@REM distributed with this work for additional information |
||||
|
@REM regarding copyright ownership. The ASF licenses this file |
||||
|
@REM to you under the Apache License, Version 2.0 (the |
||||
|
@REM "License"); you may not use this file except in compliance |
||||
|
@REM with the License. You may obtain a copy of the License at |
||||
|
@REM |
||||
|
@REM https://www.apache.org/licenses/LICENSE-2.0 |
||||
|
@REM |
||||
|
@REM Unless required by applicable law or agreed to in writing, |
||||
|
@REM software distributed under the License is distributed on an |
||||
|
@REM "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY |
||||
|
@REM KIND, either express or implied. See the License for the |
||||
|
@REM specific language governing permissions and limitations |
||||
|
@REM under the License. |
||||
|
@REM ---------------------------------------------------------------------------- |
||||
|
|
||||
|
@REM ---------------------------------------------------------------------------- |
||||
|
@REM Maven Start Up Batch script |
||||
|
@REM |
||||
|
@REM Required ENV vars: |
||||
|
@REM JAVA_HOME - location of a JDK home dir |
||||
|
@REM |
||||
|
@REM Optional ENV vars |
||||
|
@REM M2_HOME - location of maven2's installed home dir |
||||
|
@REM MAVEN_BATCH_ECHO - set to 'on' to enable the echoing of the batch commands |
||||
|
@REM MAVEN_BATCH_PAUSE - set to 'on' to wait for a keystroke before ending |
||||
|
@REM MAVEN_OPTS - parameters passed to the Java VM when running Maven |
||||
|
@REM e.g. to debug Maven itself, use |
||||
|
@REM set MAVEN_OPTS=-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=8000 |
||||
|
@REM MAVEN_SKIP_RC - flag to disable loading of mavenrc files |
||||
|
@REM ---------------------------------------------------------------------------- |
||||
|
|
||||
|
@REM Begin all REM lines with '@' in case MAVEN_BATCH_ECHO is 'on' |
||||
|
@echo off |
||||
|
@REM set title of command window |
||||
|
title %0 |
||||
|
@REM enable echoing by setting MAVEN_BATCH_ECHO to 'on' |
||||
|
@if "%MAVEN_BATCH_ECHO%" == "on" echo %MAVEN_BATCH_ECHO% |
||||
|
|
||||
|
@REM set %HOME% to equivalent of $HOME |
||||
|
if "%HOME%" == "" (set "HOME=%HOMEDRIVE%%HOMEPATH%") |
||||
|
|
||||
|
@REM Execute a user defined script before this one |
||||
|
if not "%MAVEN_SKIP_RC%" == "" goto skipRcPre |
||||
|
@REM check for pre script, once with legacy .bat ending and once with .cmd ending |
||||
|
if exist "%HOME%\mavenrc_pre.bat" call "%HOME%\mavenrc_pre.bat" |
||||
|
if exist "%HOME%\mavenrc_pre.cmd" call "%HOME%\mavenrc_pre.cmd" |
||||
|
:skipRcPre |
||||
|
|
||||
|
@setlocal |
||||
|
|
||||
|
set ERROR_CODE=0 |
||||
|
|
||||
|
@REM To isolate internal variables from possible post scripts, we use another setlocal |
||||
|
@setlocal |
||||
|
|
||||
|
@REM ==== START VALIDATION ==== |
||||
|
if not "%JAVA_HOME%" == "" goto OkJHome |
||||
|
|
||||
|
echo. |
||||
|
echo Error: JAVA_HOME not found in your environment. >&2 |
||||
|
echo Please set the JAVA_HOME variable in your environment to match the >&2 |
||||
|
echo location of your Java installation. >&2 |
||||
|
echo. |
||||
|
goto error |
||||
|
|
||||
|
:OkJHome |
||||
|
if exist "%JAVA_HOME%\bin\java.exe" goto init |
||||
|
|
||||
|
echo. |
||||
|
echo Error: JAVA_HOME is set to an invalid directory. >&2 |
||||
|
echo JAVA_HOME = "%JAVA_HOME%" >&2 |
||||
|
echo Please set the JAVA_HOME variable in your environment to match the >&2 |
||||
|
echo location of your Java installation. >&2 |
||||
|
echo. |
||||
|
goto error |
||||
|
|
||||
|
@REM ==== END VALIDATION ==== |
||||
|
|
||||
|
:init |
||||
|
|
||||
|
@REM Find the project base dir, i.e. the directory that contains the folder ".mvn". |
||||
|
@REM Fallback to current working directory if not found. |
||||
|
|
||||
|
set MAVEN_PROJECTBASEDIR=%MAVEN_BASEDIR% |
||||
|
IF NOT "%MAVEN_PROJECTBASEDIR%"=="" goto endDetectBaseDir |
||||
|
|
||||
|
set EXEC_DIR=%CD% |
||||
|
set WDIR=%EXEC_DIR% |
||||
|
:findBaseDir |
||||
|
IF EXIST "%WDIR%"\.mvn goto baseDirFound |
||||
|
cd .. |
||||
|
IF "%WDIR%"=="%CD%" goto baseDirNotFound |
||||
|
set WDIR=%CD% |
||||
|
goto findBaseDir |
||||
|
|
||||
|
:baseDirFound |
||||
|
set MAVEN_PROJECTBASEDIR=%WDIR% |
||||
|
cd "%EXEC_DIR%" |
||||
|
goto endDetectBaseDir |
||||
|
|
||||
|
:baseDirNotFound |
||||
|
set MAVEN_PROJECTBASEDIR=%EXEC_DIR% |
||||
|
cd "%EXEC_DIR%" |
||||
|
|
||||
|
:endDetectBaseDir |
||||
|
|
||||
|
IF NOT EXIST "%MAVEN_PROJECTBASEDIR%\.mvn\jvm.config" goto endReadAdditionalConfig |
||||
|
|
||||
|
@setlocal EnableExtensions EnableDelayedExpansion |
||||
|
for /F "usebackq delims=" %%a in ("%MAVEN_PROJECTBASEDIR%\.mvn\jvm.config") do set JVM_CONFIG_MAVEN_PROPS=!JVM_CONFIG_MAVEN_PROPS! %%a |
||||
|
@endlocal & set JVM_CONFIG_MAVEN_PROPS=%JVM_CONFIG_MAVEN_PROPS% |
||||
|
|
||||
|
:endReadAdditionalConfig |
||||
|
|
||||
|
SET MAVEN_JAVA_EXE="%JAVA_HOME%\bin\java.exe" |
||||
|
set WRAPPER_JAR="%MAVEN_PROJECTBASEDIR%\.mvn\wrapper\maven-wrapper.jar" |
||||
|
set WRAPPER_LAUNCHER=org.apache.maven.wrapper.MavenWrapperMain |
||||
|
|
||||
|
set DOWNLOAD_URL="https://repo.maven.apache.org/maven2/io/takari/maven-wrapper/0.5.6/maven-wrapper-0.5.6.jar" |
||||
|
|
||||
|
FOR /F "tokens=1,2 delims==" %%A IN ("%MAVEN_PROJECTBASEDIR%\.mvn\wrapper\maven-wrapper.properties") DO ( |
||||
|
IF "%%A"=="wrapperUrl" SET DOWNLOAD_URL=%%B |
||||
|
) |
||||
|
|
||||
|
@REM Extension to allow automatically downloading the maven-wrapper.jar from Maven-central |
||||
|
@REM This allows using the maven wrapper in projects that prohibit checking in binary data. |
||||
|
if exist %WRAPPER_JAR% ( |
||||
|
if "%MVNW_VERBOSE%" == "true" ( |
||||
|
echo Found %WRAPPER_JAR% |
||||
|
) |
||||
|
) else ( |
||||
|
if not "%MVNW_REPOURL%" == "" ( |
||||
|
SET DOWNLOAD_URL="%MVNW_REPOURL%/io/takari/maven-wrapper/0.5.6/maven-wrapper-0.5.6.jar" |
||||
|
) |
||||
|
if "%MVNW_VERBOSE%" == "true" ( |
||||
|
echo Couldn't find %WRAPPER_JAR%, downloading it ... |
||||
|
echo Downloading from: %DOWNLOAD_URL% |
||||
|
) |
||||
|
|
||||
|
powershell -Command "&{"^ |
||||
|
"$webclient = new-object System.Net.WebClient;"^ |
||||
|
"if (-not ([string]::IsNullOrEmpty('%MVNW_USERNAME%') -and [string]::IsNullOrEmpty('%MVNW_PASSWORD%'))) {"^ |
||||
|
"$webclient.Credentials = new-object System.Net.NetworkCredential('%MVNW_USERNAME%', '%MVNW_PASSWORD%');"^ |
||||
|
"}"^ |
||||
|
"[Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12; $webclient.DownloadFile('%DOWNLOAD_URL%', '%WRAPPER_JAR%')"^ |
||||
|
"}" |
||||
|
if "%MVNW_VERBOSE%" == "true" ( |
||||
|
echo Finished downloading %WRAPPER_JAR% |
||||
|
) |
||||
|
) |
||||
|
@REM End of extension |
||||
|
|
||||
|
@REM Provide a "standardized" way to retrieve the CLI args that will |
||||
|
@REM work with both Windows and non-Windows executions. |
||||
|
set MAVEN_CMD_LINE_ARGS=%* |
||||
|
|
||||
|
%MAVEN_JAVA_EXE% %JVM_CONFIG_MAVEN_PROPS% %MAVEN_OPTS% %MAVEN_DEBUG_OPTS% -classpath %WRAPPER_JAR% "-Dmaven.multiModuleProjectDirectory=%MAVEN_PROJECTBASEDIR%" %WRAPPER_LAUNCHER% %MAVEN_CONFIG% %* |
||||
|
if ERRORLEVEL 1 goto error |
||||
|
goto end |
||||
|
|
||||
|
:error |
||||
|
set ERROR_CODE=1 |
||||
|
|
||||
|
:end |
||||
|
@endlocal & set ERROR_CODE=%ERROR_CODE% |
||||
|
|
||||
|
if not "%MAVEN_SKIP_RC%" == "" goto skipRcPost |
||||
|
@REM check for post script, once with legacy .bat ending and once with .cmd ending |
||||
|
if exist "%HOME%\mavenrc_post.bat" call "%HOME%\mavenrc_post.bat" |
||||
|
if exist "%HOME%\mavenrc_post.cmd" call "%HOME%\mavenrc_post.cmd" |
||||
|
:skipRcPost |
||||
|
|
||||
|
@REM pause the script if MAVEN_BATCH_PAUSE is set to 'on' |
||||
|
if "%MAVEN_BATCH_PAUSE%" == "on" pause |
||||
|
|
||||
|
if "%MAVEN_TERMINATE_CMD%" == "on" exit %ERROR_CODE% |
||||
|
|
||||
|
exit /B %ERROR_CODE% |
||||
@ -0,0 +1,648 @@ |
|||||
|
<?xml version="1.0" encoding="UTF-8"?> |
||||
|
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" |
||||
|
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd"> |
||||
|
<modelVersion>4.0.0</modelVersion> |
||||
|
<parent> |
||||
|
<groupId>org.springframework.boot</groupId> |
||||
|
<artifactId>spring-boot-starter-parent</artifactId> |
||||
|
<version>3.2.12</version> |
||||
|
<relativePath/> <!-- lookup parent from repository --> |
||||
|
</parent> |
||||
|
<groupId>com.techsor</groupId> |
||||
|
<artifactId>data.center.receiver</artifactId> |
||||
|
<version>0.0.1-SNAPSHOT</version> |
||||
|
<name>TECHSOR_dataCenter_receiver</name> |
||||
|
<properties> |
||||
|
<java.version>17</java.version> |
||||
|
<aws.ecr.registry>381659385655.dkr.ecr.ap-northeast-1.amazonaws.com</aws.ecr.registry> |
||||
|
<aws.ecr.registryTest>923770123186.dkr.ecr.ap-northeast-1.amazonaws.com</aws.ecr.registryTest> |
||||
|
<aws.ecr.repository>tokyo-build-receiver</aws.ecr.repository> |
||||
|
<netty.version>4.2.2.Final</netty.version> |
||||
|
</properties> |
||||
|
<dependencyManagement> |
||||
|
<dependencies> |
||||
|
<dependency> |
||||
|
<groupId>software.amazon.awssdk</groupId> |
||||
|
<artifactId>bom</artifactId> |
||||
|
<version>2.20.113</version> |
||||
|
<type>pom</type> |
||||
|
<scope>import</scope> |
||||
|
</dependency> |
||||
|
<dependency> |
||||
|
<groupId>io.netty</groupId> |
||||
|
<artifactId>netty-buffer</artifactId> |
||||
|
<version>${netty.version}</version> |
||||
|
</dependency> |
||||
|
<dependency> |
||||
|
<groupId>io.netty</groupId> |
||||
|
<artifactId>netty-codec</artifactId> |
||||
|
<version>${netty.version}</version> |
||||
|
</dependency> |
||||
|
<dependency> |
||||
|
<groupId>io.netty</groupId> |
||||
|
<artifactId>netty-codec-base</artifactId> |
||||
|
<version>${netty.version}</version> |
||||
|
</dependency> |
||||
|
<dependency> |
||||
|
<groupId>io.netty</groupId> |
||||
|
<artifactId>netty-codec-compression</artifactId> |
||||
|
<version>${netty.version}</version> |
||||
|
</dependency> |
||||
|
<dependency> |
||||
|
<groupId>io.netty</groupId> |
||||
|
<artifactId>netty-codec-dns</artifactId> |
||||
|
<version>${netty.version}</version> |
||||
|
</dependency> |
||||
|
<dependency> |
||||
|
<groupId>io.netty</groupId> |
||||
|
<artifactId>netty-codec-haproxy</artifactId> |
||||
|
<version>${netty.version}</version> |
||||
|
</dependency> |
||||
|
<dependency> |
||||
|
<groupId>io.netty</groupId> |
||||
|
<artifactId>netty-codec-http</artifactId> |
||||
|
<version>${netty.version}</version> |
||||
|
</dependency> |
||||
|
<dependency> |
||||
|
<groupId>io.netty</groupId> |
||||
|
<artifactId>netty-codec-http2</artifactId> |
||||
|
<version>${netty.version}</version> |
||||
|
</dependency> |
||||
|
<dependency> |
||||
|
<groupId>io.netty</groupId> |
||||
|
<artifactId>netty-codec-http3</artifactId> |
||||
|
<version>${netty.version}</version> |
||||
|
</dependency> |
||||
|
<dependency> |
||||
|
<groupId>io.netty</groupId> |
||||
|
<artifactId>netty-codec-memcache</artifactId> |
||||
|
<version>${netty.version}</version> |
||||
|
</dependency> |
||||
|
<dependency> |
||||
|
<groupId>io.netty</groupId> |
||||
|
<artifactId>netty-codec-mqtt</artifactId> |
||||
|
<version>${netty.version}</version> |
||||
|
</dependency> |
||||
|
<dependency> |
||||
|
<groupId>io.netty</groupId> |
||||
|
<artifactId>netty-codec-redis</artifactId> |
||||
|
<version>${netty.version}</version> |
||||
|
</dependency> |
||||
|
<dependency> |
||||
|
<groupId>io.netty</groupId> |
||||
|
<artifactId>netty-codec-smtp</artifactId> |
||||
|
<version>${netty.version}</version> |
||||
|
</dependency> |
||||
|
<dependency> |
||||
|
<groupId>io.netty</groupId> |
||||
|
<artifactId>netty-codec-socks</artifactId> |
||||
|
<version>${netty.version}</version> |
||||
|
</dependency> |
||||
|
<dependency> |
||||
|
<groupId>io.netty</groupId> |
||||
|
<artifactId>netty-codec-stomp</artifactId> |
||||
|
<version>${netty.version}</version> |
||||
|
</dependency> |
||||
|
<dependency> |
||||
|
<groupId>io.netty</groupId> |
||||
|
<artifactId>netty-codec-xml</artifactId> |
||||
|
<version>${netty.version}</version> |
||||
|
</dependency> |
||||
|
<dependency> |
||||
|
<groupId>io.netty</groupId> |
||||
|
<artifactId>netty-codec-protobuf</artifactId> |
||||
|
<version>${netty.version}</version> |
||||
|
</dependency> |
||||
|
<dependency> |
||||
|
<groupId>io.netty</groupId> |
||||
|
<artifactId>netty-codec-marshalling</artifactId> |
||||
|
<version>${netty.version}</version> |
||||
|
</dependency> |
||||
|
|
||||
|
<!-- 其他核心模块 --> |
||||
|
<dependency> |
||||
|
<groupId>io.netty</groupId> |
||||
|
<artifactId>netty-common</artifactId> |
||||
|
<version>${netty.version}</version> |
||||
|
</dependency> |
||||
|
<dependency> |
||||
|
<groupId>io.netty</groupId> |
||||
|
<artifactId>netty-handler</artifactId> |
||||
|
<version>${netty.version}</version> |
||||
|
</dependency> |
||||
|
<dependency> |
||||
|
<groupId>io.netty</groupId> |
||||
|
<artifactId>netty-handler-proxy</artifactId> |
||||
|
<version>${netty.version}</version> |
||||
|
</dependency> |
||||
|
<dependency> |
||||
|
<groupId>io.netty</groupId> |
||||
|
<artifactId>netty-handler-ssl-ocsp</artifactId> |
||||
|
<version>${netty.version}</version> |
||||
|
</dependency> |
||||
|
|
||||
|
<!-- resolver & transport --> |
||||
|
<dependency> |
||||
|
<groupId>io.netty</groupId> |
||||
|
<artifactId>netty-resolver</artifactId> |
||||
|
<version>${netty.version}</version> |
||||
|
</dependency> |
||||
|
<dependency> |
||||
|
<groupId>io.netty</groupId> |
||||
|
<artifactId>netty-resolver-dns</artifactId> |
||||
|
<version>${netty.version}</version> |
||||
|
</dependency> |
||||
|
<dependency> |
||||
|
<groupId>io.netty</groupId> |
||||
|
<artifactId>netty-resolver-dns-classes-macos</artifactId> |
||||
|
<version>${netty.version}</version> |
||||
|
</dependency> |
||||
|
|
||||
|
<dependency> |
||||
|
<groupId>io.netty</groupId> |
||||
|
<artifactId>netty-transport</artifactId> |
||||
|
<version>${netty.version}</version> |
||||
|
</dependency> |
||||
|
<dependency> |
||||
|
<groupId>io.netty</groupId> |
||||
|
<artifactId>netty-transport-classes-epoll</artifactId> |
||||
|
<version>${netty.version}</version> |
||||
|
</dependency> |
||||
|
<dependency> |
||||
|
<groupId>io.netty</groupId> |
||||
|
<artifactId>netty-transport-classes-kqueue</artifactId> |
||||
|
<version>${netty.version}</version> |
||||
|
</dependency> |
||||
|
<dependency> |
||||
|
<groupId>io.netty</groupId> |
||||
|
<artifactId>netty-transport-classes-io_uring</artifactId> |
||||
|
<version>${netty.version}</version> |
||||
|
</dependency> |
||||
|
<dependency> |
||||
|
<groupId>io.netty</groupId> |
||||
|
<artifactId>netty-transport-native-unix-common</artifactId> |
||||
|
<version>${netty.version}</version> |
||||
|
</dependency> |
||||
|
|
||||
|
<!-- platform-specific native modules --> |
||||
|
<dependency> |
||||
|
<groupId>io.netty</groupId> |
||||
|
<artifactId>netty-transport-native-epoll</artifactId> |
||||
|
<classifier>linux-x86_64</classifier> |
||||
|
<version>${netty.version}</version> |
||||
|
</dependency> |
||||
|
<dependency> |
||||
|
<groupId>io.netty</groupId> |
||||
|
<artifactId>netty-transport-native-epoll</artifactId> |
||||
|
<classifier>linux-aarch_64</classifier> |
||||
|
<version>${netty.version}</version> |
||||
|
</dependency> |
||||
|
<dependency> |
||||
|
<groupId>io.netty</groupId> |
||||
|
<artifactId>netty-transport-native-epoll</artifactId> |
||||
|
<classifier>linux-riscv64</classifier> |
||||
|
<version>${netty.version}</version> |
||||
|
</dependency> |
||||
|
|
||||
|
<dependency> |
||||
|
<groupId>io.netty</groupId> |
||||
|
<artifactId>netty-transport-native-io_uring</artifactId> |
||||
|
<classifier>linux-x86_64</classifier> |
||||
|
<version>${netty.version}</version> |
||||
|
</dependency> |
||||
|
<dependency> |
||||
|
<groupId>io.netty</groupId> |
||||
|
<artifactId>netty-transport-native-io_uring</artifactId> |
||||
|
<classifier>linux-aarch_64</classifier> |
||||
|
<version>${netty.version}</version> |
||||
|
</dependency> |
||||
|
<dependency> |
||||
|
<groupId>io.netty</groupId> |
||||
|
<artifactId>netty-transport-native-io_uring</artifactId> |
||||
|
<classifier>linux-riscv64</classifier> |
||||
|
<version>${netty.version}</version> |
||||
|
</dependency> |
||||
|
|
||||
|
<dependency> |
||||
|
<groupId>io.netty</groupId> |
||||
|
<artifactId>netty-transport-native-kqueue</artifactId> |
||||
|
<classifier>osx-x86_64</classifier> |
||||
|
<version>${netty.version}</version> |
||||
|
</dependency> |
||||
|
<dependency> |
||||
|
<groupId>io.netty</groupId> |
||||
|
<artifactId>netty-transport-native-kqueue</artifactId> |
||||
|
<classifier>osx-aarch_64</classifier> |
||||
|
<version>${netty.version}</version> |
||||
|
</dependency> |
||||
|
|
||||
|
<dependency> |
||||
|
<groupId>io.netty</groupId> |
||||
|
<artifactId>netty-resolver-dns-native-macos</artifactId> |
||||
|
<classifier>osx-x86_64</classifier> |
||||
|
<version>${netty.version}</version> |
||||
|
</dependency> |
||||
|
<dependency> |
||||
|
<groupId>io.netty</groupId> |
||||
|
<artifactId>netty-resolver-dns-native-macos</artifactId> |
||||
|
<classifier>osx-aarch_64</classifier> |
||||
|
<version>${netty.version}</version> |
||||
|
</dependency> |
||||
|
|
||||
|
<dependency> |
||||
|
<groupId>io.netty</groupId> |
||||
|
<artifactId>netty-codec-native-quic</artifactId> |
||||
|
<classifier>linux-x86_64</classifier> |
||||
|
<version>${netty.version}</version> |
||||
|
</dependency> |
||||
|
<dependency> |
||||
|
<groupId>io.netty</groupId> |
||||
|
<artifactId>netty-codec-native-quic</artifactId> |
||||
|
<classifier>linux-aarch_64</classifier> |
||||
|
<version>${netty.version}</version> |
||||
|
</dependency> |
||||
|
<dependency> |
||||
|
<groupId>io.netty</groupId> |
||||
|
<artifactId>netty-codec-native-quic</artifactId> |
||||
|
<classifier>osx-x86_64</classifier> |
||||
|
<version>${netty.version}</version> |
||||
|
</dependency> |
||||
|
<dependency> |
||||
|
<groupId>io.netty</groupId> |
||||
|
<artifactId>netty-codec-native-quic</artifactId> |
||||
|
<classifier>osx-aarch_64</classifier> |
||||
|
<version>${netty.version}</version> |
||||
|
</dependency> |
||||
|
<dependency> |
||||
|
<groupId>io.netty</groupId> |
||||
|
<artifactId>netty-codec-native-quic</artifactId> |
||||
|
<classifier>windows-x86_64</classifier> |
||||
|
<version>${netty.version}</version> |
||||
|
</dependency> |
||||
|
|
||||
|
<!-- 特殊模块 --> |
||||
|
<dependency> |
||||
|
<groupId>io.netty</groupId> |
||||
|
<artifactId>netty-transport-rxtx</artifactId> |
||||
|
<version>${netty.version}</version> |
||||
|
</dependency> |
||||
|
<dependency> |
||||
|
<groupId>io.netty</groupId> |
||||
|
<artifactId>netty-transport-sctp</artifactId> |
||||
|
<version>${netty.version}</version> |
||||
|
</dependency> |
||||
|
<dependency> |
||||
|
<groupId>io.netty</groupId> |
||||
|
<artifactId>netty-transport-udt</artifactId> |
||||
|
<version>${netty.version}</version> |
||||
|
</dependency> |
||||
|
<dependency> |
||||
|
<groupId>io.netty</groupId> |
||||
|
<artifactId>netty-codec-classes-quic</artifactId> |
||||
|
<version>${netty.version}</version> |
||||
|
</dependency> |
||||
|
</dependencies> |
||||
|
</dependencyManagement> |
||||
|
<dependencies> |
||||
|
<dependency> |
||||
|
<groupId>org.springframework.boot</groupId> |
||||
|
<artifactId>spring-boot-starter-jdbc</artifactId> |
||||
|
</dependency> |
||||
|
<dependency> |
||||
|
<groupId>org.springframework.boot</groupId> |
||||
|
<artifactId>spring-boot-starter-web</artifactId> |
||||
|
<exclusions> |
||||
|
<exclusion> |
||||
|
<groupId>org.springframework.boot</groupId> |
||||
|
<artifactId>spring-boot-starter-logging</artifactId> |
||||
|
</exclusion> |
||||
|
<exclusion> |
||||
|
<groupId>org.apache.tomcat.embed</groupId> |
||||
|
<artifactId>tomcat-embed-core</artifactId> |
||||
|
</exclusion> |
||||
|
</exclusions> |
||||
|
</dependency> |
||||
|
<!-- https://mvnrepository.com/artifact/org.apache.tomcat.embed/tomcat-embed-core --> |
||||
|
<dependency> |
||||
|
<groupId>org.apache.tomcat.embed</groupId> |
||||
|
<artifactId>tomcat-embed-core</artifactId> |
||||
|
<version>10.1.42</version> |
||||
|
</dependency> |
||||
|
<dependency> |
||||
|
<groupId>org.springframework.boot</groupId> |
||||
|
<artifactId>spring-boot-configuration-processor</artifactId> |
||||
|
<optional>true</optional> |
||||
|
</dependency> |
||||
|
<dependency> |
||||
|
<groupId>org.springframework.boot</groupId> |
||||
|
<artifactId>spring-boot-starter-integration</artifactId> |
||||
|
</dependency> |
||||
|
<!-- https://mvnrepository.com/artifact/org.springframework.integration/spring-integration-mqtt --> |
||||
|
<!-- https://mvnrepository.com/artifact/org.springframework.integration/spring-integration-mqtt --> |
||||
|
<dependency> |
||||
|
<groupId>org.springframework.integration</groupId> |
||||
|
<artifactId>spring-integration-mqtt</artifactId> |
||||
|
<version>5.5.1</version> |
||||
|
</dependency> |
||||
|
|
||||
|
|
||||
|
<dependency> |
||||
|
<groupId>org.springframework.boot</groupId> |
||||
|
<artifactId>spring-boot-starter-data-redis</artifactId> |
||||
|
</dependency> |
||||
|
<dependency> |
||||
|
<groupId>org.springframework.boot</groupId> |
||||
|
<artifactId>spring-boot-starter-integration</artifactId> |
||||
|
</dependency> |
||||
|
|
||||
|
<dependency> |
||||
|
<groupId>org.springframework.integration</groupId> |
||||
|
<artifactId>spring-integration-redis</artifactId> |
||||
|
</dependency> |
||||
|
<dependency> |
||||
|
<groupId>org.springframework.boot</groupId> |
||||
|
<artifactId>spring-boot-starter-actuator</artifactId> |
||||
|
</dependency> |
||||
|
|
||||
|
<dependency> |
||||
|
<groupId>org.springframework.boot</groupId> |
||||
|
<artifactId>spring-boot-starter-test</artifactId> |
||||
|
<scope>test</scope> |
||||
|
</dependency> |
||||
|
<dependency> |
||||
|
<groupId>software.amazon.awssdk</groupId> |
||||
|
<artifactId>s3</artifactId> |
||||
|
</dependency> |
||||
|
<dependency> |
||||
|
<groupId>commons-io</groupId> |
||||
|
<artifactId>commons-io</artifactId> |
||||
|
<version>2.19.0</version> |
||||
|
</dependency> |
||||
|
<!-- MYSQL --> |
||||
|
<dependency> |
||||
|
<groupId>com.mysql</groupId> |
||||
|
<artifactId>mysql-connector-j</artifactId> |
||||
|
<version>9.3.0</version> |
||||
|
</dependency> |
||||
|
<dependency> |
||||
|
<groupId>org.projectlombok</groupId> |
||||
|
<artifactId>lombok</artifactId> |
||||
|
</dependency> |
||||
|
|
||||
|
<!-- https://mvnrepository.com/artifact/io.joynr.java.messaging.mqtt/paho-mqtt-client --> |
||||
|
<dependency> |
||||
|
<groupId>io.joynr.java.messaging.mqtt</groupId> |
||||
|
<artifactId>paho-mqtt-client</artifactId> |
||||
|
<version>1.14.2</version> |
||||
|
</dependency> |
||||
|
<!-- JSON等格式的转义和反转义 --> |
||||
|
<dependency> |
||||
|
<groupId>org.apache.commons</groupId> |
||||
|
<artifactId>commons-text</artifactId> |
||||
|
<version>1.13.1</version> |
||||
|
</dependency> |
||||
|
|
||||
|
<!-- <dependency> |
||||
|
<groupId>net.sf.json-lib</groupId> |
||||
|
<artifactId>json-lib</artifactId> |
||||
|
<version>2.4</version> |
||||
|
<classifier>jdk15</classifier> |
||||
|
</dependency>--> |
||||
|
<!-- https://mvnrepository.com/artifact/com.google.code.gson/gson --> |
||||
|
<dependency> |
||||
|
<groupId>com.google.code.gson</groupId> |
||||
|
<artifactId>gson</artifactId> |
||||
|
<version>2.13.1</version> |
||||
|
</dependency> |
||||
|
<dependency> |
||||
|
<groupId>io.netty</groupId> |
||||
|
<artifactId>netty-all</artifactId> |
||||
|
<version>${netty.version}</version> |
||||
|
</dependency> |
||||
|
<!-- https://mvnrepository.com/artifact/org.bouncycastle/bcpkix-jdk18on --> |
||||
|
<dependency> |
||||
|
<groupId>org.bouncycastle</groupId> |
||||
|
<artifactId>bcpkix-jdk18on</artifactId> |
||||
|
<version>1.81</version> |
||||
|
</dependency> |
||||
|
<!-- https://mvnrepository.com/artifact/com.squareup.okhttp3/okhttp --> |
||||
|
<dependency> |
||||
|
<groupId>com.squareup.okhttp3</groupId> |
||||
|
<artifactId>okhttp</artifactId> |
||||
|
<version>4.12.0</version> |
||||
|
</dependency> |
||||
|
<!-- https://mvnrepository.com/artifact/junit/junit --> |
||||
|
<dependency> |
||||
|
<groupId>junit</groupId> |
||||
|
<artifactId>junit</artifactId> |
||||
|
<version>4.13.2</version> |
||||
|
<scope>test</scope> |
||||
|
</dependency> |
||||
|
<dependency> |
||||
|
<groupId>com.github.houbb</groupId> |
||||
|
<artifactId>data-factory-core</artifactId> |
||||
|
<version>1.2.0</version> |
||||
|
</dependency> |
||||
|
<!-- https://mvnrepository.com/artifact/com.github.noconnor/junitperf --> |
||||
|
<dependency> |
||||
|
<groupId>com.github.noconnor</groupId> |
||||
|
<artifactId>junitperf</artifactId> |
||||
|
<version>1.22.1</version> |
||||
|
</dependency> |
||||
|
|
||||
|
<!-- https://mvnrepository.com/artifact/org.apache.commons/commons-pool2 --> |
||||
|
<dependency> |
||||
|
<groupId>org.apache.commons</groupId> |
||||
|
<artifactId>commons-pool2</artifactId> |
||||
|
<version>2.11.1</version> |
||||
|
</dependency> |
||||
|
<dependency> |
||||
|
<groupId>cn.hutool</groupId> |
||||
|
<artifactId>hutool-all</artifactId> |
||||
|
<version>5.8.38</version> |
||||
|
</dependency> |
||||
|
<dependency> |
||||
|
<groupId>com.github.derjust</groupId> |
||||
|
<artifactId>spring-data-dynamodb</artifactId> |
||||
|
<version>5.1.0</version> |
||||
|
<exclusions> |
||||
|
<exclusion> |
||||
|
<groupId>com.amazonaws</groupId> |
||||
|
<artifactId>aws-java-sdk-dynamodb</artifactId> |
||||
|
</exclusion> |
||||
|
</exclusions> |
||||
|
</dependency> |
||||
|
<!-- https://mvnrepository.com/artifact/com.amazonaws/aws-java-sdk-dynamodb --> |
||||
|
<dependency> |
||||
|
<groupId>com.amazonaws</groupId> |
||||
|
<artifactId>aws-java-sdk-dynamodb</artifactId> |
||||
|
<version>1.12.786</version> |
||||
|
</dependency> |
||||
|
<dependency> |
||||
|
<groupId>org.springframework.boot</groupId> |
||||
|
<artifactId>spring-boot-starter-data-jpa</artifactId> |
||||
|
</dependency> |
||||
|
<dependency> |
||||
|
<groupId>com.jayway.jsonpath</groupId> |
||||
|
<artifactId>json-path</artifactId> |
||||
|
<version>2.9.0</version> |
||||
|
</dependency> |
||||
|
<dependency> |
||||
|
<groupId>org.slf4j</groupId> |
||||
|
<artifactId>jul-to-slf4j</artifactId> |
||||
|
</dependency> |
||||
|
<!-- https://mvnrepository.com/artifact/com.alibaba.fastjson2/fastjson2 --> |
||||
|
<dependency> |
||||
|
<groupId>com.alibaba.fastjson2</groupId> |
||||
|
<artifactId>fastjson2</artifactId> |
||||
|
<version>2.0.46</version> |
||||
|
</dependency> |
||||
|
|
||||
|
<dependency> |
||||
|
<groupId>org.apache.kafka</groupId> |
||||
|
<artifactId>kafka-clients</artifactId> |
||||
|
<version>4.0.0</version> |
||||
|
</dependency> |
||||
|
|
||||
|
<!-- https://mvnrepository.com/artifact/org.yaml/snakeyaml --> |
||||
|
<dependency> |
||||
|
<groupId>org.yaml</groupId> |
||||
|
<artifactId>snakeyaml</artifactId> |
||||
|
<version>2.4</version> |
||||
|
</dependency> |
||||
|
|
||||
|
<!-- https://mvnrepository.com/artifact/net.minidev/json-smart --> |
||||
|
<dependency> |
||||
|
<groupId>net.minidev</groupId> |
||||
|
<artifactId>json-smart</artifactId> |
||||
|
<version>2.5.2</version> |
||||
|
</dependency> |
||||
|
|
||||
|
<!-- https://mvnrepository.com/artifact/org.springframework/spring-context --> |
||||
|
<dependency> |
||||
|
<groupId>org.springframework</groupId> |
||||
|
<artifactId>spring-context</artifactId> |
||||
|
<version>6.1.21</version> |
||||
|
</dependency> |
||||
|
|
||||
|
<!-- https://mvnrepository.com/artifact/org.springframework/spring-context-support --> |
||||
|
<dependency> |
||||
|
<groupId>org.springframework</groupId> |
||||
|
<artifactId>spring-context-support</artifactId> |
||||
|
<version>6.1.21</version> |
||||
|
</dependency> |
||||
|
|
||||
|
<!-- https://mvnrepository.com/artifact/ch.qos.logback/logback-classic --> |
||||
|
<dependency> |
||||
|
<groupId>ch.qos.logback</groupId> |
||||
|
<artifactId>logback-classic</artifactId> |
||||
|
<version>1.5.18</version> |
||||
|
<scope>compile</scope> |
||||
|
</dependency> |
||||
|
<dependency> |
||||
|
<groupId>ch.qos.logback</groupId> |
||||
|
<artifactId>logback-core</artifactId> |
||||
|
<version>1.5.18</version> |
||||
|
<scope>compile</scope> |
||||
|
</dependency> |
||||
|
|
||||
|
<dependency> |
||||
|
<groupId>com.google.guava</groupId> |
||||
|
<artifactId>guava</artifactId> |
||||
|
<version>33.4.8-jre</version> |
||||
|
</dependency> |
||||
|
|
||||
|
</dependencies> |
||||
|
|
||||
|
<build> |
||||
|
<finalName>data-center-receiver</finalName> |
||||
|
<plugins> |
||||
|
<plugin> |
||||
|
<groupId>org.springframework.boot</groupId> |
||||
|
<artifactId>spring-boot-maven-plugin</artifactId> |
||||
|
</plugin> |
||||
|
<!-- https://mvnrepository.com/artifact/pl.project13.maven/git-commit-id-plugin --> |
||||
|
<plugin> |
||||
|
<groupId>pl.project13.maven</groupId> |
||||
|
<artifactId>git-commit-id-plugin</artifactId> |
||||
|
<version>4.9.10</version> |
||||
|
<executions> |
||||
|
<execution> |
||||
|
<goals> |
||||
|
<goal>revision</goal> |
||||
|
</goals> |
||||
|
</execution> |
||||
|
</executions> |
||||
|
<configuration> |
||||
|
<verbose>true</verbose> |
||||
|
<dateFormat>yyyy-MM-dd'T'HH:mm:ssZ</dateFormat> |
||||
|
<generateGitPropertiesFile>true</generateGitPropertiesFile> |
||||
|
<generateGitPropertiesFilename>${project.build.outputDirectory}/git.properties</generateGitPropertiesFilename> |
||||
|
</configuration> |
||||
|
</plugin> |
||||
|
|
||||
|
<!--<!–正式环境–>--> |
||||
|
<!-- <plugin>--> |
||||
|
<!-- <groupId>io.fabric8</groupId>--> |
||||
|
<!-- <artifactId>docker-maven-plugin</artifactId>--> |
||||
|
<!-- <version>0.38.1</version>--> |
||||
|
<!-- <configuration>--> |
||||
|
<!-- <authConfig>--> |
||||
|
<!-- <username>AKIAVRXFMB43XVQ3GXAL</username>--> |
||||
|
<!-- <password>G0FaGcizm8FlgLxZsL+8xBwfPSzQF71294nrtE2y</password>--> |
||||
|
<!-- </authConfig>--> |
||||
|
<!-- <images>--> |
||||
|
<!-- --> |
||||
|
<!-- </images>--> |
||||
|
<!-- </configuration>--> |
||||
|
<!-- </plugin>--> |
||||
|
|
||||
|
<!-- 测试环境--> |
||||
|
<plugin> |
||||
|
<groupId>io.fabric8</groupId> |
||||
|
<artifactId>docker-maven-plugin</artifactId> |
||||
|
<version>0.38.1</version> |
||||
|
<configuration> |
||||
|
<authConfig> |
||||
|
<username>AKIA5OFH5OOZPCXZIRUQ</username> |
||||
|
<password>TMIN27+OxamT1FmBQSVKfUIWpOVldhxQx2Stxwix</password> |
||||
|
</authConfig> |
||||
|
<images> |
||||
|
 |
||||
|
</images> |
||||
|
</configuration> |
||||
|
</plugin> |
||||
|
|
||||
|
</plugins> |
||||
|
|
||||
|
<resources> |
||||
|
<resource> |
||||
|
<directory>src/main/java</directory> |
||||
|
<includes> |
||||
|
<include>**/*.xml</include> |
||||
|
</includes> |
||||
|
</resource> |
||||
|
|
||||
|
<resource> |
||||
|
<directory>src/main/resources</directory> |
||||
|
<includes> |
||||
|
<include>**/*</include> |
||||
|
</includes> |
||||
|
</resource> |
||||
|
</resources> |
||||
|
</build> |
||||
|
|
||||
|
|
||||
|
</project> |
||||
@ -0,0 +1,15 @@ |
|||||
|
# 二期说明 |
||||
|
|
||||
|
## 激活或者冷冻数据来源配置表 |
||||
|
aws lamdba 通过脚本 |
||||
|
调用receiver的接口 |
||||
|
` |
||||
|
/notification/receive |
||||
|
` |
||||
|
传入参数state,和List<Long> 类型的id,用于决定哪些数据来源的数据源激活还是冷冻; |
||||
|
|
||||
|
## 数据来源配置表生效 |
||||
|
|
||||
|
接上,会根据配置来实现对应数据源是MQTT接入还是RESTFUL方式接收数据。 |
||||
|
|
||||
|
|
||||
@ -0,0 +1,3 @@ |
|||||
|
git pull |
||||
|
mvn clean |
||||
|
mvn package -DskipTests=true docker:build |
||||
@ -0,0 +1,15 @@ |
|||||
|
FROM openjdk:8-jre |
||||
|
WORKDIR /app |
||||
|
COPY ../../../target/data-center-receiver.jar app.jar |
||||
|
EXPOSE 8200 |
||||
|
|
||||
|
# 使用UseCGroupMemoryLimitForHeap |
||||
|
|
||||
|
ENV JAVA_OPTS="-Xms5g -Xmx5g -XX:+UseParallelGC -XX:ParallelGCThreads=4 -XX:MaxGCPauseMillis=200 -XX:GCTimeRatio=19 -XX:NewRatio=3 -XX:+AlwaysPreTouch -XX:+PrintGCDetails -XX:+PrintGCDateStamps -Xloggc:/app/gc.log" |
||||
|
|
||||
|
# 使用shell方式的ENTRYPOINT来确保环境变量被展开 |
||||
|
ENTRYPOINT java $JAVA_OPTS -jar app.jar -Djavax.net.debug=ssl --spring-profiles=$env |
||||
|
|
||||
|
|
||||
|
|
||||
|
|
||||
@ -0,0 +1,17 @@ |
|||||
|
version: '3.8' |
||||
|
|
||||
|
services: |
||||
|
app: |
||||
|
build: . |
||||
|
|
||||
|
ports: |
||||
|
- "8200:8200" |
||||
|
environment: |
||||
|
JAVA_OPTS: "-Xms5g -Xmx5g -XX:+UseParallelGC -XX:ParallelGCThreads=4 -XX:MaxGCPauseMillis=200 -XX:GCTimeRatio=19 -XX:NewRatio=3 -XX:+AlwaysPreTouch -XX:+PrintGCDetails -XX:+PrintGCDateStamps -Xloggc:/app/gc.log" |
||||
|
AGENT_PATH: "-agentpath:/app/liberror-detector-agent.so=packageName=org.eclipse.paho.client.mqttv3,=filePath=./gclogs/errorlog.log" # Replace this with your actual agent options if necessary |
||||
|
env: "dev" # Replace this with your actual spring profile if necessary |
||||
|
volumes: |
||||
|
- /Users/zhukovasky/IdeaProjects/Datacenter/TECHSOR_dataCenter_receiver/target/data-center-receiver.jar:/app/app.jar |
||||
|
- /Users/zhukovasky/IdeaProjects/Datacenter/TECHSOR_dataCenter_receiver/target/liberror-detector-agent.so:/app/liberror-detector-agent.so |
||||
|
- /Users/zhukovasky/IdeaProjects/Datacenter/TECHSOR_dataCenter_receiver/target/app/gc.log:/app/gc.log # Make sure this path is correct for your gc logs |
||||
|
entrypoint: java $AGENT_PATH $JAVA_OPTS -jar /app/app.jar -Djavax.net.debug=ssl --spring-profiles=$env |
||||
@ -0,0 +1,37 @@ |
|||||
|
package com.techsor.datacenter.receiver; |
||||
|
|
||||
|
import org.slf4j.Logger; |
||||
|
import org.slf4j.LoggerFactory; |
||||
|
import org.slf4j.bridge.SLF4JBridgeHandler; |
||||
|
import org.springframework.boot.SpringApplication; |
||||
|
import org.springframework.boot.autoconfigure.SpringBootApplication; |
||||
|
import org.springframework.boot.autoconfigure.orm.jpa.HibernateJpaAutoConfiguration; |
||||
|
import org.springframework.integration.annotation.IntegrationComponentScan; |
||||
|
import org.springframework.integration.config.EnableIntegration; |
||||
|
import org.springframework.scheduling.annotation.EnableScheduling; |
||||
|
|
||||
|
import jakarta.annotation.PostConstruct; |
||||
|
|
||||
|
@EnableScheduling |
||||
|
@IntegrationComponentScan |
||||
|
@EnableIntegration |
||||
|
@SpringBootApplication(scanBasePackages = {"com.techsor.*"},exclude = { HibernateJpaAutoConfiguration.class}) |
||||
|
public class TechsorDataCenterReceiverApplication { |
||||
|
private static final Logger logger = LoggerFactory.getLogger(TechsorDataCenterReceiverApplication.class); |
||||
|
|
||||
|
public static void main(String[] args) { |
||||
|
logger.info("application started success!!"); |
||||
|
SpringApplication.run(TechsorDataCenterReceiverApplication.class, args); |
||||
|
logger.info("application started success!!"); |
||||
|
} |
||||
|
@PostConstruct |
||||
|
public void init() { |
||||
|
// Remove existing handlers attached to the j.u.l root logger
|
||||
|
SLF4JBridgeHandler.removeHandlersForRootLogger(); |
||||
|
|
||||
|
// Bridge/join j.u.l. to SLF4J
|
||||
|
SLF4JBridgeHandler.install(); |
||||
|
} |
||||
|
|
||||
|
} |
||||
|
|
||||
@ -0,0 +1,60 @@ |
|||||
|
package com.techsor.datacenter.receiver.clients; |
||||
|
|
||||
|
|
||||
|
import com.techsor.datacenter.receiver.config.DeltaClientConfig; |
||||
|
import com.techsor.datacenter.receiver.entity.common.MqttPublisherEntity; |
||||
|
import com.techsor.datacenter.receiver.utils.SslUtil; |
||||
|
|
||||
|
import jakarta.annotation.Resource; |
||||
|
|
||||
|
import org.eclipse.paho.client.mqttv3.MqttClient; |
||||
|
import org.eclipse.paho.client.mqttv3.MqttConnectOptions; |
||||
|
import org.eclipse.paho.client.mqttv3.MqttException; |
||||
|
import org.eclipse.paho.client.mqttv3.persist.MemoryPersistence; |
||||
|
import org.slf4j.Logger; |
||||
|
import org.slf4j.LoggerFactory; |
||||
|
import org.springframework.beans.factory.InitializingBean; |
||||
|
import org.springframework.beans.factory.annotation.Autowired; |
||||
|
import org.springframework.stereotype.Component; |
||||
|
|
||||
|
import java.util.concurrent.ScheduledExecutorService; |
||||
|
|
||||
|
@Component("deltaClientMQTT") |
||||
|
public class DeltaClientMQTTS { |
||||
|
private static final Logger logger = LoggerFactory.getLogger(DeltaClientMQTTS.class); |
||||
|
// public String TOPIC = "Publish_Topic";
|
||||
|
//// public String HOST = "ssl://8.209.255.206:8883";
|
||||
|
// public String HOST = "ssl://127.0.0.1:8883";
|
||||
|
public String randomKey; //a random
|
||||
|
|
||||
|
@Resource(name = "client") |
||||
|
private MqttClient client; |
||||
|
@Resource |
||||
|
private MqttConnectOptions options; |
||||
|
|
||||
|
@Autowired |
||||
|
private DeltaClientConfig deltaClientConfig; |
||||
|
|
||||
|
@Resource |
||||
|
private DeltaPushCallback deltaPushCallback; |
||||
|
public void reconnect(MqttClient mqttClient) throws MqttException { |
||||
|
mqttClient.reconnect(); |
||||
|
} |
||||
|
|
||||
|
|
||||
|
|
||||
|
public void start() throws Exception { |
||||
|
|
||||
|
|
||||
|
client.setCallback(deltaPushCallback); |
||||
|
|
||||
|
client.connect(options); |
||||
|
//subscribe
|
||||
|
int[] Qos = {1}; |
||||
|
String[] topic1 = {this |
||||
|
.deltaClientConfig.getTOPIC()}; |
||||
|
client.subscribe(topic1, Qos); |
||||
|
} |
||||
|
|
||||
|
|
||||
|
} |
||||
@ -0,0 +1,120 @@ |
|||||
|
package com.techsor.datacenter.receiver.clients; |
||||
|
|
||||
|
import com.google.gson.Gson; |
||||
|
import com.techsor.datacenter.receiver.config.DataCenterEnvConfig; |
||||
|
import com.techsor.datacenter.receiver.constants.CompanyConstants; |
||||
|
import com.techsor.datacenter.receiver.constants.UrlConstants; |
||||
|
import com.techsor.datacenter.receiver.entity.common.BaseTransDataEntity; |
||||
|
import com.techsor.datacenter.receiver.entity.common.MqttPublisherEntity; |
||||
|
import com.techsor.datacenter.receiver.service.MqttHistoryDynamoDBService; |
||||
|
import com.techsor.datacenter.receiver.service.MqttHistoryService; |
||||
|
|
||||
|
import com.techsor.datacenter.receiver.utils.DefaultHttpRequestUtil; |
||||
|
import com.techsor.datacenter.receiver.utils.SpringUtils; |
||||
|
import lombok.SneakyThrows; |
||||
|
import org.eclipse.paho.client.mqttv3.*; |
||||
|
import org.slf4j.Logger; |
||||
|
import org.slf4j.LoggerFactory; |
||||
|
import org.springframework.beans.factory.InitializingBean; |
||||
|
import org.springframework.stereotype.Component; |
||||
|
|
||||
|
import jakarta.annotation.Resource; |
||||
|
|
||||
|
/** |
||||
|
* 发布消息的回调类 |
||||
|
* |
||||
|
* 必须实现MqttCallback的接口并实现对应的相关接口方法CallBack 类将实现 MqttCallBack。 |
||||
|
* 每个客户机标识都需要一个回调实例。在此示例中,构造函数传递客户机标识以另存为实例数据。 |
||||
|
* 在回调中,将它用来标识已经启动了该回调的哪个实例。 |
||||
|
* 必须在回调类中实现三个方法: |
||||
|
* |
||||
|
* public void messageArrived(MqttTopic topic, MqttMessage message)接收已经预订的发布。 |
||||
|
* |
||||
|
* public void connectionLost(Throwable cause)在断开连接时调用。 |
||||
|
* |
||||
|
* public void deliveryComplete(MqttDeliveryToken token)) |
||||
|
* 接收到已经发布的 QoS 1 或 QoS 2 消息的传递令牌时调用。 |
||||
|
* 由 MqttClient.connect 激活此回调。 |
||||
|
* |
||||
|
*/ |
||||
|
@Component |
||||
|
public class DeltaPushCallback implements MqttCallback, MqttCallbackExtended{ |
||||
|
|
||||
|
|
||||
|
@Resource |
||||
|
private MqttPublisherEntity mqttPublisherEntity; |
||||
|
|
||||
|
@Resource |
||||
|
private MqttClient client; |
||||
|
|
||||
|
@Resource |
||||
|
private MqttHistoryService mqttHistoryService; |
||||
|
@Resource |
||||
|
private DefaultHttpRequestUtil defaultHttpRequestUtil; |
||||
|
@Resource |
||||
|
private DataCenterEnvConfig dataCenterEnvConfig; |
||||
|
|
||||
|
@Resource |
||||
|
MqttHistoryDynamoDBService mqttHistoryDynamoDBService; |
||||
|
private static final Logger logger = LoggerFactory.getLogger(DeltaPushCallback.class); |
||||
|
|
||||
|
|
||||
|
@SneakyThrows |
||||
|
public void connectionLost(Throwable cause) { |
||||
|
// 连接丢失后,一般在这里面进行重连,由于设置了自动重连,断开连接后会自动重连,然后进入connectComplete中
|
||||
|
logger.error("Connection Lost,Trying to reconnect... ClientId: "+this.client.getClientId()); |
||||
|
Boolean isConnected = client.isConnected(); |
||||
|
logger.warn("client connect status:"+isConnected); |
||||
|
|
||||
|
} |
||||
|
|
||||
|
/** |
||||
|
* 连接成功会进入到这里 |
||||
|
* @param reconnect |
||||
|
* @param serverURI |
||||
|
*/ |
||||
|
@SneakyThrows |
||||
|
@Override |
||||
|
public void connectComplete(boolean reconnect, String serverURI) { |
||||
|
// 可以做订阅主题
|
||||
|
logger.info("Connect success"); |
||||
|
Boolean isConnected = client.isConnected(); |
||||
|
logger.warn("client connect status:"+isConnected); |
||||
|
if (isConnected){ |
||||
|
logger.info("Subscribe to :"+mqttPublisherEntity.getTopic()); |
||||
|
client.subscribe(mqttPublisherEntity.getTopic(),0); |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
public void deliveryComplete(IMqttDeliveryToken token) { |
||||
|
logger.debug("deliveryComplete---------" + token.isComplete()); |
||||
|
} |
||||
|
|
||||
|
public void messageArrived(String topic, MqttMessage message) throws Exception { |
||||
|
BaseTransDataEntity mqttHistoryEntity = new BaseTransDataEntity(); |
||||
|
mqttHistoryEntity.setContent(new String(message.getPayload())); |
||||
|
mqttHistoryEntity.setTs(System.currentTimeMillis()+""); |
||||
|
mqttHistoryEntity.setCompany(CompanyConstants.DELTA); |
||||
|
mqttHistoryService.insertHistory(mqttHistoryEntity); |
||||
|
|
||||
|
this.mqttHistoryDynamoDBService.save(mqttHistoryEntity); |
||||
|
|
||||
|
try { |
||||
|
forwardMessage(mqttHistoryEntity); |
||||
|
}catch (Exception e){ |
||||
|
logger.warn("Not correct data"); |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
//转发收到的数据到 数据转发平台
|
||||
|
private void forwardMessage(BaseTransDataEntity mqttHistoryEntity){ |
||||
|
//只转发终端数据
|
||||
|
Gson gson = new Gson(); |
||||
|
String jsonParams = gson.toJson(mqttHistoryEntity); |
||||
|
logger.info("Send Data To: {},{}", this.dataCenterEnvConfig.getReceiveUrl(),jsonParams); |
||||
|
this.defaultHttpRequestUtil.postJson(this.dataCenterEnvConfig.getReceiveUrl(),jsonParams); |
||||
|
|
||||
|
} |
||||
|
|
||||
|
|
||||
|
} |
||||
@ -0,0 +1,66 @@ |
|||||
|
package com.techsor.datacenter.receiver.clients; |
||||
|
|
||||
|
|
||||
|
import com.techsor.datacenter.receiver.entity.common.MqttPublisherEntity; |
||||
|
import org.eclipse.paho.client.mqttv3.MqttClient; |
||||
|
import org.eclipse.paho.client.mqttv3.MqttConnectOptions; |
||||
|
import org.eclipse.paho.client.mqttv3.MqttException; |
||||
|
import org.eclipse.paho.client.mqttv3.persist.MemoryPersistence; |
||||
|
import org.slf4j.Logger; |
||||
|
import org.slf4j.LoggerFactory; |
||||
|
|
||||
|
public class ITAClientMQTT { |
||||
|
private static final Logger logger = LoggerFactory.getLogger(ITAClientMQTT.class); |
||||
|
public String TOPIC = "#"; |
||||
|
public String HOST = ""; |
||||
|
|
||||
|
public String randomKey; //a random
|
||||
|
private String clientID; |
||||
|
private MqttClient client; |
||||
|
private MqttConnectOptions options; |
||||
|
private String userName = ""; |
||||
|
private String passWord = ""; |
||||
|
|
||||
|
private MqttPublisherEntity publisherEntity; |
||||
|
|
||||
|
|
||||
|
|
||||
|
public ITAClientMQTT(MqttPublisherEntity publisherEntity, String randomKey){ |
||||
|
this.publisherEntity = publisherEntity; |
||||
|
this.HOST = publisherEntity.getHost(); |
||||
|
this.userName = publisherEntity.getUsername(); |
||||
|
this.passWord = publisherEntity.getPassword(); |
||||
|
this.TOPIC = publisherEntity.getTopic(); |
||||
|
this.randomKey = randomKey; |
||||
|
this.clientID = this.userName+":"+this.passWord+randomKey; |
||||
|
|
||||
|
} |
||||
|
|
||||
|
public void reconnect(MqttClient mqttClient) throws MqttException { |
||||
|
mqttClient.reconnect(); |
||||
|
} |
||||
|
|
||||
|
public void start() throws MqttException { |
||||
|
logger.info("重新连接成功,Connect to MQTT: {} Client ID: {}",this.HOST,clientID); |
||||
|
logger.info("Username: {}",userName); |
||||
|
client = new MqttClient(HOST, clientID, new MemoryPersistence()); |
||||
|
options = new MqttConnectOptions(); |
||||
|
options.setCleanSession(false); |
||||
|
options.setUserName(userName); |
||||
|
options.setPassword(passWord.toCharArray()); |
||||
|
options.setMqttVersion(MqttConnectOptions.MQTT_VERSION_3_1); |
||||
|
// Set Timeout
|
||||
|
options.setConnectionTimeout(20); |
||||
|
// Set mqtt-heartbeat interval
|
||||
|
options.setKeepAliveInterval(10); |
||||
|
options.setAutomaticReconnect(true); |
||||
|
client.setCallback(new ITAPushCallback(publisherEntity,randomKey,client)); |
||||
|
|
||||
|
client.connect(options); |
||||
|
//subscribe
|
||||
|
int[] Qos = {0}; |
||||
|
String[] topic1 = {TOPIC}; |
||||
|
client.subscribe(topic1, Qos); |
||||
|
} |
||||
|
|
||||
|
} |
||||
@ -0,0 +1,121 @@ |
|||||
|
package com.techsor.datacenter.receiver.clients; |
||||
|
|
||||
|
import cn.hutool.json.JSONUtil; |
||||
|
import com.google.gson.Gson; |
||||
|
import com.techsor.datacenter.receiver.config.DataCenterEnvConfig; |
||||
|
import com.techsor.datacenter.receiver.constants.CompanyConstants; |
||||
|
import com.techsor.datacenter.receiver.constants.UrlConstants; |
||||
|
import com.techsor.datacenter.receiver.entity.common.BaseTransDataEntity; |
||||
|
import com.techsor.datacenter.receiver.entity.common.MqttPublisherEntity; |
||||
|
import com.techsor.datacenter.receiver.service.MqttHistoryDynamoDBService; |
||||
|
import com.techsor.datacenter.receiver.service.MqttHistoryService; |
||||
|
import com.techsor.datacenter.receiver.utils.DefaultHttpRequestUtil; |
||||
|
import com.techsor.datacenter.receiver.utils.SpringUtils; |
||||
|
import lombok.SneakyThrows; |
||||
|
import lombok.extern.slf4j.Slf4j; |
||||
|
import org.eclipse.paho.client.mqttv3.*; |
||||
|
|
||||
|
/** |
||||
|
* 发布消息的回调类 |
||||
|
* <p> |
||||
|
* 必须实现MqttCallback的接口并实现对应的相关接口方法CallBack 类将实现 MqttCallBack。 |
||||
|
* 每个客户机标识都需要一个回调实例。在此示例中,构造函数传递客户机标识以另存为实例数据。 |
||||
|
* 在回调中,将它用来标识已经启动了该回调的哪个实例。 |
||||
|
* 必须在回调类中实现三个方法: |
||||
|
* <p> |
||||
|
* public void messageArrived(MqttTopic topic, MqttMessage message)接收已经预订的发布。 |
||||
|
* <p> |
||||
|
* public void connectionLost(Throwable cause)在断开连接时调用。 |
||||
|
* <p> |
||||
|
* public void deliveryComplete(MqttDeliveryToken token)) |
||||
|
* 接收到已经发布的 QoS 1 或 QoS 2 消息的传递令牌时调用。 |
||||
|
* 由 MqttClient.connect 激活此回调。 |
||||
|
*/ |
||||
|
@Slf4j |
||||
|
public class ITAPushCallback implements MqttCallback, MqttCallbackExtended { |
||||
|
private DataCenterEnvConfig dataCenterEnvConfig; |
||||
|
private String randomKey; |
||||
|
private MqttPublisherEntity mqttPublisherEntity; |
||||
|
private MqttClient client; |
||||
|
|
||||
|
private MqttHistoryService mqttHistoryService; |
||||
|
|
||||
|
private DefaultHttpRequestUtil defaultHttpRequestUtil; |
||||
|
|
||||
|
|
||||
|
private MqttHistoryDynamoDBService mqttHistoryDynamoDBService; |
||||
|
|
||||
|
public ITAPushCallback(MqttPublisherEntity mqttPublisherEntity, String randomKey, MqttClient client) { |
||||
|
this.randomKey = randomKey; |
||||
|
this.mqttPublisherEntity = mqttPublisherEntity; |
||||
|
this.client = client; |
||||
|
this.mqttHistoryService = SpringUtils.getBean("mqttHistoryService", MqttHistoryService.class); |
||||
|
this.defaultHttpRequestUtil = SpringUtils.getBean("defaultHttpRequestUtil", DefaultHttpRequestUtil.class); |
||||
|
this.dataCenterEnvConfig = SpringUtils.getBean("dataCenterEnvConfig", DataCenterEnvConfig.class); |
||||
|
this.mqttHistoryDynamoDBService=SpringUtils.getBean("mqttHistoryDynamoDBService",MqttHistoryDynamoDBService.class); |
||||
|
} |
||||
|
|
||||
|
@SneakyThrows |
||||
|
public void connectionLost(Throwable cause) { |
||||
|
// 连接丢失后,一般在这里面进行重连,由于设置了自动重连,断开连接后会自动重连,然后进入connectComplete中
|
||||
|
log.error("Connection Lost,Trying to reconnect... ClientId: {}", this.client.getClientId()); |
||||
|
Boolean isConnected = client.isConnected(); |
||||
|
log.warn("client connect status: {}", isConnected); |
||||
|
} |
||||
|
|
||||
|
/** |
||||
|
* 连接成功会进入到这里 |
||||
|
* |
||||
|
* @param reconnect |
||||
|
* @param serverURI |
||||
|
*/ |
||||
|
@SneakyThrows |
||||
|
@Override |
||||
|
public void connectComplete(boolean reconnect, String serverURI) { |
||||
|
// 可以做订阅主题
|
||||
|
log.info("Connect success"); |
||||
|
Boolean isConnected = client.isConnected(); |
||||
|
log.warn("client connect status: {}", isConnected); |
||||
|
if (isConnected) { |
||||
|
log.info("Subscribe to :{}", mqttPublisherEntity.getTopic()); |
||||
|
client.subscribe(mqttPublisherEntity.getTopic(), 0); |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
public void deliveryComplete(IMqttDeliveryToken token) { |
||||
|
log.info("deliveryComplete--------- {}", token.isComplete()); |
||||
|
} |
||||
|
|
||||
|
public void messageArrived(String topic, MqttMessage message) throws Exception { |
||||
|
log.info("message arrived {}", JSONUtil.toJsonStr(message)); |
||||
|
BaseTransDataEntity mqttHistoryEntity = new BaseTransDataEntity(); |
||||
|
mqttHistoryEntity.setContent(new String(message.getPayload())); |
||||
|
mqttHistoryEntity.setTs(System.currentTimeMillis() + ""); |
||||
|
mqttHistoryEntity.setCompany(CompanyConstants.ZIFISENSE); |
||||
|
|
||||
|
mqttHistoryService.insertHistory(mqttHistoryEntity); |
||||
|
//保存到dynamodb中
|
||||
|
this.mqttHistoryDynamoDBService.save(mqttHistoryEntity); |
||||
|
try { |
||||
|
forwardMessage(mqttHistoryEntity); |
||||
|
} catch (Exception e) { |
||||
|
log.error("Not correct data:{}", e.getMessage(), e); |
||||
|
} |
||||
|
|
||||
|
|
||||
|
} |
||||
|
|
||||
|
|
||||
|
//转发收到的数据到 数据转发平台
|
||||
|
private void forwardMessage(BaseTransDataEntity mqttHistoryEntity) { |
||||
|
//只转发终端数据
|
||||
|
if (mqttHistoryEntity.getContent().contains("msUid")) { |
||||
|
Gson gson = new Gson(); |
||||
|
String jsonParams = gson.toJson(mqttHistoryEntity); |
||||
|
log.info("Send Data To: {}", UrlConstants.RECEIVER_URL); |
||||
|
log.info("Send Data : {}", jsonParams); |
||||
|
this.defaultHttpRequestUtil.postJson(this.dataCenterEnvConfig.getReceiveUrl(), jsonParams); |
||||
|
} |
||||
|
|
||||
|
} |
||||
|
} |
||||
@ -0,0 +1,50 @@ |
|||||
|
package com.techsor.datacenter.receiver.clients; |
||||
|
|
||||
|
import com.google.gson.Gson; |
||||
|
import com.techsor.datacenter.receiver.constants.CompanyConstants; |
||||
|
import com.techsor.datacenter.receiver.entity.common.BaseTransDataEntity; |
||||
|
import com.techsor.datacenter.receiver.entity.common.JsonResponse; |
||||
|
import com.techsor.datacenter.receiver.entity.metcom.MetcomEntity; |
||||
|
import com.techsor.datacenter.receiver.service.DataTransService; |
||||
|
import com.techsor.datacenter.receiver.service.MqttHistoryDynamoDBService; |
||||
|
import com.techsor.datacenter.receiver.service.RestfulService; |
||||
|
import org.slf4j.Logger; |
||||
|
import org.slf4j.LoggerFactory; |
||||
|
import org.springframework.web.bind.annotation.*; |
||||
|
|
||||
|
import jakarta.annotation.Resource; |
||||
|
|
||||
|
@RestController |
||||
|
@CrossOrigin(originPatterns = "*", methods = {RequestMethod.GET, RequestMethod.POST, RequestMethod.PUT}) |
||||
|
public class MetComClient { |
||||
|
private static final Logger logger = LoggerFactory.getLogger(MetComClient.class); |
||||
|
|
||||
|
@Resource |
||||
|
private RestfulService historyDAO; |
||||
|
|
||||
|
@Resource |
||||
|
private DataTransService dataTransService; |
||||
|
@Resource |
||||
|
private MqttHistoryDynamoDBService mqttHistoryDynamoDBService; |
||||
|
|
||||
|
//MetCom 3D室内定位数据转发接口
|
||||
|
@PutMapping(value = "api/v2/to_dbm/metcom/user/location/{userId}") |
||||
|
public JsonResponse nittan(@PathVariable("userId") String userId, @RequestBody String rawJson) throws Exception { |
||||
|
//合并uuid及其他参数
|
||||
|
MetcomEntity metcomEntity = new Gson().fromJson(rawJson,MetcomEntity.class); |
||||
|
metcomEntity.setUuid(userId); |
||||
|
rawJson = new Gson().toJson(metcomEntity); |
||||
|
//记录数据转发历史
|
||||
|
historyDAO.insertHistory(rawJson, CompanyConstants.METCOM); |
||||
|
|
||||
|
BaseTransDataEntity baseTransDataEntity=new BaseTransDataEntity(); |
||||
|
baseTransDataEntity.setCompany(CompanyConstants.METCOM); |
||||
|
baseTransDataEntity.setContent(rawJson); |
||||
|
baseTransDataEntity.setTs(String.valueOf(System.currentTimeMillis())); |
||||
|
this.mqttHistoryDynamoDBService.save(baseTransDataEntity); |
||||
|
//转发数据
|
||||
|
this.dataTransService.transferData(userId,CompanyConstants.METCOM,rawJson); |
||||
|
|
||||
|
return JsonResponse.buildSuccess(""); |
||||
|
} |
||||
|
} |
||||
@ -0,0 +1,39 @@ |
|||||
|
package com.techsor.datacenter.receiver.clients; |
||||
|
|
||||
|
import com.techsor.datacenter.receiver.constants.CompanyConstants; |
||||
|
import com.techsor.datacenter.receiver.entity.common.BaseTransDataEntity; |
||||
|
import com.techsor.datacenter.receiver.entity.common.JsonResponse; |
||||
|
import com.techsor.datacenter.receiver.service.DataTransService; |
||||
|
import com.techsor.datacenter.receiver.service.MqttHistoryDynamoDBService; |
||||
|
import com.techsor.datacenter.receiver.service.RestfulService; |
||||
|
import org.springframework.web.bind.annotation.*; |
||||
|
|
||||
|
import jakarta.annotation.Resource; |
||||
|
|
||||
|
@RestController |
||||
|
@CrossOrigin(originPatterns = "*", methods = {RequestMethod.GET, RequestMethod.POST}) |
||||
|
public class NBIClient { |
||||
|
|
||||
|
@Resource |
||||
|
private RestfulService oviPhoneDAO; |
||||
|
|
||||
|
@Resource |
||||
|
private DataTransService dataTransService; |
||||
|
|
||||
|
@Resource |
||||
|
private MqttHistoryDynamoDBService mqttHistoryDynamoDBService; |
||||
|
//nbi数据转发测试接口,直接转发数据
|
||||
|
@RequestMapping(value = "api/v1/nbi/raw", method = RequestMethod.POST) |
||||
|
public JsonResponse ReceiveRawData(@RequestBody String rawJson) { |
||||
|
oviPhoneDAO.insertHistory(rawJson,CompanyConstants.NBI); |
||||
|
|
||||
|
BaseTransDataEntity baseTransDataEntity=new BaseTransDataEntity(); |
||||
|
baseTransDataEntity.setCompany(CompanyConstants.NBI); |
||||
|
baseTransDataEntity.setContent(rawJson); |
||||
|
baseTransDataEntity.setTs(String.valueOf(System.currentTimeMillis())); |
||||
|
this.mqttHistoryDynamoDBService.save(baseTransDataEntity); |
||||
|
|
||||
|
this.dataTransService.transferData("",CompanyConstants.NBI,rawJson); |
||||
|
return JsonResponse.buildSuccess(rawJson); |
||||
|
} |
||||
|
} |
||||
@ -0,0 +1,41 @@ |
|||||
|
package com.techsor.datacenter.receiver.clients; |
||||
|
|
||||
|
import com.techsor.datacenter.receiver.constants.CompanyConstants; |
||||
|
import com.techsor.datacenter.receiver.entity.common.BaseTransDataEntity; |
||||
|
import com.techsor.datacenter.receiver.entity.common.JsonResponse; |
||||
|
import com.techsor.datacenter.receiver.service.DataTransService; |
||||
|
import com.techsor.datacenter.receiver.service.MqttHistoryDynamoDBService; |
||||
|
import com.techsor.datacenter.receiver.service.RestfulService; |
||||
|
import org.springframework.web.bind.annotation.*; |
||||
|
|
||||
|
import jakarta.annotation.Resource; |
||||
|
|
||||
|
@RestController |
||||
|
@CrossOrigin(originPatterns = "*", methods = {RequestMethod.GET, RequestMethod.POST}) |
||||
|
public class NittanClient { |
||||
|
|
||||
|
@Resource |
||||
|
private RestfulService historyDAO; |
||||
|
|
||||
|
@Resource |
||||
|
private DataTransService dataTransService; |
||||
|
|
||||
|
@Resource |
||||
|
private MqttHistoryDynamoDBService mqttHistoryDynamoDBService; |
||||
|
|
||||
|
//nittan数据转发接口
|
||||
|
@RequestMapping(value = "api/v1/to_dbm/nittan", method = RequestMethod.POST) |
||||
|
public JsonResponse nittan(@RequestBody String rawJson) throws Exception { |
||||
|
//记录数据转发历史
|
||||
|
historyDAO.insertHistory(rawJson, CompanyConstants.NITTAN); |
||||
|
|
||||
|
BaseTransDataEntity baseTransDataEntity=new BaseTransDataEntity(); |
||||
|
baseTransDataEntity.setCompany(CompanyConstants.NITTAN); |
||||
|
baseTransDataEntity.setContent(rawJson); |
||||
|
baseTransDataEntity.setTs(String.valueOf(System.currentTimeMillis())); |
||||
|
this.mqttHistoryDynamoDBService.save(baseTransDataEntity); |
||||
|
|
||||
|
this.dataTransService.transferData("",CompanyConstants.NITTAN,rawJson); |
||||
|
return JsonResponse.buildSuccess(""); |
||||
|
} |
||||
|
} |
||||
@ -0,0 +1,49 @@ |
|||||
|
package com.techsor.datacenter.receiver.clients; |
||||
|
|
||||
|
import com.techsor.datacenter.receiver.constants.CompanyConstants; |
||||
|
import com.techsor.datacenter.receiver.entity.common.BaseTransDataEntity; |
||||
|
import com.techsor.datacenter.receiver.entity.common.JsonResponse; |
||||
|
import com.techsor.datacenter.receiver.service.DataTransService; |
||||
|
import com.techsor.datacenter.receiver.service.MqttHistoryDynamoDBService; |
||||
|
import com.techsor.datacenter.receiver.service.RestfulService; |
||||
|
import org.slf4j.Logger; |
||||
|
import org.slf4j.LoggerFactory; |
||||
|
import org.springframework.web.bind.annotation.*; |
||||
|
|
||||
|
import jakarta.annotation.Resource; |
||||
|
|
||||
|
@RestController |
||||
|
@CrossOrigin(originPatterns = "*", methods = {RequestMethod.GET, RequestMethod.POST}) |
||||
|
public class OCRClient { |
||||
|
private static final Logger logger = LoggerFactory.getLogger(OCRClient.class); |
||||
|
|
||||
|
@Resource |
||||
|
private RestfulService historyDAO; |
||||
|
|
||||
|
@Resource |
||||
|
private MqttHistoryDynamoDBService mqttHistoryDynamoDBService; |
||||
|
|
||||
|
@Resource |
||||
|
private DataTransService dataTransService; |
||||
|
/** |
||||
|
* OCR 数据接口 |
||||
|
* @param rawJson |
||||
|
* @return |
||||
|
* @throws Exception |
||||
|
*/ |
||||
|
@RequestMapping(value = "api/v1/to_dbm/ocr", method = RequestMethod.POST) |
||||
|
public JsonResponse nittan(@RequestBody String rawJson) throws Exception { |
||||
|
//记录数据转发历史
|
||||
|
historyDAO.insertHistory(rawJson, CompanyConstants.OCR); |
||||
|
|
||||
|
BaseTransDataEntity baseTransDataEntity=new BaseTransDataEntity(); |
||||
|
baseTransDataEntity.setCompany(CompanyConstants.OCR); |
||||
|
baseTransDataEntity.setContent(rawJson); |
||||
|
baseTransDataEntity.setTs(String.valueOf(System.currentTimeMillis())); |
||||
|
this.mqttHistoryDynamoDBService.save(baseTransDataEntity); |
||||
|
|
||||
|
this.dataTransService.transferData("",CompanyConstants.OCR,rawJson); |
||||
|
|
||||
|
return JsonResponse.buildSuccess(""); |
||||
|
} |
||||
|
} |
||||
@ -0,0 +1,38 @@ |
|||||
|
package com.techsor.datacenter.receiver.clients; |
||||
|
|
||||
|
import com.techsor.datacenter.receiver.constants.CompanyConstants; |
||||
|
import com.techsor.datacenter.receiver.entity.common.BaseTransDataEntity; |
||||
|
import com.techsor.datacenter.receiver.entity.common.JsonResponse; |
||||
|
import com.techsor.datacenter.receiver.service.DataTransService; |
||||
|
import com.techsor.datacenter.receiver.service.MqttHistoryDynamoDBService; |
||||
|
import com.techsor.datacenter.receiver.service.RestfulService; |
||||
|
import org.springframework.web.bind.annotation.*; |
||||
|
|
||||
|
import jakarta.annotation.Resource; |
||||
|
|
||||
|
@RestController |
||||
|
@CrossOrigin(originPatterns = "*", methods = {RequestMethod.GET, RequestMethod.POST}) |
||||
|
public class OVIPhoneClient { |
||||
|
|
||||
|
@Resource |
||||
|
private RestfulService oviPhoneDAO; |
||||
|
|
||||
|
@Resource |
||||
|
private MqttHistoryDynamoDBService mqttHistoryDynamoDBService; |
||||
|
@Resource |
||||
|
private DataTransService dataTransService; |
||||
|
//nittan数据转发测试接口,直接转发数据
|
||||
|
@RequestMapping(value = "api/v1/oviphone/raw", method = RequestMethod.POST) |
||||
|
public JsonResponse ReceiveRawData(@RequestBody String rawJson) { |
||||
|
oviPhoneDAO.insertHistory(rawJson,CompanyConstants.OVIPHONE); |
||||
|
|
||||
|
BaseTransDataEntity baseTransDataEntity=new BaseTransDataEntity(); |
||||
|
baseTransDataEntity.setCompany(CompanyConstants.OVIPHONE); |
||||
|
baseTransDataEntity.setContent(rawJson); |
||||
|
baseTransDataEntity.setTs(String.valueOf(System.currentTimeMillis())); |
||||
|
this.mqttHistoryDynamoDBService.save(baseTransDataEntity); |
||||
|
|
||||
|
this.dataTransService.transferData("",CompanyConstants.OVIPHONE,rawJson); |
||||
|
return JsonResponse.buildSuccess(rawJson); |
||||
|
} |
||||
|
} |
||||
@ -0,0 +1,59 @@ |
|||||
|
package com.techsor.datacenter.receiver.config; |
||||
|
|
||||
|
import java.io.*; |
||||
|
|
||||
|
import jakarta.servlet.ReadListener; |
||||
|
import jakarta.servlet.ServletInputStream; |
||||
|
import jakarta.servlet.http.HttpServletRequest; |
||||
|
import jakarta.servlet.http.HttpServletRequestWrapper; |
||||
|
|
||||
|
public class CachedBodyHttpServletRequest extends HttpServletRequestWrapper { |
||||
|
|
||||
|
byte[] cachedBody; |
||||
|
|
||||
|
public CachedBodyHttpServletRequest(HttpServletRequest request) throws IOException { |
||||
|
super(request); |
||||
|
InputStream requestInputStream = request.getInputStream(); |
||||
|
ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream(); |
||||
|
byte[] buffer = new byte[1024]; |
||||
|
int len; |
||||
|
while ((len = requestInputStream.read(buffer)) != -1) { |
||||
|
byteArrayOutputStream.write(buffer, 0, len); |
||||
|
} |
||||
|
this.cachedBody = byteArrayOutputStream.toByteArray(); |
||||
|
} |
||||
|
|
||||
|
@Override |
||||
|
public ServletInputStream getInputStream() throws IOException { |
||||
|
return new CachedBodyServletInputStream(this.cachedBody); |
||||
|
} |
||||
|
|
||||
|
private static class CachedBodyServletInputStream extends ServletInputStream { |
||||
|
|
||||
|
private ByteArrayInputStream byteArrayInputStream; |
||||
|
|
||||
|
public CachedBodyServletInputStream(byte[] data) { |
||||
|
this.byteArrayInputStream = new ByteArrayInputStream(data); |
||||
|
} |
||||
|
|
||||
|
@Override |
||||
|
public boolean isFinished() { |
||||
|
return byteArrayInputStream.available() == 0; |
||||
|
} |
||||
|
|
||||
|
@Override |
||||
|
public boolean isReady() { |
||||
|
return true; |
||||
|
} |
||||
|
|
||||
|
@Override |
||||
|
public void setReadListener(ReadListener readListener) { |
||||
|
throw new UnsupportedOperationException(); |
||||
|
} |
||||
|
|
||||
|
@Override |
||||
|
public int read() throws IOException { |
||||
|
return byteArrayInputStream.read(); |
||||
|
} |
||||
|
} |
||||
|
} |
||||
@ -0,0 +1,95 @@ |
|||||
|
package com.techsor.datacenter.receiver.config; |
||||
|
|
||||
|
import lombok.Data; |
||||
|
import org.springframework.beans.factory.annotation.Value; |
||||
|
import org.springframework.context.annotation.Configuration; |
||||
|
|
||||
|
/** |
||||
|
* 系统配置 |
||||
|
* */ |
||||
|
|
||||
|
@Configuration |
||||
|
public class DataCenterEnvConfig { |
||||
|
|
||||
|
|
||||
|
private String receiveUrl; |
||||
|
|
||||
|
@Value("${data.center.receive.address}") |
||||
|
private String apiAddress; |
||||
|
|
||||
|
@Value("${data.center.receive.api:#{'/v1/main_receiver'}}") |
||||
|
private String apiUrl; |
||||
|
|
||||
|
@Value("${data.center.process.api:#{'/v1/generic/process'}}") |
||||
|
private String processApiUrl; |
||||
|
|
||||
|
@Value("${data.center.ioserver_process.api:#{'/v1/generic/ioserver_process'}}") |
||||
|
private String processIoserverUrl; |
||||
|
|
||||
|
@Value("${data.center.ioserver_process.api:#{'/v1/generic/st150_process'}}") |
||||
|
private String processGW150Url; |
||||
|
|
||||
|
@Value("${data.center.zaiot_process.api:#{'/v1/generic/zaiot_process'}}") |
||||
|
private String zaiotProcessApiUrl; |
||||
|
|
||||
|
|
||||
|
public String getReceiveUrl() { |
||||
|
return apiAddress+apiUrl; |
||||
|
} |
||||
|
|
||||
|
public void setReceiveUrl(String receiveUrl) { |
||||
|
this.receiveUrl = receiveUrl; |
||||
|
} |
||||
|
|
||||
|
public String getApiAddress() { |
||||
|
return apiAddress; |
||||
|
} |
||||
|
|
||||
|
public void setApiAddress(String apiAddress) { |
||||
|
this.apiAddress = apiAddress; |
||||
|
} |
||||
|
|
||||
|
public String getApiUrl() { |
||||
|
return apiUrl; |
||||
|
} |
||||
|
|
||||
|
public void setApiUrl(String apiUrl) { |
||||
|
this.apiUrl = apiUrl; |
||||
|
} |
||||
|
|
||||
|
public String getProcessApiUrl() { |
||||
|
return apiAddress+processApiUrl; |
||||
|
} |
||||
|
|
||||
|
public void setProcessApiUrl(String processApiUrl) { |
||||
|
this.processApiUrl = processApiUrl; |
||||
|
} |
||||
|
|
||||
|
public String getProcessIoserverUrl() { |
||||
|
return apiAddress+processIoserverUrl; |
||||
|
} |
||||
|
|
||||
|
public void setProcessIoserverUrl(String processIoserverUrl) { |
||||
|
this.processIoserverUrl = processIoserverUrl; |
||||
|
} |
||||
|
|
||||
|
public String getZaiotProcessApiUrl() { |
||||
|
return apiAddress+zaiotProcessApiUrl; |
||||
|
} |
||||
|
|
||||
|
public void setZaiotProcessApiUrl(String zaiotProcessApiUrl) { |
||||
|
this.zaiotProcessApiUrl = zaiotProcessApiUrl; |
||||
|
} |
||||
|
|
||||
|
public String getProcessGW150Url() { |
||||
|
return processGW150Url; |
||||
|
} |
||||
|
|
||||
|
public void setProcessGW150Url(String processGW150Url) { |
||||
|
this.processGW150Url = processGW150Url; |
||||
|
} |
||||
|
|
||||
|
public String getGW150ProcessUrl() { |
||||
|
return apiAddress+this.processGW150Url; |
||||
|
} |
||||
|
} |
||||
@ -0,0 +1,87 @@ |
|||||
|
package com.techsor.datacenter.receiver.config; |
||||
|
|
||||
|
import com.techsor.datacenter.receiver.entity.common.MqttPublisherEntity; |
||||
|
import com.techsor.datacenter.receiver.utils.SslUtil; |
||||
|
import lombok.Data; |
||||
|
import org.eclipse.paho.client.mqttv3.MqttClient; |
||||
|
import org.eclipse.paho.client.mqttv3.MqttConnectOptions; |
||||
|
import org.eclipse.paho.client.mqttv3.MqttException; |
||||
|
import org.eclipse.paho.client.mqttv3.persist.MemoryPersistence; |
||||
|
import org.slf4j.Logger; |
||||
|
import org.slf4j.LoggerFactory; |
||||
|
import org.springframework.beans.factory.annotation.Value; |
||||
|
import org.springframework.context.annotation.Bean; |
||||
|
import org.springframework.context.annotation.Configuration; |
||||
|
|
||||
|
import java.util.Random; |
||||
|
|
||||
|
@Configuration |
||||
|
@Data |
||||
|
public class DeltaClientConfig { |
||||
|
private static final Logger logger = LoggerFactory.getLogger(DeltaClientConfig.class); |
||||
|
@Value("${delta.topic:Publish_Topic}") |
||||
|
private String TOPIC; |
||||
|
// public String HOST = "ssl://8.209.255.206:8883";
|
||||
|
@Value("${delta.host:ssl://127.0.0.1:8883}") |
||||
|
private String HOST = "ssl://127.0.0.1:8883"; |
||||
|
@Value("${delta.userName:techsor}") |
||||
|
private String userName = "techsor"; |
||||
|
@Value("${delta.usepassWordrName:techsorAsd123456}") |
||||
|
private String passWord = "techsorAsd123456"; |
||||
|
@Value("${delta.enableSSL:false}") |
||||
|
private boolean enableSSL; |
||||
|
|
||||
|
@Bean |
||||
|
public MqttPublisherEntity deltaMqttPublisherEntity(){ |
||||
|
MqttPublisherEntity publisherEntity = new MqttPublisherEntity(); |
||||
|
publisherEntity.setHost(this.HOST); |
||||
|
publisherEntity.setUsername(this.userName); |
||||
|
publisherEntity.setPassword(this.passWord); |
||||
|
publisherEntity.setTopic(this.TOPIC); |
||||
|
return publisherEntity; |
||||
|
} |
||||
|
@Bean |
||||
|
public MqttConnectOptions options() throws Exception { |
||||
|
|
||||
|
MqttConnectOptions options = new MqttConnectOptions(); |
||||
|
options.setCleanSession(false); |
||||
|
options.setUserName(userName); |
||||
|
options.setPassword(passWord.toCharArray()); |
||||
|
options.setMqttVersion(MqttConnectOptions.MQTT_VERSION_3_1); |
||||
|
options.setHttpsHostnameVerificationEnabled(false); |
||||
|
//Set ssl
|
||||
|
if(enableSSL){ |
||||
|
options.setSocketFactory(SslUtil.getSocketFactory("/ssl/ca.pem", "/ssl/client.pem", "/ssl/client.key", "")); |
||||
|
} |
||||
|
// Set Timeout
|
||||
|
options.setConnectionTimeout(0); |
||||
|
// Set mqtt-heartbeat interval
|
||||
|
options.setKeepAliveInterval(60); |
||||
|
options.setAutomaticReconnect(true); |
||||
|
return options; |
||||
|
} |
||||
|
|
||||
|
@Bean |
||||
|
public String clientID(){ |
||||
|
String clientID = this.userName+":"+this.passWord+getRandoms(); |
||||
|
return clientID; |
||||
|
} |
||||
|
@Bean |
||||
|
public MqttClient client(String clientID) throws MqttException { |
||||
|
logger.info("Connect to MQTTs:"+this.HOST+" Client ID:"+clientID); |
||||
|
logger.info("Username:"+userName); |
||||
|
MqttClient client = new MqttClient(HOST, clientID, new MemoryPersistence()); |
||||
|
return client; |
||||
|
|
||||
|
} |
||||
|
|
||||
|
|
||||
|
public int getRandoms(){ |
||||
|
// 创建一个Random对象
|
||||
|
Random random = new Random(); |
||||
|
|
||||
|
// 生成一个范围在100到999之间的随机数(包括100,但不包括1000)
|
||||
|
int randomNumber = random.nextInt(900) + 100; |
||||
|
return randomNumber; |
||||
|
} |
||||
|
} |
||||
@ -0,0 +1,76 @@ |
|||||
|
//package com.techsor.datacenter.receiver.config;
|
||||
|
//
|
||||
|
//
|
||||
|
//import com.amazonaws.auth.AWSCredentials;
|
||||
|
//import com.amazonaws.auth.AWSCredentialsProvider;
|
||||
|
//import com.amazonaws.auth.AWSStaticCredentialsProvider;
|
||||
|
//import com.amazonaws.auth.BasicAWSCredentials;
|
||||
|
//import com.amazonaws.regions.Regions;
|
||||
|
//import com.amazonaws.services.dynamodbv2.AmazonDynamoDB;
|
||||
|
//import com.amazonaws.services.dynamodbv2.AmazonDynamoDBClientBuilder;
|
||||
|
//import com.amazonaws.services.dynamodbv2.datamodeling.DynamoDBMapper;
|
||||
|
//import com.amazonaws.services.dynamodbv2.datamodeling.DynamoDBMapperConfig;
|
||||
|
//import org.socialsignin.spring.data.dynamodb.core.DynamoDBTemplate;
|
||||
|
//import org.socialsignin.spring.data.dynamodb.mapping.DynamoDBMappingContext;
|
||||
|
//import org.socialsignin.spring.data.dynamodb.repository.config.DynamoDBMapperConfigFactory;
|
||||
|
//import org.springframework.beans.factory.annotation.Value;
|
||||
|
//import org.springframework.context.annotation.Bean;
|
||||
|
//import org.springframework.context.annotation.Configuration;
|
||||
|
//
|
||||
|
////@Configuration
|
||||
|
////public class DynamoDBConfig {
|
||||
|
////
|
||||
|
//// @Value("${amazon.aws.accesskey}")
|
||||
|
//// private String amazonAWSAccessKey;
|
||||
|
////
|
||||
|
//// @Value("${amazon.aws.secretkey}")
|
||||
|
//// private String amazonAWSSecretKey;
|
||||
|
////
|
||||
|
//// public AWSCredentialsProvider amazonAWSCredentialsProvider() {
|
||||
|
//// return new AWSStaticCredentialsProvider(amazonAWSCredentials());
|
||||
|
//// }
|
||||
|
////
|
||||
|
//// @Bean
|
||||
|
//// public AWSCredentials amazonAWSCredentials() {
|
||||
|
//// return new BasicAWSCredentials(amazonAWSAccessKey, amazonAWSSecretKey);
|
||||
|
//// }
|
||||
|
//// @Value("${amazon.dynamodb.tableName:mqtt_history}")
|
||||
|
//// private String dynamoDBTableName ;
|
||||
|
////
|
||||
|
//// @Bean
|
||||
|
//// public DynamoDBMapperConfig.TableNameOverride tableNameOverrider() {
|
||||
|
//// return DynamoDBMapperConfig.TableNameOverride.withTableNameReplacement(this.dynamoDBTableName);
|
||||
|
//// }
|
||||
|
////
|
||||
|
//// @Bean
|
||||
|
//// public DynamoDBMapperConfig dynamoCustomDBMapperConfig(DynamoDBMapperConfig.TableNameOverride tableNameOverrider) {
|
||||
|
//// DynamoDBMapperConfig.Builder builder = new DynamoDBMapperConfig.Builder();
|
||||
|
//// builder.withTableNameOverride(tableNameOverrider);
|
||||
|
//// return builder.build();
|
||||
|
////
|
||||
|
//// }
|
||||
|
////
|
||||
|
//// @Bean
|
||||
|
//// public DynamoDBMapper dynamoCustomDBMapper(AmazonDynamoDB amazonDynamoDB, DynamoDBMapperConfig dynamoCustomDBMapperConfig) {
|
||||
|
//// return new DynamoDBMapper(amazonDynamoDB, dynamoCustomDBMapperConfig);
|
||||
|
//// }
|
||||
|
////
|
||||
|
//// @Bean
|
||||
|
//// public AmazonDynamoDB amazonDynamoDB() {
|
||||
|
//// return AmazonDynamoDBClientBuilder.standard().withCredentials(amazonAWSCredentialsProvider())
|
||||
|
//// .withRegion(Regions.AP_NORTHEAST_1).build();
|
||||
|
//// }
|
||||
|
////
|
||||
|
//// @Bean
|
||||
|
//// public DynamoDBMappingContext dynamoDBMappingContext() {
|
||||
|
//// return new DynamoDBMappingContext();
|
||||
|
//// }
|
||||
|
////
|
||||
|
//// @Bean
|
||||
|
//// public DynamoDBTemplate dynamoDBTemplate(AmazonDynamoDB amazonDynamoDB, DynamoDBMapper dynamoCustomDBMapper ,DynamoDBMapperConfig dynamoCustomDBMapperConfig) {
|
||||
|
////
|
||||
|
//// return new DynamoDBTemplate(amazonDynamoDB, dynamoCustomDBMapper,dynamoCustomDBMapperConfig);
|
||||
|
//// }
|
||||
|
//
|
||||
|
//
|
||||
|
//}
|
||||
@ -0,0 +1,16 @@ |
|||||
|
//package com.techsor.datacenter.receiver.config;
|
||||
|
//
|
||||
|
//import org.springframework.context.annotation.Bean;
|
||||
|
//import org.springframework.context.annotation.Configuration;
|
||||
|
//import org.springframework.jdbc.core.JdbcTemplate;
|
||||
|
//
|
||||
|
//import javax.sql.DataSource;
|
||||
|
//
|
||||
|
//@Configuration
|
||||
|
//public class JdbcTemplateConfig {
|
||||
|
//
|
||||
|
// @Bean
|
||||
|
// public JdbcTemplate jdbcTemplate(DataSource dataSource){
|
||||
|
// return new JdbcTemplate(dataSource);
|
||||
|
// }
|
||||
|
//}
|
||||
@ -0,0 +1,87 @@ |
|||||
|
package com.techsor.datacenter.receiver.config; |
||||
|
|
||||
|
import com.google.gson.Gson; |
||||
|
import com.jayway.jsonpath.JsonPath; |
||||
|
import com.techsor.datacenter.receiver.entity.datasource.DatasourceConfigEntity; |
||||
|
import com.techsor.datacenter.receiver.service.GlobalStateService; |
||||
|
import org.apache.commons.lang3.StringUtils; |
||||
|
import org.springframework.beans.factory.annotation.Autowired; |
||||
|
import org.springframework.stereotype.Component; |
||||
|
import org.springframework.web.filter.GenericFilterBean; |
||||
|
|
||||
|
import jakarta.servlet.FilterChain; |
||||
|
import jakarta.servlet.ServletException; |
||||
|
import jakarta.servlet.ServletRequest; |
||||
|
import jakarta.servlet.ServletResponse; |
||||
|
import jakarta.servlet.http.HttpServletRequest; |
||||
|
import jakarta.servlet.http.HttpServletResponse; |
||||
|
import java.io.BufferedReader; |
||||
|
import java.io.IOException; |
||||
|
import java.io.InputStream; |
||||
|
import java.io.InputStreamReader; |
||||
|
import java.nio.charset.StandardCharsets; |
||||
|
import java.util.HashMap; |
||||
|
import java.util.Map; |
||||
|
|
||||
|
@Component |
||||
|
public class MyFilter extends GenericFilterBean { |
||||
|
@Autowired |
||||
|
private GlobalStateService globalStateService; |
||||
|
|
||||
|
@Override |
||||
|
public void doFilter(ServletRequest request, ServletResponse response, FilterChain chain) |
||||
|
throws IOException, ServletException { |
||||
|
|
||||
|
HttpServletRequest httpServletRequest=(HttpServletRequest)request; |
||||
|
|
||||
|
HttpServletResponse httpServletResponse=(HttpServletResponse)response; |
||||
|
String uri=httpServletRequest.getRequestURI(); |
||||
|
|
||||
|
if (this.globalStateService.checkUrlExist(uri)) { |
||||
|
|
||||
|
|
||||
|
StringBuilder stringBuilder = new StringBuilder(); |
||||
|
// 使用包装器包装原始的 HttpServletRequest
|
||||
|
CachedBodyHttpServletRequest wrappedRequest = new CachedBodyHttpServletRequest(httpServletRequest); |
||||
|
// 获取请求体
|
||||
|
String body = new String(wrappedRequest.cachedBody, StandardCharsets.UTF_8); |
||||
|
body=StringUtils.replaceAll(body, "\n", ""); |
||||
|
body=StringUtils.replaceAll(body, "\t", ""); |
||||
|
body=StringUtils.replaceAll(body, "\\s+", ""); |
||||
|
DatasourceConfigEntity dataSrcEntity = this.globalStateService.getDatasourceConfig(uri); |
||||
|
if(StringUtils.isEmpty(dataSrcEntity.getDeviceIdPosition())){ |
||||
|
httpServletResponse.setStatus(200); |
||||
|
Map<String,String> errorMap=new HashMap<>(); |
||||
|
errorMap.put("code","-1"); |
||||
|
errorMap.put("msg","deviceId position id null"); |
||||
|
Gson gson=new Gson(); |
||||
|
httpServletResponse.setContentType("application/json;charset=UTF-8"); |
||||
|
httpServletResponse.getWriter().write(gson.toJson(errorMap)); |
||||
|
httpServletResponse.getWriter().flush(); |
||||
|
return; |
||||
|
} |
||||
|
Gson currentGson=new Gson(); |
||||
|
Map resultMao=currentGson.fromJson(body, Map.class); |
||||
|
String resultBody=currentGson.toJson(resultMao); |
||||
|
String deviceId= JsonPath.read(resultBody,dataSrcEntity.getDeviceIdPosition()); |
||||
|
if(StringUtils.isEmpty(deviceId)){ |
||||
|
httpServletResponse.setStatus(200); |
||||
|
Map<String,String> errorMap=new HashMap<>(); |
||||
|
httpServletResponse.setContentType("application/json;charset=UTF-8"); |
||||
|
errorMap.put("code","-1"); |
||||
|
errorMap.put("msg","deviceId is null"); |
||||
|
Gson gson=new Gson(); |
||||
|
httpServletResponse.getWriter().write(gson.toJson(errorMap)); |
||||
|
httpServletResponse.getWriter().flush(); |
||||
|
return; |
||||
|
} |
||||
|
String contextPath=httpServletRequest.getContextPath(); |
||||
|
wrappedRequest.getRequestDispatcher(contextPath+"/api/generic/process?MyDeviceId="+deviceId).forward(wrappedRequest, response); |
||||
|
|
||||
|
}else{ |
||||
|
chain.doFilter(request, response); |
||||
|
} |
||||
|
|
||||
|
|
||||
|
} |
||||
|
} |
||||
@ -0,0 +1,137 @@ |
|||||
|
package com.techsor.datacenter.receiver.config; |
||||
|
import com.fasterxml.jackson.databind.ser.impl.SimpleBeanPropertyFilter; |
||||
|
import com.fasterxml.jackson.databind.ser.impl.SimpleFilterProvider; |
||||
|
import com.techsor.datacenter.receiver.listener.RedisNotificationMessageSubscriber; |
||||
|
import org.apache.commons.pool2.impl.GenericObjectPoolConfig; |
||||
|
import com.fasterxml.jackson.annotation.JsonAutoDetect; |
||||
|
import com.fasterxml.jackson.annotation.JsonTypeInfo; |
||||
|
import com.fasterxml.jackson.annotation.PropertyAccessor; |
||||
|
import com.fasterxml.jackson.databind.ObjectMapper; |
||||
|
import com.fasterxml.jackson.databind.jsontype.impl.LaissezFaireSubTypeValidator; |
||||
|
import org.springframework.beans.factory.annotation.Value; |
||||
|
import org.springframework.context.annotation.Bean; |
||||
|
import org.springframework.context.annotation.Configuration; |
||||
|
import org.springframework.data.redis.connection.RedisConnectionFactory; |
||||
|
import org.springframework.data.redis.connection.RedisPassword; |
||||
|
import org.springframework.data.redis.connection.RedisStandaloneConfiguration; |
||||
|
import org.springframework.data.redis.connection.lettuce.LettuceClientConfiguration; |
||||
|
import org.springframework.data.redis.connection.lettuce.LettuceConnectionFactory; |
||||
|
import org.springframework.data.redis.connection.lettuce.LettucePoolingClientConfiguration; |
||||
|
import org.springframework.data.redis.core.RedisTemplate; |
||||
|
import org.springframework.data.redis.listener.PatternTopic; |
||||
|
import org.springframework.data.redis.listener.RedisMessageListenerContainer; |
||||
|
import org.springframework.data.redis.listener.adapter.MessageListenerAdapter; |
||||
|
import org.springframework.data.redis.serializer.GenericJackson2JsonRedisSerializer; |
||||
|
import org.springframework.data.redis.serializer.Jackson2JsonRedisSerializer; |
||||
|
import org.springframework.data.redis.serializer.RedisSerializer; |
||||
|
import org.springframework.data.redis.serializer.StringRedisSerializer; |
||||
|
import org.springframework.integration.redis.util.RedisLockRegistry; |
||||
|
|
||||
|
import java.time.Duration; |
||||
|
|
||||
|
@Configuration |
||||
|
public class RedisConfig { |
||||
|
@Value("${spring.redis.database}") |
||||
|
private int database; |
||||
|
|
||||
|
@Value("${spring.redis.host}") |
||||
|
private String host; |
||||
|
|
||||
|
@Value("${spring.redis.password}") |
||||
|
private String password; |
||||
|
|
||||
|
@Value("${spring.redis.port}") |
||||
|
private int port; |
||||
|
|
||||
|
@Value("${spring.redis.timeout}") |
||||
|
private long timeout; |
||||
|
|
||||
|
@Value("${spring.redis.lettuce.shutdown-timeout}") |
||||
|
private long shutDownTimeout; |
||||
|
|
||||
|
@Value("${spring.redis.lettuce.pool.max-idle}") |
||||
|
private int maxIdle; |
||||
|
|
||||
|
@Value("${spring.redis.lettuce.pool.min-idle}") |
||||
|
private int minIdle; |
||||
|
|
||||
|
@Value("${spring.redis.lettuce.pool.max-active}") |
||||
|
private int maxActive; |
||||
|
|
||||
|
@Value("${spring.redis.lettuce.pool.max-wait}") |
||||
|
private long maxWait; |
||||
|
|
||||
|
Jackson2JsonRedisSerializer<Object> jackson2JsonRedisSerializer = new Jackson2JsonRedisSerializer<>(Object.class); |
||||
|
|
||||
|
@Bean |
||||
|
public LettuceConnectionFactory lettuceConnectionFactory() { |
||||
|
GenericObjectPoolConfig genericObjectPoolConfig = new GenericObjectPoolConfig(); |
||||
|
genericObjectPoolConfig.setMaxIdle(maxIdle); |
||||
|
genericObjectPoolConfig.setMinIdle(minIdle); |
||||
|
genericObjectPoolConfig.setMaxTotal(maxActive); |
||||
|
genericObjectPoolConfig.setMaxWaitMillis(maxWait); |
||||
|
genericObjectPoolConfig.setTimeBetweenEvictionRunsMillis(100); |
||||
|
RedisStandaloneConfiguration redisStandaloneConfiguration = new RedisStandaloneConfiguration(); |
||||
|
redisStandaloneConfiguration.setDatabase(database); |
||||
|
redisStandaloneConfiguration.setHostName(host); |
||||
|
redisStandaloneConfiguration.setPort(port); |
||||
|
redisStandaloneConfiguration.setPassword(RedisPassword.of(password)); |
||||
|
LettuceClientConfiguration clientConfig = LettucePoolingClientConfiguration.builder() |
||||
|
.commandTimeout(Duration.ofMillis(timeout)) |
||||
|
.shutdownTimeout(Duration.ofMillis(shutDownTimeout)) |
||||
|
.poolConfig(genericObjectPoolConfig) |
||||
|
.build(); |
||||
|
|
||||
|
LettuceConnectionFactory factory = new LettuceConnectionFactory(redisStandaloneConfiguration, clientConfig); |
||||
|
|
||||
|
return factory; |
||||
|
} |
||||
|
|
||||
|
@Bean |
||||
|
public RedisTemplate<String, Object> redisTemplate(LettuceConnectionFactory lettuceConnectionFactory) { |
||||
|
RedisTemplate<String, Object> template = new RedisTemplate<>(); |
||||
|
template.setConnectionFactory(lettuceConnectionFactory); |
||||
|
//使用Jackson2JsonRedisSerializer替换默认的JdkSerializationRedisSerializer来序列化和反序列化redis的value值
|
||||
|
ObjectMapper mapper = new ObjectMapper(); |
||||
|
mapper.setVisibility(PropertyAccessor.ALL, JsonAutoDetect.Visibility.ANY); |
||||
|
mapper.activateDefaultTyping(LaissezFaireSubTypeValidator.instance, |
||||
|
ObjectMapper.DefaultTyping.NON_FINAL, JsonTypeInfo.As.PROPERTY); |
||||
|
jackson2JsonRedisSerializer.setObjectMapper(mapper); |
||||
|
StringRedisSerializer stringRedisSerializer = new StringRedisSerializer(); |
||||
|
//key采用String的序列化方式
|
||||
|
template.setKeySerializer(stringRedisSerializer); |
||||
|
// hash的key也采用String的序列化方式
|
||||
|
template.setHashKeySerializer(stringRedisSerializer); |
||||
|
// value序列化方式采用jackson
|
||||
|
template.setValueSerializer(jackson2JsonRedisSerializer); |
||||
|
// hash的value序列化方式采用jackson
|
||||
|
template.setHashValueSerializer(jackson2JsonRedisSerializer); |
||||
|
template.afterPropertiesSet(); |
||||
|
return template; |
||||
|
} |
||||
|
|
||||
|
@Value("${redis.lock.expire}") |
||||
|
private Integer lockAttempTimeout ; |
||||
|
|
||||
|
@Bean |
||||
|
public RedisLockRegistry redisLockRegistry(RedisConnectionFactory redisConnectionFactory){ |
||||
|
return new RedisLockRegistry(redisConnectionFactory, "redis-receiver-lock",lockAttempTimeout); |
||||
|
} |
||||
|
|
||||
|
|
||||
|
@Bean |
||||
|
RedisMessageListenerContainer container(RedisConnectionFactory connectionFactory, |
||||
|
MessageListenerAdapter listenerAdapter) { |
||||
|
RedisMessageListenerContainer container = new RedisMessageListenerContainer(); |
||||
|
container.setConnectionFactory(connectionFactory); |
||||
|
container.addMessageListener(listenerAdapter, new PatternTopic("notificationReceiver")); |
||||
|
return container; |
||||
|
} |
||||
|
|
||||
|
@Bean |
||||
|
MessageListenerAdapter listenerAdapter(RedisNotificationMessageSubscriber subscriber) { |
||||
|
return new MessageListenerAdapter(subscriber); |
||||
|
} |
||||
|
|
||||
|
} |
||||
|
|
||||
@ -0,0 +1,65 @@ |
|||||
|
package com.techsor.datacenter.receiver.config; |
||||
|
|
||||
|
import okhttp3.ConnectionPool; |
||||
|
import okhttp3.OkHttpClient; |
||||
|
import org.springframework.beans.factory.annotation.Value; |
||||
|
import org.springframework.context.annotation.Bean; |
||||
|
import org.springframework.context.annotation.Configuration; |
||||
|
import org.springframework.http.client.ClientHttpRequestFactory; |
||||
|
import org.springframework.http.client.OkHttp3ClientHttpRequestFactory; |
||||
|
import org.springframework.web.client.RestTemplate; |
||||
|
|
||||
|
import java.util.concurrent.TimeUnit; |
||||
|
|
||||
|
/*** |
||||
|
* 使用okhttp connectionFactory |
||||
|
* **/ |
||||
|
@Configuration |
||||
|
public class RestTemplateConfig { |
||||
|
|
||||
|
|
||||
|
@Value("${ok.http.connect-timeout}") |
||||
|
private Integer connectTimeout; |
||||
|
|
||||
|
@Value("${ok.http.read-timeout}") |
||||
|
private Integer readTimeout; |
||||
|
|
||||
|
@Value("${ok.http.write-timeout}") |
||||
|
private Integer writeTimeout; |
||||
|
|
||||
|
@Value("${ok.http.max-idle-connections}") |
||||
|
private Integer maxIdleConnections; |
||||
|
|
||||
|
@Value("${ok.http.keep-alive-duration}") |
||||
|
private Long keepAliveDuration; |
||||
|
|
||||
|
|
||||
|
/** |
||||
|
* 声明 RestTemplate |
||||
|
*/ |
||||
|
@Bean |
||||
|
public RestTemplate httpRestTemplate() { |
||||
|
ClientHttpRequestFactory factory = httpRequestFactory(); |
||||
|
RestTemplate restTemplate = new RestTemplate(factory); |
||||
|
return restTemplate; |
||||
|
} |
||||
|
|
||||
|
public ClientHttpRequestFactory httpRequestFactory() { |
||||
|
return new OkHttp3ClientHttpRequestFactory(okHttpConfigClient()); |
||||
|
} |
||||
|
|
||||
|
public OkHttpClient okHttpConfigClient(){ |
||||
|
return new OkHttpClient().newBuilder() |
||||
|
.connectionPool(pool()) |
||||
|
.connectTimeout(connectTimeout, TimeUnit.SECONDS) |
||||
|
.readTimeout(readTimeout, TimeUnit.SECONDS) |
||||
|
.writeTimeout(writeTimeout, TimeUnit.SECONDS) |
||||
|
.hostnameVerifier((hostname, session) -> true) |
||||
|
.build(); |
||||
|
} |
||||
|
|
||||
|
public ConnectionPool pool() { |
||||
|
return new ConnectionPool(maxIdleConnections, keepAliveDuration, TimeUnit.SECONDS); |
||||
|
} |
||||
|
|
||||
|
} |
||||
@ -0,0 +1,56 @@ |
|||||
|
package com.techsor.datacenter.receiver.config; |
||||
|
|
||||
|
import org.springframework.context.annotation.Bean; |
||||
|
import org.springframework.context.annotation.Configuration; |
||||
|
import org.springframework.scheduling.TaskScheduler; |
||||
|
import org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor; |
||||
|
import org.springframework.core.task.TaskExecutor; |
||||
|
import org.springframework.scheduling.concurrent.ThreadPoolTaskScheduler; |
||||
|
|
||||
|
import java.util.concurrent.Executor; |
||||
|
import java.util.concurrent.ThreadPoolExecutor; |
||||
|
|
||||
|
@Configuration |
||||
|
public class TaskExecutorConfig { |
||||
|
|
||||
|
@Bean("threadPoolTaskExecutor") |
||||
|
public TaskExecutor threadPoolTaskExecutor() { |
||||
|
ThreadPoolTaskExecutor executor = new ThreadPoolTaskExecutor(); |
||||
|
executor.setCorePoolSize(4); |
||||
|
executor.setMaxPoolSize(10); |
||||
|
executor.setThreadNamePrefix("my_executor_thread"); |
||||
|
executor.initialize(); |
||||
|
return executor; |
||||
|
} |
||||
|
|
||||
|
@Bean("taskScheduler") |
||||
|
public TaskScheduler taskScheduler(){ |
||||
|
ThreadPoolTaskScheduler threadPoolTaskScheduler |
||||
|
= new ThreadPoolTaskScheduler(); |
||||
|
threadPoolTaskScheduler.setPoolSize(5); |
||||
|
threadPoolTaskScheduler.setThreadNamePrefix( |
||||
|
"ThreadPoolTaskScheduler"); |
||||
|
return threadPoolTaskScheduler; |
||||
|
} |
||||
|
|
||||
|
|
||||
|
|
||||
|
|
||||
|
@Bean("postSenderThreadTaskExecutor") |
||||
|
public Executor postSenderThreadTaskExecutor() { |
||||
|
int cpuCores = Runtime.getRuntime().availableProcessors(); |
||||
|
int poolSize = cpuCores * 2; // 假设I/O密集型,可根据实际情况调整
|
||||
|
|
||||
|
ThreadPoolTaskExecutor executor = new ThreadPoolTaskExecutor(); |
||||
|
executor.setCorePoolSize(poolSize); |
||||
|
executor.setMaxPoolSize(poolSize * 2); // 可以根据需要进一步调整
|
||||
|
executor.setQueueCapacity(100); // 可以根据需要进一步调整
|
||||
|
executor.setThreadNamePrefix("Async-"); |
||||
|
executor.setKeepAliveSeconds(60); // 如果需要,可以设置keep-alive时间
|
||||
|
executor.setRejectedExecutionHandler(new ThreadPoolExecutor.CallerRunsPolicy()); // 设置拒绝策略
|
||||
|
executor.initialize(); |
||||
|
return executor; |
||||
|
} |
||||
|
|
||||
|
|
||||
|
} |
||||
@ -0,0 +1,186 @@ |
|||||
|
package com.techsor.datacenter.receiver.config.datasource; |
||||
|
|
||||
|
|
||||
|
import com.zaxxer.hikari.HikariDataSource; |
||||
|
|
||||
|
import jakarta.persistence.EntityManagerFactory; |
||||
|
|
||||
|
import org.slf4j.Logger; |
||||
|
import org.slf4j.LoggerFactory; |
||||
|
import org.springframework.beans.factory.annotation.Qualifier; |
||||
|
import org.springframework.beans.factory.annotation.Value; |
||||
|
import org.springframework.boot.jdbc.DataSourceBuilder; |
||||
|
import org.springframework.boot.orm.jpa.EntityManagerFactoryBuilder; |
||||
|
import org.springframework.context.annotation.Bean; |
||||
|
import org.springframework.context.annotation.Configuration; |
||||
|
import org.springframework.context.annotation.Primary; |
||||
|
import org.springframework.jdbc.core.JdbcTemplate; |
||||
|
import org.springframework.orm.jpa.JpaTransactionManager; |
||||
|
import org.springframework.orm.jpa.LocalContainerEntityManagerFactoryBean; |
||||
|
import org.springframework.orm.jpa.vendor.HibernateJpaVendorAdapter; |
||||
|
import org.springframework.transaction.PlatformTransactionManager; |
||||
|
|
||||
|
import javax.sql.DataSource; |
||||
|
import java.util.HashMap; |
||||
|
import java.util.Map; |
||||
|
|
||||
|
@Configuration |
||||
|
public class DataSourceAdminConfig { |
||||
|
private static final Logger logger= LoggerFactory.getLogger(DataSourceAdminConfig.class); |
||||
|
|
||||
|
@Value("${spring.datasource.admin.name}") |
||||
|
private String name; |
||||
|
|
||||
|
@Value("${spring.datasource.admin.url}") |
||||
|
private String url; |
||||
|
|
||||
|
@Value("${spring.datasource.admin.username}") |
||||
|
private String username; |
||||
|
|
||||
|
@Value("${spring.datasource.admin.password}") |
||||
|
private String password; |
||||
|
|
||||
|
@Value("${spring.datasource.admin.driverClassName}") |
||||
|
private String driverClassName; |
||||
|
|
||||
|
|
||||
|
|
||||
|
@Value("${spring.datasource.admin.hikari.schema}") |
||||
|
private String schema; |
||||
|
|
||||
|
@Value("${spring.datasource.admin.hikari.minimum-idle}") |
||||
|
private Integer mininumIdle; |
||||
|
|
||||
|
@Value("${spring.datasource.admin.hikari.maximum-pool-size}") |
||||
|
private Integer maximuPoolSize; |
||||
|
|
||||
|
@Value("${spring.datasource.admin.hikari.connection-timeout}") |
||||
|
private Integer connectionTimeout; |
||||
|
|
||||
|
@Value("${dynamic.jdbc.url}") |
||||
|
private String dynamicJdbcUrl; |
||||
|
|
||||
|
/** |
||||
|
* Retrieve the admin data source. |
||||
|
*/ |
||||
|
@Primary |
||||
|
@Bean |
||||
|
public DataSource adminDatasource() { |
||||
|
HikariDataSource adminDatasource = DataSourceBuilder.create() |
||||
|
.url(url) |
||||
|
.username(username) |
||||
|
.password(password).driverClassName(driverClassName) |
||||
|
.type(HikariDataSource.class) |
||||
|
.build(); |
||||
|
adminDatasource.setSchema(schema); |
||||
|
adminDatasource.setMinimumIdle(mininumIdle); |
||||
|
adminDatasource.setMaximumPoolSize(maximuPoolSize); |
||||
|
adminDatasource.setConnectionTimeout(connectionTimeout); |
||||
|
return adminDatasource; |
||||
|
} |
||||
|
|
||||
|
|
||||
|
@Bean |
||||
|
public DataSource thirdDatasource() { |
||||
|
String dbUrl=String.format(dynamicJdbcUrl,"data_center_aeon_admin"); |
||||
|
HikariDataSource adminDatasource = DataSourceBuilder.create() |
||||
|
.url(dbUrl) |
||||
|
.username(username) |
||||
|
.password(password).driverClassName(driverClassName) |
||||
|
.type(HikariDataSource.class) |
||||
|
.build(); |
||||
|
adminDatasource.setSchema("data_center_aeon_admin"); |
||||
|
adminDatasource.setMinimumIdle(mininumIdle); |
||||
|
adminDatasource.setMaximumPoolSize(maximuPoolSize); |
||||
|
adminDatasource.setConnectionTimeout(connectionTimeout); |
||||
|
return adminDatasource; |
||||
|
} |
||||
|
|
||||
|
/** |
||||
|
* Construct a JdbcTemplate for accessing the dynamic data source. |
||||
|
*/ |
||||
|
@Bean |
||||
|
public JdbcTemplate jdbcTemplate(@Qualifier("adminDatasource") DataSource adminDatasource) { |
||||
|
return new JdbcTemplate(adminDatasource); |
||||
|
} |
||||
|
|
||||
|
|
||||
|
|
||||
|
@Bean |
||||
|
public DataSource dynamicDataSource(JdbcTemplate jdbcTemplate,@Qualifier("adminDatasource") DataSource adminDatasource,@Qualifier("thirdDatasource") DataSource thirdDatasource) { |
||||
|
DynamicRouteDataSource dynamicDataSource = new DynamicRouteDataSource(); |
||||
|
Map<Object, Object> targetDataSources = new HashMap<>(); |
||||
|
|
||||
|
String sql=" SELECT " + |
||||
|
" bcom.id,bcom.parent_id, " + |
||||
|
" bcom.company_name companyName " + |
||||
|
" FROM " + |
||||
|
" data_center_aeon_admin.basic_company bcom " + |
||||
|
" WHERE (bcom.parent_id=1 or bcom.parent_id=-1) and bcom.flag!=1"; |
||||
|
|
||||
|
jdbcTemplate.query(sql,rs->{ |
||||
|
HikariDataSource dataSource1 = new HikariDataSource(); |
||||
|
String dbName="data_center_aeon_"+rs.getInt("id"); |
||||
|
String dbUrl=String.format(dynamicJdbcUrl,dbName); |
||||
|
dataSource1.setJdbcUrl(dbUrl); |
||||
|
dataSource1.setUsername(username); |
||||
|
dataSource1.setPassword(password); |
||||
|
dataSource1.setDriverClassName(driverClassName); |
||||
|
dataSource1.setSchema(dbName); |
||||
|
dataSource1.setMinimumIdle(mininumIdle); |
||||
|
dataSource1.setMaximumPoolSize(maximuPoolSize); |
||||
|
dataSource1.setConnectionTimeout(connectionTimeout); |
||||
|
if (rs.getInt("parent_id")==1 || rs.getInt("parent_id")==-1){ |
||||
|
targetDataSources.put("dataSourceForCompany_"+rs.getInt("id"), dataSource1); |
||||
|
logger.info("Put dataSourceForCompany_"+rs.getInt("id")+" -- URL:"+dbUrl); |
||||
|
}else{ |
||||
|
|
||||
|
} |
||||
|
}); |
||||
|
targetDataSources.put("dataSourceForCompany_0", thirdDatasource); |
||||
|
|
||||
|
dynamicDataSource.setTargetDataSources(targetDataSources); |
||||
|
dynamicDataSource.setDefaultTargetDataSource(adminDatasource); // 设置默认数据源
|
||||
|
return dynamicDataSource; |
||||
|
} |
||||
|
|
||||
|
|
||||
|
@Bean |
||||
|
public JdbcTemplate dynamicJdbcTemplate(@Qualifier("dynamicDataSource") DataSource dynamicDataSource) { |
||||
|
return new JdbcTemplate(dynamicDataSource); |
||||
|
} |
||||
|
|
||||
|
|
||||
|
|
||||
|
// 新增这个Bean,让Spring容器有EntityManagerFactoryBuilder可用
|
||||
|
@Bean |
||||
|
public EntityManagerFactoryBuilder entityManagerFactoryBuilder() { |
||||
|
HibernateJpaVendorAdapter vendorAdapter = new HibernateJpaVendorAdapter(); |
||||
|
// 如果你有特别的jpa配置,可以设置下面这行:
|
||||
|
// vendorAdapter.setGenerateDdl(true);
|
||||
|
// vendorAdapter.setShowSql(true);
|
||||
|
|
||||
|
return new EntityManagerFactoryBuilder(vendorAdapter, new HashMap<>(), null); |
||||
|
} |
||||
|
|
||||
|
@Primary |
||||
|
@Bean(name = {"adminEntityManagerFactory", "entityManagerFactory"}) |
||||
|
public LocalContainerEntityManagerFactoryBean adminEntityManagerFactory( |
||||
|
EntityManagerFactoryBuilder builder, |
||||
|
@Qualifier("adminDatasource") DataSource adminDatasource) { |
||||
|
return builder |
||||
|
.dataSource(adminDatasource) |
||||
|
.packages("com.techsor.datacenter.receiver.entity") |
||||
|
.persistenceUnit("adminPU") |
||||
|
.build(); |
||||
|
} |
||||
|
|
||||
|
@Primary |
||||
|
@Bean(name = "adminTransactionManager") |
||||
|
public PlatformTransactionManager adminTransactionManager( |
||||
|
@Qualifier("adminEntityManagerFactory") EntityManagerFactory adminEntityManagerFactory) { |
||||
|
return new JpaTransactionManager(adminEntityManagerFactory); |
||||
|
} |
||||
|
|
||||
|
|
||||
|
} |
||||
@ -0,0 +1,18 @@ |
|||||
|
package com.techsor.datacenter.receiver.config.datasource; |
||||
|
|
||||
|
public class DataSourceContextHolder { |
||||
|
|
||||
|
private static final ThreadLocal<String> contextHolder = new ThreadLocal<>(); |
||||
|
|
||||
|
public static void setCurrentDataSourceKey(String dataSourceKey) { |
||||
|
contextHolder.set(dataSourceKey); |
||||
|
} |
||||
|
|
||||
|
public static String getCurrentDataSourceKey() { |
||||
|
return contextHolder.get(); |
||||
|
} |
||||
|
|
||||
|
public static void clearCurrentDataSourceKey() { |
||||
|
contextHolder.remove(); |
||||
|
} |
||||
|
} |
||||
@ -0,0 +1,13 @@ |
|||||
|
package com.techsor.datacenter.receiver.config.datasource; |
||||
|
|
||||
|
import org.springframework.jdbc.datasource.lookup.AbstractRoutingDataSource; |
||||
|
|
||||
|
|
||||
|
public class DynamicRouteDataSource extends AbstractRoutingDataSource { |
||||
|
|
||||
|
@Override |
||||
|
protected Object determineCurrentLookupKey() { |
||||
|
// Retrieve the key of the data source to be used by the current thread.
|
||||
|
return DataSourceContextHolder.getCurrentDataSourceKey(); |
||||
|
} |
||||
|
} |
||||
@ -0,0 +1,42 @@ |
|||||
|
package com.techsor.datacenter.receiver.config.kafka; |
||||
|
|
||||
|
import com.techsor.datacenter.receiver.config.kafka.ZiFiClient; |
||||
|
import com.techsor.datacenter.receiver.config.kafka.imp.ClientFactory; |
||||
|
import com.techsor.datacenter.receiver.config.kafka.imp.ClientType; |
||||
|
public class Main { |
||||
|
public static void main(String[] args) { |
||||
|
|
||||
|
String host; |
||||
|
String apiKey; |
||||
|
String apiSecret; |
||||
|
String companyCode; |
||||
|
|
||||
|
//hostname,连接的服务器地址,实际使用需要修改\com\zifisense\zetag\mq\api\RegionEnum.java里面的连接地址
|
||||
|
host = "https://zetagintl.zifisense.com:9093"; |
||||
|
//apiKey,现在支持一个企业有多ApiKey;在V1版本为企业编码
|
||||
|
apiKey = "7c5554faf5744135b310a81a13bb487e"; |
||||
|
// apiKey对应的apiSecret;在v1版本中,相当于企业秘钥
|
||||
|
apiSecret = "691b57705c644b6ab080a89de7949f27"; |
||||
|
//企业编号
|
||||
|
companyCode = "00ebe81f74b34ce38b41266993487f3c"; |
||||
|
|
||||
|
//建立客户端
|
||||
|
ZiFiClient c = ClientFactory.createClient(ClientType.KAFKA, host, apiKey, apiSecret, companyCode); |
||||
|
//订阅topic
|
||||
|
c.subscribe(); |
||||
|
/* 在v1版本中还需指定topic,该方式已被弃用 |
||||
|
// 描述需要获取哪些数据
|
||||
|
String topic= "zetag-heartbeat-all"; |
||||
|
c.subscribe(topic); |
||||
|
*/ |
||||
|
while (true) { |
||||
|
//循环处理收到的数据
|
||||
|
c.poll().forEach(message->{ |
||||
|
System.out.printf("id = %s, value = %s%n", message.getMessageId(), message.getBody()); |
||||
|
}); |
||||
|
//commit之后才能继续消费下一批次数据
|
||||
|
c.commit(); |
||||
|
} |
||||
|
|
||||
|
} |
||||
|
} |
||||
@ -0,0 +1,22 @@ |
|||||
|
package com.techsor.datacenter.receiver.config.kafka; |
||||
|
|
||||
|
public enum RegionEnum { |
||||
|
|
||||
|
CN("zetagos.zifisense.com:9093"); |
||||
|
|
||||
|
RegionEnum(String url) { |
||||
|
this.url = url; |
||||
|
} |
||||
|
|
||||
|
private String url; |
||||
|
|
||||
|
public String getUrl() { |
||||
|
return url; |
||||
|
} |
||||
|
|
||||
|
public void setUrl(String url) { |
||||
|
this.url = url; |
||||
|
} |
||||
|
|
||||
|
|
||||
|
} |
||||
@ -0,0 +1,30 @@ |
|||||
|
package com.techsor.datacenter.receiver.config.kafka; |
||||
|
|
||||
|
import com.techsor.datacenter.receiver.config.kafka.RegionEnum; |
||||
|
import com.techsor.datacenter.receiver.config.kafka.model.Message; |
||||
|
|
||||
|
import java.util.Collection; |
||||
|
|
||||
|
public abstract class ZiFiClient { |
||||
|
|
||||
|
public abstract void init(com.techsor.datacenter.receiver.config.kafka.RegionEnum region, String apiKey, String apiSecret, String companyCode); |
||||
|
|
||||
|
public abstract void init(RegionEnum region, String apiKey, String apiSecret, String companyCode, String certPath); |
||||
|
|
||||
|
public abstract void init(String host, String apiKey, String apiSecret, String companyCode); |
||||
|
|
||||
|
public abstract void init(String host, String apiKey, String apiSecret, String companyCode, String certPath); |
||||
|
|
||||
|
public abstract void subscribe(String topic); |
||||
|
|
||||
|
public abstract void subscribe(); |
||||
|
|
||||
|
public abstract Collection<Message> poll(); |
||||
|
|
||||
|
public abstract void commit(); |
||||
|
|
||||
|
|
||||
|
|
||||
|
|
||||
|
|
||||
|
} |
||||
@ -0,0 +1,44 @@ |
|||||
|
package com.techsor.datacenter.receiver.config.kafka.imp; |
||||
|
|
||||
|
import com.techsor.datacenter.receiver.config.kafka.RegionEnum; |
||||
|
import com.techsor.datacenter.receiver.config.kafka.ZiFiClient; |
||||
|
import com.techsor.datacenter.receiver.config.kafka.imp.ClientType; |
||||
|
|
||||
|
public class ClientFactory { |
||||
|
|
||||
|
private static Object loadImplement(String className) |
||||
|
{ |
||||
|
try { |
||||
|
ClassLoader cl = ClientFactory.class.getClassLoader(); |
||||
|
Class<?> implClass; |
||||
|
implClass = Class.forName(className, true, cl); |
||||
|
return implClass.newInstance(); |
||||
|
} catch (ClassNotFoundException e) { |
||||
|
throw new RuntimeException("Cannot load class " + className, e); |
||||
|
} catch (InstantiationException e) { |
||||
|
throw new RuntimeException("Cannot instantiate class " + className, e); |
||||
|
} catch (IllegalAccessException e) { |
||||
|
throw new RuntimeException("Cannot access class " + className, e); |
||||
|
} catch (SecurityException e) { |
||||
|
throw new RuntimeException("Cannot access class " + className, e); |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
public static ZiFiClient createClient(com.techsor.datacenter.receiver.config.kafka.imp.ClientType type, RegionEnum region, String apiKey, String apiSecret, String companyCode){ |
||||
|
ZiFiClient client = (ZiFiClient)loadImplement(type.getName()); |
||||
|
client.init(region, apiKey, apiSecret, companyCode); |
||||
|
return client; |
||||
|
} |
||||
|
|
||||
|
public static ZiFiClient createClient(com.techsor.datacenter.receiver.config.kafka.imp.ClientType type, String host, String apiKey, String apiSecret, String companyCode){ |
||||
|
ZiFiClient client = (ZiFiClient)loadImplement(type.getName()); |
||||
|
client.init(host, apiKey, apiSecret, companyCode); |
||||
|
return client; |
||||
|
} |
||||
|
|
||||
|
public static ZiFiClient createClient(ClientType type, RegionEnum region, String apiKey, String apiSecret, String companyCode, String certPath){ |
||||
|
ZiFiClient client = (ZiFiClient)loadImplement(type.getName()); |
||||
|
client.init(region, apiKey, apiSecret, companyCode, certPath); |
||||
|
return client; |
||||
|
} |
||||
|
} |
||||
@ -0,0 +1,21 @@ |
|||||
|
package com.techsor.datacenter.receiver.config.kafka.imp; |
||||
|
|
||||
|
public enum ClientType { |
||||
|
|
||||
|
KAFKA("com.techsor.datacenter.receiver.config.kafka.imp.KafkaZiFiClient"); |
||||
|
|
||||
|
ClientType(String name) { |
||||
|
this.name = name; |
||||
|
} |
||||
|
|
||||
|
private String name; |
||||
|
|
||||
|
public String getName() { |
||||
|
return name; |
||||
|
} |
||||
|
|
||||
|
public void setName(String name) { |
||||
|
this.name = name; |
||||
|
} |
||||
|
|
||||
|
} |
||||
@ -0,0 +1,25 @@ |
|||||
|
package com.techsor.datacenter.receiver.config.kafka.imp; |
||||
|
|
||||
|
import java.util.HashMap; |
||||
|
import java.util.Map; |
||||
|
|
||||
|
import org.apache.kafka.clients.consumer.ConsumerRecord; |
||||
|
|
||||
|
import com.techsor.datacenter.receiver.config.kafka.model.Message; |
||||
|
|
||||
|
public class KafkaMessage extends Message { |
||||
|
|
||||
|
private static final String TOPIC_NAME = "topicName"; |
||||
|
private static final String PARTITION = "partition"; |
||||
|
private static final String TIME = "time"; |
||||
|
|
||||
|
public KafkaMessage(ConsumerRecord<String, String> record) { |
||||
|
setBody(record.value()); |
||||
|
setMessageId(String.valueOf(record.offset())); |
||||
|
Map<String,String> head = new HashMap<>(); |
||||
|
head.put(TOPIC_NAME, record.topic()); |
||||
|
head.put(PARTITION, String.valueOf(record.partition())); |
||||
|
head.put(TIME, String.valueOf(record.timestamp())); |
||||
|
} |
||||
|
|
||||
|
} |
||||
@ -0,0 +1,138 @@ |
|||||
|
package com.techsor.datacenter.receiver.config.kafka.imp; |
||||
|
|
||||
|
import com.techsor.datacenter.receiver.config.kafka.RegionEnum; |
||||
|
import com.techsor.datacenter.receiver.config.kafka.ZiFiClient; |
||||
|
import com.techsor.datacenter.receiver.config.kafka.model.Message; |
||||
|
import org.apache.kafka.clients.consumer.ConsumerConfig; |
||||
|
import org.apache.kafka.clients.consumer.ConsumerRecord; |
||||
|
import org.apache.kafka.clients.consumer.ConsumerRecords; |
||||
|
import org.apache.kafka.clients.consumer.KafkaConsumer; |
||||
|
import org.slf4j.Logger; |
||||
|
import org.slf4j.LoggerFactory; |
||||
|
|
||||
|
import java.io.File; |
||||
|
import java.io.FileOutputStream; |
||||
|
import java.io.InputStream; |
||||
|
import java.nio.file.Files; |
||||
|
import java.time.Duration; |
||||
|
import java.util.ArrayList; |
||||
|
import java.util.Collection; |
||||
|
import java.util.Properties; |
||||
|
import java.util.regex.Pattern; |
||||
|
|
||||
|
public class KafkaZiFiClient extends ZiFiClient { |
||||
|
|
||||
|
private static final Logger logger = LoggerFactory.getLogger(KafkaZiFiClient.class); |
||||
|
|
||||
|
private KafkaConsumer<String, String> consumer; |
||||
|
private static final Pattern TOPIC_PATTERN_V2 = Pattern.compile(".*-v2"); |
||||
|
|
||||
|
private KafkaConsumer<String, String> createConsumer(String url, String apiKey, String apiSecret, String certPath) { |
||||
|
Properties props = new Properties(); |
||||
|
props.setProperty(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, url); |
||||
|
props.setProperty(ConsumerConfig.GROUP_ID_CONFIG, apiKey); |
||||
|
props.setProperty(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "false"); |
||||
|
props.setProperty(ConsumerConfig.METADATA_MAX_AGE_CONFIG, "60000"); |
||||
|
props.setProperty(ConsumerConfig.MAX_POLL_INTERVAL_MS_CONFIG, "300000"); |
||||
|
props.setProperty(ConsumerConfig.SESSION_TIMEOUT_MS_CONFIG, "20000"); |
||||
|
props.setProperty("security.protocol", "SASL_SSL"); |
||||
|
props.setProperty("ssl.truststore.location", certPath); |
||||
|
props.setProperty("ssl.truststore.password", "zifisense"); |
||||
|
props.setProperty("sasl.mechanism", "PLAIN"); |
||||
|
props.setProperty("sasl.jaas.config", "org.apache.kafka.common.security.plain.PlainLoginModule required username=\"" + apiKey + "\" password=\"" + apiSecret + "\";"); |
||||
|
props.setProperty("ssl.endpoint.identification.algorithm", ""); |
||||
|
props.setProperty(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringDeserializer"); |
||||
|
props.setProperty(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringDeserializer"); |
||||
|
return new KafkaConsumer<>(props); |
||||
|
} |
||||
|
|
||||
|
@Override |
||||
|
public void init(RegionEnum region, String apiKey, String apiSecret, String companyCode) { |
||||
|
this.init(region, apiKey, apiSecret, companyCode, getFilePath()); |
||||
|
} |
||||
|
|
||||
|
@Override |
||||
|
public void init(String host, String apiKey, String apiSecret, String companyCode) { |
||||
|
this.init(host, apiKey, apiSecret, companyCode, getFilePath()); |
||||
|
} |
||||
|
|
||||
|
@Override |
||||
|
public void init(String host, String apiKey, String apiSecret, String companyCode, String certPath) { |
||||
|
this.consumer = createConsumer(host, apiKey, apiSecret, certPath); |
||||
|
} |
||||
|
|
||||
|
@Override |
||||
|
public void init(RegionEnum region, String apiKey, String apiSecret, String companyCode, String certPath) { |
||||
|
this.consumer = createConsumer(region.getUrl(), apiKey, apiSecret, certPath); |
||||
|
} |
||||
|
|
||||
|
public String getFilePath() { |
||||
|
// String path = KafkaZiFiClient.class.getClassLoader().getResource("certificate/client.truststore.jks").getPath();
|
||||
|
// if (path.startsWith("/")) {
|
||||
|
// return path.substring(1);
|
||||
|
// } else if (path.startsWith("file:/")) {
|
||||
|
// return path.substring(6);
|
||||
|
// } else {
|
||||
|
// return path;
|
||||
|
// }
|
||||
|
try { |
||||
|
// 获取 JAR 内的证书文件流
|
||||
|
InputStream inputStream = KafkaZiFiClient.class.getClassLoader().getResourceAsStream("certificate/client.truststore.jks"); |
||||
|
if (inputStream == null) { |
||||
|
throw new RuntimeException("Certificate file not found!"); |
||||
|
} |
||||
|
|
||||
|
// 创建临时文件
|
||||
|
File tempFile = Files.createTempFile("client.truststore", ".jks").toFile(); |
||||
|
tempFile.deleteOnExit(); // JVM 退出时自动删除
|
||||
|
|
||||
|
// 将证书文件流写入临时文件
|
||||
|
try (FileOutputStream outputStream = new FileOutputStream(tempFile)) { |
||||
|
byte[] buffer = new byte[1024]; // 缓冲区
|
||||
|
int bytesRead; |
||||
|
while ((bytesRead = inputStream.read(buffer)) != -1) { |
||||
|
outputStream.write(buffer, 0, bytesRead); |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
// 返回临时文件的绝对路径
|
||||
|
logger.info("truststore path:{}", tempFile.getAbsolutePath()); |
||||
|
return tempFile.getAbsolutePath(); |
||||
|
|
||||
|
} catch (Exception e) { |
||||
|
throw new RuntimeException("Failed to extract certificate file", e); |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
/** |
||||
|
* 订阅KAFKA v1版本接口数据,v1版本接口需要使用topic来区分 |
||||
|
* |
||||
|
* @deprecated 该方法订阅不到V2版本的接口,若要订阅v2版本接口请使用 {@link #subscribe()}。 |
||||
|
*/ |
||||
|
@Deprecated |
||||
|
@Override |
||||
|
public void subscribe(String topic) { |
||||
|
consumer.subscribe(Pattern.compile(".*-v1-" + topic)); |
||||
|
} |
||||
|
|
||||
|
@Override |
||||
|
public void subscribe() { |
||||
|
consumer.subscribe(TOPIC_PATTERN_V2); |
||||
|
} |
||||
|
|
||||
|
@Override |
||||
|
public Collection<Message> poll() { |
||||
|
Collection<Message> messages = new ArrayList<Message>(); |
||||
|
ConsumerRecords<String, String> records = consumer.poll(Duration.ofMillis(100)); |
||||
|
for (ConsumerRecord<String, String> record : records) { |
||||
|
messages.add(new KafkaMessage(record)); |
||||
|
} |
||||
|
return messages; |
||||
|
} |
||||
|
|
||||
|
@Override |
||||
|
public void commit() { |
||||
|
consumer.commitSync(); |
||||
|
} |
||||
|
|
||||
|
} |
||||
@ -0,0 +1,38 @@ |
|||||
|
package com.techsor.datacenter.receiver.config.kafka.model; |
||||
|
|
||||
|
import java.util.Map; |
||||
|
|
||||
|
public class Message { |
||||
|
|
||||
|
private String body; |
||||
|
|
||||
|
private Map<String,String> headers; |
||||
|
|
||||
|
private String messageId; |
||||
|
|
||||
|
|
||||
|
public String getMessageId() { |
||||
|
return messageId; |
||||
|
} |
||||
|
|
||||
|
public void setMessageId(String messageId) { |
||||
|
this.messageId = messageId; |
||||
|
} |
||||
|
|
||||
|
public String getBody() { |
||||
|
return body; |
||||
|
} |
||||
|
|
||||
|
public void setBody(String body) { |
||||
|
this.body = body; |
||||
|
} |
||||
|
|
||||
|
public Map<String, String> getHeaders() { |
||||
|
return headers; |
||||
|
} |
||||
|
|
||||
|
public void setHeaders(Map<String, String> headers) { |
||||
|
this.headers = headers; |
||||
|
} |
||||
|
|
||||
|
} |
||||
@ -0,0 +1,10 @@ |
|||||
|
package com.techsor.datacenter.receiver.constants; |
||||
|
|
||||
|
public class CodeConstants { |
||||
|
|
||||
|
public static int CODE_SUCCESS=200; |
||||
|
public static int CODE_SERVER_ERROR=500; |
||||
|
public static int CODE_WRONG_VERIFYCODE=601; |
||||
|
public static int CODE_USER_NOT_REGISTERED=602; |
||||
|
|
||||
|
} |
||||
@ -0,0 +1,11 @@ |
|||||
|
package com.techsor.datacenter.receiver.constants; |
||||
|
|
||||
|
public class CompanyConstants { |
||||
|
public final static String ZIFISENSE="zifisense"; |
||||
|
public final static String NBI="nbi"; |
||||
|
public final static String OVIPHONE="oviphone"; |
||||
|
public final static String NITTAN="nittan"; |
||||
|
public final static String OCR="ocr"; |
||||
|
public final static String DELTA="delta"; |
||||
|
public final static String METCOM="metcom"; |
||||
|
} |
||||
@ -0,0 +1,19 @@ |
|||||
|
package com.techsor.datacenter.receiver.constants; |
||||
|
|
||||
|
public class MsgConstants { |
||||
|
|
||||
|
public static String MSG_SERVER_ERROR="服务器问题,请联系管理员"; |
||||
|
public static String MSG_WRONG_VERIFYCODE="验证码错误"; |
||||
|
public static String MSG_USER_NOT_REGISTERED="用户未注册,请注册"; |
||||
|
|
||||
|
|
||||
|
public static String REDIS_COMMAND_NOTIFICATION_TYPE="notification"; |
||||
|
|
||||
|
|
||||
|
public static String REIDS_COMMAND_SWITCH_TYPE="switch"; |
||||
|
|
||||
|
public static final String SWITCH_STATUS_PREFIX="switch:stats:status"; |
||||
|
|
||||
|
|
||||
|
public static final String REDIS_COMMAND_STAT_EVENT="startStat"; |
||||
|
} |
||||
@ -0,0 +1,7 @@ |
|||||
|
package com.techsor.datacenter.receiver.constants; |
||||
|
|
||||
|
|
||||
|
|
||||
|
public class UrlConstants { |
||||
|
public static String RECEIVER_URL="http://localhost:8201/v1/main_receiver"; |
||||
|
} |
||||
@ -0,0 +1,43 @@ |
|||||
|
package com.techsor.datacenter.receiver.controller; |
||||
|
|
||||
|
import com.alibaba.fastjson2.JSON; |
||||
|
import org.springframework.core.io.ClassPathResource; |
||||
|
import org.springframework.core.io.Resource; |
||||
|
import org.springframework.web.bind.annotation.GetMapping; |
||||
|
import org.springframework.web.bind.annotation.RestController; |
||||
|
|
||||
|
import java.io.IOException; |
||||
|
import java.lang.management.ManagementFactory; |
||||
|
import java.lang.management.RuntimeMXBean; |
||||
|
import java.util.HashMap; |
||||
|
import java.util.Map; |
||||
|
import java.util.Properties; |
||||
|
|
||||
|
@RestController |
||||
|
public class HealthController { |
||||
|
@GetMapping("/healthcheck") |
||||
|
public String healhealthcheck(){ |
||||
|
return "ok"; |
||||
|
} |
||||
|
|
||||
|
|
||||
|
@GetMapping("/health") |
||||
|
public String health(){ |
||||
|
RuntimeMXBean runtimeMXBean = ManagementFactory.getRuntimeMXBean(); |
||||
|
long startTime = runtimeMXBean.getStartTime(); |
||||
|
Map<String,String> resultMap=new HashMap<>(); |
||||
|
resultMap.put("status","UP"); |
||||
|
resultMap.put("startUpDate",String.valueOf(startTime)); |
||||
|
return JSON.toJSONString(resultMap); |
||||
|
} |
||||
|
|
||||
|
|
||||
|
@GetMapping("/version") |
||||
|
public String getVersion() throws IOException { |
||||
|
Properties properties = new Properties(); |
||||
|
Resource currentResource = new ClassPathResource("git.properties"); |
||||
|
properties.load(currentResource.getInputStream()); |
||||
|
String version=properties.getProperty("git.commit.id.abbrev"); |
||||
|
return version; |
||||
|
} |
||||
|
} |
||||
@ -0,0 +1,43 @@ |
|||||
|
package com.techsor.datacenter.receiver.controller; |
||||
|
|
||||
|
|
||||
|
import com.google.common.collect.Maps; |
||||
|
import com.techsor.datacenter.receiver.service.IDataReceiveService; |
||||
|
import org.apache.commons.collections.MapUtils; |
||||
|
import org.springframework.core.task.TaskExecutor; |
||||
|
import org.springframework.web.bind.annotation.*; |
||||
|
|
||||
|
import jakarta.annotation.Resource; |
||||
|
import java.util.List; |
||||
|
import java.util.Map; |
||||
|
|
||||
|
|
||||
|
/** |
||||
|
* 通知接收,用于触发接收本地MQTTSink的数据 |
||||
|
* **/ |
||||
|
@RequestMapping("/notification") |
||||
|
@RestController |
||||
|
public class NotificationSinkController { |
||||
|
|
||||
|
|
||||
|
@Resource |
||||
|
private IDataReceiveService dataReceiveService; |
||||
|
|
||||
|
|
||||
|
@Resource(name="threadPoolTaskExecutor") |
||||
|
private TaskExecutor threadPoolTaskExecutor; |
||||
|
/** |
||||
|
* 给lambda提供接收通知接口,用于触发数据来源监听 |
||||
|
* */ |
||||
|
@PostMapping("/receive") |
||||
|
public Object receiveNotification(@RequestParam(name="state") String state, |
||||
|
@RequestBody List<Long> params){ |
||||
|
|
||||
|
this.threadPoolTaskExecutor.execute(() -> dataReceiveService.receiveCurrentDataSrc(state,params)); |
||||
|
|
||||
|
|
||||
|
Map resultMap=Maps.newHashMap(); |
||||
|
resultMap.put("result",state); |
||||
|
return resultMap; |
||||
|
} |
||||
|
} |
||||
@ -0,0 +1,32 @@ |
|||||
|
package com.techsor.datacenter.receiver.controller; |
||||
|
|
||||
|
|
||||
|
import com.google.gson.Gson; |
||||
|
import com.techsor.datacenter.receiver.service.DataTransService; |
||||
|
import org.springframework.web.bind.annotation.*; |
||||
|
|
||||
|
import jakarta.annotation.Resource; |
||||
|
import java.util.HashMap; |
||||
|
import java.util.Map; |
||||
|
|
||||
|
@RequestMapping("/api/generic") |
||||
|
@RestController |
||||
|
public class ReceiverController { |
||||
|
|
||||
|
@Resource |
||||
|
private DataTransService dataTransService; |
||||
|
|
||||
|
|
||||
|
//TODO
|
||||
|
@PostMapping("/process") |
||||
|
public Object process(@RequestParam("MyDeviceId")String deviceId,@RequestBody(required = false) Object params){ |
||||
|
//TODO
|
||||
|
Gson gson=new Gson(); |
||||
|
String json=gson.toJson(params); |
||||
|
this.dataTransService.transferData(deviceId,json); |
||||
|
Map<String,String> resultMap=new HashMap<>(); |
||||
|
resultMap.put("code","0"); |
||||
|
resultMap.put("msg","success"); |
||||
|
return resultMap; |
||||
|
} |
||||
|
} |
||||
@ -0,0 +1,25 @@ |
|||||
|
package com.techsor.datacenter.receiver.entity.common; |
||||
|
|
||||
|
import com.amazonaws.services.dynamodbv2.datamodeling.DynamoDBAutoGeneratedKey; |
||||
|
import com.amazonaws.services.dynamodbv2.datamodeling.DynamoDBHashKey; |
||||
|
import com.amazonaws.services.dynamodbv2.datamodeling.DynamoDBTable; |
||||
|
import lombok.Data; |
||||
|
|
||||
|
import java.util.UUID; |
||||
|
|
||||
|
|
||||
|
@Data |
||||
|
public class BaseTransDataEntity { |
||||
|
|
||||
|
@DynamoDBHashKey |
||||
|
@DynamoDBAutoGeneratedKey() |
||||
|
private UUID hashId; |
||||
|
|
||||
|
private Integer id; |
||||
|
private String content; |
||||
|
private String ts; |
||||
|
private String company; //zifisense:纵行, nbi:农博, oviphone:oviphone
|
||||
|
|
||||
|
private String dateKey; //日期,格式:yyyy-MM-dd
|
||||
|
|
||||
|
} |
||||
@ -0,0 +1,15 @@ |
|||||
|
package com.techsor.datacenter.receiver.entity.common; |
||||
|
|
||||
|
|
||||
|
import lombok.Data; |
||||
|
|
||||
|
@Data |
||||
|
public class CommonTransDataEntity { |
||||
|
|
||||
|
private Integer id; |
||||
|
private String content; |
||||
|
private String ts; |
||||
|
private String dataSrcCode; //数据源编码
|
||||
|
|
||||
|
|
||||
|
} |
||||
@ -0,0 +1,57 @@ |
|||||
|
package com.techsor.datacenter.receiver.entity.common; |
||||
|
|
||||
|
public class JsonResponse { |
||||
|
private int code; |
||||
|
private Object data; |
||||
|
private String msg; |
||||
|
|
||||
|
public JsonResponse() { |
||||
|
} |
||||
|
|
||||
|
public JsonResponse(int code, Object data) { |
||||
|
this.code = code; |
||||
|
this.data = data; |
||||
|
} |
||||
|
|
||||
|
public JsonResponse(int code, Object data, String msg) { |
||||
|
this.code = code; |
||||
|
this.data = data; |
||||
|
this.msg = msg; |
||||
|
} |
||||
|
|
||||
|
public static JsonResponse buildSuccess(Object data) { |
||||
|
return new JsonResponse(200, data); |
||||
|
} |
||||
|
|
||||
|
public static JsonResponse buildError(String msg) { |
||||
|
return new JsonResponse(400, "", msg); |
||||
|
} |
||||
|
|
||||
|
public static JsonResponse buildError(int code, String msg) { |
||||
|
return new JsonResponse(code, "", msg); |
||||
|
} |
||||
|
|
||||
|
public int getCode() { |
||||
|
return code; |
||||
|
} |
||||
|
|
||||
|
public void setCode(int code) { |
||||
|
this.code = code; |
||||
|
} |
||||
|
|
||||
|
public Object getData() { |
||||
|
return data; |
||||
|
} |
||||
|
|
||||
|
public void setData(Object data) { |
||||
|
this.data = data; |
||||
|
} |
||||
|
|
||||
|
public String getMsg() { |
||||
|
return msg; |
||||
|
} |
||||
|
|
||||
|
public void setMsg(String msg) { |
||||
|
this.msg = msg; |
||||
|
} |
||||
|
} |
||||
@ -0,0 +1,16 @@ |
|||||
|
package com.techsor.datacenter.receiver.entity.common; |
||||
|
|
||||
|
import lombok.Data; |
||||
|
|
||||
|
@Data |
||||
|
public class KafkaClientConfig { |
||||
|
|
||||
|
private String host; |
||||
|
|
||||
|
private String apiKey; |
||||
|
|
||||
|
private String apiSecret; |
||||
|
|
||||
|
private String companyCode; |
||||
|
|
||||
|
} |
||||
@ -0,0 +1,37 @@ |
|||||
|
package com.techsor.datacenter.receiver.entity.common; |
||||
|
|
||||
|
import lombok.Data; |
||||
|
|
||||
|
import java.util.Map; |
||||
|
|
||||
|
@Data |
||||
|
public class KafkaZetaMessage { |
||||
|
private Payload payload; |
||||
|
private Source source; |
||||
|
|
||||
|
@lombok.Data |
||||
|
public static class Payload { |
||||
|
private Data data; |
||||
|
private String identify; |
||||
|
} |
||||
|
|
||||
|
@lombok.Data |
||||
|
public static class Data { |
||||
|
private long upTime; |
||||
|
private long apTime; |
||||
|
private String parsedData; // JSON string, can be parsed into ParsedData object
|
||||
|
private String apUid; |
||||
|
private String rawData; |
||||
|
private String msUid; |
||||
|
} |
||||
|
|
||||
|
@lombok.Data |
||||
|
public static class Source { |
||||
|
private long createTime; |
||||
|
private long pushTime; |
||||
|
private String topicName; |
||||
|
} |
||||
|
|
||||
|
|
||||
|
} |
||||
|
|
||||
@ -0,0 +1,37 @@ |
|||||
|
package com.techsor.datacenter.receiver.entity.common; |
||||
|
|
||||
|
import lombok.Data; |
||||
|
|
||||
|
import java.util.List; |
||||
|
|
||||
|
@Data |
||||
|
public class MqttClientConfig { |
||||
|
|
||||
|
private String url; |
||||
|
|
||||
|
private String clientId; |
||||
|
|
||||
|
private String username; |
||||
|
|
||||
|
private String password; |
||||
|
|
||||
|
private String topic; |
||||
|
|
||||
|
private boolean sslState; |
||||
|
|
||||
|
private Integer connectionTimeout; |
||||
|
|
||||
|
|
||||
|
private String description; |
||||
|
|
||||
|
//1
|
||||
|
private String caPath; |
||||
|
|
||||
|
//2
|
||||
|
private String clientCrtPath; |
||||
|
|
||||
|
//3
|
||||
|
private String clientKeyPath; |
||||
|
|
||||
|
private Integer Qos; |
||||
|
} |
||||
@ -0,0 +1,14 @@ |
|||||
|
package com.techsor.datacenter.receiver.entity.common; |
||||
|
|
||||
|
import lombok.Data; |
||||
|
import org.springframework.context.annotation.Bean; |
||||
|
|
||||
|
@Data |
||||
|
public class MqttPublisherEntity { |
||||
|
private Integer id; |
||||
|
private String host; |
||||
|
private String username; |
||||
|
private String password; |
||||
|
private String topic; |
||||
|
private String description; |
||||
|
} |
||||
@ -0,0 +1,9 @@ |
|||||
|
package com.techsor.datacenter.receiver.entity.common; |
||||
|
|
||||
|
import lombok.Data; |
||||
|
|
||||
|
@Data |
||||
|
public class MqttStartStatusEntity { |
||||
|
private String HOST; |
||||
|
private String status; |
||||
|
} |
||||
@ -0,0 +1,12 @@ |
|||||
|
package com.techsor.datacenter.receiver.entity.common; |
||||
|
|
||||
|
import lombok.Data; |
||||
|
|
||||
|
import java.util.List; |
||||
|
|
||||
|
@Data |
||||
|
public class ResponseEntity { |
||||
|
private int code; |
||||
|
private String msg; |
||||
|
private List<Object> data; |
||||
|
} |
||||
@ -0,0 +1,15 @@ |
|||||
|
package com.techsor.datacenter.receiver.entity.common; |
||||
|
|
||||
|
|
||||
|
import lombok.Data; |
||||
|
|
||||
|
@Data |
||||
|
public class ZAIotTransDataEntity { |
||||
|
|
||||
|
private String content; |
||||
|
private String ts; |
||||
|
private String deviceId; |
||||
|
private String identify; |
||||
|
|
||||
|
|
||||
|
} |
||||
@ -0,0 +1,33 @@ |
|||||
|
package com.techsor.datacenter.receiver.entity.company; |
||||
|
|
||||
|
|
||||
|
import lombok.Data; |
||||
|
|
||||
|
/** |
||||
|
* create table basic_company |
||||
|
* ( |
||||
|
* id bigint auto_increment |
||||
|
* primary key, |
||||
|
* parent_id bigint null comment '父企业ID', |
||||
|
* company_name varchar(500) charset utf8mb4 null, |
||||
|
* flag int default 0 null comment '0-正常,1-删除', |
||||
|
* create_time bigint null, |
||||
|
* creator_id bigint null, |
||||
|
* modify_time bigint null, |
||||
|
* modifier_id bigint null |
||||
|
* ); |
||||
|
* |
||||
|
* **/ |
||||
|
@Data |
||||
|
public class CompanyEntity { |
||||
|
|
||||
|
private Long id; |
||||
|
|
||||
|
private Long parentId; |
||||
|
|
||||
|
private String companyName; |
||||
|
|
||||
|
private Integer flag; |
||||
|
|
||||
|
|
||||
|
} |
||||
@ -0,0 +1,46 @@ |
|||||
|
package com.techsor.datacenter.receiver.entity.datasource; |
||||
|
|
||||
|
import lombok.Data; |
||||
|
|
||||
|
/** |
||||
|
* create table data_src_config |
||||
|
* ( |
||||
|
* id bigint auto_increment |
||||
|
* primary key, |
||||
|
* code varchar(100) null comment '数据来源识别代码', |
||||
|
* api_key varchar(100) null comment 'api_key', |
||||
|
* name varchar(100) null comment '数据来源名称', |
||||
|
* method_type varchar(2) null comment '来源方式(0:MQTT;1:RESTFULL)', |
||||
|
* params varchar(1000) null comment '链接参数', |
||||
|
* state varchar(2) null comment '状态;(0:激活;1:冻结;)', |
||||
|
* created_by bigint null, |
||||
|
* created_timestamp timestamp null, |
||||
|
* updated_by bigint null, |
||||
|
* updated_timestamp timestamp null, |
||||
|
* company_id bigint not null |
||||
|
* ) |
||||
|
* comment '数据来源配置表'; |
||||
|
* */ |
||||
|
|
||||
|
@Data |
||||
|
public class DatasourceConfigEntity { |
||||
|
|
||||
|
private Long id; |
||||
|
|
||||
|
private String code; |
||||
|
|
||||
|
private String apiKey; |
||||
|
|
||||
|
private String name; |
||||
|
|
||||
|
private String methodType; |
||||
|
|
||||
|
private String params; |
||||
|
|
||||
|
private String state; |
||||
|
|
||||
|
private Long companyId; |
||||
|
|
||||
|
private String deviceIdPosition; |
||||
|
|
||||
|
} |
||||
@ -0,0 +1,12 @@ |
|||||
|
package com.techsor.datacenter.receiver.entity.datasource; |
||||
|
|
||||
|
import lombok.Data; |
||||
|
|
||||
|
@Data |
||||
|
public class RestfulParams { |
||||
|
|
||||
|
private String url; |
||||
|
|
||||
|
|
||||
|
private Object params; |
||||
|
} |
||||
@ -0,0 +1,23 @@ |
|||||
|
package com.techsor.datacenter.receiver.entity.dto; |
||||
|
|
||||
|
import lombok.Data; |
||||
|
|
||||
|
import java.util.List; |
||||
|
|
||||
|
/**** |
||||
|
* 开关,commandType为类型 |
||||
|
* commandType=='notification',则是通知处理mqtt相关信息 |
||||
|
* commandType=='switch',则是通知处理redis开关,则value为on,off |
||||
|
* ***/ |
||||
|
@Data |
||||
|
public class RedisCommandReceiverDTO { |
||||
|
|
||||
|
|
||||
|
private String commandType; |
||||
|
|
||||
|
private String value; |
||||
|
|
||||
|
private String state; |
||||
|
|
||||
|
private List<Long> params; |
||||
|
} |
||||
@ -0,0 +1,24 @@ |
|||||
|
package com.techsor.datacenter.receiver.entity.metcom; |
||||
|
|
||||
|
import lombok.Data; |
||||
|
|
||||
|
@Data |
||||
|
//{
|
||||
|
// “measured” : [計測時刻 (unix timestamp)],
|
||||
|
// “spaceId” : “[DBM空間コード]”,
|
||||
|
// “latitude” : [緯度(deg)],
|
||||
|
// “longitude” : [経度(deg)],
|
||||
|
// “floor” : [階数],
|
||||
|
// “hae” : [楕円体高(m)],
|
||||
|
// “hat” : [地上高(m)]
|
||||
|
//}
|
||||
|
public class MetcomEntity { |
||||
|
private String uuid; |
||||
|
private Long measured; |
||||
|
private String spaceId; |
||||
|
private double latitude; |
||||
|
private double longitude; |
||||
|
private int floor; |
||||
|
private float hae; |
||||
|
private float hat; |
||||
|
} |
||||
@ -0,0 +1,13 @@ |
|||||
|
package com.techsor.datacenter.receiver.entity.nittan; |
||||
|
|
||||
|
import lombok.Data; |
||||
|
|
||||
|
@Data |
||||
|
public class AuthEntity{ |
||||
|
private String subscription_id; |
||||
|
private String signature; |
||||
|
private String code; |
||||
|
private String version; |
||||
|
private Integer timestamp; |
||||
|
|
||||
|
} |
||||
@ -0,0 +1,9 @@ |
|||||
|
package com.techsor.datacenter.receiver.entity.nittan; |
||||
|
|
||||
|
import lombok.Data; |
||||
|
|
||||
|
@Data |
||||
|
public class DeviceInfoEntity{ |
||||
|
private String device_name; |
||||
|
private String device_class; |
||||
|
} |
||||
@ -0,0 +1,10 @@ |
|||||
|
package com.techsor.datacenter.receiver.entity.nittan; |
||||
|
|
||||
|
import lombok.Data; |
||||
|
|
||||
|
@Data |
||||
|
public class FacilityInfoEntity{ |
||||
|
private String facility_id; |
||||
|
private String facility_maker; |
||||
|
private String facility_mode; |
||||
|
} |
||||
@ -0,0 +1,8 @@ |
|||||
|
package com.techsor.datacenter.receiver.entity.nittan; |
||||
|
|
||||
|
import lombok.Data; |
||||
|
|
||||
|
@Data |
||||
|
public class LocationInfoEntity{ |
||||
|
private String location_address; |
||||
|
} |
||||
@ -0,0 +1,20 @@ |
|||||
|
package com.techsor.datacenter.receiver.entity.nittan; |
||||
|
|
||||
|
import lombok.Data; |
||||
|
|
||||
|
import java.util.List; |
||||
|
|
||||
|
@Data |
||||
|
public class NittanEntity { |
||||
|
private AuthEntity auth; |
||||
|
private String cmd; |
||||
|
private FacilityInfoEntity facility_info; |
||||
|
private List<NoticeInfoEntity> notice_info; |
||||
|
} |
||||
|
|
||||
|
|
||||
|
|
||||
|
|
||||
|
|
||||
|
|
||||
|
|
||||
@ -0,0 +1,12 @@ |
|||||
|
package com.techsor.datacenter.receiver.entity.nittan; |
||||
|
|
||||
|
import lombok.Data; |
||||
|
|
||||
|
@Data |
||||
|
public class NoticeInfoEntity { |
||||
|
private String notice_type; |
||||
|
private String notice_level; |
||||
|
private String notice_status; |
||||
|
private LocationInfoEntity location_info; |
||||
|
private DeviceInfoEntity device_info; |
||||
|
} |
||||
@ -0,0 +1,113 @@ |
|||||
|
package com.techsor.datacenter.receiver.listener; |
||||
|
|
||||
|
import com.google.gson.Gson; |
||||
|
import com.techsor.datacenter.receiver.config.datasource.DataSourceContextHolder; |
||||
|
import com.techsor.datacenter.receiver.entity.common.KafkaClientConfig; |
||||
|
import com.techsor.datacenter.receiver.entity.common.MqttClientConfig; |
||||
|
import com.techsor.datacenter.receiver.entity.company.CompanyEntity; |
||||
|
import com.techsor.datacenter.receiver.entity.datasource.DatasourceConfigEntity; |
||||
|
import com.techsor.datacenter.receiver.service.GlobalStateService; |
||||
|
import com.techsor.datacenter.receiver.service.IDataSourceConfigService; |
||||
|
import com.techsor.datacenter.receiver.service.IMQTTService; |
||||
|
import com.techsor.datacenter.receiver.service.ZetaKafkaService; |
||||
|
|
||||
|
import lombok.extern.slf4j.Slf4j; |
||||
|
import org.apache.commons.lang3.StringUtils; |
||||
|
import org.slf4j.Logger; |
||||
|
import org.slf4j.LoggerFactory; |
||||
|
import org.springframework.beans.factory.annotation.Value; |
||||
|
import org.springframework.boot.context.event.ApplicationStartedEvent; |
||||
|
import org.springframework.context.ApplicationListener; |
||||
|
import org.springframework.stereotype.Component; |
||||
|
|
||||
|
import jakarta.annotation.Resource; |
||||
|
import java.util.ArrayList; |
||||
|
import java.util.List; |
||||
|
|
||||
|
|
||||
|
@Slf4j |
||||
|
@Component |
||||
|
public class DataSourceConfigListener implements ApplicationListener<ApplicationStartedEvent> { |
||||
|
private static final Logger logger= LoggerFactory.getLogger(DataSourceConfigListener.class); |
||||
|
@Resource |
||||
|
private GlobalStateService globalStateService; |
||||
|
@Resource |
||||
|
private IMQTTService mqttService; |
||||
|
@Resource |
||||
|
private IDataSourceConfigService dataSourceConfigService; |
||||
|
@Resource |
||||
|
private ZetaKafkaService zetaKafkaService; |
||||
|
|
||||
|
@Value("${base.companyId}") |
||||
|
private Integer baseComanyId; |
||||
|
|
||||
|
/** |
||||
|
* 应用程序启动事件监听器。此方法在应用程序启动完成后被自动调用,用于根据数据库中的数据源配置来启动或停止数据接收客户端。 |
||||
|
* |
||||
|
* @param event 应用程序启动事件。 |
||||
|
*/ |
||||
|
@Override |
||||
|
public void onApplicationEvent(ApplicationStartedEvent event) { |
||||
|
if (!(event instanceof ApplicationStartedEvent)){ |
||||
|
return; |
||||
|
} |
||||
|
|
||||
|
// Clearing data source to use the default one.
|
||||
|
DataSourceContextHolder.clearCurrentDataSourceKey(); |
||||
|
|
||||
|
|
||||
|
//Get companyId and parentId
|
||||
|
List<CompanyEntity> companyEntityList=this.dataSourceConfigService.getRelatedTopCompany(baseComanyId.toString()); |
||||
|
List<Long> companyIdList = new ArrayList<>(); |
||||
|
for (CompanyEntity companyItem : companyEntityList){ |
||||
|
companyIdList.add(companyItem.getId()); |
||||
|
if (companyItem.getParentId()!=-1){ |
||||
|
companyIdList.add(companyItem.getParentId()); |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
DataSourceContextHolder.clearCurrentDataSourceKey(); |
||||
|
|
||||
|
companyIdList.forEach(iter-> { |
||||
|
logger.info("Not Error, debug Log:dataSourceForCompany" + iter); |
||||
|
DataSourceContextHolder.setCurrentDataSourceKey("dataSourceForCompany_" + iter); |
||||
|
List<DatasourceConfigEntity> resultList = this.dataSourceConfigService.getDataSourceConfigWithState(); |
||||
|
logger.info("All datasource:"+new Gson().toJson(resultList)); |
||||
|
for (int i = 0; i < resultList.size(); i++) { |
||||
|
DatasourceConfigEntity item = resultList.get(i); |
||||
|
// 检查数据源的状态,如果状态为"0",表示需要启动该数据源。
|
||||
|
if (StringUtils.equals("0", item.getState())) { |
||||
|
//判断类型,MQTT
|
||||
|
if (StringUtils.equals("0", item.getMethodType())) { |
||||
|
//mqtt
|
||||
|
Gson gson = new Gson(); |
||||
|
MqttClientConfig mqttClientConfig = gson.fromJson(item.getParams(), MqttClientConfig.class); |
||||
|
this.mqttService.addMQTTConsumerClient(iter,item.getId(), mqttClientConfig, item); |
||||
|
} else if (StringUtils.equals("1",item.getMethodType())) { |
||||
|
// 对于HTTP类型,将数据源配置添加到全局状态服务中管理。
|
||||
|
this.globalStateService.addDatasourceConfig(item); |
||||
|
} else if (StringUtils.equals("2",item.getMethodType())) { |
||||
|
//kafka
|
||||
|
Gson gson=new Gson(); |
||||
|
KafkaClientConfig kafkaClientConfig = gson.fromJson(item.getParams(), KafkaClientConfig.class); |
||||
|
this.zetaKafkaService.addKafkaClient(iter, item.getId(), kafkaClientConfig, item); |
||||
|
} |
||||
|
} else { |
||||
|
|
||||
|
if (StringUtils.equals("0", item.getMethodType())) { |
||||
|
//mqtt
|
||||
|
this.mqttService.removeMQTTConsumerClient(item.getId()); |
||||
|
} else { |
||||
|
this.globalStateService.removeDatasourceConfig(item); |
||||
|
} |
||||
|
|
||||
|
} |
||||
|
} |
||||
|
DataSourceContextHolder.clearCurrentDataSourceKey(); |
||||
|
}); |
||||
|
|
||||
|
} |
||||
|
|
||||
|
|
||||
|
|
||||
|
} |
||||
@ -0,0 +1,31 @@ |
|||||
|
package com.techsor.datacenter.receiver.listener; |
||||
|
|
||||
|
import com.techsor.datacenter.receiver.clients.DeltaClientMQTTS; |
||||
|
import org.slf4j.Logger; |
||||
|
import org.slf4j.LoggerFactory; |
||||
|
import org.springframework.boot.context.event.ApplicationStartedEvent; |
||||
|
import org.springframework.context.ApplicationListener; |
||||
|
import org.springframework.stereotype.Component; |
||||
|
|
||||
|
import jakarta.annotation.Resource; |
||||
|
|
||||
|
@Component |
||||
|
public class DeltaMQTTListener implements ApplicationListener<ApplicationStartedEvent> { |
||||
|
private static final Logger logger= LoggerFactory.getLogger(MqttPublisherListener.class); |
||||
|
|
||||
|
@Resource |
||||
|
private DeltaClientMQTTS deltaClientMQTT; |
||||
|
@Override |
||||
|
public void onApplicationEvent(ApplicationStartedEvent event) { |
||||
|
if (!(event instanceof ApplicationStartedEvent)){ |
||||
|
return; |
||||
|
} |
||||
|
|
||||
|
try { |
||||
|
deltaClientMQTT.start(); |
||||
|
} catch (Exception e) { |
||||
|
logger.error("DeltaClientMQTTS runtime exception",e); |
||||
|
// throw new RuntimeException(e);
|
||||
|
} |
||||
|
} |
||||
|
} |
||||
@ -0,0 +1,63 @@ |
|||||
|
package com.techsor.datacenter.receiver.listener; |
||||
|
|
||||
|
import com.amazonaws.services.dynamodbv2.AmazonDynamoDB; |
||||
|
import com.amazonaws.services.dynamodbv2.datamodeling.DynamoDBMapper; |
||||
|
import com.amazonaws.services.dynamodbv2.datamodeling.DynamoDBQueryExpression; |
||||
|
import com.amazonaws.services.dynamodbv2.model.AttributeValue; |
||||
|
import com.amazonaws.services.dynamodbv2.model.CreateTableRequest; |
||||
|
import com.amazonaws.services.dynamodbv2.model.ProvisionedThroughput; |
||||
|
import com.amazonaws.services.dynamodbv2.model.QueryRequest; |
||||
|
import com.amazonaws.services.dynamodbv2.util.TableUtils; |
||||
|
import com.techsor.datacenter.receiver.entity.common.BaseTransDataEntity; |
||||
|
import org.slf4j.Logger; |
||||
|
import org.slf4j.LoggerFactory; |
||||
|
import org.socialsignin.spring.data.dynamodb.core.DynamoDBTemplate; |
||||
|
import org.springframework.beans.factory.annotation.Value; |
||||
|
import org.springframework.boot.context.event.ApplicationStartedEvent; |
||||
|
import org.springframework.context.ApplicationListener; |
||||
|
import org.springframework.stereotype.Component; |
||||
|
|
||||
|
import jakarta.annotation.Resource; |
||||
|
import java.util.HashMap; |
||||
|
|
||||
|
//@Component
|
||||
|
public class DynamoDBListener implements ApplicationListener<ApplicationStartedEvent> { |
||||
|
private static final Logger logger= LoggerFactory.getLogger(DynamoDBListener.class); |
||||
|
|
||||
|
@Resource |
||||
|
AmazonDynamoDB amazonDynamoDB; |
||||
|
|
||||
|
@Resource |
||||
|
DynamoDBMapper dynamoDBMapper; |
||||
|
@Value("${amazon.dynamodb.tableName:mqtt_history}") |
||||
|
private String dynamoDBTableName; |
||||
|
|
||||
|
|
||||
|
/*** |
||||
|
* 生成创建表的请求。这里使用DynamoDBMapper的generateCreateTableRequest方法 |
||||
|
* 为BaseTransDataEntity类生成创建表的请求,同时指定读写吞吐量。 |
||||
|
* 等待表变为活动状态。这是为了确保表已经完全创建并可以使用。 |
||||
|
* */ |
||||
|
@Override |
||||
|
public void onApplicationEvent(ApplicationStartedEvent event) { |
||||
|
if (!(event instanceof ApplicationStartedEvent)){ |
||||
|
return; |
||||
|
} |
||||
|
logger.info("DynamoDBListner started successfully!!!"); |
||||
|
// 生成创建表的请求。这里使用DynamoDBMapper的generateCreateTableRequest方法
|
||||
|
// 为BaseTransDataEntity类生成创建表的请求,同时指定读写吞吐量。
|
||||
|
CreateTableRequest ctr = dynamoDBMapper.generateCreateTableRequest(BaseTransDataEntity.class) |
||||
|
.withProvisionedThroughput(new ProvisionedThroughput(1L, 1L)); |
||||
|
ctr.setTableName(dynamoDBTableName); |
||||
|
TableUtils.createTableIfNotExists(amazonDynamoDB, ctr); |
||||
|
try { |
||||
|
// 等待表变为活动状态。这是为了确保表已经完全创建并可以使用。
|
||||
|
TableUtils.waitUntilActive(amazonDynamoDB, ctr.getTableName()); |
||||
|
} catch (InterruptedException e) { |
||||
|
throw new RuntimeException(e); |
||||
|
} |
||||
|
logger.info("DynamoDBListener createTableIfNotExists successfully!!!"); |
||||
|
|
||||
|
logger.info("DynamoDBListener save successfully!!!"); |
||||
|
} |
||||
|
} |
||||
@ -0,0 +1,60 @@ |
|||||
|
package com.techsor.datacenter.receiver.listener; |
||||
|
|
||||
|
import com.techsor.datacenter.receiver.clients.ITAClientMQTT; |
||||
|
import com.techsor.datacenter.receiver.entity.common.MqttPublisherEntity; |
||||
|
import com.techsor.datacenter.receiver.entity.common.MqttStartStatusEntity; |
||||
|
import com.techsor.datacenter.receiver.service.MqttPublisherService; |
||||
|
import org.eclipse.paho.client.mqttv3.MqttException; |
||||
|
import org.slf4j.Logger; |
||||
|
import org.slf4j.LoggerFactory; |
||||
|
import org.springframework.boot.context.event.ApplicationStartedEvent; |
||||
|
import org.springframework.context.ApplicationListener; |
||||
|
import org.springframework.stereotype.Component; |
||||
|
|
||||
|
import jakarta.annotation.Resource; |
||||
|
import java.util.List; |
||||
|
import java.util.Random; |
||||
|
|
||||
|
//@Component
|
||||
|
public class MqttPublisherListener implements ApplicationListener<ApplicationStartedEvent> { |
||||
|
|
||||
|
private static final Logger logger=LoggerFactory.getLogger(MqttPublisherListener.class); |
||||
|
|
||||
|
@Resource |
||||
|
private MqttPublisherService mqttPublisherService; |
||||
|
|
||||
|
|
||||
|
@Override |
||||
|
public void onApplicationEvent(ApplicationStartedEvent event) { |
||||
|
if (!(event instanceof ApplicationStartedEvent)){ |
||||
|
return; |
||||
|
} |
||||
|
|
||||
|
List publisherList = mqttPublisherService.getList(); |
||||
|
|
||||
|
for (int i=0;i<publisherList.size();i++){ |
||||
|
ITAClientMQTT clientMQTT = new ITAClientMQTT((MqttPublisherEntity) publisherList.get(i),getRandoms()+""); |
||||
|
MqttStartStatusEntity statusItem = new MqttStartStatusEntity(); |
||||
|
statusItem.setHOST(((MqttPublisherEntity) publisherList.get(i)).getHost()); |
||||
|
try { |
||||
|
clientMQTT.start(); |
||||
|
statusItem.setStatus("Success"); |
||||
|
}catch (MqttException e) { |
||||
|
logger.error("Failed,please check log. {}",e); |
||||
|
statusItem.setStatus("Failed,please check log."); |
||||
|
|
||||
|
} |
||||
|
} |
||||
|
|
||||
|
|
||||
|
} |
||||
|
|
||||
|
public int getRandoms(){ |
||||
|
// 创建一个Random对象
|
||||
|
Random random = new Random(); |
||||
|
|
||||
|
// 生成一个范围在100到999之间的随机数(包括100,但不包括1000)
|
||||
|
int randomNumber = random.nextInt(900) + 100; |
||||
|
return randomNumber; |
||||
|
} |
||||
|
} |
||||
@ -0,0 +1,71 @@ |
|||||
|
package com.techsor.datacenter.receiver.listener; |
||||
|
|
||||
|
import com.alibaba.fastjson2.JSON; |
||||
|
import com.google.gson.Gson; |
||||
|
import com.google.gson.GsonBuilder; |
||||
|
import com.techsor.datacenter.receiver.constants.MsgConstants; |
||||
|
import com.techsor.datacenter.receiver.entity.dto.RedisCommandReceiverDTO; |
||||
|
import com.techsor.datacenter.receiver.service.GlobalSwitchStatusComponent; |
||||
|
import com.techsor.datacenter.receiver.service.GuavaRedisCache; |
||||
|
import com.techsor.datacenter.receiver.service.IDataReceiveService; |
||||
|
import com.techsor.datacenter.receiver.utils.JsonUtils; |
||||
|
import lombok.extern.slf4j.Slf4j; |
||||
|
import org.apache.commons.lang3.StringUtils; |
||||
|
import org.apache.commons.text.StringEscapeUtils; |
||||
|
import org.springframework.core.task.TaskExecutor; |
||||
|
import org.springframework.data.redis.connection.Message; |
||||
|
import org.springframework.data.redis.connection.MessageListener; |
||||
|
import org.springframework.stereotype.Component; |
||||
|
|
||||
|
import jakarta.annotation.Resource; |
||||
|
import java.util.Objects; |
||||
|
|
||||
|
@Slf4j |
||||
|
@Component |
||||
|
public class RedisNotificationMessageSubscriber implements MessageListener { |
||||
|
@Resource |
||||
|
private IDataReceiveService dataReceiveService; |
||||
|
|
||||
|
@Resource |
||||
|
private GlobalSwitchStatusComponent globalSwitchStatusComponent; |
||||
|
@Resource(name="threadPoolTaskExecutor") |
||||
|
private TaskExecutor threadPoolTaskExecutor; |
||||
|
|
||||
|
@Resource |
||||
|
private GuavaRedisCache guavaRedisCache; |
||||
|
@Override |
||||
|
public void onMessage(Message message, byte[] pattern) { |
||||
|
String commandMessage = new String(message.getBody()); |
||||
|
if (StringUtils.isEmpty(commandMessage)){ |
||||
|
log.warn("Empty message received."); |
||||
|
return; |
||||
|
} |
||||
|
log.info("Message received: {}" , commandMessage); |
||||
|
commandMessage = commandMessage.substring(1, commandMessage.length() - 1); |
||||
|
commandMessage=StringEscapeUtils.unescapeJson(commandMessage); |
||||
|
RedisCommandReceiverDTO redisCommandReceiverDTO=JSON.parseObject(commandMessage, RedisCommandReceiverDTO.class); |
||||
|
|
||||
|
if (redisCommandReceiverDTO==null){ |
||||
|
log.warn("Failed to parse message: {}",commandMessage); |
||||
|
return; |
||||
|
} |
||||
|
|
||||
|
if (StringUtils.equals(redisCommandReceiverDTO.getCommandType(), MsgConstants.REDIS_COMMAND_NOTIFICATION_TYPE)){ |
||||
|
this.threadPoolTaskExecutor.execute(() -> dataReceiveService.receiveCurrentDataSrc(redisCommandReceiverDTO.getState(),redisCommandReceiverDTO.getParams())); |
||||
|
}else{ |
||||
|
//redis统计事件,每隔5分钟会被触发
|
||||
|
log.warn("redisCommandReceiverDTO.getCommandType():"+redisCommandReceiverDTO.getCommandType()); |
||||
|
log.warn("MsgConstants.REDIS_COMMAND_STAT_EVENT:"+MsgConstants.REDIS_COMMAND_STAT_EVENT); |
||||
|
if (StringUtils.equalsIgnoreCase(redisCommandReceiverDTO.getCommandType(), MsgConstants.REDIS_COMMAND_STAT_EVENT)) { |
||||
|
this.guavaRedisCache.syncToRedis(); |
||||
|
}else{ |
||||
|
if (StringUtils.equals(redisCommandReceiverDTO.getCommandType(), MsgConstants.REIDS_COMMAND_SWITCH_TYPE)){ |
||||
|
String value=redisCommandReceiverDTO.getValue(); |
||||
|
this.globalSwitchStatusComponent.updateSwitchStatus(Objects.equals(value, "on")); |
||||
|
}else{ |
||||
|
log.warn("Unknown command type: {}",redisCommandReceiverDTO.getCommandType()); |
||||
|
} |
||||
|
} |
||||
|
} |
||||
|
} |
||||
|
} |
||||
@ -0,0 +1,33 @@ |
|||||
|
package com.techsor.datacenter.receiver.listener; |
||||
|
|
||||
|
import com.techsor.datacenter.receiver.constants.MsgConstants; |
||||
|
import com.techsor.datacenter.receiver.service.GlobalSwitchStatusComponent; |
||||
|
import org.springframework.boot.context.event.ApplicationStartedEvent; |
||||
|
import org.springframework.context.ApplicationListener; |
||||
|
import org.springframework.data.redis.core.RedisTemplate; |
||||
|
import org.springframework.stereotype.Component; |
||||
|
|
||||
|
import jakarta.annotation.Resource; |
||||
|
import java.util.Objects; |
||||
|
|
||||
|
@Component |
||||
|
public class RedisStatsSwitchListener implements ApplicationListener<ApplicationStartedEvent> { |
||||
|
|
||||
|
@Resource |
||||
|
private RedisTemplate<String,Object> redisTemplate; |
||||
|
|
||||
|
@Resource |
||||
|
private GlobalSwitchStatusComponent globalSwitchStatusComponent; |
||||
|
@Override |
||||
|
public void onApplicationEvent(ApplicationStartedEvent event) { |
||||
|
if (!(event instanceof ApplicationStartedEvent)){ |
||||
|
return; |
||||
|
} |
||||
|
|
||||
|
Object switchStatus = redisTemplate.opsForValue().get(MsgConstants.SWITCH_STATUS_PREFIX); |
||||
|
|
||||
|
if (Objects.nonNull(switchStatus)){ |
||||
|
this.globalSwitchStatusComponent.updateSwitchStatus(Boolean.parseBoolean(String.valueOf(switchStatus))); |
||||
|
} |
||||
|
} |
||||
|
} |
||||
@ -0,0 +1,141 @@ |
|||||
|
package com.techsor.datacenter.receiver.service; |
||||
|
|
||||
|
import com.google.gson.Gson; |
||||
|
import com.google.gson.JsonObject; |
||||
|
import com.google.gson.JsonParser; |
||||
|
import com.techsor.datacenter.receiver.config.DataCenterEnvConfig; |
||||
|
import com.techsor.datacenter.receiver.constants.CompanyConstants; |
||||
|
import com.techsor.datacenter.receiver.constants.UrlConstants; |
||||
|
import com.techsor.datacenter.receiver.entity.common.BaseTransDataEntity; |
||||
|
import com.techsor.datacenter.receiver.entity.common.CommonTransDataEntity; |
||||
|
import com.techsor.datacenter.receiver.entity.common.ZAIotTransDataEntity; |
||||
|
import com.techsor.datacenter.receiver.utils.DefaultHttpRequestUtil; |
||||
|
import com.techsor.datacenter.receiver.utils.MyHTTPResponse; |
||||
|
import lombok.extern.slf4j.Slf4j; |
||||
|
import org.apache.commons.lang3.StringUtils; |
||||
|
import org.slf4j.Logger; |
||||
|
import org.slf4j.LoggerFactory; |
||||
|
import org.springframework.beans.factory.annotation.Autowired; |
||||
|
import org.springframework.data.redis.core.RedisTemplate; |
||||
|
import org.springframework.stereotype.Service; |
||||
|
|
||||
|
import java.util.Objects; |
||||
|
import java.util.concurrent.TimeUnit; |
||||
|
|
||||
|
import jakarta.annotation.Resource; |
||||
|
|
||||
|
@Slf4j |
||||
|
@Service |
||||
|
public class DataTransService { |
||||
|
|
||||
|
private static final Logger logger= LoggerFactory.getLogger(DataTransService.class); |
||||
|
@Resource |
||||
|
private DefaultHttpRequestUtil defaultHttpRequestUtil; |
||||
|
|
||||
|
@Autowired |
||||
|
private RedisTemplate redisTemplate; |
||||
|
|
||||
|
@Resource |
||||
|
private DataCenterEnvConfig dataCenterEnvConfig; |
||||
|
|
||||
|
public void transferData(String userId,String companyName,String rawJson){ |
||||
|
logger.warn("Start Process:{},",rawJson); |
||||
|
BaseTransDataEntity baseTransDataEntity = new BaseTransDataEntity(); |
||||
|
baseTransDataEntity.setCompany(companyName); |
||||
|
baseTransDataEntity.setTs(System.currentTimeMillis()+""); |
||||
|
baseTransDataEntity.setContent(rawJson); |
||||
|
Gson gson = new Gson(); |
||||
|
String jsonParams = gson.toJson(baseTransDataEntity); |
||||
|
if(StringUtils.isEmpty(userId)){ |
||||
|
logger.debug("Send Data :{} ,To: {}", jsonParams,dataCenterEnvConfig.getReceiveUrl()); |
||||
|
}else{ |
||||
|
logger.debug("Send Data :{} by:{},To: {}", jsonParams,userId,dataCenterEnvConfig.getReceiveUrl()); |
||||
|
} |
||||
|
|
||||
|
this.defaultHttpRequestUtil.postJson(dataCenterEnvConfig.getReceiveUrl(),jsonParams); |
||||
|
} |
||||
|
|
||||
|
@Resource |
||||
|
private DuplicateDataProcessor duplicateDataProcessor; |
||||
|
|
||||
|
public MyHTTPResponse transferData(String srcCode,String rawJson){ |
||||
|
String redisKey = Objects.toString(srcCode, "") + Objects.toString(rawJson, ""); |
||||
|
boolean redisResp = redisTemplate.opsForValue().setIfAbsent(redisKey, System.currentTimeMillis(), 60, TimeUnit.SECONDS); |
||||
|
|
||||
|
MyHTTPResponse response=new MyHTTPResponse<>(); |
||||
|
if (redisResp) { |
||||
|
CommonTransDataEntity baseTransDataEntity = new CommonTransDataEntity(); |
||||
|
baseTransDataEntity.setDataSrcCode(srcCode); |
||||
|
baseTransDataEntity.setTs(System.currentTimeMillis()+""); |
||||
|
baseTransDataEntity.setContent(rawJson); |
||||
|
Gson gson = new Gson(); |
||||
|
String jsonParams = gson.toJson(baseTransDataEntity); |
||||
|
|
||||
|
try{ |
||||
|
if (rawJson.contains("PNs") && rawJson.contains("Objs")){ |
||||
|
//单独处理IoServer数据
|
||||
|
logger.debug("CustomSend Data :{} ,To: {}", jsonParams,dataCenterEnvConfig.getProcessIoserverUrl()); |
||||
|
response = this.defaultHttpRequestUtil.postJson(dataCenterEnvConfig.getProcessIoserverUrl(), jsonParams); |
||||
|
}else if (isGW150(rawJson)){ |
||||
|
//单独处理ST-150网关数据
|
||||
|
logger.debug("CustomSend Data :{} ,To: {}", jsonParams,dataCenterEnvConfig.getProcessIoserverUrl()); |
||||
|
response = this.defaultHttpRequestUtil.postJson(dataCenterEnvConfig.getGW150ProcessUrl(), jsonParams); |
||||
|
}else{ |
||||
|
logger.debug("CustomSend Data :{} ,To: {}", jsonParams,dataCenterEnvConfig.getProcessApiUrl()); |
||||
|
response = this.defaultHttpRequestUtil.postJson(dataCenterEnvConfig.getProcessApiUrl(), jsonParams); |
||||
|
} |
||||
|
}catch (Exception e){ |
||||
|
response.setCode(500); |
||||
|
log.error("transferData error:{}",rawJson,e); |
||||
|
} |
||||
|
logger.debug("transferData response:{}",new Gson().toJson(response)); |
||||
|
} |
||||
|
return response; |
||||
|
} |
||||
|
|
||||
|
public MyHTTPResponse transferDataZaiot(String deviceId, String rawJson, String identify) { |
||||
|
String redisKey = Objects.toString(deviceId, "") + Objects.toString(rawJson, ""); |
||||
|
boolean redisResp = redisTemplate.opsForValue().setIfAbsent(redisKey, System.currentTimeMillis(), 60, TimeUnit.SECONDS); |
||||
|
|
||||
|
if (redisResp) { |
||||
|
MyHTTPResponse response=new MyHTTPResponse<>(); |
||||
|
ZAIotTransDataEntity zAIotTransDataEntity = new ZAIotTransDataEntity(); |
||||
|
zAIotTransDataEntity.setDeviceId(deviceId); |
||||
|
zAIotTransDataEntity.setTs(System.currentTimeMillis()+""); |
||||
|
zAIotTransDataEntity.setContent(rawJson); |
||||
|
zAIotTransDataEntity.setIdentify(identify); |
||||
|
|
||||
|
Gson gson = new Gson(); |
||||
|
String zaiotProcessJsonParams = gson.toJson(zAIotTransDataEntity); |
||||
|
logger.debug("zaiot_process Data :{} ,To: {}", zaiotProcessJsonParams, dataCenterEnvConfig.getZaiotProcessApiUrl()); |
||||
|
try{ |
||||
|
response = this.defaultHttpRequestUtil.postJson(dataCenterEnvConfig.getZaiotProcessApiUrl(), zaiotProcessJsonParams); |
||||
|
}catch (Exception e){ |
||||
|
response.setCode(500); |
||||
|
logger.error("zaiot_process error:{}",zaiotProcessJsonParams,e); |
||||
|
} |
||||
|
return response; |
||||
|
} |
||||
|
return null; |
||||
|
} |
||||
|
|
||||
|
/** |
||||
|
* 检查JSON字符串是否包含特定的字段,用于判断是否符合GW150数据格式 |
||||
|
* |
||||
|
* @param jsonStr 待检查的JSON字符串 |
||||
|
* @return 如果JSON字符串同时包含字段"d"和"ts",则返回true,否则返回false |
||||
|
*/ |
||||
|
private Boolean isGW150(String jsonStr){ |
||||
|
// 解析JSON字符串为JsonObject,以便进行字段检查
|
||||
|
JsonObject json = JsonParser.parseString(jsonStr).getAsJsonObject(); |
||||
|
|
||||
|
// 检查JSON对象中是否包含字段"d"
|
||||
|
boolean hasD = json.has("d"); |
||||
|
// 检查JSON对象中是否包含字段"ts"
|
||||
|
boolean hasTs = json.has("ts"); |
||||
|
|
||||
|
// 如果同时存在字段"d"和"ts",则认为是GW150数据格式,返回true,否则返回false
|
||||
|
return hasD && hasTs; |
||||
|
} |
||||
|
|
||||
|
} |
||||
@ -0,0 +1,84 @@ |
|||||
|
package com.techsor.datacenter.receiver.service; |
||||
|
|
||||
|
|
||||
|
import cn.hutool.crypto.digest.DigestAlgorithm; |
||||
|
import cn.hutool.crypto.digest.DigestUtil; |
||||
|
import com.techsor.datacenter.receiver.utils.RedisUtils; |
||||
|
import lombok.extern.slf4j.Slf4j; |
||||
|
import org.apache.commons.lang3.StringUtils; |
||||
|
import org.slf4j.Logger; |
||||
|
import org.slf4j.LoggerFactory; |
||||
|
import org.springframework.integration.redis.util.RedisLockRegistry; |
||||
|
import org.springframework.stereotype.Service; |
||||
|
|
||||
|
import jakarta.annotation.Resource; |
||||
|
import java.util.concurrent.TimeUnit; |
||||
|
import java.util.concurrent.locks.Lock; |
||||
|
|
||||
|
/** |
||||
|
* 处理数据去重的类,使用Redis分布式锁来确保数据的唯一性。 |
||||
|
*/ |
||||
|
@Slf4j |
||||
|
@Service |
||||
|
public class DuplicateDataProcessor { |
||||
|
|
||||
|
private final static Logger myLogger = LoggerFactory.getLogger(DuplicateDataProcessor.class); |
||||
|
|
||||
|
@Resource |
||||
|
private RedisLockRegistry redisLockRegistry; |
||||
|
|
||||
|
@Resource |
||||
|
private RedisUtils redisUtils; |
||||
|
|
||||
|
/** |
||||
|
* 移除重复数据。 |
||||
|
* |
||||
|
* @param content 需要去重的数据内容。 |
||||
|
* @return 布尔值,表示是否成功去除重复数据(true表示数据是唯一的,false表示数据重复)。 |
||||
|
*/ |
||||
|
public boolean removeDuplicateData(String content) { |
||||
|
//获取lockKeyName
|
||||
|
String lockKeyName= DigestUtil.digester(DigestAlgorithm.SHA1).digestHex(content); |
||||
|
//此处排除并发下为空的情况
|
||||
|
if (StringUtils.isEmpty(lockKeyName)){ |
||||
|
return false; |
||||
|
} |
||||
|
//定义返回结果
|
||||
|
boolean result=false; |
||||
|
//获取分布式锁,进行去重
|
||||
|
|
||||
|
Lock lock=this.redisLockRegistry.obtain("receiver_"+lockKeyName); |
||||
|
try{ |
||||
|
if (lock.tryLock(10, TimeUnit.SECONDS)){ |
||||
|
log.debug("get lock success"); |
||||
|
//有数据
|
||||
|
if (this.redisUtils.hasKey("receiver_"+lockKeyName)){ |
||||
|
|
||||
|
result=false; |
||||
|
}else{ |
||||
|
//没有数据,则写入redis
|
||||
|
result=true; |
||||
|
this.redisUtils.add("receiver_"+lockKeyName,content,3600L,TimeUnit.SECONDS); |
||||
|
log.debug("add data to redis success,lockKeyName:{},content:{}",lockKeyName,content); |
||||
|
} |
||||
|
}else{ |
||||
|
log.debug("get lock failed {}",content); |
||||
|
return false; |
||||
|
} |
||||
|
}catch (InterruptedException e){ |
||||
|
// myLogger.error("an exception caught here :{}",e.getMessage(),e);
|
||||
|
result=false; |
||||
|
}finally { |
||||
|
try{ |
||||
|
lock.unlock(); |
||||
|
}catch (Exception e){ |
||||
|
// myLogger.error("an exception caught here :{}",e.getMessage(),e);
|
||||
|
} |
||||
|
} |
||||
|
|
||||
|
return result; |
||||
|
} |
||||
|
|
||||
|
|
||||
|
|
||||
|
} |
||||
@ -0,0 +1,37 @@ |
|||||
|
package com.techsor.datacenter.receiver.service; |
||||
|
|
||||
|
import com.google.gson.Gson; |
||||
|
import com.techsor.datacenter.receiver.entity.datasource.DatasourceConfigEntity; |
||||
|
import com.techsor.datacenter.receiver.entity.datasource.RestfulParams; |
||||
|
import org.springframework.stereotype.Service; |
||||
|
|
||||
|
import java.util.Map; |
||||
|
import java.util.concurrent.ConcurrentHashMap; |
||||
|
|
||||
|
@Service |
||||
|
public class GlobalStateService { |
||||
|
|
||||
|
private Map<String, DatasourceConfigEntity> datasourceConfigMap = new ConcurrentHashMap<>(); |
||||
|
|
||||
|
public void addDatasourceConfig(DatasourceConfigEntity datasourceConfigEntity) { |
||||
|
String params= datasourceConfigEntity.getParams(); |
||||
|
Gson gson = new Gson(); |
||||
|
RestfulParams restfulParams=gson.fromJson(params, RestfulParams.class); |
||||
|
datasourceConfigMap.put(restfulParams.getUrl(), datasourceConfigEntity); |
||||
|
} |
||||
|
|
||||
|
|
||||
|
public DatasourceConfigEntity getDatasourceConfig(String url){ |
||||
|
return this.datasourceConfigMap.get(url); |
||||
|
} |
||||
|
public boolean checkUrlExist(String url){ |
||||
|
return this.datasourceConfigMap.containsKey(url); |
||||
|
} |
||||
|
|
||||
|
public void removeDatasourceConfig(DatasourceConfigEntity datasourceConfigEntity) { |
||||
|
String params= datasourceConfigEntity.getParams(); |
||||
|
Gson gson = new Gson(); |
||||
|
RestfulParams restfulParams=gson.fromJson(params, RestfulParams.class); |
||||
|
datasourceConfigMap.remove(restfulParams.getUrl()); |
||||
|
} |
||||
|
} |
||||
@ -0,0 +1,27 @@ |
|||||
|
package com.techsor.datacenter.receiver.service; |
||||
|
|
||||
|
import org.springframework.stereotype.Component; |
||||
|
|
||||
|
import jakarta.annotation.PostConstruct; |
||||
|
|
||||
|
import java.util.concurrent.atomic.AtomicBoolean; |
||||
|
|
||||
|
@Component |
||||
|
public class GlobalSwitchStatusComponent { |
||||
|
|
||||
|
private AtomicBoolean switchStatus; |
||||
|
|
||||
|
@PostConstruct |
||||
|
public void init(){ |
||||
|
this.switchStatus=new AtomicBoolean(false); |
||||
|
} |
||||
|
|
||||
|
|
||||
|
public void updateSwitchStatus(boolean switchStatus){ |
||||
|
this.switchStatus=new AtomicBoolean(switchStatus); |
||||
|
} |
||||
|
|
||||
|
public boolean getSwitchStatus(){ |
||||
|
return this.switchStatus.get(); |
||||
|
} |
||||
|
} |
||||
@ -0,0 +1,87 @@ |
|||||
|
package com.techsor.datacenter.receiver.service; |
||||
|
|
||||
|
import com.alibaba.fastjson2.JSON; |
||||
|
import com.google.common.cache.CacheBuilder; |
||||
|
import com.google.common.cache.CacheLoader; |
||||
|
import com.google.common.cache.LoadingCache; |
||||
|
import lombok.extern.slf4j.Slf4j; |
||||
|
import org.slf4j.MDC; |
||||
|
import org.springframework.beans.factory.annotation.Autowired; |
||||
|
import org.springframework.data.redis.core.RedisTemplate; |
||||
|
import org.springframework.scheduling.annotation.Scheduled; |
||||
|
import org.springframework.stereotype.Component; |
||||
|
|
||||
|
import jakarta.annotation.PostConstruct; |
||||
|
import java.time.Duration; |
||||
|
import java.time.LocalDate; |
||||
|
import java.time.ZoneId; |
||||
|
import java.time.ZonedDateTime; |
||||
|
import java.time.format.DateTimeFormatter; |
||||
|
import java.util.Objects; |
||||
|
import java.util.UUID; |
||||
|
import java.util.concurrent.TimeUnit; |
||||
|
|
||||
|
import static com.google.common.cache.CacheBuilder.newBuilder; |
||||
|
|
||||
|
@Slf4j |
||||
|
@Component |
||||
|
public class GuavaRedisCache { |
||||
|
|
||||
|
private final LoadingCache<String, Integer> cache; |
||||
|
private final RedisTemplate<String, Object> redisTemplate; |
||||
|
|
||||
|
|
||||
|
private final GlobalSwitchStatusComponent globalSwitchStatusComponent; |
||||
|
@Autowired |
||||
|
public GuavaRedisCache(RedisTemplate<String, Object> redisTemplate, GlobalSwitchStatusComponent globalSwitchStatusComponent ) { |
||||
|
this.redisTemplate = redisTemplate; |
||||
|
this.globalSwitchStatusComponent = globalSwitchStatusComponent; |
||||
|
this.cache = CacheBuilder.newBuilder() |
||||
|
.maximumSize(10000) |
||||
|
.expireAfterWrite(7, TimeUnit.MINUTES) |
||||
|
.concurrencyLevel(1000) |
||||
|
.build(new CacheLoader<String,Integer>() { |
||||
|
@Override |
||||
|
public Integer load(String s) throws Exception { |
||||
|
return 0; |
||||
|
} |
||||
|
}); |
||||
|
} |
||||
|
|
||||
|
public void incrementDailyDeviceIdCount(String deviceId) { |
||||
|
String key = getKeyForToday(); |
||||
|
cache.put(key + ":" + deviceId, cache.getUnchecked(key + ":" + deviceId) + 1); |
||||
|
} |
||||
|
|
||||
|
public String getKeyForToday() { |
||||
|
LocalDate today = LocalDate.now(); |
||||
|
// 设置日本时区
|
||||
|
ZoneId japanZone = ZoneId.of("Asia/Tokyo"); |
||||
|
|
||||
|
// 获取日本时区的当前日期和时间
|
||||
|
ZonedDateTime nowInJapan = ZonedDateTime.now(japanZone); |
||||
|
DateTimeFormatter formatter = DateTimeFormatter.ofPattern("yyyy-MM-dd"); |
||||
|
LocalDate yesterdayInJapan = nowInJapan.toLocalDate(); |
||||
|
return "receiver:device:counts:" + formatter.format(yesterdayInJapan); |
||||
|
} |
||||
|
|
||||
|
//由guava同步到redis
|
||||
|
public void syncToRedis() { |
||||
|
String requestId =UUID.randomUUID().toString(); |
||||
|
MDC.put("requestId", requestId); |
||||
|
log.info("Syncing cache to Redis=============================>"); |
||||
|
log.info("Current Cache Size Data {}", cache.size()); |
||||
|
log.info("Current Cache Value :{}", JSON.toJSON(cache.asMap())); |
||||
|
cache.asMap().forEach((key, value) -> { |
||||
|
String[] parts = key.split(":"); |
||||
|
String redisKey = parts[0] + ":" + parts[1] + ":" + parts[2] + ":" + parts[3]; |
||||
|
String deviceId = parts[4]; |
||||
|
redisTemplate.opsForHash().increment(redisKey, deviceId, value); |
||||
|
redisTemplate.expire(redisKey, Duration.ofDays(2)); // 设置键2天后过期
|
||||
|
}); |
||||
|
log.info("Syncing cache to Redis done"); |
||||
|
this.cache.invalidateAll(); |
||||
|
MDC.clear(); |
||||
|
} |
||||
|
|
||||
|
} |
||||
@ -0,0 +1,12 @@ |
|||||
|
package com.techsor.datacenter.receiver.service; |
||||
|
|
||||
|
import java.util.List; |
||||
|
|
||||
|
public interface IDataReceiveService { |
||||
|
|
||||
|
|
||||
|
/**** |
||||
|
* 本方法用于接收aws lambda触发的通知,用于触发数据来源监听或者停止监听 |
||||
|
* */ |
||||
|
public void receiveCurrentDataSrc(String state, List<Long> params); |
||||
|
} |
||||
@ -0,0 +1,19 @@ |
|||||
|
package com.techsor.datacenter.receiver.service; |
||||
|
|
||||
|
|
||||
|
import com.techsor.datacenter.receiver.entity.company.CompanyEntity; |
||||
|
import com.techsor.datacenter.receiver.entity.datasource.DatasourceConfigEntity; |
||||
|
|
||||
|
import java.util.List; |
||||
|
|
||||
|
public interface IDataSourceConfigService { |
||||
|
|
||||
|
|
||||
|
public List<DatasourceConfigEntity> getDataSourceConfig(List<Long> datasourceIds) ; |
||||
|
|
||||
|
|
||||
|
public List<DatasourceConfigEntity> getDataSourceConfigWithState() ; |
||||
|
|
||||
|
public List<CompanyEntity> getRelatedTopCompany(String baseCompanyId); |
||||
|
|
||||
|
} |
||||
@ -0,0 +1,14 @@ |
|||||
|
package com.techsor.datacenter.receiver.service; |
||||
|
|
||||
|
import com.techsor.datacenter.receiver.entity.common.MqttClientConfig; |
||||
|
import com.techsor.datacenter.receiver.entity.datasource.DatasourceConfigEntity; |
||||
|
|
||||
|
public interface IMQTTService { |
||||
|
|
||||
|
//增加MQTT客户端
|
||||
|
void addMQTTConsumerClient(Long companyId,Long id, MqttClientConfig config, DatasourceConfigEntity datasourceConfigEntity); |
||||
|
|
||||
|
//根据ID去除MQTT客户端
|
||||
|
public void removeMQTTConsumerClient(Long id); |
||||
|
} |
||||
|
|
||||
@ -0,0 +1,98 @@ |
|||||
|
package com.techsor.datacenter.receiver.service; |
||||
|
|
||||
|
|
||||
|
import lombok.extern.slf4j.Slf4j; |
||||
|
import org.apache.commons.io.FileUtils; |
||||
|
import org.apache.commons.lang3.StringUtils; |
||||
|
import software.amazon.awssdk.auth.credentials.AwsBasicCredentials; |
||||
|
import software.amazon.awssdk.auth.credentials.StaticCredentialsProvider; |
||||
|
import software.amazon.awssdk.core.sync.RequestBody; |
||||
|
import software.amazon.awssdk.regions.Region; |
||||
|
import software.amazon.awssdk.services.s3.S3Client; |
||||
|
import software.amazon.awssdk.services.s3.model.GetObjectRequest; |
||||
|
import org.springframework.beans.factory.annotation.Value; |
||||
|
import org.springframework.stereotype.Service; |
||||
|
|
||||
|
import java.io.File; |
||||
|
import java.io.IOException; |
||||
|
import java.io.OutputStream; |
||||
|
import java.nio.file.Path; |
||||
|
import java.nio.file.Paths; |
||||
|
|
||||
|
/** |
||||
|
* 用于处理与MQTT证书相关的服务,包括从AWS S3下载证书文件。 |
||||
|
*/ |
||||
|
@Slf4j |
||||
|
@Service |
||||
|
public class MQTTCrtService { |
||||
|
|
||||
|
|
||||
|
@Value("${amazon.aws.accesskey}") |
||||
|
private String awsAccessKeyId; |
||||
|
|
||||
|
@Value("${amazon.aws.secretkey}") |
||||
|
private String awsAccessSecret; |
||||
|
|
||||
|
@Value("${amazon.aws.bucket}") |
||||
|
private String awsBucketAdress; |
||||
|
|
||||
|
|
||||
|
|
||||
|
|
||||
|
public static final String CERT_FILE_PATH="tokyobuilding-certs/"; |
||||
|
|
||||
|
/** |
||||
|
* 从AWS S3下载指定路径的文件到本地系统。 |
||||
|
* |
||||
|
* @param filePath S3中文件的路径。 |
||||
|
* @return 下载文件的本地路径。 |
||||
|
*/ |
||||
|
public String getFilePath(String filePath) { |
||||
|
if (StringUtils.isEmpty(filePath)) { |
||||
|
return ""; |
||||
|
} |
||||
|
String bucket = awsBucketAdress; |
||||
|
|
||||
|
// 创建获取对象的请求
|
||||
|
GetObjectRequest getObjectRequest = GetObjectRequest.builder() |
||||
|
.bucket(bucket) |
||||
|
.key(CERT_FILE_PATH + filePath) |
||||
|
.build(); |
||||
|
String tmpDir = System.getProperty("java.io.tmpdir"); |
||||
|
// 下载文件到本地路径
|
||||
|
Region region = Region.AP_NORTHEAST_1; |
||||
|
|
||||
|
S3Client s3 = S3Client |
||||
|
.builder().region(region) |
||||
|
.credentialsProvider(StaticCredentialsProvider |
||||
|
.create(AwsBasicCredentials.create(awsAccessKeyId, awsAccessSecret))) |
||||
|
.build(); |
||||
|
|
||||
|
String localPath = tmpDir + File.separator + "certs" + File.separator + System.currentTimeMillis() + File.separator + filePath; |
||||
|
|
||||
|
s3.getObject(getObjectRequest, (response, inputStream) -> { |
||||
|
if (response.sdkHttpResponse().isSuccessful()) { |
||||
|
// Handle the response here
|
||||
|
Path path = Paths.get(localPath); |
||||
|
if (path.getParent() != null) { |
||||
|
FileUtils.forceMkdir(path.getParent().toFile()); |
||||
|
} |
||||
|
|
||||
|
try (OutputStream outputStream = FileUtils.openOutputStream(path.toFile())) { |
||||
|
byte[] bytes = new byte[1024]; |
||||
|
int len; |
||||
|
while ((len = inputStream.read(bytes)) != -1) { |
||||
|
outputStream.write(bytes, 0, len); |
||||
|
} |
||||
|
} catch (IOException e) { |
||||
|
log.error("下载文件失败", e); |
||||
|
} |
||||
|
} else { |
||||
|
throw new RuntimeException("Failed to get object from AWS S3"); |
||||
|
} |
||||
|
return null; |
||||
|
}); |
||||
|
|
||||
|
return localPath; |
||||
|
} |
||||
|
} |
||||
@ -0,0 +1,42 @@ |
|||||
|
package com.techsor.datacenter.receiver.service; |
||||
|
|
||||
|
import com.techsor.datacenter.receiver.entity.common.BaseTransDataEntity; |
||||
|
import com.techsor.datacenter.receiver.utils.DateUtils; |
||||
|
import lombok.extern.slf4j.Slf4j; |
||||
|
|
||||
|
import org.socialsignin.spring.data.dynamodb.core.DynamoDBTemplate; |
||||
|
import org.springframework.stereotype.Component; |
||||
|
|
||||
|
import jakarta.annotation.Resource; |
||||
|
import java.util.Objects; |
||||
|
|
||||
|
//@Component
|
||||
|
//@Repository
|
||||
|
//@EnableScan
|
||||
|
//public interface MqttHistoryDynamoDBService extends DynamoDBCrudRepository<BaseTransDataEntity, UUID> {
|
||||
|
//
|
||||
|
//}
|
||||
|
@Slf4j |
||||
|
@Component |
||||
|
public class MqttHistoryDynamoDBService { |
||||
|
|
||||
|
//@Resource
|
||||
|
private DynamoDBTemplate dynamoDBTemplate; |
||||
|
|
||||
|
@Resource |
||||
|
private DuplicateDataProcessor duplicateDataProcessor; |
||||
|
public void save(BaseTransDataEntity baseTransDataEntity) { |
||||
|
// if (Objects.isNull(baseTransDataEntity)){
|
||||
|
// return;
|
||||
|
// }
|
||||
|
//
|
||||
|
// boolean flag=this.duplicateDataProcessor.removeDuplicateData(baseTransDataEntity.getContent());
|
||||
|
// log.debug("save data to dynamoDB flag:{},{}",baseTransDataEntity.getContent(),flag);
|
||||
|
// if (!flag){
|
||||
|
// return;
|
||||
|
// }
|
||||
|
// baseTransDataEntity.setDateKey(DateUtils.getCurrentDate());
|
||||
|
// this.dynamoDBTemplate.save(baseTransDataEntity);
|
||||
|
// log.debug("save data to dynamoDB success");
|
||||
|
} |
||||
|
} |
||||
@ -0,0 +1,33 @@ |
|||||
|
package com.techsor.datacenter.receiver.service; |
||||
|
|
||||
|
import com.techsor.datacenter.receiver.entity.common.BaseTransDataEntity; |
||||
|
import org.slf4j.Logger; |
||||
|
import org.slf4j.LoggerFactory; |
||||
|
import org.springframework.jdbc.core.JdbcTemplate; |
||||
|
import org.springframework.stereotype.Service; |
||||
|
|
||||
|
import jakarta.annotation.Resource; |
||||
|
|
||||
|
@Service("mqttHistoryService") |
||||
|
public class MqttHistoryService { |
||||
|
private static final Logger logger = LoggerFactory.getLogger(MqttHistoryService.class); |
||||
|
|
||||
|
@Resource |
||||
|
private JdbcTemplate jdbcTemplate; |
||||
|
|
||||
|
public void insertHistory(BaseTransDataEntity mqttHistoryEntity){ |
||||
|
logger.info("insertHistory is disabled now "); |
||||
|
//编写sql语句
|
||||
|
String sql = "insert into mqtt_history(content,ts) values(\'"+mqttHistoryEntity.getContent()+"\',\'"+mqttHistoryEntity.getTs()+"\')"; |
||||
|
logger.info("Execute SQL: {}",sql); |
||||
|
//调用他的update方法
|
||||
|
Integer result = jdbcTemplate.update(sql); |
||||
|
if (result==1){ |
||||
|
logger.info("Save succeed."); |
||||
|
}else{ |
||||
|
logger.warn("Save failed, content:"+mqttHistoryEntity.getContent()); |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
|
||||
|
} |
||||
@ -0,0 +1,40 @@ |
|||||
|
package com.techsor.datacenter.receiver.service; |
||||
|
|
||||
|
import com.techsor.datacenter.receiver.entity.common.MqttPublisherEntity; |
||||
|
import org.springframework.beans.factory.annotation.Autowired; |
||||
|
import org.springframework.jdbc.core.JdbcTemplate; |
||||
|
import org.springframework.jdbc.core.RowMapper; |
||||
|
import org.springframework.stereotype.Service; |
||||
|
|
||||
|
import jakarta.annotation.Resource; |
||||
|
import java.sql.ResultSet; |
||||
|
import java.sql.SQLException; |
||||
|
import java.util.List; |
||||
|
|
||||
|
@Service |
||||
|
public class MqttPublisherService { |
||||
|
|
||||
|
@Resource |
||||
|
private JdbcTemplate jdbcTemplate ; |
||||
|
|
||||
|
public List<MqttPublisherEntity> getList(){ |
||||
|
String sql = "SELECT * FROM mqtt_publisher"; |
||||
|
List<MqttPublisherEntity> dataList = (List<MqttPublisherEntity>) jdbcTemplate.query(sql, new RowMapper<MqttPublisherEntity>(){ |
||||
|
@Override |
||||
|
public MqttPublisherEntity mapRow(ResultSet rs, int rowNum) throws SQLException { |
||||
|
MqttPublisherEntity dataItem = new MqttPublisherEntity(); |
||||
|
dataItem.setId(rs.getInt("id")); |
||||
|
dataItem.setHost(rs.getString("host")); |
||||
|
dataItem.setUsername(rs.getString("username")); |
||||
|
dataItem.setPassword(rs.getString("password")); |
||||
|
dataItem.setTopic(rs.getString("topic")); |
||||
|
dataItem.setDescription(rs.getString("description")); |
||||
|
return dataItem; |
||||
|
} |
||||
|
}); |
||||
|
|
||||
|
return dataList; |
||||
|
} |
||||
|
|
||||
|
|
||||
|
} |
||||
@ -0,0 +1,29 @@ |
|||||
|
package com.techsor.datacenter.receiver.service; |
||||
|
|
||||
|
import org.apache.commons.text.StringEscapeUtils; |
||||
|
import org.slf4j.Logger; |
||||
|
import org.slf4j.LoggerFactory; |
||||
|
import org.springframework.jdbc.core.JdbcTemplate; |
||||
|
import org.springframework.stereotype.Service; |
||||
|
|
||||
|
import jakarta.annotation.Resource; |
||||
|
|
||||
|
@Service |
||||
|
public class RestfulService { |
||||
|
|
||||
|
@Resource |
||||
|
private JdbcTemplate jdbcTemplate; |
||||
|
private static final Logger logger = LoggerFactory.getLogger(RestfulService.class); |
||||
|
|
||||
|
|
||||
|
public void insertHistory(String content,String company) { |
||||
|
|
||||
|
content = StringEscapeUtils.escapeJson(content); |
||||
|
String sql = "insert into restful_history (content,company,ts) values (\""+content+"\",\""+company+"\",\""+System.currentTimeMillis()/1000+"\")"; |
||||
|
logger.debug("insertHistory : "+sql); |
||||
|
logger.debug("From : "+company); |
||||
|
|
||||
|
jdbcTemplate.execute(sql); |
||||
|
} |
||||
|
|
||||
|
} |
||||
@ -0,0 +1,15 @@ |
|||||
|
package com.techsor.datacenter.receiver.service; |
||||
|
|
||||
|
import com.techsor.datacenter.receiver.entity.common.KafkaClientConfig; |
||||
|
import com.techsor.datacenter.receiver.entity.datasource.DatasourceConfigEntity; |
||||
|
import org.springframework.stereotype.Service; |
||||
|
|
||||
|
@Service |
||||
|
public interface ZetaKafkaService { |
||||
|
|
||||
|
|
||||
|
void addKafkaClient(Long companyId, Long configId, KafkaClientConfig config, DatasourceConfigEntity datasourceConfigEntity); |
||||
|
|
||||
|
public void removeKafkaClient(Long companyId,Long configId); |
||||
|
|
||||
|
} |
||||
@ -0,0 +1,76 @@ |
|||||
|
package com.techsor.datacenter.receiver.service.impl; |
||||
|
|
||||
|
import com.google.gson.Gson; |
||||
|
import com.techsor.datacenter.receiver.config.datasource.DataSourceContextHolder; |
||||
|
import com.techsor.datacenter.receiver.entity.common.MqttClientConfig; |
||||
|
import com.techsor.datacenter.receiver.entity.datasource.DatasourceConfigEntity; |
||||
|
import com.techsor.datacenter.receiver.service.GlobalStateService; |
||||
|
import com.techsor.datacenter.receiver.service.IDataReceiveService; |
||||
|
import com.techsor.datacenter.receiver.service.IDataSourceConfigService; |
||||
|
import com.techsor.datacenter.receiver.service.IMQTTService; |
||||
|
import lombok.extern.slf4j.Slf4j; |
||||
|
import org.apache.commons.lang3.StringUtils; |
||||
|
import org.springframework.beans.factory.annotation.Value; |
||||
|
import org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor; |
||||
|
import org.springframework.stereotype.Service; |
||||
|
|
||||
|
import jakarta.annotation.Resource; |
||||
|
import java.util.List; |
||||
|
|
||||
|
/** |
||||
|
* 数据接收服务实现,用于管理数据源的接收状态。 |
||||
|
*/ |
||||
|
@Slf4j |
||||
|
@Service |
||||
|
public class DataReceiveServiceImpl implements IDataReceiveService { |
||||
|
|
||||
|
|
||||
|
@Resource |
||||
|
private IDataSourceConfigService dataSourceConfigService; |
||||
|
|
||||
|
@Resource |
||||
|
private GlobalStateService globalStateService; |
||||
|
@Resource |
||||
|
private IMQTTService mqttService; |
||||
|
|
||||
|
@Value("${base.companyId}") |
||||
|
private Long baseComanyId; |
||||
|
|
||||
|
|
||||
|
/** |
||||
|
* 根据提供的状态和数据源ID列表,启动或停止相应的数据接收客户端。 |
||||
|
* |
||||
|
* @param state 数据源的期望状态,"0"表示启动,其他值表示停止。 |
||||
|
* @param params 数据源ID的列表,用于指定哪些数据源需要被处理。 |
||||
|
*/ |
||||
|
@Override |
||||
|
public void receiveCurrentDataSrc(String state, List<Long> params) { |
||||
|
DataSourceContextHolder.setCurrentDataSourceKey("dataSourceForCompany_" + baseComanyId); |
||||
|
List<DatasourceConfigEntity> resultList = this.dataSourceConfigService.getDataSourceConfig(params); |
||||
|
|
||||
|
for (int i = 0; i < resultList.size(); i++) { |
||||
|
DatasourceConfigEntity item = resultList.get(i); |
||||
|
if (StringUtils.equals("0",state)){ |
||||
|
//判断类型,MQTT
|
||||
|
if (StringUtils.equals("0",item.getMethodType())) { |
||||
|
//mqtt
|
||||
|
Gson gson=new Gson(); |
||||
|
MqttClientConfig mqttClientConfig=gson.fromJson(item.getParams(), MqttClientConfig.class); |
||||
|
this.mqttService.addMQTTConsumerClient(baseComanyId,item.getId(),mqttClientConfig,item); |
||||
|
}else{ |
||||
|
//http
|
||||
|
this.globalStateService.addDatasourceConfig(item); |
||||
|
} |
||||
|
}else{ |
||||
|
|
||||
|
if (StringUtils.equals("0",item.getMethodType())) { |
||||
|
//mqtt
|
||||
|
this.mqttService.removeMQTTConsumerClient(item.getId()); |
||||
|
}else{ |
||||
|
this.globalStateService.removeDatasourceConfig(item); |
||||
|
} |
||||
|
|
||||
|
} |
||||
|
} |
||||
|
} |
||||
|
} |
||||
@ -0,0 +1,115 @@ |
|||||
|
package com.techsor.datacenter.receiver.service.impl; |
||||
|
|
||||
|
import com.techsor.datacenter.receiver.entity.company.CompanyEntity; |
||||
|
import com.techsor.datacenter.receiver.entity.datasource.DatasourceConfigEntity; |
||||
|
import com.techsor.datacenter.receiver.service.IDataSourceConfigService; |
||||
|
import lombok.extern.slf4j.Slf4j; |
||||
|
import org.apache.commons.collections.CollectionUtils; |
||||
|
import org.springframework.jdbc.core.JdbcTemplate; |
||||
|
import org.springframework.stereotype.Service; |
||||
|
|
||||
|
import jakarta.annotation.Resource; |
||||
|
import java.util.ArrayList; |
||||
|
import java.util.List; |
||||
|
|
||||
|
|
||||
|
@Slf4j |
||||
|
@Service |
||||
|
public class DataSourceConfigServiceImpl implements IDataSourceConfigService { |
||||
|
|
||||
|
|
||||
|
@Resource |
||||
|
private JdbcTemplate jdbcTemplate; |
||||
|
@Resource |
||||
|
private JdbcTemplate dynamicJdbcTemplate; |
||||
|
|
||||
|
|
||||
|
/** |
||||
|
* create table data_src_config |
||||
|
* ( |
||||
|
* id bigint auto_increment |
||||
|
* primary key, |
||||
|
* code varchar(100) null comment '数据来源识别代码', |
||||
|
* api_key varchar(100) null comment 'api_key', |
||||
|
* name varchar(100) null comment '数据来源名称', |
||||
|
* method_type varchar(2) null comment '来源方式(0:MQTT;1:RESTFULL)', |
||||
|
* params varchar(1000) null comment '链接参数', |
||||
|
* state varchar(2) null comment '状态;(0:激活;1:冻结;)', |
||||
|
* created_by bigint null, |
||||
|
* created_timestamp timestamp null, |
||||
|
* updated_by bigint null, |
||||
|
* updated_timestamp timestamp null, |
||||
|
* company_id bigint not null |
||||
|
* ) |
||||
|
* comment '数据来源配置表'; |
||||
|
* */ |
||||
|
@Override |
||||
|
public List<DatasourceConfigEntity> getDataSourceConfig(List<Long> datasourceIds) { |
||||
|
|
||||
|
if (CollectionUtils.isEmpty(datasourceIds)){ |
||||
|
return new ArrayList<>(); |
||||
|
} |
||||
|
//查询激活后的数据源配置
|
||||
|
String sql="select * from data_src_config where id in (%s) "; |
||||
|
|
||||
|
StringBuffer sb=new StringBuffer(); |
||||
|
datasourceIds.stream().forEach(iter->{ |
||||
|
sb.append(iter).append(","); |
||||
|
}); |
||||
|
sb.deleteCharAt(sb.length()-1); |
||||
|
sql=String.format(sql,sb.toString()); |
||||
|
|
||||
|
List<DatasourceConfigEntity> resultList = this.dynamicJdbcTemplate.query(sql, |
||||
|
(rs, rowNum) -> { |
||||
|
DatasourceConfigEntity item = new DatasourceConfigEntity(); |
||||
|
item.setId(rs.getLong("id")); |
||||
|
item.setCode(rs.getString("code")); |
||||
|
item.setApiKey(rs.getString("api_key")); |
||||
|
item.setName(rs.getString("name")); |
||||
|
item.setMethodType(rs.getString("method_type")); |
||||
|
item.setParams(rs.getString("params")); |
||||
|
item.setState(rs.getString("state")); |
||||
|
item.setDeviceIdPosition(rs.getString("device_id_position")); |
||||
|
return item; |
||||
|
}); |
||||
|
|
||||
|
return resultList; |
||||
|
} |
||||
|
|
||||
|
@Override |
||||
|
public List<DatasourceConfigEntity> getDataSourceConfigWithState() { |
||||
|
|
||||
|
//查询激活后的数据源配置
|
||||
|
String sql="select * from data_src_config where state='0' and flag=0 "; |
||||
|
|
||||
|
List<DatasourceConfigEntity> resultList = this.dynamicJdbcTemplate.query(sql, |
||||
|
(rs, rowNum) -> { |
||||
|
DatasourceConfigEntity item = new DatasourceConfigEntity(); |
||||
|
item.setId(rs.getLong("id")); |
||||
|
item.setCode(rs.getString("code")); |
||||
|
item.setApiKey(rs.getString("api_key")); |
||||
|
item.setName(rs.getString("name")); |
||||
|
item.setMethodType(rs.getString("method_type")); |
||||
|
item.setParams(rs.getString("params")); |
||||
|
item.setState(rs.getString("state")); |
||||
|
item.setDeviceIdPosition(rs.getString("device_id_position")); |
||||
|
return item; |
||||
|
}); |
||||
|
|
||||
|
return resultList; |
||||
|
} |
||||
|
|
||||
|
@Override |
||||
|
public List<CompanyEntity> getRelatedTopCompany(String baseCompanyId) { |
||||
|
String sql="SELECT bcom.id,bcom.parent_id FROM data_center_aeon_admin.basic_company bcom WHERE bcom.id="+baseCompanyId+" and bcom.flag!=1"; |
||||
|
List<CompanyEntity> dataList = this.jdbcTemplate.query(sql, (rs, rowNum) -> { |
||||
|
CompanyEntity item = new CompanyEntity(); |
||||
|
item.setId(rs.getLong("id")); |
||||
|
item.setParentId(rs.getLong("parent_id")); |
||||
|
return item; |
||||
|
}); |
||||
|
return dataList; |
||||
|
} |
||||
|
|
||||
|
|
||||
|
} |
||||
@ -0,0 +1,465 @@ |
|||||
|
package com.techsor.datacenter.receiver.service.impl; |
||||
|
|
||||
|
import com.alibaba.fastjson2.JSON; |
||||
|
import com.google.gson.Gson; |
||||
|
import com.google.gson.JsonArray; |
||||
|
import com.google.gson.JsonElement; |
||||
|
import com.jayway.jsonpath.JsonPath; |
||||
|
import com.techsor.datacenter.receiver.entity.common.MqttClientConfig; |
||||
|
import com.techsor.datacenter.receiver.entity.datasource.DatasourceConfigEntity; |
||||
|
import com.techsor.datacenter.receiver.service.DataTransService; |
||||
|
import com.techsor.datacenter.receiver.service.GuavaRedisCache; |
||||
|
import com.techsor.datacenter.receiver.service.IMQTTService; |
||||
|
import com.techsor.datacenter.receiver.service.MQTTCrtService; |
||||
|
import com.techsor.datacenter.receiver.utils.JsonUtils; |
||||
|
import com.techsor.datacenter.receiver.utils.MyHTTPResponse; |
||||
|
import com.techsor.datacenter.receiver.utils.RandomUtils; |
||||
|
import com.techsor.datacenter.receiver.utils.SslUtil; |
||||
|
import lombok.extern.slf4j.Slf4j; |
||||
|
import org.apache.commons.lang3.StringUtils; |
||||
|
import org.apache.commons.lang3.tuple.Pair; |
||||
|
import org.eclipse.paho.client.mqttv3.MqttConnectOptions; |
||||
|
import org.springframework.beans.factory.annotation.Value; |
||||
|
import org.springframework.data.redis.core.RedisTemplate; |
||||
|
import org.springframework.integration.mqtt.core.DefaultMqttPahoClientFactory; |
||||
|
import org.springframework.integration.mqtt.core.MqttPahoClientFactory; |
||||
|
import org.springframework.integration.mqtt.inbound.MqttPahoMessageDrivenChannelAdapter; |
||||
|
import org.springframework.integration.mqtt.support.DefaultPahoMessageConverter; |
||||
|
import org.springframework.messaging.Message; |
||||
|
import org.springframework.messaging.support.AbstractMessageChannel; |
||||
|
import org.springframework.scheduling.TaskScheduler; |
||||
|
import org.springframework.stereotype.Service; |
||||
|
|
||||
|
import jakarta.annotation.Resource; |
||||
|
import javax.net.ssl.SSLContext; |
||||
|
import javax.net.ssl.SSLSocketFactory; |
||||
|
import javax.net.ssl.TrustManager; |
||||
|
import javax.net.ssl.X509TrustManager; |
||||
|
import java.security.SecureRandom; |
||||
|
import java.security.cert.X509Certificate; |
||||
|
import java.time.Duration; |
||||
|
import java.time.LocalDate; |
||||
|
import java.time.ZoneId; |
||||
|
import java.time.ZonedDateTime; |
||||
|
import java.time.format.DateTimeFormatter; |
||||
|
import java.util.*; |
||||
|
import java.util.concurrent.*; |
||||
|
|
||||
|
/** |
||||
|
* MQTT服务实现类,用于管理MQTT消费者客户端的添加和移除,以及处理MQTT消息。 |
||||
|
*/ |
||||
|
@Slf4j |
||||
|
@Service |
||||
|
public class MQTTServiceImpl implements IMQTTService { |
||||
|
|
||||
|
|
||||
|
private MqttConsumerCache consumerCache = new MqttConsumerCache(2000); // Max 1000 consumers in cache
|
||||
|
|
||||
|
@Resource |
||||
|
private DataTransService dataTransService; |
||||
|
|
||||
|
@Value("${mqtt.keepAliveInterval}") |
||||
|
private Integer keepAliveInterval; |
||||
|
|
||||
|
@Resource |
||||
|
MQTTCrtService mqttCrtService; |
||||
|
@Resource |
||||
|
private TaskScheduler taskScheduler; |
||||
|
|
||||
|
@Resource |
||||
|
private RedisTemplate<String, Object> redisTemplate; |
||||
|
|
||||
|
@Resource |
||||
|
private Executor postSenderThreadTaskExecutor; |
||||
|
|
||||
|
@Resource |
||||
|
private GuavaRedisCache guavaRedisCache; |
||||
|
/** |
||||
|
* 根据提供的MQTT客户端配置创建MQTT客户端工厂实例。 |
||||
|
* |
||||
|
* @param config MQTT客户端配置对象。 |
||||
|
* @return 创建的MQTT客户端工厂实例。 |
||||
|
* @throws Exception 如果创建过程中发生错误,则抛出异常。 |
||||
|
*/ |
||||
|
private MqttPahoClientFactory mqttClientFactory(MqttClientConfig config) throws Exception { |
||||
|
DefaultMqttPahoClientFactory factory = new DefaultMqttPahoClientFactory(); |
||||
|
MqttConnectOptions options = new MqttConnectOptions(); |
||||
|
options.setServerURIs(new String[]{config.getUrl()}); |
||||
|
options.setUserName(config.getUsername()); |
||||
|
options.setPassword(config.getPassword().toCharArray()); |
||||
|
options.setMqttVersion(MqttConnectOptions.MQTT_VERSION_3_1); |
||||
|
|
||||
|
if(config.isSslState()){ |
||||
|
String caCrtFile=config.getCaPath(); |
||||
|
String crtFile=config.getClientCrtPath(); |
||||
|
String keyFile=config.getClientKeyPath(); |
||||
|
|
||||
|
String localCaCrtFilePath=this.mqttCrtService.getFilePath(caCrtFile); |
||||
|
String localCrtFile=this.mqttCrtService.getFilePath(crtFile); |
||||
|
String localKeyFile=this.mqttCrtService.getFilePath(keyFile); |
||||
|
options.setSocketFactory(SslUtil.getSocketFactory(localCaCrtFilePath, localCrtFile, localKeyFile, "")); |
||||
|
options.setHttpsHostnameVerificationEnabled(false); |
||||
|
} |
||||
|
if (Objects.nonNull(config.getConnectionTimeout())) { |
||||
|
if (config.getConnectionTimeout() > 0){ |
||||
|
options.setConnectionTimeout(config.getConnectionTimeout()); |
||||
|
}else { |
||||
|
options.setConnectionTimeout(1000); |
||||
|
} |
||||
|
} |
||||
|
// Set mqtt-heartbeat interval
|
||||
|
options.setKeepAliveInterval(this.keepAliveInterval); |
||||
|
options.setAutomaticReconnect(true); |
||||
|
factory.setConnectionOptions(options); |
||||
|
return factory; |
||||
|
} |
||||
|
|
||||
|
/** |
||||
|
* 添加一个MQTT消费者客户端到消费者缓存中,并启动它。 |
||||
|
* |
||||
|
* @param id 消费者客户端的唯一标识。 |
||||
|
* @param config MQTT客户端配置。 |
||||
|
* @param datasourceConfigEntity 数据源配置实体。 |
||||
|
*/ |
||||
|
@Override |
||||
|
public void addMQTTConsumerClient(Long companyId,Long id, MqttClientConfig config, DatasourceConfigEntity datasourceConfigEntity) { |
||||
|
if (!consumerCache.containsKey(String.valueOf(id))) { |
||||
|
MqttConsumer consumer = null; |
||||
|
try { |
||||
|
if (Objects.isNull(config)){ |
||||
|
return; |
||||
|
} |
||||
|
consumer = new MqttConsumer(config, mqttClientFactory(config),this.dataTransService, |
||||
|
this.taskScheduler,datasourceConfigEntity,this.postSenderThreadTaskExecutor, |
||||
|
this.redisTemplate, |
||||
|
this.guavaRedisCache |
||||
|
); |
||||
|
} catch (Exception e) { |
||||
|
// throw new RuntimeException(e);
|
||||
|
log.error("addMQTTConsumerClient error:{}",e.getMessage()); |
||||
|
return; |
||||
|
} |
||||
|
consumerCache.put(String.valueOf(id), consumer); |
||||
|
consumer.start(); |
||||
|
} |
||||
|
} |
||||
|
/** |
||||
|
* 从消费者缓存中移除指定的MQTT消费者客户端,并停止它。 |
||||
|
* |
||||
|
* @param id 要移除的消费者客户端的唯一标识。 |
||||
|
*/ |
||||
|
@Override |
||||
|
public void removeMQTTConsumerClient(Long id) { |
||||
|
String key=String.valueOf(id); |
||||
|
if (this.consumerCache.containsKey(key)){ |
||||
|
this.consumerCache.get(key).stop(); |
||||
|
this.consumerCache.remove(key); |
||||
|
} |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
|
||||
|
/** |
||||
|
* MQTT消费者类,负责接收MQTT消息并进行处理。 |
||||
|
*/ |
||||
|
@Slf4j |
||||
|
class MqttConsumer { |
||||
|
private MqttPahoMessageDrivenChannelAdapter adapter; |
||||
|
private MqttClientConfig config; |
||||
|
|
||||
|
|
||||
|
private DatasourceConfigEntity datasourceConfigEntity; |
||||
|
|
||||
|
private Executor postSenderThreadTaskExecutor; |
||||
|
|
||||
|
|
||||
|
private DataTransService dataTransService; |
||||
|
|
||||
|
// 使用ConcurrentLinkedQueue代替List来存储需要重试的请求
|
||||
|
private final ConcurrentLinkedQueue<Pair<String, String>> retryRequests = new ConcurrentLinkedQueue<>(); |
||||
|
private final ScheduledExecutorService scheduleRetryTaskExecutor = Executors.newScheduledThreadPool(1); |
||||
|
private static final int MAX_RETRY_ATTEMPTS = 5; |
||||
|
private final Map<Pair<String, String>, Integer> retryAttempts = new ConcurrentHashMap<>(); |
||||
|
|
||||
|
private RedisTemplate<String, Object> redisTemplate; |
||||
|
|
||||
|
|
||||
|
private GuavaRedisCache guavaRedisCache; |
||||
|
/** |
||||
|
* 构造一个新的MQTT消费者实例。 |
||||
|
* 此构造函数初始化MQTT消费者,并设置消息处理逻辑。 |
||||
|
* |
||||
|
* @param config MQTT客户端配置,包含连接信息和客户端特定配置。 |
||||
|
* @param clientFactory 用于创建MQTT客户端的工厂,根据提供的配置生成连接选项。 |
||||
|
* @param dataTransService 用于数据传输的服务,处理接收到的MQTT消息。 |
||||
|
* @param taskScheduler 任务调度器,用于调度异步任务,例如消息重传。 |
||||
|
* @param datasourceConfigEntity 数据源配置实体,包含数据处理规则和目标信息。 |
||||
|
* @param postSenderThreadTaskExecutor 用于执行后台任务的执行器,如发送消息到目的地。 |
||||
|
*/ |
||||
|
public MqttConsumer(MqttClientConfig config, MqttPahoClientFactory clientFactory, |
||||
|
DataTransService dataTransService, |
||||
|
TaskScheduler taskScheduler, |
||||
|
DatasourceConfigEntity datasourceConfigEntity, |
||||
|
Executor postSenderThreadTaskExecutor, |
||||
|
RedisTemplate<String, Object> redisTemplate, |
||||
|
GuavaRedisCache guavaRedisCache |
||||
|
) { |
||||
|
this.config = config; |
||||
|
if (StringUtils.isBlank(config.getClientId())) { |
||||
|
config.setClientId(UUID.randomUUID().toString()); |
||||
|
} |
||||
|
String clientId = config.getClientId()+ RandomUtils.unique3DigitGenerator()+UUID.randomUUID().toString(); |
||||
|
String topics=config.getTopic(); |
||||
|
if (StringUtils.contains(topics,",")){ |
||||
|
String[] topicArrays=topics.split(","); |
||||
|
this.adapter = new MqttPahoMessageDrivenChannelAdapter(clientId, clientFactory, topicArrays); |
||||
|
}else{ |
||||
|
this.adapter = new MqttPahoMessageDrivenChannelAdapter(clientId, clientFactory, config.getTopic()); |
||||
|
} |
||||
|
|
||||
|
adapter.setCompletionTimeout(5000); |
||||
|
adapter.setRecoveryInterval(5000); |
||||
|
adapter.setConverter(new DefaultPahoMessageConverter()); |
||||
|
if (!Objects.nonNull(config.getQos())){ |
||||
|
adapter.setQos(1); |
||||
|
}else{ |
||||
|
adapter.setQos(config.getQos()); |
||||
|
} |
||||
|
this.dataTransService = dataTransService; |
||||
|
this.datasourceConfigEntity=datasourceConfigEntity; |
||||
|
this.postSenderThreadTaskExecutor=postSenderThreadTaskExecutor; |
||||
|
adapter.setTaskScheduler(taskScheduler); |
||||
|
final DataTransService finalDataTransService=dataTransService; |
||||
|
this.redisTemplate=redisTemplate; |
||||
|
this.guavaRedisCache=guavaRedisCache; |
||||
|
adapter.setOutputChannel(new AbstractMessageChannel() { |
||||
|
@Override |
||||
|
protected boolean sendInternal(Message<?> message, long timeout) { |
||||
|
// 在这里处理接收到的消息
|
||||
|
log.debug("Received Message:{}", message.getPayload()); |
||||
|
if (message.getPayload() == null) { |
||||
|
return true; |
||||
|
} |
||||
|
// 根据配置的设备ID位置,从消息中提取设备ID
|
||||
|
String deviceIdPosition = datasourceConfigEntity.getDeviceIdPosition(); |
||||
|
|
||||
|
//此处做如下操作
|
||||
|
if (StringUtils.isEmpty(deviceIdPosition)) { |
||||
|
log.warn("deviceIdPosition is null:==>{}", datasourceConfigEntity); |
||||
|
return true; |
||||
|
} |
||||
|
String[] deviceIdPositionArrays; |
||||
|
if (JsonUtils.isJsonArray(deviceIdPosition)){ |
||||
|
deviceIdPositionArrays= JSON.parseArray(deviceIdPosition,String.class).toArray(new String[0]); |
||||
|
}else { |
||||
|
deviceIdPositionArrays=new String[]{deviceIdPosition}; |
||||
|
} |
||||
|
//解析出多个设备ID
|
||||
|
|
||||
|
Gson gson = new Gson(); |
||||
|
String payloadStr = message.getPayload() instanceof String |
||||
|
? (String) message.getPayload() |
||||
|
: gson.toJson(message.getPayload()); |
||||
|
|
||||
|
String trimData = StringUtils.trim(payloadStr); |
||||
|
if (!JsonUtils.isJson(trimData)) { |
||||
|
log.warn("data is not json:==>{}", datasourceConfigEntity, trimData); |
||||
|
return true; |
||||
|
} |
||||
|
if (JsonUtils.isJsonArray(trimData)) { |
||||
|
|
||||
|
postSenderThreadTaskExecutor.execute(() -> { |
||||
|
JsonArray resultJsonArrays = gson.fromJson(trimData, JsonArray.class); |
||||
|
for (JsonElement element : resultJsonArrays) { |
||||
|
String currentJsonValue = gson.toJson(element); |
||||
|
//获取最终设备ID位置信息
|
||||
|
String finalDevicePosition=extractDevicePosition(currentJsonValue,deviceIdPositionArrays); |
||||
|
String deviceId = JsonPath.read(currentJsonValue, finalDevicePosition); |
||||
|
|
||||
|
if (StringUtils.isEmpty(deviceId)) { |
||||
|
log.warn("deviceId is null:==>{}", datasourceConfigEntity); |
||||
|
continue; |
||||
|
} |
||||
|
//计数
|
||||
|
guavaRedisCache.incrementDailyDeviceIdCount(deviceId); |
||||
|
MyHTTPResponse response=finalDataTransService.transferData(deviceId, currentJsonValue); |
||||
|
if (response.getCode()!=200){ |
||||
|
log.error("transferData error:{}",currentJsonValue); |
||||
|
synchronized (retryRequests){ |
||||
|
retryRequests.add(Pair.of(deviceId,currentJsonValue)); |
||||
|
} |
||||
|
} |
||||
|
} |
||||
|
}); |
||||
|
} else { |
||||
|
if(StringUtils.isEmpty(trimData)){ |
||||
|
return true; |
||||
|
} |
||||
|
if (!JsonUtils.isJson(trimData)) { |
||||
|
return true; |
||||
|
} |
||||
|
String deviceId = JsonPath.read(trimData, deviceIdPosition); |
||||
|
if (StringUtils.isEmpty(deviceId)) { |
||||
|
log.warn("deviceId is null:==>{}", datasourceConfigEntity); |
||||
|
return true; |
||||
|
} |
||||
|
guavaRedisCache.incrementDailyDeviceIdCount(deviceId); |
||||
|
MyHTTPResponse response=finalDataTransService.transferData(deviceId, trimData); |
||||
|
if (response.getCode()!=200){ |
||||
|
log.error("transferData error:{}",trimData); |
||||
|
synchronized (retryRequests){ |
||||
|
retryRequests.add(Pair.of(deviceId,trimData)); |
||||
|
} |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
return true; |
||||
|
} |
||||
|
}); |
||||
|
// 定时任务改为只处理队列的头部请求,避免一次处理整个队列
|
||||
|
scheduleRetryTaskExecutor.scheduleWithFixedDelay(() -> { |
||||
|
if (!retryRequests.isEmpty()) { |
||||
|
Pair<String, String> request = retryRequests.poll(); // 获取并移除此队列的头部
|
||||
|
if (request != null) { |
||||
|
retryFailedMessage(request); |
||||
|
} |
||||
|
} |
||||
|
}, 0, 5, TimeUnit.SECONDS); |
||||
|
} |
||||
|
|
||||
|
|
||||
|
private void retryFailedMessage(Pair<String, String> request) { |
||||
|
Integer attempts = retryAttempts.getOrDefault(request, 0); |
||||
|
if (attempts >= MAX_RETRY_ATTEMPTS) { |
||||
|
log.error("Reached max retry attempts, discarding it."); |
||||
|
retryAttempts.remove(request); |
||||
|
} else { |
||||
|
try { |
||||
|
sendAndRetry(request); |
||||
|
} catch (Exception e) { |
||||
|
log.error("Error retrying, request: {}", request, e); |
||||
|
} |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
|
||||
|
private void sendAndRetry(Pair<String, String> request) { |
||||
|
MyHTTPResponse response = dataTransService.transferData(request.getLeft(), request.getRight()); |
||||
|
if (response.getCode() != 200) { |
||||
|
log.error("Transfer data failed, retrying... Request: {}", request); |
||||
|
log.error("Response: {}", new Gson().toJson(response)); |
||||
|
Integer attempts = retryAttempts.compute(request, (k, v) -> (v == null) ? 1 : v + 1); |
||||
|
if (attempts < MAX_RETRY_ATTEMPTS) { |
||||
|
retryRequests.offer(request); // 重新加入队列末尾
|
||||
|
} else { |
||||
|
log.error("Reached max retry attempts, discarding request: {}", request); |
||||
|
retryAttempts.remove(request); |
||||
|
} |
||||
|
} else { |
||||
|
log.info("Data transferred successfully on retry. Request: {}", request); |
||||
|
retryAttempts.remove(request); |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
private String extractDevicePosition(String rootCurrentJsonValue,String[] deviceIdPositionArrays){ |
||||
|
String result=""; |
||||
|
for (int i = 0; i < deviceIdPositionArrays.length; i++) { |
||||
|
try{ |
||||
|
Object value=JsonPath.read(rootCurrentJsonValue,deviceIdPositionArrays[i]); |
||||
|
if (!Objects.isNull(value)){ |
||||
|
result = deviceIdPositionArrays[i]; |
||||
|
break; |
||||
|
} |
||||
|
}catch (Exception e){ |
||||
|
|
||||
|
} |
||||
|
} |
||||
|
return result; |
||||
|
} |
||||
|
public MqttClientConfig getConfig() { |
||||
|
return this.config; |
||||
|
} |
||||
|
|
||||
|
public String getKeyForToday() { |
||||
|
LocalDate today = LocalDate.now(); |
||||
|
// 设置日本时区
|
||||
|
ZoneId japanZone = ZoneId.of("Asia/Tokyo"); |
||||
|
|
||||
|
// 获取日本时区的当前日期和时间
|
||||
|
ZonedDateTime nowInJapan = ZonedDateTime.now(japanZone); |
||||
|
DateTimeFormatter formatter = DateTimeFormatter.ofPattern("yyyy-MM-dd"); |
||||
|
// 计算昨天的日期
|
||||
|
LocalDate yesterdayInJapan = nowInJapan.toLocalDate(); |
||||
|
return "receiver:device:counts:" + formatter.format(yesterdayInJapan); |
||||
|
} |
||||
|
|
||||
|
|
||||
|
/** |
||||
|
* 启动MQTT消费者客户端,开始接收和处理消息。 |
||||
|
*/ |
||||
|
public void start() { |
||||
|
this.adapter.start(); |
||||
|
log.info("MqttConsumer start success:{}", this.config); |
||||
|
} |
||||
|
/** |
||||
|
* 停止MQTT消费者客户端,停止接收和处理消息。 |
||||
|
*/ |
||||
|
public void stop() { |
||||
|
log.info("MqttConsumer stop success:{}", this.config); |
||||
|
safeDisconnectMqttClient(); |
||||
|
} |
||||
|
private void safeDisconnectMqttClient() { |
||||
|
final int maxRetries = 3; // 最大重试次数
|
||||
|
int retryCount = 0; |
||||
|
|
||||
|
while (true) { |
||||
|
try { |
||||
|
if (this.adapter != null && this.adapter.isRunning()) { |
||||
|
this.adapter.stop(); // 尝试停止适配器
|
||||
|
this.adapter.destroy(); // 销毁适配器
|
||||
|
log.info("MQTT 客户端成功断开!!!!!!!!"); |
||||
|
break; // 成功后退出循环
|
||||
|
} |
||||
|
} catch (Exception e) { |
||||
|
log.info("尝试断开 MQTT 客户端时发生错误: " + e.getMessage()); |
||||
|
retryCount++; |
||||
|
if (retryCount >= maxRetries) { |
||||
|
log.info("尝试断开 MQTT 客户端失败达到最大次数,停止重试。"); |
||||
|
break; // 达到最大重试次数,退出循环
|
||||
|
} |
||||
|
try { |
||||
|
Thread.sleep(2000); // 等待2秒后重试
|
||||
|
} catch (InterruptedException ie) { |
||||
|
Thread.currentThread().interrupt(); // 保持良好的中断实践
|
||||
|
log.info("重试等待被中断。"); |
||||
|
break; |
||||
|
} |
||||
|
log.info("重试断开 MQTT 客户端。.............................."); |
||||
|
} |
||||
|
} |
||||
|
} |
||||
|
} |
||||
|
/** |
||||
|
* MQTT消费者缓存类,用于缓存和管理MQTT消费者客户端实例。 |
||||
|
* 当缓存达到最大容量时,最老的消费者客户端将被自动移除并停止。 |
||||
|
*/ |
||||
|
class MqttConsumerCache extends LinkedHashMap<String, MqttConsumer> { |
||||
|
private final int maxSize; |
||||
|
|
||||
|
public MqttConsumerCache(int maxSize) { |
||||
|
super(maxSize + 1, 1.0f, true); |
||||
|
this.maxSize = maxSize; |
||||
|
} |
||||
|
|
||||
|
@Override |
||||
|
protected boolean removeEldestEntry(Map.Entry<String, MqttConsumer> eldest) { |
||||
|
boolean shouldRemove = size() > maxSize; |
||||
|
|
||||
|
if (shouldRemove) { |
||||
|
MqttConsumer consumer = eldest.getValue(); |
||||
|
consumer.stop(); |
||||
|
} |
||||
|
|
||||
|
return shouldRemove; |
||||
|
} |
||||
|
} |
||||
Some files were not shown because too many files changed in this diff
Loading…
Reference in new issue