commit
a89ff65254
8 changed files with 598 additions and 0 deletions
@ -0,0 +1,39 @@ |
|||
target/ |
|||
!.mvn/wrapper/maven-wrapper.jar |
|||
!**/src/main/**/target/ |
|||
!**/src/test/**/target/ |
|||
|
|||
### IntelliJ IDEA ### |
|||
.idea |
|||
.idea/modules.xml |
|||
.idea/jarRepositories.xml |
|||
.idea/compiler.xml |
|||
.idea/libraries/ |
|||
*.iws |
|||
*.iml |
|||
*.ipr |
|||
|
|||
### Eclipse ### |
|||
.apt_generated |
|||
.classpath |
|||
.factorypath |
|||
.project |
|||
.settings |
|||
.springBeans |
|||
.sts4-cache |
|||
|
|||
### NetBeans ### |
|||
/nbproject/private/ |
|||
/nbbuild/ |
|||
/dist/ |
|||
/nbdist/ |
|||
/.nb-gradle/ |
|||
build/ |
|||
!**/src/main/**/build/ |
|||
!**/src/test/**/build/ |
|||
|
|||
### VS Code ### |
|||
.vscode/ |
|||
|
|||
### Mac OS ### |
|||
.DS_Store |
@ -0,0 +1,103 @@ |
|||
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" |
|||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd"> |
|||
<modelVersion>4.0.0</modelVersion> |
|||
<groupId>com.zhukovasky</groupId> |
|||
<artifactId>dashboard-statistics-lambda</artifactId> |
|||
<packaging>jar</packaging> |
|||
<version>1.0-SNAPSHOT</version> |
|||
<name>dashboard-statistics-lambda Maven Webapp</name> |
|||
<url>http://maven.apache.org</url> |
|||
<dependencies> |
|||
<dependency> |
|||
<groupId>junit</groupId> |
|||
<artifactId>junit</artifactId> |
|||
<version>3.8.1</version> |
|||
<scope>test</scope> |
|||
</dependency> |
|||
<!-- https://mvnrepository.com/artifact/com.amazonaws/aws-java-sdk-lambda --> |
|||
<!-- AWS Lambda Core --> |
|||
<dependency> |
|||
<groupId>com.amazonaws</groupId> |
|||
<artifactId>aws-lambda-java-core</artifactId> |
|||
<version>1.4.0</version> |
|||
</dependency> |
|||
|
|||
<!-- AWS SDK for S3 --> |
|||
<dependency> |
|||
<groupId>software.amazon.awssdk</groupId> |
|||
<artifactId>s3</artifactId> |
|||
<version>2.35.4</version> |
|||
<scope>compile</scope> |
|||
</dependency> |
|||
|
|||
<!-- MySQL JDBC --> |
|||
<dependency> |
|||
<groupId>mysql</groupId> |
|||
<artifactId>mysql-connector-java</artifactId> |
|||
<version>8.0.33</version> |
|||
</dependency> |
|||
|
|||
<!-- CSV 操作 --> |
|||
<dependency> |
|||
<groupId>org.apache.commons</groupId> |
|||
<artifactId>commons-csv</artifactId> |
|||
<version>1.14.1</version> |
|||
</dependency> |
|||
|
|||
<!-- https://mvnrepository.com/artifact/software.amazon.awssdk/url-connection-client --> |
|||
<dependency> |
|||
<groupId>software.amazon.awssdk</groupId> |
|||
<artifactId>url-connection-client</artifactId> |
|||
<version>2.34.9</version> |
|||
<scope>compile</scope> |
|||
</dependency> |
|||
|
|||
<!-- 日志 --> |
|||
<dependency> |
|||
<groupId>org.slf4j</groupId> |
|||
<artifactId>slf4j-api</artifactId> |
|||
<version>1.7.36</version> |
|||
</dependency> |
|||
<dependency> |
|||
<groupId>ch.qos.logback</groupId> |
|||
<artifactId>logback-classic</artifactId> |
|||
<version>1.2.12</version> |
|||
</dependency> |
|||
|
|||
|
|||
</dependencies> |
|||
<build> |
|||
<finalName>dashboard-statistics-lambda</finalName> |
|||
<plugins> |
|||
|
|||
<plugin> |
|||
<groupId>org.apache.maven.plugins</groupId> |
|||
<artifactId>maven-dependency-plugin</artifactId> |
|||
<executions> |
|||
<execution> |
|||
<phase>package</phase> |
|||
<goals> |
|||
<goal>copy-dependencies</goal> |
|||
</goals> |
|||
<configuration> |
|||
<outputDirectory>${project.build.directory}/lib</outputDirectory> |
|||
<overWriteReleases>false</overWriteReleases> |
|||
<overWriteSnapshots>true</overWriteSnapshots> |
|||
<excludeTransitive>false</excludeTransitive> |
|||
<includeScope>compile</includeScope> |
|||
</configuration> |
|||
</execution> |
|||
</executions> |
|||
</plugin> |
|||
|
|||
<plugin> |
|||
<groupId>org.apache.maven.plugins</groupId> |
|||
<artifactId>maven-compiler-plugin</artifactId> |
|||
<configuration> |
|||
<source>17</source> |
|||
<target>17</target> |
|||
</configuration> |
|||
</plugin> |
|||
</plugins> |
|||
</build> |
|||
</project> |
@ -0,0 +1,89 @@ |
|||
package com.dashboard.aws.lambda.handler; |
|||
|
|||
import com.amazonaws.services.lambda.runtime.Context; |
|||
import com.amazonaws.services.lambda.runtime.RequestHandler; |
|||
import com.dashboard.aws.lambda.service.MySQLService; |
|||
import com.dashboard.aws.lambda.service.S3Service; |
|||
import com.dashboard.aws.lambda.util.CsvUtil; |
|||
import org.slf4j.Logger; |
|||
import org.slf4j.LoggerFactory; |
|||
|
|||
import java.time.LocalDate; |
|||
import java.time.ZoneId; |
|||
import java.util.*; |
|||
|
|||
public class BackupHandler implements RequestHandler<Map<String, Object>, String> { |
|||
|
|||
private static final Logger logger = LoggerFactory.getLogger(BackupHandler.class); |
|||
|
|||
private final MySQLService mysqlService = new MySQLService(); |
|||
private final S3Service s3Service = new S3Service(); |
|||
|
|||
@Override |
|||
public String handleRequest(Map<String, Object> event, Context context) { |
|||
try { |
|||
LocalDate targetDate = LocalDate.now(ZoneId.of("Asia/Tokyo")).minusDays(3); |
|||
long cutoff = targetDate.atStartOfDay(ZoneId.of("Asia/Tokyo")).toInstant().toEpochMilli(); |
|||
|
|||
// 查询符合条件的企业ID
|
|||
List<Long> companyIds = mysqlService.getActiveCompanyIds(); |
|||
logger.info("company id list: {}", companyIds); |
|||
|
|||
for (Long companyId : companyIds) { |
|||
String schema = "data_center_dongjian_" + companyId; |
|||
|
|||
for (String table : List.of("dashboard_record_accumulate", "dashboard_record_measure")) { |
|||
// 查询旧数据
|
|||
List<Map<String, Object>> rows = mysqlService.queryOldData(schema, table, cutoff); |
|||
if (rows.isEmpty()) { |
|||
logger.info("[{}][{}] no data....", schema, table); |
|||
continue; |
|||
} |
|||
|
|||
// 按年月日分组
|
|||
Map<String, List<Map<String, Object>>> grouped = new HashMap<>(); |
|||
for (Map<String, Object> row : rows) { |
|||
int y = (int) row.get("date_year"); |
|||
int m = (int) row.get("date_month"); |
|||
int d = (int) row.get("date_day"); |
|||
// 格式化年月日,不足两位补0
|
|||
String monthStr = String.format("%02d", m); |
|||
String dayStr = String.format("%02d", d); |
|||
String key = String.format("%d-%s-%s", y, monthStr, dayStr); |
|||
|
|||
grouped.computeIfAbsent(key, k -> new ArrayList<>()).add(row); |
|||
} |
|||
|
|||
// 上传每个分组
|
|||
boolean allSuccess = true; |
|||
for (Map.Entry<String, List<Map<String, Object>>> entry : grouped.entrySet()) { |
|||
String dateKey = entry.getKey(); |
|||
byte[] csvBytes = CsvUtil.toCsvBytes(entry.getValue()); |
|||
String s3Key = String.format("%s/%s/%s.csv", table, companyId, dateKey); |
|||
try { |
|||
s3Service.upload(s3Key, csvBytes); |
|||
logger.info("[{}][{}] backup success -> {}", schema, table, s3Key); |
|||
} catch (Exception e) { |
|||
allSuccess = false; |
|||
logger.error("[{}][{}] backup failed -> {}", schema, table, s3Key, e); |
|||
} |
|||
} |
|||
|
|||
// 全部上传成功才删除数据
|
|||
if (allSuccess) { |
|||
mysqlService.deleteOldData(schema, table, cutoff); |
|||
logger.info("[{}][{}] delete old data", schema, table); |
|||
} else { |
|||
logger.error("[{}][{}] not all files uploaded successfully, skip delete", schema, table); |
|||
} |
|||
} |
|||
} |
|||
|
|||
return "backup process finished"; |
|||
|
|||
} catch (Exception e) { |
|||
logger.error("backup error", e); |
|||
return "backup error:" + e.getMessage(); |
|||
} |
|||
} |
|||
} |
@ -0,0 +1,77 @@ |
|||
package com.dashboard.aws.lambda.service; |
|||
|
|||
import org.slf4j.Logger; |
|||
import org.slf4j.LoggerFactory; |
|||
|
|||
import java.sql.*; |
|||
import java.util.*; |
|||
|
|||
public class MySQLService { |
|||
|
|||
private static final Logger logger = LoggerFactory.getLogger(MySQLService.class); |
|||
|
|||
private static final String MYSQL_URL = System.getenv("DB_URL"); // 统一实例URL
|
|||
private static final String DB_USER = System.getenv("DB_USER"); |
|||
private static final String DB_PASSWORD = System.getenv("DB_PASSWORD"); |
|||
|
|||
|
|||
/** |
|||
* 获取符合条件的企业ID |
|||
*/ |
|||
public List<Long> getActiveCompanyIds() throws SQLException { |
|||
String sql = """ |
|||
SELECT id FROM data_center_new.basic_company |
|||
WHERE flag != 1 |
|||
AND (parent_id = -1 OR parent_id = 1) |
|||
"""; |
|||
|
|||
try (Connection conn = DriverManager.getConnection(MYSQL_URL, DB_USER, DB_PASSWORD); |
|||
PreparedStatement ps = conn.prepareStatement(sql); |
|||
ResultSet rs = ps.executeQuery()) { |
|||
|
|||
List<Long> ids = new ArrayList<>(); |
|||
while (rs.next()) { |
|||
ids.add(rs.getLong("id")); |
|||
} |
|||
return ids; |
|||
} |
|||
} |
|||
|
|||
public List<Map<String, Object>> queryOldData(String schema, String table, long cutoff) throws SQLException { |
|||
String sql = String.format("SELECT * FROM %s.%s WHERE upload_at < ?", schema, table); |
|||
logger.info("query sql: {}, cutoff: {}", sql, cutoff); |
|||
|
|||
try (Connection conn = DriverManager.getConnection(MYSQL_URL, DB_USER, DB_PASSWORD); |
|||
PreparedStatement ps = conn.prepareStatement(sql)) { |
|||
|
|||
ps.setLong(1, cutoff); |
|||
|
|||
ResultSet rs = ps.executeQuery(); |
|||
ResultSetMetaData meta = rs.getMetaData(); |
|||
List<Map<String, Object>> list = new ArrayList<>(); |
|||
|
|||
while (rs.next()) { |
|||
Map<String, Object> row = new LinkedHashMap<>(); |
|||
for (int i = 1; i <= meta.getColumnCount(); i++) { |
|||
row.put(meta.getColumnName(i), rs.getObject(i)); |
|||
} |
|||
list.add(row); |
|||
} |
|||
return list; |
|||
} |
|||
} |
|||
|
|||
public void deleteOldData(String schema, String table, long cutoff) throws SQLException { |
|||
String sql = String.format("DELETE FROM %s.%s WHERE upload_at < ?", schema, table); |
|||
logger.info("deleteOldData sql: {}, cutoff: {}", sql, cutoff); |
|||
|
|||
try (Connection conn = DriverManager.getConnection(MYSQL_URL, DB_USER, DB_PASSWORD); |
|||
PreparedStatement ps = conn.prepareStatement(sql)) { |
|||
|
|||
ps.setLong(1, cutoff); |
|||
int deleted = ps.executeUpdate(); |
|||
logger.info("[{}.{}] 删除 {} 条记录", schema, table, deleted); |
|||
} |
|||
} |
|||
|
|||
} |
@ -0,0 +1,83 @@ |
|||
package com.dashboard.aws.lambda.service; |
|||
|
|||
import org.slf4j.Logger; |
|||
import org.slf4j.LoggerFactory; |
|||
import software.amazon.awssdk.auth.credentials.AwsBasicCredentials; |
|||
import software.amazon.awssdk.auth.credentials.StaticCredentialsProvider; |
|||
import software.amazon.awssdk.core.sync.RequestBody; |
|||
import software.amazon.awssdk.http.urlconnection.UrlConnectionHttpClient; |
|||
import software.amazon.awssdk.regions.Region; |
|||
import software.amazon.awssdk.services.s3.S3Client; |
|||
import software.amazon.awssdk.services.s3.model.PutObjectRequest; |
|||
import software.amazon.awssdk.services.s3.model.PutObjectResponse; |
|||
|
|||
import java.util.Objects; |
|||
|
|||
public class S3Service { |
|||
|
|||
private static final Logger logger = LoggerFactory.getLogger(S3Service.class); |
|||
|
|||
String accessKey = System.getenv("ACCESS_KEY_ID"); |
|||
String secretKey = System.getenv("SECRET_ACCESS_KEY"); |
|||
String bucketName = System.getenv("S3_BUCKET"); |
|||
|
|||
private final S3Client s3; |
|||
private final String bucket; |
|||
|
|||
public S3Service() { |
|||
|
|||
Objects.requireNonNull(accessKey, "环境变量 AWS_ACCESS_KEY_ID 不能为空"); |
|||
Objects.requireNonNull(secretKey, "环境变量 AWS_SECRET_ACCESS_KEY 不能为空"); |
|||
Objects.requireNonNull(bucketName, "环境变量 S3_BUCKET 不能为空"); |
|||
|
|||
this.bucket = bucketName; |
|||
|
|||
this.s3 = S3Client.builder() |
|||
.httpClientBuilder(UrlConnectionHttpClient.builder()) |
|||
.credentialsProvider(StaticCredentialsProvider.create( |
|||
AwsBasicCredentials.create(accessKey, secretKey) |
|||
)) |
|||
.region(Region.AP_NORTHEAST_1) // 东京区域
|
|||
.build(); |
|||
} |
|||
|
|||
public S3Service(String accessKey, String secretKey, String bucketName) { |
|||
Objects.requireNonNull(accessKey, "AccessKey不能为空"); |
|||
Objects.requireNonNull(secretKey, "SecretKey不能为空"); |
|||
Objects.requireNonNull(bucketName, "S3桶名不能为空"); |
|||
|
|||
this.bucket = bucketName; |
|||
|
|||
this.s3 = S3Client.builder() |
|||
.credentialsProvider(StaticCredentialsProvider.create( |
|||
AwsBasicCredentials.create(accessKey, secretKey) |
|||
)) |
|||
.region(Region.AP_NORTHEAST_1) // 东京区域
|
|||
.build(); |
|||
} |
|||
|
|||
/** |
|||
* 上传字节数组到指定 key |
|||
*/ |
|||
public void upload(String key, byte[] content) { |
|||
Objects.requireNonNull(key, "S3 key不能为空"); |
|||
Objects.requireNonNull(content, "上传内容不能为空"); |
|||
|
|||
PutObjectRequest request = PutObjectRequest.builder() |
|||
.bucket(bucket) |
|||
.key(key) |
|||
.build(); |
|||
|
|||
PutObjectResponse response = s3.putObject(request, RequestBody.fromBytes(content)); |
|||
logger.info("上传成功: {}, ETag={}", key, response.eTag()); |
|||
} |
|||
|
|||
/** |
|||
* 关闭 S3Client |
|||
*/ |
|||
public void close() { |
|||
if (s3 != null) { |
|||
s3.close(); |
|||
} |
|||
} |
|||
} |
@ -0,0 +1,165 @@ |
|||
package com.dashboard.aws.lambda.util; |
|||
import java.math.BigDecimal; |
|||
|
|||
/** |
|||
* 进行BigDecimal对象的加减乘除,四舍五入等运算的工具类 |
|||
* |
|||
* 由于Java的简单类型不能够精确的对浮点数进行运算,这个工具类提供精 |
|||
* 确的浮点数运算,包括加减乘除和四舍五入。 |
|||
*/ |
|||
public class Arith { |
|||
//默认除法运算精度
|
|||
private static final int DEF_DIV_SCALE = 10; |
|||
|
|||
//这个类不能实例化
|
|||
private Arith(){ |
|||
} |
|||
|
|||
/** |
|||
* 提供精确的加法运算。 |
|||
* @param v1 被加数 |
|||
* @param v2 加数 |
|||
* @return 两个参数的和 |
|||
*/ |
|||
public static double add(double v1,double v2){ |
|||
BigDecimal b1 = new BigDecimal(Double.toString(v1)); |
|||
BigDecimal b2 = new BigDecimal(Double.toString(v2)); |
|||
return b1.add(b2).doubleValue(); |
|||
} |
|||
|
|||
/** |
|||
* 提供精确的减法运算。 |
|||
* @param v1 被减数 |
|||
* @param v2 减数 |
|||
* @return 两个参数的差 |
|||
*/ |
|||
public static double sub(double v1,double v2){ |
|||
BigDecimal b1 = new BigDecimal(Double.toString(v1)); |
|||
BigDecimal b2 = new BigDecimal(Double.toString(v2)); |
|||
return b1.subtract(b2).doubleValue(); |
|||
} |
|||
|
|||
/** |
|||
* 提供精确的乘法运算。 |
|||
* @param v1 被乘数 |
|||
* @param v2 乘数 |
|||
* @return 两个参数的积 |
|||
*/ |
|||
public static double mul(double v1,double v2){ |
|||
BigDecimal b1 = new BigDecimal(Double.toString(v1)); |
|||
BigDecimal b2 = new BigDecimal(Double.toString(v2)); |
|||
return b1.multiply(b2).doubleValue(); |
|||
} |
|||
|
|||
/** |
|||
* 提供(相对)精确的除法运算,当发生除不尽的情况时,精确到 |
|||
* 小数点以后10位,以后的数字四舍五入。 |
|||
* @param v1 被除数 |
|||
* @param v2 除数 |
|||
* @return 两个参数的商 |
|||
*/ |
|||
public static double div(double v1,double v2){ |
|||
return div(v1,v2,DEF_DIV_SCALE); |
|||
} |
|||
|
|||
/** |
|||
* 提供(相对)精确的除法运算。当发生除不尽的情况时,由scale参数指 |
|||
* 定精度,以后的数字四舍五入。 |
|||
* @param v1 被除数 |
|||
* @param v2 除数 |
|||
* @param scale 表示表示需要精确到小数点以后几位。 |
|||
* @return 两个参数的商 |
|||
*/ |
|||
public static double div(double v1,double v2,int scale){ |
|||
if(scale<0){ |
|||
throw new IllegalArgumentException( |
|||
"The scale must be a positive integer or zero"); |
|||
} |
|||
BigDecimal b1 = new BigDecimal(Double.toString(v1)); |
|||
BigDecimal b2 = new BigDecimal(Double.toString(v2)); |
|||
return b1.divide(b2,scale,BigDecimal.ROUND_HALF_UP).doubleValue(); |
|||
} |
|||
|
|||
/** |
|||
* 提供精确的小数位四舍五入处理。 |
|||
* @param v 需要四舍五入的数字 |
|||
* @param scale 小数点后保留几位 |
|||
* @return 四舍五入后的结果 |
|||
*/ |
|||
public static double round(double v,int scale){ |
|||
if(scale<0){ |
|||
throw new IllegalArgumentException( |
|||
"The scale must be a positive integer or zero"); |
|||
} |
|||
BigDecimal b = new BigDecimal(Double.toString(v)); |
|||
BigDecimal one = new BigDecimal("1"); |
|||
return b.divide(one,scale,BigDecimal.ROUND_HALF_UP).doubleValue(); |
|||
} |
|||
|
|||
/** |
|||
* 提供精确的类型转换(Float) |
|||
* @param v 需要被转换的数字 |
|||
* @return 返回转换结果 |
|||
*/ |
|||
public static float convertsToFloat(double v){ |
|||
BigDecimal b = BigDecimal.valueOf(v); |
|||
return b.floatValue(); |
|||
} |
|||
|
|||
/** |
|||
* 提供精确的类型转换(Int)不进行四舍五入 |
|||
* @param v 需要被转换的数字 |
|||
* @return 返回转换结果 |
|||
*/ |
|||
public static int convertsToInt(double v){ |
|||
BigDecimal b = BigDecimal.valueOf(v); |
|||
return b.intValue(); |
|||
} |
|||
|
|||
/** |
|||
* 提供精确的类型转换(Long) |
|||
* @param v 需要被转换的数字 |
|||
* @return 返回转换结果 |
|||
*/ |
|||
public static long convertsToLong(double v){ |
|||
BigDecimal b = BigDecimal.valueOf(v); |
|||
return b.longValue(); |
|||
} |
|||
|
|||
/** |
|||
* 返回两个数中大的一个值 |
|||
* @param v1 需要被对比的第一个数 |
|||
* @param v2 需要被对比的第二个数 |
|||
* @return 返回两个数中大的一个值 |
|||
*/ |
|||
public static double returnMax(double v1,double v2){ |
|||
BigDecimal b1 = BigDecimal.valueOf(v1); |
|||
BigDecimal b2 = BigDecimal.valueOf(v2); |
|||
return b1.max(b2).doubleValue(); |
|||
} |
|||
|
|||
/** |
|||
* 返回两个数中小的一个值 |
|||
* @param v1 需要被对比的第一个数 |
|||
* @param v2 需要被对比的第二个数 |
|||
* @return 返回两个数中小的一个值 |
|||
*/ |
|||
public static double returnMin(double v1,double v2){ |
|||
BigDecimal b1 = BigDecimal.valueOf(v1); |
|||
BigDecimal b2 = BigDecimal.valueOf(v2); |
|||
return b1.min(b2).doubleValue(); |
|||
} |
|||
|
|||
/** |
|||
* 精确对比两个数字 |
|||
* @param v1 需要被对比的第一个数 |
|||
* @param v2 需要被对比的第二个数 |
|||
* @return 如果两个数一样则返回0,如果第一个数比第二个数大则返回1,反之返回-1 |
|||
*/ |
|||
public static int compareTo(double v1,double v2){ |
|||
BigDecimal b1 = BigDecimal.valueOf(v1); |
|||
BigDecimal b2 = BigDecimal.valueOf(v2); |
|||
return b1.compareTo(b2); |
|||
} |
|||
|
|||
} |
@ -0,0 +1,29 @@ |
|||
package com.dashboard.aws.lambda.util; |
|||
|
|||
import java.io.ByteArrayOutputStream; |
|||
import java.io.PrintWriter; |
|||
import java.nio.charset.StandardCharsets; |
|||
import java.util.List; |
|||
import java.util.Map; |
|||
|
|||
public class CsvUtil { |
|||
public static byte[] toCsvBytes(List<Map<String, Object>> rows) { |
|||
if (rows == null || rows.isEmpty()) return new byte[0]; |
|||
ByteArrayOutputStream out = new ByteArrayOutputStream(); |
|||
PrintWriter writer = new PrintWriter(out); |
|||
|
|||
// 表头
|
|||
List<String> headers = rows.get(0).keySet().stream().toList(); |
|||
writer.println(String.join(",", headers)); |
|||
|
|||
// 数据
|
|||
for (Map<String, Object> row : rows) { |
|||
writer.println(String.join(",", headers.stream() |
|||
.map(h -> String.valueOf(row.getOrDefault(h, ""))) |
|||
.toList())); |
|||
} |
|||
|
|||
writer.flush(); |
|||
return out.toString(StandardCharsets.UTF_8).getBytes(StandardCharsets.UTF_8); |
|||
} |
|||
} |
@ -0,0 +1,13 @@ |
|||
<configuration> |
|||
<property name="LOG_LEVEL" value="${LOG_LEVEL:-INFO}" /> |
|||
|
|||
<appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender"> |
|||
<encoder> |
|||
<pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n</pattern> |
|||
</encoder> |
|||
</appender> |
|||
|
|||
<root level="${LOG_LEVEL}"> |
|||
<appender-ref ref="STDOUT" /> |
|||
</root> |
|||
</configuration> |
Loading…
Reference in new issue