This commit is contained in:
2023-02-11 13:36:14 +08:00
23 changed files with 419 additions and 97 deletions

View File

@@ -0,0 +1,98 @@
package org.nl.config;
import com.alibaba.druid.filter.FilterChain;
import com.alibaba.druid.filter.FilterEventAdapter;
import com.alibaba.druid.proxy.jdbc.JdbcParameter;
import com.alibaba.druid.proxy.jdbc.PreparedStatementProxy;
import com.alibaba.druid.proxy.jdbc.ResultSetProxy;
import com.alibaba.druid.proxy.jdbc.StatementProxy;
import com.alibaba.druid.sql.SQLUtils;
import com.alibaba.druid.util.JdbcUtils;
import com.alibaba.fastjson.JSON;
import com.mysql.cj.jdbc.result.ResultSetImpl;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.MDC;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Map;
/*
* @author ZZQ
* @Date 2023/2/10 11:27 上午
*/
@Slf4j
public class DruidFilter extends FilterEventAdapter {
@Override
public int preparedStatement_executeUpdate(FilterChain chain, PreparedStatementProxy statement) throws SQLException {
return super.preparedStatement_executeUpdate(chain, statement);
}
@Override
public int statement_executeUpdate(FilterChain chain, StatementProxy statement, String sql) throws SQLException {
return super.statement_executeUpdate(chain, statement, sql);
}
@Override
protected void statementExecuteAfter(StatementProxy statement, String sql, boolean result) {
String traceId = MDC.get("traceId");
int size = statement.getParametersSize();
String executeSql = sql;
int count = 0;
try {
count=statement.getUpdateCount();
}catch (Exception ex){ }
if (StringUtils.isNotEmpty(traceId) && count>0) {
if (size > 0) {
Collection<JdbcParameter> values = statement.getParameters().values();
List<Object> params = new ArrayList<>();
for (JdbcParameter value : values) {
params.add(value.getValue());
}
executeSql = SQLUtils.format(executeSql, JdbcUtils.MYSQL, params);
}
log.info("[----SQL----][update][ SQL: {} ]", executeSql);
}
super.statementExecuteAfter(statement, sql, result);
}
@Override
public ResultSetProxy statement_getResultSet(FilterChain chain, StatementProxy statement) throws SQLException {
ResultSetProxy rs = super.statement_getResultSet(chain, statement);
String executeSql = statement.getLastExecuteSql();
String traceId = MDC.get("traceId");
if (StringUtils.isNotEmpty(traceId)){
int result = 0;
if (rs != null) {
ResultSetImpl rss = rs.getResultSetRaw().unwrap(ResultSetImpl.class);
result = rss.getRows().size();
}
try {
int size = statement.getParametersSize();
if (size>0){
Collection<JdbcParameter> values = statement.getParameters().values();
List<Object> params = new ArrayList<>();
for (JdbcParameter value : values) {
params.add(value.getValue());
}
executeSql = SQLUtils.format(executeSql, JdbcUtils.MYSQL, params);
}
}catch (Exception ex){
log.warn("[-SQL解析异常-][{}]",ex.getMessage());
}
log.info("[----SQL----][select][执行结果:{}][ SQL: {} ]",result, executeSql);
}
return rs;
}
}

View File

@@ -116,21 +116,20 @@ public class LogAspect {
}
}
}catch (Exception ex){
StringBuffer errorInfo = new StringBuffer();
errorInfo.append(ex.getMessage()).append("\n");
StringBuffer errorStack = new StringBuffer();
String errorMsg = ex.getMessage();
int x = 0;
StackTraceElement[] stackTrace = ex.getStackTrace();
if (stackTrace!=null && stackTrace.length>0){
errorInfo.append("---堆栈信息:");
for (StackTraceElement stack : stackTrace) {
x++;
errorInfo.append(stack.toString()).append("\n");
errorStack.append(stack.toString()).append(" | ");
if (x>10){
break;
}
}
}
log.error("[--requestError--][请求接口:{}][请求参数:{}]异常信息 :{}", url,params, errorInfo.toString());
log.error("[-requestError-][请求接口:{}]【异常信息:{}】[请求参数:{}][异常堆栈:{}]", url,errorMsg,params, errorStack.toString());
Log log = new Log("ERROR", System.currentTimeMillis() - comming);
log.setExceptionDetail(ThrowableUtil.getStackTrace(ex).getBytes());
logService.save(getUsername(), StringUtils.getBrowser(request), StringUtils.getIp(request), (ProceedingJoinPoint) joinPoint, log);

View File

@@ -23,10 +23,10 @@ public class EsLogController {
private final EsLogService esLogService;
@GetMapping("/labels")
@GetMapping("/labels/{type}")
@ApiOperation("获取标签")
public ResponseEntity<Object> labelsValues() {
return new ResponseEntity<>(esLogService.getLabelsValues(), HttpStatus.OK);
public ResponseEntity<Object> labelsValues(@PathVariable String type) {
return new ResponseEntity<>(esLogService.getLabelsValues(type), HttpStatus.OK);
}
@PostMapping("/query")

View File

@@ -16,7 +16,7 @@ public interface EsLogService {
* 获取labels和values树
* @return
*/
JSONArray getLabelsValues();
JSONArray getLabelsValues(String type);
/**
* 日志查询

View File

@@ -27,6 +27,10 @@ public class LogQuery {
* 日志级别
*/
private String logLevel;
/**
* 系统标签
*/
private String system;
/**
* 是否只查询Http相关请求
*/

View File

@@ -8,9 +8,13 @@ import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import lombok.RequiredArgsConstructor;
import org.apache.commons.lang3.StringUtils;
import org.apache.poi.ss.formula.functions.T;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.index.query.BoolQueryBuilder;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.script.Script;
import org.elasticsearch.search.aggregations.AggregationBuilders;
import org.elasticsearch.search.aggregations.Aggregations;
import org.elasticsearch.search.aggregations.bucket.terms.Terms;
import org.nl.common.domain.query.PageQuery;
import org.nl.common.enums.LevelEnum;
import org.nl.modules.logging.repository.EsLogRepository;
@@ -21,12 +25,19 @@ import org.nl.wms.ext.acs.service.impl.AcsToWmsServiceImpl;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.domain.PageRequest;
import org.springframework.data.domain.Sort;
import org.springframework.data.elasticsearch.core.ElasticsearchRestTemplate;
import org.springframework.data.elasticsearch.core.aggregation.impl.AggregatedPageImpl;
import org.springframework.data.elasticsearch.core.query.FetchSourceFilter;
import org.springframework.data.elasticsearch.core.query.NativeSearchQueryBuilder;
import org.springframework.stereotype.Service;
import org.springframework.util.CollectionUtils;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.function.Consumer;
import java.util.function.Function;
/**
* @author ldjun
@@ -40,6 +51,8 @@ public class EsLogServiceImpl implements EsLogService {
private final EsLogRepository esLogRepository;
private final ElasticsearchRestTemplate elasticsearchRestTemplate;
@Override
public Page query(LogQuery logQuery){
Page<T> page = new Page<>();
@@ -60,6 +73,9 @@ public class EsLogServiceImpl implements EsLogService {
if (StringUtils.isNotEmpty(logQuery.getLogLevel())){
query.must().add(QueryBuilders.matchQuery("logLevel", LevelEnum.checkLevel(logQuery.getLogLevel())));
}
if (StringUtils.isNotEmpty(logQuery.getSystem())){
query.must().add(QueryBuilders.matchQuery("system", logQuery.getSystem()));
}
if (logQuery.getIsRequest()){
query.must().add(QueryBuilders.existsQuery("requestMethod"));
}
@@ -86,13 +102,23 @@ public class EsLogServiceImpl implements EsLogService {
@Override
public JSONArray getLabelsValues() {
public JSONArray getLabelsValues(String type) {
JSONArray result = new JSONArray();
for (LevelEnum value : LevelEnum.values()) {
JSONObject level = new JSONObject();
level.put("label", value.name());
level.put("value", value.name());
result.add(level);
FetchSourceFilter fetchSourceFilter = new FetchSourceFilter(new String[]{type}, null);
NativeSearchQueryBuilder queryBuilder = new NativeSearchQueryBuilder();
queryBuilder.withCollapseField(type+".keyword");
queryBuilder.withSourceFilter(fetchSourceFilter);
queryBuilder.addAggregation(AggregationBuilders.terms(type).field(type+".keyword").size(100));
Aggregations agg = elasticsearchRestTemplate.query(queryBuilder.build(), SearchResponse::getAggregations);
Terms terms = agg.get(type);
List<? extends Terms.Bucket> buckets = terms.getBuckets();
if (!CollectionUtils.isEmpty(buckets)){
buckets.stream().map(Terms.Bucket::getKeyAsString).forEach(v-> {
JSONObject item = new JSONObject();
item.put("label", v);
item.put("value", v);
result.add(item);
});
}
return result;
}

View File

@@ -98,7 +98,7 @@
IF 输入.flag = "2"
QUERY
SELECT
SELECT DISTINCT
dtl.check_code,
(
CASE

View File

@@ -17,6 +17,8 @@
输入.pcsn TYPEAS s_string
输入.struct_id TYPEAS s_string
输入.iostorinv_id TYPEAS s_string
输入.width TYPEAS s_string
输入.thickness TYPEAS s_string
[临时表]
--这边列出来的临时表就会在运行期动态创建
@@ -121,4 +123,55 @@
ENDSELECT
ENDQUERY
ENDIF
ENDIF
IF 输入.flag = "5"
QUERY
SELECT
sub.width,
sub.thickness
FROM
ST_IVT_IOStorInvDis dis
LEFT JOIN pdm_bi_subpackagerelation sub ON sub.package_box_sn = dis.box_no
WHERE
1 = 1
OPTION 输入.iostorinv_id <> ""
dis.iostorinv_id = 输入.iostorinv_id
ENDOPTION
group by sub.width,sub.thickness
ENDSELECT
ENDQUERY
ENDIF
IF 输入.flag = "6"
QUERY
SELECT
sub.package_box_sn AS box_no,
MAX( sub.product_description ) AS material_name,
SUM( sub.net_weight ) AS qty
FROM
ST_IVT_IOStorInvDis dis
LEFT JOIN pdm_bi_subpackagerelation sub ON sub.container_name = dis.pcsn
WHERE
1 = 1
OPTION 输入.iostorinv_id <> ""
dis.iostorinv_id = 输入.iostorinv_id
ENDOPTION
OPTION 输入.width <> ""
sub.width = 输入.width
ENDOPTION
OPTION 输入.thickness <> ""
sub.width = 输入.thickness
ENDOPTION
group by sub.package_box_sn
ENDSELECT
ENDQUERY
ENDIF

View File

@@ -0,0 +1 @@
druid.filters.DruidFilter=org.nl.config.DruidFilter

View File

@@ -23,10 +23,10 @@ spring:
db-type: com.alibaba.druid.pool.DruidDataSource
driverClassName: net.sf.log4jdbc.sql.jdbcapi.DriverSpy
# url: jdbc:log4jdbc:mysql://${DB_HOST:10.1.3.91}:${DB_PORT:3306}/${DB_NAME:lms_test2}?serverTimezone=Asia/Shanghai&characterEncoding=utf8&useSSL=false&useOldAliasMetadataBehavior=true
url: jdbc:log4jdbc:mysql://${DB_HOST:127.0.0.1}:${DB_PORT:3306}/${DB_NAME:hl_tb_lms}?serverTimezone=Asia/Shanghai&characterEncoding=utf8&useSSL=false&useOldAliasMetadataBehavior=true
url: jdbc:log4jdbc:mysql://${DB_HOST:127.0.0.1}:${DB_PORT:3306}/${DB_NAME:lms}?serverTimezone=Asia/Shanghai&characterEncoding=utf8&useSSL=false&useOldAliasMetadataBehavior=true
# username: ${DB_USER:root}
username: ${DB_USER:root}
password: ${DB_PWD:Root.123456}
password: ${DB_PWD:942464Yy}
# 初始连接数
initial-size: 5
# 最小连接数
@@ -58,16 +58,17 @@ spring:
enabled: true
url-pattern: /druid/*
reset-enable: false
filter:
stat:
enabled: true
# 记录慢SQL
log-slow-sql: true
slow-sql-millis: 1000
merge-sql: true
wall:
config:
multi-statement-allow: true
filters:
DruidFilter,stat
# stat:
# enabled: true
# # 记录慢SQL
# log-slow-sql: true
# slow-sql-millis: 1000
# merge-sql: true
# wall:
# config:
# multi-statement-allow: true
redis:
#数据库索引
database: ${REDIS_DB:15}

View File

@@ -59,16 +59,17 @@ spring:
enabled: true
url-pattern: /druid/*
reset-enable: false
filter:
stat:
enabled: true
# 记录慢SQL
log-slow-sql: true
slow-sql-millis: 1000
merge-sql: true
wall:
config:
multi-statement-allow: true
filters:
DruidFilter,stat
# stat:
# enabled: true
# # 记录慢SQL
# log-slow-sql: true
# slow-sql-millis: 1000
# merge-sql: true
# wall:
# config:
# multi-statement-allow: true
redis:
#数据库索引
database: ${REDIS_DB:14}