rev:替换lucene

This commit is contained in:
2024-01-02 17:21:53 +08:00
parent 93bf10bd79
commit d967103c17
37 changed files with 274 additions and 993 deletions

View File

View File

@@ -423,6 +423,11 @@
<!-- </dependency>-->
<!-- Lucence核心包 -->
<dependency>
<groupId>com.yomahub</groupId>
<artifactId>tlog-all-spring-boot-starter</artifactId>
<version>1.5.0</version>
</dependency>
<dependency>
<groupId>org.apache.lucene</groupId>
<artifactId>lucene-core</artifactId>

View File

@@ -261,7 +261,7 @@ public class HailiangEngravingInDeviceDriver extends AbstractOpcDeviceDriver imp
if (mode != last_mode) {
feedDeviceStatusFlag = false;
logServer.deviceExecuteLog(this.device_code, "", "", SignalEnum.getNameByCode("mode") + SignalEnum.getDescByCode("mode") +""+ last_mode + "->" + mode);
lucene.deviceExecuteLog(new LuceneLogDto(this.device_code, ItemProtocol.item_mode, mode, last_mode));
//lucene.deviceExecuteLog(new LuceneLogDto(this.device_code, ItemProtocol.item_mode, mode, last_mode));
}
if (gd_move != last_gd_move) {
logServer.deviceExecuteLog(this.device_code, "", "", SignalEnum.getNameByCode("move") + SignalEnum.getDescByCode("move") +""+ last_gd_move + "->" + gd_move);

View File

@@ -3,10 +3,12 @@ package org.nl.acs.opc;
import cn.hutool.core.date.DateUtil;
import cn.hutool.core.util.ObjectUtil;
import cn.hutool.core.util.StrUtil;
import com.alibaba.fastjson.JSON;
import lombok.extern.slf4j.Slf4j;
import org.nl.acs.udw.UnifiedDataAccessor;
import org.nl.acs.udw.UnifiedDataAccessorFactory;
import org.nl.acs.udw.UnifiedDataAppService;
import org.nl.modules.lucene.enums.LogTypeEnum;
import org.nl.modules.lucene.service.LuceneExecuteLogService;
import org.nl.modules.lucene.service.dto.LuceneLogDto;
import org.nl.modules.wql.util.SpringContextHolder;
@@ -358,6 +360,7 @@ public class DeviceOpcProtocolRunable implements Runnable, DataCallback, ServerC
}
@Override
public void connectionStateChanged(boolean connected) {
if (!connected) {
this.server = null;
@@ -380,6 +383,7 @@ public class DeviceOpcProtocolRunable implements Runnable, DataCallback, ServerC
}
@Override
public void changed(Item item, ItemState itemState) {
String itemId = item.getId();
@@ -410,8 +414,9 @@ public class DeviceOpcProtocolRunable implements Runnable, DataCallback, ServerC
}
private void logItemChanged(String itemId, UnifiedDataAccessor accessor_value, Object value, OpcItemDto itemDto) {
LuceneExecuteLogService lucene = SpringContextHolder.getBean(LuceneExecuteLogService.class);
Object his = accessor_value.getValue(itemId);
itemDto.setHis_item_value(his);
itemDto.setItem_value(value);
List<String> relate_items = itemDto.getRelate_items();
if (relate_items != null && !relate_items.isEmpty()) {
StringBuilder sb = new StringBuilder();
@@ -422,14 +427,22 @@ public class DeviceOpcProtocolRunable implements Runnable, DataCallback, ServerC
Object obj = accessor_value.getValue(relate);
sb.append("key:" + relate + "value:" + obj + ";");
}
// log.warn("设备:{}信号{}变更从{}->{};信号快照:{}", new Object[]{itemDto.getDevice_code(), itemId, his, value, sb});
if (!itemDto.getItem_code().endsWith("heartbeat")) {
lucene.deviceExecuteLog(new LuceneLogDto(itemDto.getDevice_code(), itemDto.getItem_code().substring(itemDto.getItem_code().lastIndexOf(".")+1), String.valueOf(his), String.valueOf(value)));
if (!itemDto.getItem_code().endsWith("heartbeat") && !itemDto.getItem_code().endsWith("time")) {
// 存在上次点位值为null情况 则不记录日志
LuceneLogDto luceneLogDto = new LuceneLogDto(itemDto.getOpc_server_code(), itemDto.getOpc_plc_code(), itemDto.getDevice_code(), itemDto.getItem_code().substring(itemDto.getItem_code().lastIndexOf(".") + 1),
String.valueOf(itemDto.getHis_item_value()), String.valueOf(itemDto.getItem_value()));
luceneLogDto.setLogType(LogTypeEnum.DEVICE_LOG.getDesc());
log.info("{}", JSON.toJSONString(luceneLogDto));
}
} else {
// log.warn("设备:{}信号{}变更从{}->{};信号快照:{}", new Object[]{itemDto.getDevice_code(), itemId, his, value});
if (!itemDto.getItem_code().endsWith("heartbeat")) {
lucene.deviceExecuteLog(new LuceneLogDto(itemDto.getDevice_code(), itemDto.getItem_code().substring(itemDto.getItem_code().lastIndexOf(".")+1), String.valueOf(his), String.valueOf(value)));
// log.info("设备:{}信号{}变更从{}->{};信号快照:{}", new Object[]{itemDto.getDevice_code(), itemId, his, value});
// 存在上次点位值为null情况 则不记录日志
if (!itemDto.getItem_code().endsWith("heartbeat") && !itemDto.getItem_code().endsWith("time")) {
LuceneLogDto luceneLogDto = new LuceneLogDto(itemDto.getOpc_server_code(), itemDto.getOpc_plc_code(), itemDto.getDevice_code(), itemDto.getItem_code().substring(itemDto.getItem_code().lastIndexOf(".") + 1),
String.valueOf(itemDto.getHis_item_value()), String.valueOf(itemDto.getItem_value()));
luceneLogDto.setLogType(LogTypeEnum.DEVICE_LOG.getDesc());
log.info("{}", JSON.toJSONString(luceneLogDto));
}
}
}

View File

@@ -11,6 +11,7 @@ public class OpcItemDto {
private String opc_plc_code;
private String item_code;
private Object item_value;
private Object his_item_value;
private Boolean need_log = Boolean.valueOf(false);
private List<String> relate_items = new ArrayList();
@@ -86,4 +87,11 @@ public class OpcItemDto {
this.relate_items = relate_items;
}
public Object getHis_item_value() {
return his_item_value;
}
public void setHis_item_value(Object his_item_value) {
this.his_item_value = his_item_value;
}
}

View File

@@ -1,49 +0,0 @@
package org.nl.modules.lucene.common;
import ch.qos.logback.classic.LoggerContext;
import ch.qos.logback.classic.encoder.PatternLayoutEncoder;
import ch.qos.logback.core.rolling.RollingFileAppender;
import ch.qos.logback.core.rolling.TimeBasedRollingPolicy;
import cn.hutool.core.date.DateTime;
import cn.hutool.core.date.DateUtil;
import org.slf4j.LoggerFactory;
import java.io.File;
import java.nio.charset.Charset;
public class DynamicLogAppender {
/**
* 通过传入的动态名字,动态设置appender
* @param dynamicName
* @return
*/
public RollingFileAppender getAppender(String oldLogPath,String dynamicName) {
LoggerContext context = (LoggerContext) LoggerFactory.getILoggerFactory();
RollingFileAppender appender = new RollingFileAppender();
//appender的name属性
appender.setName(dynamicName);
appender.setContext(context);
//设置文件名
appender.setFile(new File(oldLogPath, dynamicName + "\\" + DateUtil.format(new DateTime(),"yyyy-MM-dd")+".log").getAbsolutePath());
//设置日志文件输出格式
PatternLayoutEncoder encoder = new PatternLayoutEncoder();
encoder.setContext(context);
encoder.setPattern("%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n");
encoder.setCharset(Charset.forName("UTF-8"));
encoder.start();
//设置日志记录器的滚动策略
TimeBasedRollingPolicy policy = new TimeBasedRollingPolicy();
policy.setFileNamePattern(oldLogPath+dynamicName+".%d{yyyy-MM-dd}.log");
policy.setParent(appender); //设置父节点是appender
policy.setContext(context);
policy.start();
//加入下面两个节点
appender.setRollingPolicy(policy);
appender.setEncoder(encoder);
appender.start();
return appender;
}
}

View File

@@ -1,35 +0,0 @@
package org.nl.modules.lucene.common;
import ch.qos.logback.classic.Logger;
import ch.qos.logback.classic.LoggerContext;
import ch.qos.logback.core.rolling.RollingFileAppender;
import org.slf4j.LoggerFactory;
import java.util.HashMap;
import java.util.Map;
public class DynamicLogger {
String logPath;//日志存储路径
public DynamicLogger(String logPath) {
this.logPath = logPath;
}
//对外暴露日志对象:每次拿的对象从内存里拿,没有再构建
private static Map<String,Logger> container = new HashMap<>();
public Logger getLogger(String dynamicName) {
Logger logger = container.get(dynamicName);
if(logger != null) {
return logger;
}
logger = build(dynamicName);
container.put(dynamicName,logger);
return logger;
}
//构建Logger对象给Logger指定appender
private Logger build(String dynamicName) {
RollingFileAppender runTaskAppender =new DynamicLogAppender().getAppender(this.logPath,dynamicName);
LoggerContext context = (LoggerContext) LoggerFactory.getILoggerFactory();
Logger logger = context.getLogger(dynamicName);
logger.addAppender(runTaskAppender);
return logger;
}
}

View File

@@ -1,178 +0,0 @@
package org.nl.modules.lucene.common;
import com.alibaba.fastjson.JSONObject;
import org.apache.commons.io.FileUtils;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.TextField;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.FSDirectory;
import org.springframework.beans.factory.annotation.Value;
import org.wltea.analyzer.lucene.IKAnalyzer;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.io.IOException;
import java.nio.file.Paths;
import java.util.Set;
/**
* lucene索引器
*/
public class Indexer {
/**
* 写索引实例
*/
private IndexWriter writer;
public IndexWriter getWriter() {
return writer;
}
/**
* 构造方法实例化IndexWriter
* @param indexDir
* @throws Exception
*/
public Indexer(String indexDir) throws Exception {
Directory dir = FSDirectory.open(Paths.get(indexDir));
//标准分词器会自动去掉空格啊is a the等单词
// Analyzer analyzer = new StandardAnalyzer();
Analyzer analyzer = new IKAnalyzer();
//将标准分词器配到写索引的配置中
IndexWriterConfig config = new IndexWriterConfig(analyzer);
//实例化写索引对象
writer = new IndexWriter(dir, config);
}
/**
* 索引指定目录下的所有文件
* @param dataDir
* @return
* @throws Exception
*/
public int indexAll(String dataDir) throws Exception {
// 获取该路径下的所有文件
File[] files = new File(dataDir).listFiles();
if (null != files) {
for (File file : files) {
//调用下面的indexFile方法对每个文件进行索引
indexFile(file);
}
}
//返回索引的文件数
// return writer.numDocs();
return writer.numRamDocs();
}
/**
* 索引指定的文件
* @param file
* @throws Exception
*/
private void indexFile(File file) throws Exception {
System.out.println("索引文件的路径:" + file.getCanonicalPath());
//调用下面的getDocument方法获取该文件的document
Document doc = getDocument(file);
//添加索引文档
//Document doc = json2Doc(jsonDoc);
// Document doc = new Document();
// doc.add(new TextField("content", jsonDoc, Field.Store.YES));
Field fieldContent=new TextField("fieldContent", FileUtils.readFileToString(null,"UTF-8"), Field.Store.YES);
//将doc添加到索引中
writer.addDocument(doc);
}
/**
* 获取文档,文档里再设置每个字段,就类似于数据库中的一行记录
* @param file
* @return
* @throws Exception
*/
private Document getDocument(File file) throws Exception {
Document doc = new Document();
//开始添加字段
//添加内容
doc.add(new TextField("contents", new FileReader(file)));
//添加文件名,并把这个字段存到索引文件里
doc.add(new TextField("fileName", file.getName(), Field.Store.YES));
//添加文件路径
doc.add(new TextField("fullPath", file.getCanonicalPath(), Field.Store.YES));
return doc;
}
public Document json2Doc(String strDoc) {
Document doc = new Document();
JSONObject jsonDoc = JSONObject.parseObject(strDoc);
Set<String> keys = jsonDoc.keySet();
for (String key : keys) {
doc.add(new TextField(key, jsonDoc.getString(key), Field.Store.YES));
}
return doc;
}
public void addLogIndex(String msg) throws IOException {
//步骤一创建Directory对象用于指定索引库的位置 RAMDirectory内存
Directory directory = FSDirectory.open(new File("D:\\lucene\\index").toPath());
//步骤二创建一个IndexWriter对象用于写索引
// Analyzer analyzer = new StandardAnalyzer();
IndexWriter indexWriter=new IndexWriter(directory,new IndexWriterConfig(new IKAnalyzer(false)));
// indexWriter.deleteAll();//清理所有索引库
// IndexWriter indexWriter=new IndexWriter(directory,new IndexWriterConfig(new StandardAnalyzer()));
//记录索引开始时间
long startTime = System.currentTimeMillis();
//步骤三:读取磁盘中文件,对应每一个文件创建一个文档对象
Document document = new Document();
// document.add(new TextField("fieldContent", device_id, Field.Store.YES));
document.add(new TextField("fieldContent", msg, Field.Store.YES));
indexWriter.addDocument(document);
//记录索引结束时间
long endTime = System.currentTimeMillis();
System.out.println("建立索引"+ "共耗时" + (endTime-startTime) + "毫秒");
indexWriter.commit();
//步骤八:关闭资源
indexWriter.close();
System.out.println("建立索引成功-----关闭资源");
}
//系统的日志文件路径
@Value("${logging.file.path}")
private String logUrl;
public static void main(String[] args)throws IOException {
//步骤一创建Directory对象用于指定索引库的位置 RAMDirectory内存
Directory directory = FSDirectory.open(new File("D:\\lucene\\index").toPath());
//步骤二创建一个IndexWriter对象用于写索引
// Analyzer analyzer = new StandardAnalyzer();
IndexWriter indexWriter=new IndexWriter(directory,new IndexWriterConfig(new IKAnalyzer(false)));
indexWriter.deleteAll();//清理所有索引库
// IndexWriter indexWriter=new IndexWriter(directory,new IndexWriterConfig(new StandardAnalyzer()));
//记录索引开始时间
long startTime = System.currentTimeMillis();
//步骤三:读取磁盘中文件,对应每一个文件创建一个文档对象
File file=new File("D:\\testlog");
//步骤四:获取文件列表
File[] files = file.listFiles();
for (File item:files) {
BufferedReader bufferedReader = new BufferedReader(new FileReader(item));
String strLine = null;
while(null != (strLine = bufferedReader.readLine())){
Document document = new Document();
// document.add(new Field());
document.add(new TextField("fieldContent", strLine, Field.Store.YES));
indexWriter.addDocument(document);
}
}
//记录索引结束时间
long endTime = System.currentTimeMillis();
System.out.println("建立索引"+ "共耗时" + (endTime-startTime) + "毫秒");
indexWriter.commit();
//步骤八:关闭资源
indexWriter.close();
System.out.println("建立索引成功-----关闭资源");
}
}

View File

@@ -1,66 +0,0 @@
//package org.nl.modules.lucene.common;
//
//import cn.hutool.core.date.DateUtil;
//import org.apache.lucene.index.CorruptIndexException;
//import org.apache.lucene.index.IndexWriter;
//import org.apache.lucene.index.IndexWriterConfig;
//import org.apache.lucene.store.Directory;
//import org.apache.lucene.store.FSDirectory;
//import org.nl.modules.lucene.config.UrlConfig;
//import org.wltea.analyzer.lucene.IKAnalyzer;
//
//import java.io.File;
//import java.io.IOException;
//import java.text.ParseException;
//import java.text.SimpleDateFormat;
//import java.util.Date;
//
//public class LuceneIndexWriter {
// private static volatile IndexWriter indexWriter;
//
// static {
// try {
// Directory directory = FSDirectory.open(new File(UrlConfig.luceneUrl).toPath());
// IndexWriterConfig config = new IndexWriterConfig(new IKAnalyzer());
// indexWriter = new IndexWriter(directory, config);
// } catch (Exception e) {
// e.printStackTrace();
// }
// /**当当前线程结束时自动关闭IndexWriter使用Runtime对象*/
//// Runtime.getRuntime().addShutdownHook(new Thread(){
//// @Override
//// public void run() {
//// try {
//// closeIndexWriter();
//// } catch (Exception e) {
//// e.printStackTrace();
//// }
//// }
//// });
// }
// /**在线程结束时自动关闭IndexWriter*/
// public static IndexWriter getIndexWriter() {
// return indexWriter;
// }
// /**关闭IndexWriter
// * @throws IOException
// * @throws CorruptIndexException */
// public static void closeIndexWriter() throws Exception {
// if(indexWriter != null) {
//// indexWriter.close();
// }
// }
//
// public static void main(String[] args) throws IOException {
// indexWriter.deleteAll();
// }
//
// public static String getDate(String timeString) throws ParseException {
// SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSSX");//时间格式
// Date date = sdf.parse(timeString);
// timeString = DateUtil.format(date, "yyyy-MM-dd HH:mm:ss.SSS");//格式化后的时间
// return timeString;
// }
//
//
//}

View File

@@ -1,85 +0,0 @@
package org.nl.modules.lucene.common;
import cn.hutool.core.date.DateUtil;
import cn.hutool.core.util.ObjectUtil;
import com.alibaba.fastjson.JSON;
import lombok.extern.slf4j.Slf4j;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.FSDirectory;
import org.nl.acs.AcsConfig;
import org.nl.acs.auto.run.AbstractAutoRunnable;
import org.nl.modules.system.service.ParamService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import org.wltea.analyzer.lucene.IKAnalyzer;
import java.io.File;
import java.io.IOException;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Date;
@Slf4j
@Component
public class LuceneServiceAutoRun extends AbstractAutoRunnable {
@Autowired
private ParamService paramService;
public LuceneServiceAutoRun() {
}
@Override
public String getCode() {
return LuceneServiceAutoRun.class.getSimpleName();
}
@Override
public String getName() {
return "Lucene索引服务";
}
private static volatile IndexWriter indexWriter;
@Override
public void autoRun() throws IOException {
try {
String luceneUrl = paramService.findByCode(AcsConfig.LUCENEURL).getValue();
Directory directory = FSDirectory.open(new File(luceneUrl).toPath());
IndexWriterConfig config = new IndexWriterConfig(new IKAnalyzer());
indexWriter = new IndexWriter(directory, config);
} catch (Exception e) {
log.error("Lucene索引服务出现异常:{}", JSON.toJSONString(e));
if (ObjectUtil.isNotEmpty(indexWriter)) {
indexWriter.close();
}
}
}
/**在线程结束时会自动关闭IndexWriter*/
public static IndexWriter getIndexWriter() {
return indexWriter;
}
@Override
public void stop() {
super.after();
try {
indexWriter.close();
} catch (Exception e) {
e.printStackTrace();
}
}
public static void main(String[] args) throws IOException {
indexWriter.deleteAll();
}
public static String getDate(String timeString) throws ParseException {
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSSX");//时间格式
Date date = sdf.parse(timeString);
timeString = DateUtil.format(date, "yyyy-MM-dd HH:mm:ss.SSS");//格式化后的时间
return timeString;
}
}

View File

@@ -1,140 +0,0 @@
package org.nl.modules.lucene.common;
import cn.hutool.core.date.DateTime;
import cn.hutool.core.date.DateUtil;
import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import lombok.extern.slf4j.Slf4j;
import org.apache.lucene.document.Document;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.*;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.FSDirectory;
import org.apache.lucene.util.BytesRef;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.List;
import java.util.Map;
/**
* lucene查询器
*/
@Slf4j
public class Searcher {
public static Map<String, Object> search(String indexDir, String ext,Map whereJson) throws Exception {
//获取要查询的路径,也就是索引所在的位置
Directory dir = FSDirectory.open(Paths.get(indexDir));
IndexReader reader = DirectoryReader.open(dir);
//构建IndexSearcher
IndexSearcher searcher = new IndexSearcher(reader);
//标准分词器会自动去掉空格啊is a the等单词
// Analyzer analyzer = new StandardAnalyzer();
// Analyzer analyzer = new IKAnalyzer(false);
//查询解析器
// QueryParser queryParser = new QueryParser("fieldContent", analyzer);
//记录索引开始时间
long startTime = System.currentTimeMillis();
// 实际上Lucene本身不支持分页。因此我们需要自己进行逻辑分页。我们要准备分页参数
int pageSize = Integer.parseInt(whereJson.get("size").toString());// 每页条数
int pageNum = Integer.parseInt(whereJson.get("page").toString());// 当前页码
int start = pageNum * pageSize;// 当前页的起始条数
int end = start + pageSize;// 当前页的结束条数(不能包含)
TopDocs docs = null;
BooleanQuery.Builder booleanQueryBuilder = new BooleanQuery.Builder();
//时间范围查询
String startDate = (String) whereJson.get("begin_time");
String endDate = (String) whereJson.get("end_time");
Calendar calendar=Calendar.getInstance();
calendar.set(1970, 0, 1);
if (startDate == null){
startDate = DateUtil.format(calendar.getTime(),"yyyy-MM-dd HH:mm:ss.SSS");
}else{
startDate = LuceneServiceAutoRun.getDate(startDate);
}
if (endDate == null){
endDate = DateUtil.format(new DateTime(),"yyyy-MM-dd HH:mm:ss.SSS");
} else {
endDate = LuceneServiceAutoRun.getDate(endDate);
}
TermRangeQuery termRangeQuery = new TermRangeQuery("logTime", new BytesRef(startDate), new BytesRef(endDate), true, true);
booleanQueryBuilder.add(termRangeQuery,BooleanClause.Occur.MUST);
if (whereJson.get("device_code") != null){
Query termQuery = new TermQuery(new Term("device_code", (String) whereJson.get("device_code")));
booleanQueryBuilder.add(termQuery,BooleanClause.Occur.MUST);
}
if (whereJson.get("method") != null){
Query termQuery = new TermQuery(new Term("method", (String) whereJson.get("method")));
booleanQueryBuilder.add(termQuery,BooleanClause.Occur.MUST);
}
if (whereJson.get("status_code") != null){
Query termQuery = new TermQuery(new Term("status_code", (String) whereJson.get("status_code")));
booleanQueryBuilder.add(termQuery,BooleanClause.Occur.MUST);
}
if (whereJson.get("requestparam") != null){
WildcardQuery query = new WildcardQuery(new Term("requestparam", "*"+(String) whereJson.get("requestparam")+"*"));
booleanQueryBuilder.add(query,BooleanClause.Occur.MUST);
}
if (whereJson.get("responseparam") != null){
WildcardQuery query = new WildcardQuery(new Term("responseparam", "*"+(String) whereJson.get("responseparam")+"*"));
booleanQueryBuilder.add(query,BooleanClause.Occur.MUST);
}
if (whereJson.get("blurry") != null) {
WildcardQuery query = new WildcardQuery(new Term("fieldContent", "*"+(String) whereJson.get("blurry")+"*"));
booleanQueryBuilder.add(query, BooleanClause.Occur.MUST);
}
// 创建排序对象,需要排序字段SortField参数字段的名称、字段的类型、是否反转如果是false升序。true降序
Sort sort = new Sort(new SortField("logTime",SortField.Type.LONG,true));
docs = searcher.search(booleanQueryBuilder.build(), end, sort);
// docs = searcher.search(booleanQueryBuilder.build(), end, Sort.INDEXORDER);
//记录索引时间
long endTime = System.currentTimeMillis();
log.info("匹配{}共耗时{}毫秒",booleanQueryBuilder.build(),(endTime-startTime));
log.info("查询到{}条日志文件", docs.totalHits.value);
List<String> list = new ArrayList<>();
ScoreDoc[] scoreDocs = docs.scoreDocs;
if (end > docs.totalHits.value) end = (int) docs.totalHits.value;
JSONArray array = new JSONArray();
for (int i = start; i < end; i++) {
ScoreDoc scoreDoc = scoreDocs[i];
Document doc = reader.document(scoreDoc.doc);
JSONObject object = new JSONObject();
object.put("content",doc.get("fieldContent"));
object.put("device_code",doc.get("device_code"));
object.put("logTime",doc.get("logTime"));
object.put("method",doc.get("method"));
object.put("status_code",doc.get("status_code"));
object.put("requestparam",doc.get("requestparam"));
object.put("responseparam",doc.get("responseparam"));
if(doc.get("fieldContent") != null) {
array.add(object);
}
}
for(Object logDto:array){
log.info(logDto.toString());
}
reader.close();
JSONObject jo = new JSONObject();
jo.put("content", array);
jo.put("totalElements", docs.totalHits.value);
return jo;
}
public static void main(String[] args) {
String indexDir = "D:\\lucene\\index";
//查询这个字符串
String q = "07.832";
Map whereJson = null;
try {
search(indexDir, q,whereJson);
} catch (Exception e) {
e.printStackTrace();
}
}
}

View File

@@ -1,21 +0,0 @@
package org.nl.modules.lucene.config;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
/**
* @deprecated 设置静态参数初始化
*/
@Configuration
public class StaticConfig {
//日志索引目录
@Value("${lucene.index.path}")
private String luceneDir;
@Bean
public int initStatic() {
UrlConfig.setLuceneUrl(luceneDir);
return 0;
}
}

View File

@@ -1,13 +0,0 @@
package org.nl.modules.lucene.config;
public class UrlConfig {
public static String luceneUrl;
public static String getLuceneUrl() {
return luceneUrl;
}
public static void setLuceneUrl(String luceneUrl) {
UrlConfig.luceneUrl = luceneUrl;
}
}

View File

@@ -26,11 +26,6 @@ public class LuceneController {
private final LuceneService luceneService;
@GetMapping("/labels/values")
@ApiOperation("获取标签")
public ResponseEntity<Object> labelsValues() {
return new ResponseEntity<>(luceneService.getLabelsValues(), HttpStatus.OK);
}
@GetMapping("/getAll")

View File

@@ -3,8 +3,6 @@ package org.nl.modules.lucene.service;
import org.nl.modules.lucene.service.dto.LuceneLogDto;
import java.io.IOException;
public interface LuceneExecuteLogService {
/**
* 设备光电变化实时光电信号
@@ -27,7 +25,7 @@ public interface LuceneExecuteLogService {
*
* @param luceneLogDto 日志结果对象
*/
void interfaceExecuteLog(LuceneLogDto luceneLogDto) throws IOException;
void interfaceExecuteLog(LuceneLogDto luceneLogDto);
/**
* 设备执行日志,会保留历史记录

View File

@@ -7,11 +7,6 @@ import java.util.Map;
public interface LuceneService {
/**
* 获取labels和values树
* @return
*/
JSONArray getLabelsValues();
/**
* 获取数据分页

View File

@@ -83,39 +83,24 @@ public class LuceneLogDto {
+ home;
}
public LuceneLogDto(final String device_code, final String item, final String last_signal,
final String signal) {
super();
this.device_code = device_code;
this.content = "信号 ["
+ item + "] 发生变更 "
+ last_signal + " -> "
+ signal;
}
public LuceneLogDto(final String device_code, final String remark) {
super();
this.device_code = device_code;
this.content = "设备 ["
+ device_code
+ "] : "
+ "] - "
+ remark;
}
public LuceneLogDto(final String device_code, final String item, final int last_info,
final int info) {
public LuceneLogDto(final LuceneLogDto dto) {
super();
String info_china = "";
switch(info){
case 0: info_china = "未联机";
case 1: info_china = "已联机";
case 2: break;
default:;
}
this.device_code = device_code;
this.content = "信号 ["
+item + "] 发生变更 "
+ last_info + " -> "
+ info;
this.method = method;
this.content = "设备 ["
+ device_code
+ "] - "
+ remark;
}
}

View File

@@ -1,23 +1,15 @@
package org.nl.modules.lucene.service.impl;
import cn.hutool.core.date.DateTime;
import cn.hutool.core.date.DateUtil;
import cn.hutool.core.util.ObjectUtil;
import com.alibaba.fastjson.JSON;
import lombok.RequiredArgsConstructor;
import lombok.SneakyThrows;
import lombok.extern.slf4j.Slf4j;
import org.apache.lucene.document.*;
import org.apache.lucene.index.IndexWriter;
import org.nl.modules.lucene.common.LuceneServiceAutoRun;
import org.nl.modules.lucene.enums.LogTypeEnum;
import org.nl.modules.lucene.service.LuceneExecuteLogService;
import org.nl.modules.lucene.service.dto.LuceneLogDto;
import org.slf4j.MDC;
import org.springframework.stereotype.Service;
import java.io.IOException;
import java.util.concurrent.atomic.AtomicInteger;
/**
* @author jlm
* @description 服务实现
@@ -27,65 +19,22 @@ import java.util.concurrent.atomic.AtomicInteger;
@RequiredArgsConstructor
@Slf4j
public class LuceneExecuteLogServiceImpl implements LuceneExecuteLogService {
private static AtomicInteger count = new AtomicInteger(0);
@Override
public void deviceItemValue(String device_code, String key, String value) {
String now = DateUtil.now();
}
@SneakyThrows
@Override
public void deviceExecuteLog(LuceneLogDto luceneLogDto) {
luceneLogDto.setLogType(LogTypeEnum.DEVICE_LOG.getDesc());
addIndex(luceneLogDto);
log.info("{}", JSON.toJSONString(luceneLogDto));
}
@Override
public void interfaceExecuteLog(LuceneLogDto luceneLogDto) throws IOException {
public void interfaceExecuteLog(LuceneLogDto luceneLogDto) {
luceneLogDto.setLogType(LogTypeEnum.INTERFACE_LOG.getDesc());
addIndex(luceneLogDto);
}
private void addIndex(LuceneLogDto luceneLogDto) throws IOException {
IndexWriter indexWriter = LuceneServiceAutoRun.getIndexWriter();
//创建一个Document对象
Document document = new Document();
try {
//记录索引开始时间
long startTime = System.currentTimeMillis();
//向document对象中添加域。
if (ObjectUtil.isNotEmpty(luceneLogDto.getDevice_code())) {
document.add(new StringField("device_code", luceneLogDto.getDevice_code(), Field.Store.YES));
// document.add(new TextField("device_code", luceneLogDto.getDevice_code(), Field.Store.YES));
}
if (ObjectUtil.isNotEmpty(luceneLogDto.getContent())) {
document.add(new StringField("fieldContent", luceneLogDto.getContent(), Field.Store.YES));
}
if (ObjectUtil.isNotEmpty(luceneLogDto.getMethod())) {
document.add(new StringField("method", luceneLogDto.getMethod(), Field.Store.YES));
}
if (ObjectUtil.isNotEmpty(luceneLogDto.getStatus_code())) {
document.add(new StringField("status_code", luceneLogDto.getStatus_code(), Field.Store.YES));
}
if (ObjectUtil.isNotEmpty(luceneLogDto.getRequestparam())) {
document.add(new StringField("requestparam", luceneLogDto.getRequestparam(), Field.Store.YES));
}
if (ObjectUtil.isNotEmpty(luceneLogDto.getResponseparam())) {
document.add(new StringField("responseparam", luceneLogDto.getResponseparam(), Field.Store.YES));
}
document.add(new StringField("logType", luceneLogDto.getLogType(), Field.Store.YES));
document.add(new StringField("logTime", DateUtil.format(new DateTime(), "yyyy-MM-dd HH:mm:ss.SSS"), Field.Store.YES));
document.add(new NumericDocValuesField("logTime",System.currentTimeMillis()));//排序
indexWriter.addDocument(document);
//记录索引结束时间
long endTime = System.currentTimeMillis();
log.info("建立索引共耗时{}毫秒", endTime - startTime);
indexWriter.commit();
// MDC.put("DEVICECODE", luceneLogDto.getDevice_code());
} catch (Exception e) {
log.error(e.getMessage(), e);
}
log.info("{}", JSON.toJSONString(luceneLogDto));
}
@Override

View File

@@ -1,18 +1,29 @@
package org.nl.modules.lucene.service.impl;
import cn.hutool.core.util.CharsetUtil;
import cn.hutool.http.HttpUtil;
import com.alibaba.fastjson.JSONArray;
import cn.hutool.core.date.DateTime;
import cn.hutool.core.date.DateUtil;
import com.alibaba.fastjson.JSONObject;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.nl.modules.lucene.common.Searcher;
import org.apache.lucene.document.Document;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.Term;
import org.apache.lucene.queryparser.classic.QueryParser;
import org.apache.lucene.search.*;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.FSDirectory;
import org.apache.lucene.util.BytesRef;
import org.nl.modules.lucene.service.LuceneService;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.data.domain.Pageable;
import org.springframework.stereotype.Service;
import org.wltea.analyzer.lucene.IKAnalyzer;
import java.util.Map;
import java.nio.file.Paths;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.*;
@Service
@@ -20,61 +31,102 @@ import java.util.Map;
@Slf4j
public class LuceneServiceImpl implements LuceneService {
//日志索引目录
@Value("${lucene.index.path}")
private String luceneUrl;
/**
* 获取labels和values树
*
* @return
*/
@Override
public JSONArray getLabelsValues() {
JSONArray result = new JSONArray();
// 获取所有标签
// String labelString = HttpUtil.get(lokiUrl + "/labels", CharsetUtil.CHARSET_UTF_8);
// JSONObject parse = (JSONObject) JSONObject.parse(labelString);
// JSONArray labels = parse.getJSONArray("data");
// for (int i=0; i<labels.size(); i++) {
// // 获取标签下的所有值
// String valueString = HttpUtil.get(lokiUrl + "/label/" + labels.getString(i) + "/values", CharsetUtil.CHARSET_UTF_8);
// JSONObject parse2 = (JSONObject) JSONObject.parse(valueString);
// JSONArray values = parse2.getJSONArray("data");
// JSONArray children = new JSONArray();
// // 组成树形状态 两级
// for (int j=0; j<values.size(); j++) {
// JSONObject leaf = new JSONObject();
// leaf.put("label", values.getString(j));
// leaf.put("value", values.getString(j));
// children.add(leaf);
// }
//
// JSONObject node = new JSONObject();
// node.put("label", labels.getString(i));
// node.put("value", labels.getString(i));
// node.put("children", children);
// result.add(node);
// }
return result;
}
@Override
public Map<String, Object> getAll(Map whereJson, Pageable page) {
JSONObject jo = new JSONObject();
//获取要查询的路径,也就是索引所在的位置
try {
JSONObject jsonObject = (JSONObject) Searcher.search(luceneUrl, "", whereJson);
JSONArray array = jsonObject.getJSONArray("content");
int totalElements = Integer.parseInt(jsonObject.get("totalElements").toString());
jo.put("content", array);
jo.put("totalElements", totalElements);
FSDirectory directory = FSDirectory.open(Paths.get("C:\\acs\\lucene\\index"));
DirectoryReader open = DirectoryReader.open(directory);
IndexSearcher searcher = new IndexSearcher(open);
// 实际上Lucene本身不支持分页。因此我们需要自己进行逻辑分页。我们要准备分页参数
int pageSize = Integer.parseInt(whereJson.get("size").toString());// 每页条数
int pageNum = Integer.parseInt(whereJson.get("page").toString());// 当前页码
BooleanQuery.Builder booleanQueryBuilder = new BooleanQuery.Builder();
//时间范围查询
String startDate = (String) whereJson.get("begin_time");
String endDate = (String) whereJson.get("end_time");
if (startDate == null){
Calendar calendar=Calendar.getInstance();
calendar.set(1970, 0, 1);
startDate = DateUtil.format(calendar.getTime(),"yyyy-MM-dd HH:mm:ss.SSS");
}else{
startDate = getDate(startDate);
}
if (endDate == null){
endDate = DateUtil.format(new DateTime(),"yyyy-MM-dd HH:mm:ss.SSS");
} else {
endDate = getDate(endDate);
}
TermRangeQuery termRangeQuery = new TermRangeQuery("logTime", new BytesRef(startDate), new BytesRef(endDate), true, true);
booleanQueryBuilder.add(termRangeQuery,BooleanClause.Occur.MUST);
if (whereJson.get("device_code") != null){
Query termQuery = new TermQuery(new Term("device_code", (String) whereJson.get("device_code")));
booleanQueryBuilder.add(termQuery,BooleanClause.Occur.MUST);
}
if (whereJson.get("method") != null){
Query termQuery = new TermQuery(new Term("method", (String) whereJson.get("method")));
booleanQueryBuilder.add(termQuery,BooleanClause.Occur.MUST);
}
if (whereJson.get("status_code") != null){
Query termQuery = new TermQuery(new Term("status_code", (String) whereJson.get("status_code")));
booleanQueryBuilder.add(termQuery,BooleanClause.Occur.MUST);
}
if (whereJson.get("requestparam") != null){
WildcardQuery query = new WildcardQuery(new Term("requestparam", "*"+(String) whereJson.get("requestparam")+"*"));
booleanQueryBuilder.add(query,BooleanClause.Occur.MUST);
}
if (whereJson.get("responseparam") != null){
WildcardQuery query = new WildcardQuery(new Term("responseparam", "*"+(String) whereJson.get("responseparam")+"*"));
booleanQueryBuilder.add(query,BooleanClause.Occur.MUST);
}
if (whereJson.get("blurry") != null) {
WildcardQuery query = new WildcardQuery(new Term("fieldContent", "*"+(String) whereJson.get("blurry")+"*"));
booleanQueryBuilder.add(query, BooleanClause.Occur.MUST);
}
TopFieldCollector collector = TopFieldCollector.create(new Sort(new SortField("logTime", SortField.Type.LONG,true)), 20000, 0);
searcher.search(booleanQueryBuilder.build(), collector);
TopDocs topDocs = collector.topDocs(pageNum*pageSize, pageSize);
int totalSize = collector.getTotalHits();
ScoreDoc[] scoreDocs = topDocs.scoreDocs;
List<JSONObject> list = new ArrayList<>();
for (ScoreDoc scoreDoc : scoreDocs) {
Document doc = open.document(scoreDoc.doc);
JSONObject object = new JSONObject();
object.put("content",doc.get("fieldContent"));
object.put("device_code",doc.get("device_code"));
object.put("logTime",doc.get("logTime"));
object.put("method",doc.get("method"));
object.put("status_code",doc.get("status_code"));
object.put("requestparam",doc.get("requestparam"));
object.put("responseparam",doc.get("responseparam"));
if(doc.get("fieldContent") != null) {
list.add(object);
}
}
open.close();
directory.close();
JSONObject jo = new JSONObject();
jo.put("content", list);
jo.put("totalElements", totalSize);
return jo;
} catch (Exception e) {
log.error("索引查询为空", e);
throw new NullPointerException("索引查询为空");
}
}
return jo;
public static String getDate(String timeString) throws ParseException {
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSSX");//时间格式
Date date = sdf.parse(timeString);
timeString = DateUtil.format(date, "yyyy-MM-dd HH:mm:ss.SSS");//格式化后的时间
return timeString;
}
}

View File

@@ -17,7 +17,7 @@ spring:
username: ${DB_USER:root}
# password: ${DB_PWD:P@ssw0rd}
# password: ${DB_PWD:Root.123456}
password: ${DB_PWD:123456}
password: ${DB_PWD:password}
# 初始连接数
initial-size: 5
@@ -136,11 +136,13 @@ file:
avatarMaxSize: 5
logging:
file:
path: D:\acs_log
path: /Users/onepiece/myFile/acs_logs
config: classpath:logback-spring.xml
lucene:
index:
path: D:\lucene\index
path: /lucene
# /Users/onepiece/myFile/lucene/index
# Sa-Token配置
sa-token:
@@ -163,5 +165,5 @@ sa-token:
token-prefix: Bearer
loki:
url: http://127.0.0.1:3100/loki/api/v1
url: http://10.211.55.3:3100/loki/api/v1
systemName: acs

View File

@@ -158,6 +158,6 @@ sa-token:
# token 前缀
token-prefix: Bearer
loki:
url: http://localhost:3100/loki/api/v1
systemName: acs
#loki:
# url: http://localhost:3100/loki/api/v1
# systemName: acs

View File

@@ -78,3 +78,7 @@ security:
- /api/localStorage/pictures
# 参数
- /api/param/getValueByCode
lucene:
index:
path: C:\acs\lucene\index

View File

@@ -3,7 +3,7 @@
<springProperty scope="context" name="logPath" source="logging.file.path" defaultValue="logs"/>
<property name="LOG_HOME" value="${logPath}"/>
<!-- 按照每天生成日志文件 -->
<appender name="FILE14" class="ch.qos.logback.core.rolling.RollingFileAppender">
<appender name="AcsToLiKuServiceImpl" class="ch.qos.logback.core.rolling.RollingFileAppender">
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<!--日志文件输出的文件名-->
<FileNamePattern>${LOG_HOME}/ACS请求立库/%d{yyyy-MM-dd}.%i.log</FileNamePattern>
@@ -22,12 +22,12 @@
</appender>
<!-- <logger name="org.nl.start.Init" level="info" additivity="false">
<appender-ref ref="FILE3"/>
</logger>-->
<!-- 打印sql -->
<logger name="org.nl.acs.ext.wms.service.impl.AcsToLiKuServiceImpl" level="info" additivity="false">
<appender-ref ref="FILE14"/>
<appender name="asyncFileAppender" class="ch.qos.logback.classic.AsyncAppender">
<appender-ref ref="AcsToLiKuServiceImpl" />
<!-- 设置队列大小,根据您的需求调整 -->
<queueSize>512</queueSize>
</appender>
<logger name="org.nl.acs.ext.wms.service.impl.AcsToLiKuServiceImpl" level="info" additivity="true">
<appender-ref ref="asyncFileAppender"/>
</logger>
</included>

View File

@@ -3,7 +3,7 @@
<springProperty scope="context" name="logPath" source="logging.file.path" defaultValue="logs"/>
<property name="LOG_HOME" value="${logPath}"/>
<!-- 按照每天生成日志文件 -->
<appender name="FILE12" class="ch.qos.logback.core.rolling.RollingFileAppender">
<appender name="AcsToWmsServiceImpl" class="ch.qos.logback.core.rolling.RollingFileAppender">
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<!--日志文件输出的文件名-->
<FileNamePattern>${LOG_HOME}/ACS请求LMS/%d{yyyy-MM-dd}.%i.log</FileNamePattern>
@@ -22,12 +22,12 @@
</appender>
<!-- <logger name="org.nl.start.Init" level="info" additivity="false">
<appender-ref ref="FILE3"/>
</logger>-->
<!-- 打印sql -->
<logger name="org.nl.acs.ext.wms.service.impl.AcsToWmsServiceImpl" level="info" additivity="false">
<appender-ref ref="FILE12"/>
<appender name="asyncFileAppender" class="ch.qos.logback.classic.AsyncAppender">
<appender-ref ref="AcsToWmsServiceImpl" />
<!-- 设置队列大小,根据您的需求调整 -->
<queueSize>512</queueSize>
</appender>
<logger name="org.nl.acs.ext.wms.service.impl.AcsToWmsServiceImpl" level="info" additivity="true">
<appender-ref ref="asyncFileAppender"/>
</logger>
</included>

View File

@@ -3,7 +3,7 @@
<springProperty scope="context" name="logPath" source="logging.file.path" defaultValue="logs"/>
<property name="LOG_HOME" value="${logPath}"/>
<!-- 按照每天生成日志文件 -->
<appender name="FILE11" class="ch.qos.logback.core.rolling.RollingFileAppender">
<appender name="AgvNdcOneDeviceDriver" class="ch.qos.logback.core.rolling.RollingFileAppender">
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<!--日志文件输出的文件名-->
<FileNamePattern>${LOG_HOME}/PS20-RT20NDC驱动/%d{yyyy-MM-dd}.%i.log</FileNamePattern>
@@ -22,12 +22,12 @@
</appender>
<!-- <logger name="org.nl.start.Init" level="info" additivity="false">
<appender-ref ref="FILE3"/>
</logger>-->
<!-- 打印sql -->
<logger name="org.nl.acs.device_driver.basedriver.agv.ndcone.AgvNdcOneDeviceDriver" level="info" additivity="false">
<appender-ref ref="FILE11"/>
<appender name="asyncFileAppender" class="ch.qos.logback.classic.AsyncAppender">
<appender-ref ref="AgvNdcOneDeviceDriver" />
<!-- 设置队列大小,根据您的需求调整 -->
<queueSize>512</queueSize>
</appender>
<logger name="org.nl.acs.device_driver.basedriver.agv.ndcone.AgvNdcOneDeviceDriver" level="info" additivity="true">
<appender-ref ref="asyncFileAppender"/>
</logger>
</included>

View File

@@ -3,7 +3,7 @@
<springProperty scope="context" name="logPath" source="logging.file.path" defaultValue="logs"/>
<property name="LOG_HOME" value="${logPath}"/>
<!-- 按照每天生成日志文件 -->
<appender name="FILE19" class="ch.qos.logback.core.rolling.RollingFileAppender">
<appender name="AgvNdcTwoDeviceDriver" class="ch.qos.logback.core.rolling.RollingFileAppender">
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<!--日志文件输出的文件名-->
<FileNamePattern>${LOG_HOME}/诺宝NDC驱动/%d{yyyy-MM-dd}.%i.log</FileNamePattern>
@@ -22,12 +22,12 @@
</appender>
<!-- <logger name="org.nl.start.Init" level="info" additivity="false">
<appender-ref ref="FILE3"/>
</logger>-->
<!-- 打印sql -->
<logger name="org.nl.acs.device_driver.basedriver.agv.ndctwo.AgvNdcTwoDeviceDriver" level="info" additivity="false">
<appender-ref ref="FILE19"/>
<appender name="asyncFileAppender" class="ch.qos.logback.classic.AsyncAppender">
<appender-ref ref="AgvNdcTwoDeviceDriver" />
<!-- 设置队列大小,根据您的需求调整 -->
<queueSize>512</queueSize>
</appender>
<logger name="org.nl.acs.device_driver.basedriver.agv.ndctwo.AgvNdcTwoDeviceDriver" level="info" additivity="true">
<appender-ref ref="asyncFileAppender"/>
</logger>
</included>

View File

@@ -3,7 +3,7 @@
<springProperty scope="context" name="logPath" source="logging.file.path" defaultValue="logs"/>
<property name="LOG_HOME" value="${logPath}"/>
<!-- 按照每天生成日志文件 -->
<appender name="FILE3" class="ch.qos.logback.core.rolling.RollingFileAppender">
<appender name="AutoCreateInst" class="ch.qos.logback.core.rolling.RollingFileAppender">
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<!--日志文件输出的文件名-->
<FileNamePattern>${LOG_HOME}/自动创建诺宝AGV指令/%d{yyyy-MM-dd}.%i.log</FileNamePattern>
@@ -21,12 +21,12 @@
</encoder>
</appender>
<!-- <logger name="org.nl.start.Init" level="info" additivity="false">
<appender-ref ref="FILE3"/>
</logger>-->
<!-- 打印sql -->
<logger name="org.nl.modules.quartz.task.AutoCreateInst" level="info" additivity="false">
<appender-ref ref="FILE3"/>
<appender name="asyncFileAppender" class="ch.qos.logback.classic.AsyncAppender">
<appender-ref ref="AutoCreateInst" />
<!-- 设置队列大小,根据您的需求调整 -->
<queueSize>512</queueSize>
</appender>
<logger name="org.nl.modules.quartz.task.AutoCreateInst" level="info" additivity="true">
<appender-ref ref="asyncFileAppender"/>
</logger>
</included>

View File

@@ -3,7 +3,7 @@
<springProperty scope="context" name="logPath" source="logging.file.path" defaultValue="logs"/>
<property name="LOG_HOME" value="${logPath}"/>
<!-- 按照每天生成日志文件 -->
<appender name="FILE3" class="ch.qos.logback.core.rolling.RollingFileAppender">
<appender name="AutoCreateAgvOneInst" class="ch.qos.logback.core.rolling.RollingFileAppender">
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<!--日志文件输出的文件名-->
<FileNamePattern>${LOG_HOME}/自动创建RT20-PS20AGV指令/%d{yyyy-MM-dd}.%i.log</FileNamePattern>
@@ -21,12 +21,12 @@
</encoder>
</appender>
<!-- <logger name="org.nl.start.Init" level="info" additivity="false">
<appender-ref ref="FILE3"/>
</logger>-->
<!-- 打印sql -->
<logger name="org.nl.modules.quartz.task.AutoCreateAgvOneInst" level="info" additivity="false">
<appender-ref ref="FILE3"/>
<appender name="asyncFileAppender" class="ch.qos.logback.classic.AsyncAppender">
<appender-ref ref="AutoCreateAgvOneInst" />
<!-- 设置队列大小,根据您的需求调整 -->
<queueSize>512</queueSize>
</appender>
<logger name="org.nl.modules.quartz.task.AutoCreateAgvOneInst" level="info" additivity="true">
<appender-ref ref="asyncFileAppender"/>
</logger>
</included>

View File

@@ -3,7 +3,7 @@
<springProperty scope="context" name="logPath" source="logging.file.path" defaultValue="logs"/>
<property name="LOG_HOME" value="${logPath}"/>
<!-- 按照每天生成日志文件 -->
<appender name="FILE15" class="ch.qos.logback.core.rolling.RollingFileAppender">
<appender name="LiKuToAcsServiceImpl" class="ch.qos.logback.core.rolling.RollingFileAppender">
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<!--日志文件输出的文件名-->
<FileNamePattern>${LOG_HOME}/立库请求ACS/%d{yyyy-MM-dd}.%i.log</FileNamePattern>
@@ -22,12 +22,12 @@
</appender>
<!-- <logger name="org.nl.start.Init" level="info" additivity="false">
<appender-ref ref="FILE3"/>
</logger>-->
<!-- 打印sql -->
<logger name="org.nl.acs.ext.wms.service.impl.LiKuToAcsServiceImpl" level="info" additivity="false">
<appender-ref ref="FILE15"/>
<appender name="asyncFileAppender" class="ch.qos.logback.classic.AsyncAppender">
<appender-ref ref="LiKuToAcsServiceImpl" />
<!-- 设置队列大小,根据您的需求调整 -->
<queueSize>512</queueSize>
</appender>
<logger name="org.nl.acs.ext.wms.service.impl.LiKuToAcsServiceImpl" level="info" additivity="true">
<appender-ref ref="asyncFileAppender"/>
</logger>
</included>

View File

@@ -1,34 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<included>
<springProperty scope="context" name="logPath" source="logging.file.path" defaultValue="logs"/>
<property name="LOG_HOME" value="${logPath}"/>
<define name="DEVICECODE" class="org.nl.modules.logging.DeviceCodeDir"/>
<!-- 按照每天生成日志文件 -->
<appender name="FILE_LUCENE" class="ch.qos.logback.core.rolling.RollingFileAppender">
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<!--日志文件输出的文件名-->
<FileNamePattern>${LOG_HOME}/lucene/${DEVICECODE}/%d{yyyy-MM-dd}.%i.log</FileNamePattern>
<!--日志文件保留天数-->
<maxHistory>15</maxHistory>
<!--单个日志最大容量 至少10MB才能看得出来-->
<maxFileSize>200MB</maxFileSize>
<!--所有日志最多占多大容量-->
<totalSizeCap>2GB</totalSizeCap>
</rollingPolicy>
<encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder">
<!--格式化输出:%d表示日期%thread表示线程名%-5level级别从左显示5个字符宽度%msg日志消息%n是换行符-->
<pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{50} - %msg%n</pattern>
<charset>${log.charset}</charset>
</encoder>
</appender>
<!-- <logger name="org.nl.start.Init" level="info" additivity="false">
<appender-ref ref="FILE3"/>
</logger>-->
<!-- 打印sql -->
<logger name="org.nl.modules.lucene.service.impl.LuceneExecuteLogServiceImpl" level="info" additivity="false">
<appender-ref ref="FILE_LUCENE"/>
</logger>
</included>

View File

@@ -3,7 +3,7 @@
<springProperty scope="context" name="logPath" source="logging.file.path" defaultValue="logs"/>
<property name="LOG_HOME" value="${logPath}"/>
<!-- 按照每天生成日志文件 -->
<appender name="FILE13" class="ch.qos.logback.core.rolling.RollingFileAppender">
<appender name="WmsToAcsServiceImpl" class="ch.qos.logback.core.rolling.RollingFileAppender">
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<!--日志文件输出的文件名-->
<FileNamePattern>${LOG_HOME}/LMS下发ACS/%d{yyyy-MM-dd}.%i.log</FileNamePattern>
@@ -22,12 +22,12 @@
</appender>
<!-- <logger name="org.nl.start.Init" level="info" additivity="false">
<appender-ref ref="FILE3"/>
</logger>-->
<!-- 打印sql -->
<logger name="org.nl.acs.ext.wms.service.impl.WmsToAcsServiceImpl" level="info" additivity="false">
<appender-ref ref="FILE13"/>
<appender name="asyncFileAppender" class="ch.qos.logback.classic.AsyncAppender">
<appender-ref ref="WmsToAcsServiceImpl" />
<!-- 设置队列大小,根据您的需求调整 -->
<queueSize>512</queueSize>
</appender>
<logger name="org.nl.acs.ext.wms.service.impl.WmsToAcsServiceImpl" level="info" additivity="true">
<appender-ref ref="asyncFileAppender"/>
</logger>
</included>

View File

@@ -3,7 +3,7 @@
<springProperty scope="context" name="logPath" source="logging.file.path" defaultValue="logs"/>
<property name="LOG_HOME" value="${logPath}"/>
<!-- 按照每天生成日志文件 -->
<appender name="FILE20" class="ch.qos.logback.core.rolling.RollingFileAppender">
<appender name="NDCAgvServiceImpl" class="ch.qos.logback.core.rolling.RollingFileAppender">
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<!--日志文件输出的文件名-->
<FileNamePattern>${LOG_HOME}/下发NDC系统数据/%d{yyyy-MM-dd}.%i.log</FileNamePattern>
@@ -22,12 +22,12 @@
</appender>
<!-- <logger name="org.nl.start.Init" level="info" additivity="false">
<appender-ref ref="FILE3"/>
</logger>-->
<!-- 打印sql -->
<logger name="org.nl.acs.agv.server.impl.NDCAgvServiceImpl" level="info" additivity="false">
<appender-ref ref="FILE20"/>
<appender name="asyncFileAppender" class="ch.qos.logback.classic.AsyncAppender">
<appender-ref ref="NDCAgvServiceImpl" />
<!-- 设置队列大小,根据您的需求调整 -->
<queueSize>512</queueSize>
</appender>
<logger name="org.nl.acs.agv.server.impl.NDCAgvServiceImpl" level="info" additivity="true">
<appender-ref ref="asyncFileAppender"/>
</logger>
</included>

View File

@@ -3,7 +3,7 @@
<springProperty scope="context" name="logPath" source="logging.file.path" defaultValue="logs"/>
<property name="LOG_HOME" value="${logPath}"/>
<!-- 按照每天生成日志文件 -->
<appender name="FILE18" class="ch.qos.logback.core.rolling.RollingFileAppender">
<appender name="NDCSocketConnectionAutoRun" class="ch.qos.logback.core.rolling.RollingFileAppender">
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<!--日志文件输出的文件名-->
<FileNamePattern>${LOG_HOME}/诺宝NDC系统/%d{yyyy-MM-dd}.%i.log</FileNamePattern>
@@ -22,12 +22,12 @@
</appender>
<!-- <logger name="org.nl.start.Init" level="info" additivity="false">
<appender-ref ref="FILE3"/>
</logger>-->
<!-- 打印sql -->
<logger name="org.nl.acs.auto.run.NDCSocketConnectionAutoRun" level="info" additivity="false">
<appender-ref ref="FILE18"/>
<appender name="asyncFileAppender" class="ch.qos.logback.classic.AsyncAppender">
<appender-ref ref="NDCSocketConnectionAutoRun" />
<!-- 设置队列大小,根据您的需求调整 -->
<queueSize>512</queueSize>
</appender>
<logger name="org.nl.acs.auto.run.NDCSocketConnectionAutoRun" level="info" additivity="true">
<appender-ref ref="asyncFileAppender"/>
</logger>
</included>

View File

@@ -3,7 +3,7 @@
<springProperty scope="context" name="logPath" source="logging.file.path" defaultValue="logs"/>
<property name="LOG_HOME" value="${logPath}"/>
<!-- 按照每天生成日志文件 -->
<appender name="FILE10" class="ch.qos.logback.core.rolling.RollingFileAppender">
<appender name="OneNDCSocketConnectionAutoRun" class="ch.qos.logback.core.rolling.RollingFileAppender">
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<!--日志文件输出的文件名-->
<FileNamePattern>${LOG_HOME}/PS20-RT20NDC系统/%d{yyyy-MM-dd}.%i.log</FileNamePattern>
@@ -22,12 +22,12 @@
</appender>
<!-- <logger name="org.nl.start.Init" level="info" additivity="false">
<appender-ref ref="FILE3"/>
</logger>-->
<!-- 打印sql -->
<logger name="org.nl.acs.auto.run.OneNDCSocketConnectionAutoRun" level="info" additivity="false">
<appender-ref ref="FILE10"/>
<appender name="asyncFileAppender" class="ch.qos.logback.classic.AsyncAppender">
<appender-ref ref="OneNDCSocketConnectionAutoRun" />
<!-- 设置队列大小,根据您的需求调整 -->
<queueSize>512</queueSize>
</appender>
<logger name="org.nl.acs.auto.run.OneNDCSocketConnectionAutoRun" level="info" additivity="true">
<appender-ref ref="asyncFileAppender"/>
</logger>
</included>

View File

@@ -3,7 +3,7 @@
<springProperty scope="context" name="logPath" source="logging.file.path" defaultValue="logs"/>
<property name="LOG_HOME" value="${logPath}"/>
<!-- 按照每天生成日志文件 -->
<appender name="FILE16" class="ch.qos.logback.core.rolling.RollingFileAppender">
<appender name="OpcUtl" class="ch.qos.logback.core.rolling.RollingFileAppender">
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<!--日志文件输出的文件名-->
<FileNamePattern>${LOG_HOME}/下发KEP信号/%d{yyyy-MM-dd}.%i.log</FileNamePattern>
@@ -22,12 +22,12 @@
</appender>
<!-- <logger name="org.nl.start.Init" level="info" additivity="false">
<appender-ref ref="FILE3"/>
</logger>-->
<!-- 打印sql -->
<logger name="org.nl.acs.opc.OpcUtl" level="info" additivity="false">
<appender-ref ref="FILE16"/>
<appender name="asyncFileAppender" class="ch.qos.logback.classic.AsyncAppender">
<appender-ref ref="OpcUtl" />
<!-- 设置队列大小,根据您的需求调整 -->
<queueSize>512</queueSize>
</appender>
<logger name="org.nl.acs.opc.OpcUtl" level="info" additivity="true">
<appender-ref ref="asyncFileAppender"/>
</logger>
</included>

View File

@@ -14,13 +14,13 @@ https://juejin.cn/post/6844903775631572999
<property name="log.pattern"
value="%black(%contextName-) %red(%d{yyyy-MM-dd HH:mm:ss.SSS}) %green([%thread]) %highlight(%-5level) %boldMagenta(%logger{36}) - %gray(%msg%n)"/>
<springProperty scope="context" name="logPath" source="logging.file.path" defaultValue="logs"/>
<!-- <springProperty scope="context" name="lokiUrl" source="loki.url"/>-->
<!-- <springProperty scope="context" name="lokiUrl" source="loki.url"/>-->
<springProperty scope="context" name="systemName" source="loki.systemName"/>
<!-- <property name="LOKI_URL" value="${lokiUrl}"/>-->
<!-- <property name="LOKI_URL" value="${lokiUrl}"/>-->
<property name="SYSTEM_NAME" value="${systemName}"/>
<!--定义日志文件的存储地址 勿在 LogBack 的配置中使用相对路径-->
<property name="LOG_HOME" value="${logPath}"/>
<include resource="log/Lucene.xml"/>
<!--<include resource="log/Lucene.xml"/>-->
<include resource="log/OneNDCSocketConnectionAutoRun.xml"/>
<include resource="log/NDCSocketConnectionAutoRun.xml"/>
<include resource="log/AgvNdcOneDeviceDriver.xml"/>
@@ -33,10 +33,9 @@ https://juejin.cn/post/6844903775631572999
<include resource="log/LkToAcs.xml"/>
<appender name="CONSOLE" class="ch.qos.logback.core.ConsoleAppender">
<withJansi>true</withJansi>
<encoder>
<pattern>${log.pattern}</pattern>
<!-- <charset>${log.charset}</charset>-->
<charset>${log.charset}</charset>
</encoder>
</appender>
@@ -56,99 +55,30 @@ https://juejin.cn/post/6844903775631572999
<encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder">
<!--格式化输出:%d表示日期%thread表示线程名%-5level级别从左显示5个字符宽度%msg日志消息%n是换行符-->
<pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{50} - %msg%n</pattern>
<charset>${log.charset}</charset>
</encoder>
</appender>
<!--异步到文件-->
<appender name="asyncFileAppender" class="ch.qos.logback.classic.AsyncAppender">
<discardingThreshold>0</discardingThreshold>
<queueSize>500</queueSize>
<queueSize>200</queueSize>
<appender-ref ref="FILE"/>
</appender>
<!--添加loki-->
<!-- <appender name="lokiAppender" class="com.github.loki4j.logback.Loki4jAppender">-->
<!-- <batchTimeoutMs>1000</batchTimeoutMs>-->
<!-- <http class="com.github.loki4j.logback.ApacheHttpSender">-->
<!-- <url>${LOKI_URL}/push</url>-->
<!-- </http>-->
<!-- <format>-->
<!-- <label>-->
<!-- <pattern>-->
<!-- system=${SYSTEM_NAME},level=%level,logType=%X{log_file_type:-logType},device=%X{device_code_log:-device}-->
<!-- </pattern>-->
<!-- </label>-->
<!-- <message>-->
<!-- <pattern>${log.pattern}</pattern>-->
<!-- </message>-->
<!-- <sortByTime>true</sortByTime>-->
<!-- </format>-->
<!-- </appender>-->
<appender name="ERROR" class="ch.qos.logback.core.rolling.RollingFileAppender">
<filter class="ch.qos.logback.classic.filter.ThresholdFilter">
<level>error</level>
</filter>
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<!--日志文件输出的文件名-->
<FileNamePattern>${LOG_HOME}/ERROR/%d{yyyy-MM-dd}.%i.log</FileNamePattern>
<!--日志文件保留天数-->
<maxHistory>15</maxHistory>
<!--单个日志最大容量 至少10MB才能看得出来-->
<maxFileSize>200MB</maxFileSize>
<!--所有日志最多占多大容量-->
<totalSizeCap>20GB</totalSizeCap>
</rollingPolicy>
<encoder>
<pattern>${log.pattern}</pattern>
<charset>${log.charset}</charset>
</encoder>
<appender name="luceneAppender" class="org.nl.modules.lucene.config.LuceneAppender"/>
<appender name="asyncLuceneAppender" class="org.nl.modules.lucene.config.AsyncLuceneAppender">
<appender-ref ref="luceneAppender"/>
<!-- 设置队列大小,根据您的需求调整 -->
<queueSize>512</queueSize>
</appender>
<!--开发环境:打印控制台-->
<springProfile name="dev">
<root level="info">
<appender-ref ref="asyncLuceneAppender"/>
<appender-ref ref="CONSOLE"/>
<!-- <appender-ref ref="lokiAppender"/>-->
<appender-ref ref="asyncFileAppender"/>
</root>
<!--logmanage -->
<logger name="org.nl.acs.log.service.impl.DeviceExecuteLogServiceImpl" level="info" additivity="false">
<appender-ref ref="CONSOLE"/>
<!-- <appender-ref ref="lokiAppender"/>-->
</logger>
<logger name="jdbc.resultsettable" level="ERROR" additivity="false">
<appender-ref ref="CONSOLE"/>
<!-- <appender-ref ref="lokiAppender"/>-->
</logger>
<logger name="org.openscada.opc.lib.da.Server" level="ERROR" additivity="false">
<appender-ref ref="CONSOLE"/>
<!-- <appender-ref ref="lokiAppender"/>-->
</logger>
<!--logmanage -->
<logger name="jdbc.audit" level="ERROR" additivity="false">
<appender-ref ref="CONSOLE"/>
</logger>
<logger name="jdbc.resultset" level="ERROR" additivity="false">
<appender-ref ref="CONSOLE"/>
</logger>
<logger name="springfox.documentation" level="ERROR" additivity="false">
<appender-ref ref="CONSOLE"/>
</logger>
<logger name="jdbc.sqlonly" level="ERROR" additivity="false">
<appender-ref ref="CONSOLE"/>
</logger>
<logger name="jdbc.sqltiming" level="ERROR" additivity="false">
<appender-ref ref="CONSOLE"/>
</logger>
<logger name="org.jinterop" level="ERROR" additivity="false">
<appender-ref ref="CONSOLE"/>
</logger>
</springProfile>
<!--测试环境:打印控制台-->
<springProfile name="test">
<!-- 打印sql -->
@@ -157,49 +87,14 @@ https://juejin.cn/post/6844903775631572999
</logger>
<root level="info">
<appender-ref ref="CONSOLE"/>
</root>
</springProfile>
<!--生产环境:打印控制台和输出到文件-->
<springProfile name="prod">
<root level="info">
<appender-ref ref="ERROR"/>
<!-- <appender-ref ref="lokiAppender"/>-->
<appender-ref ref="asyncLuceneAppender"/>
<appender-ref ref="asyncFileAppender"/>
</root>
<!--logmanage -->
<logger name="org.nl.acs.log.service.impl.DeviceExecuteLogServiceImpl" level="info" additivity="false">
<!-- <appender-ref ref="lokiAppender"/>-->
</logger>
<logger name="jdbc.resultsettable" level="ERROR" additivity="false">
<appender-ref ref="CONSOLE"/>
<!-- <appender-ref ref="lokiAppender"/>-->
</logger>
<logger name="org.openscada.opc.lib.da.Server" level="ERROR" additivity="false">
<appender-ref ref="CONSOLE"/>
<!-- <appender-ref ref="lokiAppender"/>-->
</logger>
<!--logmanage -->
<logger name="jdbc.audit" level="ERROR" additivity="false">
<appender-ref ref="CONSOLE"/>
</logger>
<logger name="jdbc.resultset" level="ERROR" additivity="false">
<appender-ref ref="CONSOLE"/>
</logger>
<logger name="springfox.documentation" level="ERROR" additivity="false">
<appender-ref ref="CONSOLE"/>
</logger>
<logger name="jdbc.sqlonly" level="ERROR" additivity="false">
<appender-ref ref="CONSOLE"/>
</logger>
<logger name="jdbc.sqltiming" level="ERROR" additivity="false">
<appender-ref ref="CONSOLE"/>
</logger>
<logger name="org.jinterop" level="ERROR" additivity="false">
<appender-ref ref="CONSOLE"/>
</logger>
</springProfile>
</configuration>