2 Commits

Author SHA1 Message Date
0db93f21cf feat: 导出 2025-05-30 10:45:59 +08:00
dd918ba7a6 feat: shardingsphere主从复制,去除dynamic-datasource多数据源 2025-05-27 17:04:44 +08:00
6 changed files with 200 additions and 62 deletions

View File

@@ -1 +1,3 @@
# 诺力开发平台
## 主从复制Master-Slave Replication
<img src="./doc/img.png">

BIN
nladmin-system/doc/img.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 455 KiB

View File

@@ -32,6 +32,12 @@
</properties>
<dependencies>
<!-- 主从复制 -->
<dependency>
<groupId>org.apache.shardingsphere</groupId>
<artifactId>shardingsphere-jdbc-core-spring-boot-starter</artifactId>
<version>5.1.1</version>
</dependency>
<dependency>
<groupId>com.dameng</groupId>
<artifactId>DmJdbcDriver18</artifactId>
@@ -76,11 +82,11 @@
<version>11.2.0.4</version>
</dependency>
<!-- dynamic-datasource -->
<dependency>
<groupId>com.baomidou</groupId>
<artifactId>dynamic-datasource-spring-boot-starter</artifactId>
<version>4.1.3</version>
</dependency>
<!-- <dependency>-->
<!-- <groupId>com.baomidou</groupId>-->
<!-- <artifactId>dynamic-datasource-spring-boot-starter</artifactId>-->
<!-- <version>4.1.3</version>-->
<!-- </dependency>-->
<!-- https://onew.me/logback/2018/09/17/logback_win.html-->
<dependency>
<groupId>org.fusesource.jansi</groupId>

View File

@@ -19,12 +19,18 @@ import cn.hutool.core.io.IoUtil;
import cn.hutool.core.util.IdUtil;
import cn.hutool.poi.excel.BigExcelWriter;
import cn.hutool.poi.excel.ExcelUtil;
import com.google.common.collect.Lists;
import org.apache.poi.ss.usermodel.Cell;
import org.apache.poi.ss.usermodel.Row;
import org.apache.poi.ss.usermodel.Sheet;
import org.apache.poi.util.IOUtils;
import org.apache.poi.xssf.streaming.SXSSFSheet;
import org.apache.poi.xssf.streaming.SXSSFWorkbook;
import org.nl.common.exception.BadRequestException;
import org.nl.config.language.LangProcess;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.util.CollectionUtils;
import org.springframework.web.multipart.MultipartFile;
import javax.servlet.ServletOutputStream;
@@ -34,9 +40,7 @@ import java.io.*;
import java.security.MessageDigest;
import java.text.DecimalFormat;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.List;
import java.util.Map;
import java.util.*;
/**
* File工具类扩展 hutool 工具包
@@ -45,6 +49,7 @@ import java.util.Map;
* @date 2018-12-27
*/
public class FileUtil extends cn.hutool.core.io.FileUtil {
private static final Integer BATCH_WRITE_EXCEL_ROW_AMOUNT = 500;
private static final Logger log = LoggerFactory.getLogger(FileUtil.class);
@@ -227,6 +232,86 @@ public class FileUtil extends cn.hutool.core.io.FileUtil {
IoUtil.close(out);
}
/**
* 流导出
* @param list
* @param response
* @throws IOException
*/
public void downloadExcelIO(List<Map<String, String>> list, String[] columnExcelNameArr, HttpServletResponse response) {
List<Map<String, String>> lastRestDataList = list;
int blockNum = 0;
List<String> allFileLocations = new ArrayList<>();
ServletOutputStream out = null;
try {
boolean allFinish = false;
while (!allFinish) {
int thisFileRowNumber = 1;
SXSSFWorkbook wb = null;
try {
// 流式写入EXCEL只保留少数行(比如50行)数据在内存,防止内存溢出
wb = new SXSSFWorkbook(BATCH_WRITE_EXCEL_ROW_AMOUNT);
Sheet sh = wb.createSheet();
Row titleRow = sh.createRow(0);
for (int cellNum = 0; cellNum < columnExcelNameArr.length; cellNum++) {
Cell cell = titleRow.createCell(cellNum);
cell.setCellValue(columnExcelNameArr[cellNum]);
}
if (!CollectionUtils.isEmpty(lastRestDataList)) {
for (int i = 0; i < lastRestDataList.size(); i++) {
Row dataRow = sh.createRow(thisFileRowNumber);
Map<String, String> columnMap = lastRestDataList.get(i);
for (int cellNum = 0; cellNum < columnExcelNameArr.length; cellNum++) {
Cell cell = dataRow.createCell(cellNum);
String valueStr = columnMap.get(columnExcelNameArr[cellNum]);
cell.setCellValue(valueStr);
}
thisFileRowNumber++;
}
}
String localFilePath = SYS_TEM_DIR + IdUtil.fastSimpleUUID() + blockNum + ".xlsx";
allFileLocations.add(localFilePath);
response.setContentType("application/vnd.openxmlformats-officedocument.spreadsheetml.sheet;charset=utf-8");
response.setHeader("Content-Disposition", "attachment;filename=" + localFilePath);
out = response.getOutputStream();
wb.write(out);
// 必须清理流式写入Excel生成的临时文件
wb.dispose();
allFinish = true;
} catch (Exception ex) {
log.warn(ex.getMessage());
throw new BadRequestException(ex.getMessage());
} finally {
if (out != null) {
try {
out.flush();
out.close();
} catch (IOException e) {
log.warn(e.getMessage());
}
}
if (wb != null) {
try {
wb.dispose();
} catch (Exception e) {
log.warn(e.getMessage());
}
}
}
}
} finally {
if (out != null) {
try {
out.flush();
out.close();
} catch (IOException e) {
log.warn(e.getMessage());
}
}
}
}
public static String getFileType(String type) {
String documents = "txt doc pdf ppt pps xlsx xls docx";
String music = "mp3 wav wma mpa ram ra aac aif m4a";
@@ -343,5 +428,4 @@ public class FileUtil extends cn.hutool.core.io.FileUtil {
public static String getMd5(File file) {
return getMd5(getByte(file));
}
}

View File

@@ -9,6 +9,12 @@ nl:
username: root
password: 12356
database: nl-platform
slave:
ip: 127.0.0.1
port: 3306
username: root
password: 12356
database: nl_platform
redis:
ip: 127.0.0.1
port: 6379

View File

@@ -12,62 +12,102 @@ spring:
exclude: com.alibaba.druid.spring.boot.autoconfigure.DruidDataSourceAutoConfigure
messages:
basename: language/login/login,language/error/error,language/buss/buss,language/task/task
datasource:
druid:
initial-size: 5 #初始化时建立物理连接的个数
min-idle: 15 #最小连接池数量
maxActive: 30 #最大连接池数量
maxWait: 3000 #获取连接时最大等待时间,单位毫秒
#申请连接的时候检测如果空闲时间大于timeBetweenEvictionRunsMillis执行validationQuery检测连接是否有效。
test-while-idle: true
time-between-eviction-runs-millis: 300000 #既作为检测的间隔时间又作为test-while-idle执行的依据
min-evictable-idle-time-millis: 900000 #销毁线程时检测当前连接的最后活动时间和当前时间差大于该值时,关闭当前连接
#用来检测连接是否有效的sql
#mysql中为 select 'x'
#oracle中为 select 1 from dual
validation-query: SELECT 'x'
test-on-borrow: true #申请连接时会执行validationQuery检测连接是否有效,开启会降低性能,默认为true
test-on-return: false #归还连接时会执行validationQuery检测连接是否有效,开启会降低性能,默认为true
exception-sorter: true #当数据库抛出不可恢复的异常时,抛弃该连接
pool-prepared-statements: true #是否缓存preparedStatement,mysql5.5+建议开启
max-pool-prepared-statement-per-connection-size: 20 #当值大于20时poolPreparedStatements会自动修改为true
#通过connectProperties属性来打开mergeSql功能慢SQL记录
connection-properties: druid.stat.mergeSql=true;druid.stat.slowSqlMillis=5000
# use-global-data-source-stat: true #合并多个DruidDataSource的监控数据
#filters通过别名的方式配置扩展插件常用的插件有
#监控统计用的filter:stat 日志用的filter:log4j 防御sql注入的filter:wall
filter:
stat:
shardingsphere:
datasource:
names: master,slave
master:
type: com.alibaba.druid.pool.DruidDataSource
driver-class-name: com.mysql.cj.jdbc.Driver
# 初始连接数
initial-size: 20
# 最小连接数
min-idle: 30
# 最大连接数
max-active: 300
# 是否自动回收超时连接
socket-timeout: 10
query-time-out: 7
transaction-query-timeout: 30
# 获取连接超时时间
max-wait: 4000
# 连接有效性检测时间
time-between-eviction-runs-millis: 60000
# 连接在池中最小生存的时间
min-evictable-idle-time-millis: 300000
# 连接在池中最大生存的时间
max-evictable-idle-time-millis: 900000
# 指明连接是否被空闲连接回收器(如果有)进行检验.如果检测失败,则连接将被从池中去除
test-while-idle: true
# 指明是否在从池中取出连接前进行检验,如果检验失败, 则从池中去除连接并尝试取出另一个
test-on-borrow: true
# 是否在归还到池中前进行检验
test-on-return: false
# 检测连接是否有效
validation-query: select 1
# 配置监控统计
webStatFilter:
enabled: true
# 记录慢SQL
log-slow-sql: true
slow-sql-millis: 1000
merge-sql: true
wall:
config:
multi-statement-allow: true
#设置访问druid监控页面的拦截路径及账号和密码,默认没有
stat-view-servlet:
enabled: true
url-pattern: /druid/*
login-username: admin
login-password: admin
dynamic:
primary: mysql
datasource:
mysql:
driver-class-name: net.sf.log4jdbc.sql.jdbcapi.DriverSpy
url: jdbc:log4jdbc:mysql://${DB_HOST:${nl.config.mysql.ip}}:${DB_PORT:${nl.config.mysql.port}}/${DB_NAME:${nl.config.mysql.database}}?serverTimezone=Asia/Shanghai&characterEncoding=utf8&useSSL=false&useOldAliasMetadataBehavior=true
username: ${DB_USER:${nl.config.mysql.username}}
password: ${DB_PWD:${nl.config.mysql.password}}
type: com.alibaba.druid.pool.DruidDataSource
druid:
stat-view-servlet:
enabled: true
url-pattern: /druid/*
reset-enable: false
filters:
DruidFilter,stat
initial-size: 5 #初始化时建立物理连接的个数
min-idle: 15 #最小连接池数量
maxActive: 30 #最大连接池数量
maxWait: 3000 #获取连接时最大等待时间,单位毫秒
url: jdbc:mysql://${DB_HOST:${nl.config.mysql.ip}}:${DB_PORT:${nl.config.mysql.port}}/${DB_NAME:${nl.config.mysql.database}}?serverTimezone=Asia/Shanghai&characterEncoding=utf8&useSSL=false&useOldAliasMetadataBehavior=true
username: ${DB_USER:${nl.config.mysql.username}}
password: ${DB_PWD:${nl.config.mysql.password}}
slave:
type: com.alibaba.druid.pool.DruidDataSource
driver-class-name: com.mysql.cj.jdbc.Driver
# 初始连接数
initial-size: 20
# 最小连接数
min-idle: 30
# 最大连接数
max-active: 300
# 是否自动回收超时连接
socket-timeout: 10
query-time-out: 7
transaction-query-timeout: 30
# 获取连接超时时间
max-wait: 4000
# 连接有效性检测时间
time-between-eviction-runs-millis: 60000
# 连接在池中最小生存的时间
min-evictable-idle-time-millis: 300000
# 连接在池中最大生存的时间
max-evictable-idle-time-millis: 900000
# 指明连接是否被空闲连接回收器(如果有)进行检验.如果检测失败,则连接将被从池中去除
test-while-idle: true
# 指明是否在从池中取出连接前进行检验,如果检验失败, 则从池中去除连接并尝试取出另一个
test-on-borrow: true
# 是否在归还到池中前进行检验
test-on-return: false
# 检测连接是否有效
validation-query: select 1
# 配置监控统计
webStatFilter:
enabled: true
stat-view-servlet:
enabled: true
url-pattern: /druid/*
reset-enable: false
filters:
DruidFilter,stat
url: jdbc:mysql://${DB_HOST:${nl.config.slave.ip}}:${DB_PORT:${nl.config.slave.port}}/${DB_NAME:${nl.config.slave.database}}?serverTimezone=Asia/Shanghai&characterEncoding=utf8&useSSL=false&useOldAliasMetadataBehavior=true
username: ${DB_USER:${nl.config.slave.username}}
password: ${DB_PWD:${nl.config.slave.password}}
rules:
readwrite-splitting:
data-sources:
db:
type: Static
props:
#接口有事务,读写分离不生效,默认全部使用主库
write-data-source-name: master
read-data-source-names: slave
#负载均衡算法名称
load-balancer-name: round-robin
flyway:
#开启
enabled: false