Commit d90e12c3 authored by lisong's avatar lisong

Merge branch 'develop_tas_new_patrol_sharding' into develop_tzs_new_patrol

# Conflicts: # amos-boot-system-tzs/amos-boot-module-tzspatrol/amos-boot-module-tzspatrol-biz/src/main/java/com/yeejoin/amos/patrol/business/service/impl/PlanTaskServiceImpl.java
parents f48ba754 48715d71
...@@ -80,6 +80,17 @@ ...@@ -80,6 +80,17 @@
</exclusion> </exclusion>
</exclusions> </exclusions>
</dependency> </dependency>
<dependency>
<groupId>com.baomidou</groupId>
<artifactId>dynamic-datasource-spring-boot-starter</artifactId>
<version>4.2.0</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>org.apache.shardingsphere</groupId>
<artifactId>shardingsphere-jdbc-core-spring-boot-starter</artifactId>
<version>5.2.1</version>
</dependency>
<!-- 添加fastjson 依赖包. --> <!-- 添加fastjson 依赖包. -->
<dependency> <dependency>
<groupId>com.alibaba</groupId> <groupId>com.alibaba</groupId>
......
...@@ -291,6 +291,9 @@ public interface PlanTaskMapper extends BaseMapper { ...@@ -291,6 +291,9 @@ public interface PlanTaskMapper extends BaseMapper {
List<Check> findCheck(); List<Check> findCheck();
void truncateTable(@Param("tableName")String tableName);
List<CheckInput> findCheckInput(); List<CheckInput> findCheckInput();
List<CheckShot> findCheckShot(); List<CheckShot> findCheckShot();
......
...@@ -7,6 +7,14 @@ public interface RepositoryTs { ...@@ -7,6 +7,14 @@ public interface RepositoryTs {
// 批量存储的方法 // 批量存储的方法
<S> Iterable<S> batchSave(Iterable<S> var1); <S> Iterable<S> batchSave(Iterable<S> var1);
/**
* 分库分表数据源
* @param var1
* @param <S>
* @return
*/
<S> Iterable<S> batchSaveBySharding(Iterable<S> var1);
// 批量更新的方法 // 批量更新的方法
<S> Iterable<S> batchUpdate(Iterable<S> var1); <S> Iterable<S> batchUpdate(Iterable<S> var1);
<S> Iterable<S> batchSaveNoAsync(Iterable<S> var1); <S> Iterable<S> batchSaveNoAsync(Iterable<S> var1);
......
...@@ -4,6 +4,7 @@ import cn.hutool.core.date.DateTime; ...@@ -4,6 +4,7 @@ import cn.hutool.core.date.DateTime;
import com.alibaba.fastjson.JSON; import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONArray; import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject; import com.alibaba.fastjson.JSONObject;
import com.baomidou.dynamic.datasource.annotation.DS;
import com.google.common.base.Joiner; import com.google.common.base.Joiner;
import com.yeejoin.amos.boot.biz.common.constants.RuleConstant; import com.yeejoin.amos.boot.biz.common.constants.RuleConstant;
import com.yeejoin.amos.boot.biz.common.utils.DateUtils; import com.yeejoin.amos.boot.biz.common.utils.DateUtils;
...@@ -51,6 +52,9 @@ import com.yeejoin.amos.patrol.business.service.intfc.IPlanTaskService; ...@@ -51,6 +52,9 @@ import com.yeejoin.amos.patrol.business.service.intfc.IPlanTaskService;
import com.yeejoin.amos.patrol.business.util.MyByteArrayMultipartFile; import com.yeejoin.amos.patrol.business.util.MyByteArrayMultipartFile;
import com.yeejoin.amos.patrol.business.util.PlanTaskUtil; import com.yeejoin.amos.patrol.business.util.PlanTaskUtil;
import com.yeejoin.amos.patrol.business.util.WordTemplateUtils; import com.yeejoin.amos.patrol.business.util.WordTemplateUtils;
import com.yeejoin.amos.patrol.business.vo.*;
import com.yeejoin.amos.patrol.common.enums.*;
import com.yeejoin.amos.patrol.config.DataSourceConfiguration;
import com.yeejoin.amos.patrol.business.vo.CalDateVo; import com.yeejoin.amos.patrol.business.vo.CalDateVo;
import com.yeejoin.amos.patrol.business.vo.CodeOrderVo; import com.yeejoin.amos.patrol.business.vo.CodeOrderVo;
import com.yeejoin.amos.patrol.business.vo.DefectVo; import com.yeejoin.amos.patrol.business.vo.DefectVo;
...@@ -2538,7 +2542,8 @@ public class PlanTaskServiceImpl implements IPlanTaskService { ...@@ -2538,7 +2542,8 @@ public class PlanTaskServiceImpl implements IPlanTaskService {
return d; return d;
}).collect(Collectors.toList()); }).collect(Collectors.toList());
repositoryTs.batchSave(collect); repositoryTs.batchSaveBySharding(collect);
planTaskMapper.truncateTable("p_plan_task");
log.info("归档p_plan_task表完成========"); log.info("归档p_plan_task表完成========");
} }
...@@ -2551,7 +2556,8 @@ public class PlanTaskServiceImpl implements IPlanTaskService { ...@@ -2551,7 +2556,8 @@ public class PlanTaskServiceImpl implements IPlanTaskService {
BeanUtils.copyProperties(e, d); BeanUtils.copyProperties(e, d);
return d; return d;
}).collect(Collectors.toList()); }).collect(Collectors.toList());
repositoryTs.batchSave(collect); repositoryTs.batchSaveBySharding(collect);
planTaskMapper.truncateTable("p_plan_task_detail");
log.info("归档p_plan_task_detail表完成========"); log.info("归档p_plan_task_detail表完成========");
} }
...@@ -2564,7 +2570,8 @@ public class PlanTaskServiceImpl implements IPlanTaskService { ...@@ -2564,7 +2570,8 @@ public class PlanTaskServiceImpl implements IPlanTaskService {
BeanUtils.copyProperties(e, d); BeanUtils.copyProperties(e, d);
return d; return d;
}).collect(Collectors.toList()); }).collect(Collectors.toList());
repositoryTs.batchSave(collect); repositoryTs.batchSaveBySharding(collect);
planTaskMapper.truncateTable("p_check");
log.info("归档p_check表完成========"); log.info("归档p_check表完成========");
} }
...@@ -2577,7 +2584,8 @@ public class PlanTaskServiceImpl implements IPlanTaskService { ...@@ -2577,7 +2584,8 @@ public class PlanTaskServiceImpl implements IPlanTaskService {
BeanUtils.copyProperties(e, d); BeanUtils.copyProperties(e, d);
return d; return d;
}).collect(Collectors.toList()); }).collect(Collectors.toList());
repositoryTs.batchSave(collect); repositoryTs.batchSaveBySharding(collect);
planTaskMapper.truncateTable("p_check_input");
log.info("归档p_check_input表完成========"); log.info("归档p_check_input表完成========");
} }
...@@ -2591,7 +2599,8 @@ public class PlanTaskServiceImpl implements IPlanTaskService { ...@@ -2591,7 +2599,8 @@ public class PlanTaskServiceImpl implements IPlanTaskService {
return d; return d;
}).collect(Collectors.toList()); }).collect(Collectors.toList());
repositoryTs.batchSave(collect); repositoryTs.batchSaveBySharding(collect);
planTaskMapper.truncateTable("p_check_shot");
log.info("归档p_check_shot表完成========"); log.info("归档p_check_shot表完成========");
} }
......
package com.yeejoin.amos.patrol.business.service.impl; package com.yeejoin.amos.patrol.business.service.impl;
import com.baomidou.dynamic.datasource.annotation.DS;
import com.yeejoin.amos.patrol.business.dao.repository.RepositoryTs; import com.yeejoin.amos.patrol.business.dao.repository.RepositoryTs;
import com.yeejoin.amos.patrol.config.DataSourceConfiguration;
import org.springframework.scheduling.annotation.Async; import org.springframework.scheduling.annotation.Async;
import org.springframework.stereotype.Service; import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional; import org.springframework.transaction.annotation.Transactional;
import javax.persistence.EntityManager; import javax.persistence.EntityManager;
import javax.persistence.PersistenceContext; import javax.persistence.PersistenceContext;
import java.util.Iterator; import java.util.Iterator;
...@@ -43,6 +44,28 @@ public class RepositoryTImpl implements RepositoryTs { ...@@ -43,6 +44,28 @@ public class RepositoryTImpl implements RepositoryTs {
} }
@Override @Override
@Transactional
@Async("asyncServiceExecutor")
@DS(DataSourceConfiguration.SHARDING_DATA_SOURCE_NAME)
public <S> Iterable<S> batchSaveBySharding(Iterable<S> var1) {
Iterator<S> iterator = var1.iterator();
int index = 0;
while (iterator.hasNext()){
em.persist(iterator.next());
index++;
if (index % BATCH_SIZE == 0){
em.flush();
em.clear();
}
}
if (index % BATCH_SIZE != 0){
em.flush();
em.clear();
}
return var1;
}
@Override
public <S> Iterable<S> batchUpdate(Iterable<S> var1) { public <S> Iterable<S> batchUpdate(Iterable<S> var1) {
Iterator<S> iterator = var1.iterator(); Iterator<S> iterator = var1.iterator();
int index = 0; int index = 0;
......
package com.yeejoin.amos.patrol.config;
import com.baomidou.dynamic.datasource.spring.boot.autoconfigure.DynamicDataSourceProperties;
import com.baomidou.dynamic.datasource.DynamicRoutingDataSource;
import com.baomidou.dynamic.datasource.creator.DefaultDataSourceCreator;
import com.baomidou.dynamic.datasource.provider.AbstractDataSourceProvider;
import com.baomidou.dynamic.datasource.provider.DynamicDataSourceProvider;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Primary;
import javax.sql.DataSource;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* @author LiuLin
* @date 2023年12月01日 09:57
*/
@Configuration
public class DataSourceConfiguration {
private final DynamicDataSourceProperties properties;
private final DefaultDataSourceCreator dataSourceCreator;
private final DataSource shardingSphereDataSource;
public static final String SHARDING_DATA_SOURCE_NAME = "gits_sharding";
public DataSourceConfiguration(DynamicDataSourceProperties properties,
DefaultDataSourceCreator dataSourceCreator,
@Qualifier("shardingSphereDataSource") DataSource shardingSphereDataSource) {
this.properties = properties;
this.dataSourceCreator = dataSourceCreator;
this.shardingSphereDataSource = shardingSphereDataSource;
}
@Bean
public DynamicDataSourceProvider dynamicDataSourceProvider() {
return new AbstractDataSourceProvider(dataSourceCreator) {
@Override
public Map<String, DataSource> loadDataSources() {
Map<String, DataSource> dataSourceMap = new HashMap<>();
dataSourceMap.put(SHARDING_DATA_SOURCE_NAME, shardingSphereDataSource);
return dataSourceMap;
}
};
}
/**
* 将dynamic-datasource设置为首选
* 当Spring存在多个数据源时,自动注入首选对象
* 设置为主要数据源之后,就可以支持shardingSphere原生的配置方式了
*/
@Primary
@Bean
public DataSource dataSource(List<DynamicDataSourceProvider> providers) {
DynamicRoutingDataSource dataSource = new DynamicRoutingDataSource(providers);
dataSource.setPrimary(properties.getPrimary());
dataSource.setStrict(properties.getStrict());
dataSource.setStrategy(properties.getStrategy());
dataSource.setP6spy(properties.getP6spy());
dataSource.setSeata(properties.getSeata());
return dataSource;
}
}
\ No newline at end of file
package com.yeejoin.amos.patrol.config;
import org.springframework.beans.factory.ObjectProvider;
import org.springframework.boot.actuate.autoconfigure.jdbc.DataSourceHealthContributorAutoConfiguration;
import org.springframework.boot.actuate.health.AbstractHealthIndicator;
import org.springframework.boot.actuate.jdbc.DataSourceHealthIndicator;
import org.springframework.boot.jdbc.metadata.DataSourcePoolMetadataProvider;
import org.springframework.context.annotation.Configuration;
import org.springframework.util.StringUtils;
import javax.sql.DataSource;
import java.util.Map;
@Configuration
public class DataSourceHealthConfig extends DataSourceHealthContributorAutoConfiguration {
public DataSourceHealthConfig(Map<String, DataSource> dataSources, ObjectProvider<DataSourcePoolMetadataProvider> metadataProviders) {
super(dataSources, metadataProviders);
}
@Override
protected AbstractHealthIndicator createIndicator(DataSource source) {
DataSourceHealthIndicator indicator = (DataSourceHealthIndicator) super.createIndicator(source);
if (!StringUtils.hasText(indicator.getQuery())) {
indicator.setQuery("select 1");
}
return indicator;
}
}
\ No newline at end of file
//package com.yeejoin.amos.patrol.config; package com.yeejoin.amos.patrol.config;
//
//
//import lombok.Getter; import com.baomidou.dynamic.datasource.annotation.DS;
//import lombok.Setter; import lombok.Getter;
//import lombok.extern.slf4j.Slf4j; import lombok.Setter;
//import org.apache.commons.lang3.StringUtils; import lombok.extern.slf4j.Slf4j;
//import org.apache.shardingsphere.driver.jdbc.core.datasource.ShardingSphereDataSource; import org.apache.commons.lang3.StringUtils;
//
//import org.springframework.beans.factory.annotation.Value; import org.springframework.beans.factory.annotation.Value;
//import org.springframework.stereotype.Component; import org.springframework.stereotype.Component;
//import org.springframework.util.Assert; import org.springframework.util.Assert;
//
//import javax.annotation.PostConstruct; import javax.annotation.PostConstruct;
//import javax.annotation.Resource; import javax.annotation.Resource;
//import javax.sql.DataSource; import javax.sql.DataSource;
//import java.sql.Connection; import java.sql.Connection;
//import java.sql.ResultSet; import java.sql.ResultSet;
//import java.sql.SQLException; import java.sql.SQLException;
//import java.sql.Statement; import java.sql.Statement;
//import java.util.HashMap; import java.util.HashMap;
//import java.util.Map; import java.util.Map;
//
///** /**
// * @author liran * @author liran
// */ */
//@Slf4j @Slf4j
//@Setter @Setter
//@Getter @Getter
//@Component @Component
//public class TableCreate { //@DS(DataSourceConfiguration.SHARDING_DATA_SOURCE_NAME)
// public class TableCreate {
// @Resource
// private ShardingSphereDataSource dataSource; @Resource
// private DataSource dataSource;
// Map<String, Object> createdTables = new HashMap<>();
// @Value("${shardingsphere.create.tables.num:10}") Map<String, Object> createdTables = new HashMap<>();
// private String num; @Value("${shardingsphere.create.tables.num:10}")
// private String num;
// private String PLAN_TASK = "p_plan_task";
// private String PLAN_TASK_HISTORY = "p_plan_task_history_"; private String PLAN_TASK = "p_plan_task";
// private String PLAN_TASK_HISTORY = "p_plan_task_history_";
// private String PLAN_TASK_DETAIL = "p_plan_task_detail";
// private String PLAN_TASK_DETAIL_HISTORY = "p_plan_task_detail_history_"; private String PLAN_TASK_DETAIL = "p_plan_task_detail";
// private String PLAN_TASK_DETAIL_HISTORY = "p_plan_task_detail_history_";
// private String P_CHECK = "p_check";
// private String P_CHECK_HISTORY = "p_check_history_"; private String P_CHECK = "p_check";
// private String P_CHECK_HISTORY = "p_check_history_";
// private String P_CHECK_INPUT = "p_check_input";
// private String P_CHECK_INPUT_HISTORY = "p_check_input_history_"; private String P_CHECK_INPUT = "p_check_input";
// private String P_CHECK_INPUT_HISTORY = "p_check_input_history_";
// private String P_CHECK_SHOT = "p_check_shot";
// private String P_CHECK_SHOT_HISTORY = "p_check_shot_history_"; private String P_CHECK_SHOT = "p_check_shot";
// private String P_CHECK_SHOT_HISTORY = "p_check_shot_history_";
// private String DB = "amos_tzs_biz.";
// private String DB = "amos_tzs_biz.";
//
// @PostConstruct
// public void init() { @PostConstruct
// for(int i = 1; i<=Integer.parseInt(num); i++) { public void init() {
// createNeedTime(PLAN_TASK,DB,PLAN_TASK_HISTORY+i); for(int i = 1; i<=Integer.parseInt(num); i++) {
// } createNeedTime(PLAN_TASK,DB,PLAN_TASK_HISTORY+i);
// for(int i = 1; i<=Integer.parseInt(num); i++) { }
// createNeedTime(PLAN_TASK_DETAIL,DB,PLAN_TASK_DETAIL_HISTORY+i); for(int i = 1; i<=Integer.parseInt(num); i++) {
// } createNeedTime(PLAN_TASK_DETAIL,DB,PLAN_TASK_DETAIL_HISTORY+i);
// for(int i = 1; i<=Integer.parseInt(num); i++) { }
// createNeedTime(P_CHECK,DB,P_CHECK_HISTORY+i); for(int i = 1; i<=Integer.parseInt(num); i++) {
// } createNeedTime(P_CHECK,DB,P_CHECK_HISTORY+i);
// for(int i = 1; i<=Integer.parseInt(num); i++) { }
// createNeedTime(P_CHECK_INPUT,DB,P_CHECK_INPUT_HISTORY+i); for(int i = 1; i<=Integer.parseInt(num); i++) {
// } createNeedTime(P_CHECK_INPUT,DB,P_CHECK_INPUT_HISTORY+i);
// for(int i = 1; i<=Integer.parseInt(num); i++) { }
// createNeedTime(P_CHECK_SHOT,DB,P_CHECK_SHOT_HISTORY+i); for(int i = 1; i<=Integer.parseInt(num); i++) {
// } createNeedTime(P_CHECK_SHOT,DB,P_CHECK_SHOT_HISTORY+i);
// } }
// }
//
// private void createNeedTime(String table, String db, String create) {
// DataSource dataSource = this.dataSource; private void createNeedTime(String table, String db, String create) {
// String sql = "SHOW CREATE TABLE " + table; DataSource dataSource = this.dataSource;
// String existSql = "select * from information_schema.tables where table_name ='" + table + "'; "; String sql = "SHOW CREATE TABLE " + table;
// doCreate(dataSource, sql, existSql, create, db, table); String existSql = "select * from information_schema.tables where table_name ='" + table + "'; ";
// } doCreate(dataSource, sql, existSql, create, db, table);
// }
// private void doCreate(DataSource dataSource, String sql, String existSql, String create, String db, String table) {
// String msg = " create table: " + create + " origin table: " + table + " db: " + db; private void doCreate(DataSource dataSource, String sql, String existSql, String create, String db, String table) {
// Connection conn = null; String msg = " create table: " + create + " origin table: " + table + " db: " + db;
// Statement stmt = null; Connection conn = null;
// try { Statement stmt = null;
// conn = dataSource.getConnection().getMetaData().getConnection(); try {
// stmt = conn.createStatement(); conn = dataSource.getConnection().getMetaData().getConnection();
// ResultSet resultSet = stmt.executeQuery(existSql); stmt = conn.createStatement();
// Assert.isTrue(resultSet.next(), msg + "初始化表不存在"); ResultSet resultSet = stmt.executeQuery(existSql);
// Assert.isTrue(resultSet.next(), msg + "初始化表不存在");
// ResultSet resTable = stmt.executeQuery(sql);
// Assert.isTrue(resTable.next(), msg + "初始化表不存在"); ResultSet resTable = stmt.executeQuery(sql);
// String existTableName = resTable.getString(1); Assert.isTrue(resTable.next(), msg + "初始化表不存在");
// String createSqlOrigin = resTable.getString(2); String existTableName = resTable.getString(1);
// // log.info(existTableName, createSqlOrigin); String createSqlOrigin = resTable.getString(2);
// // log.info(existTableName, createSqlOrigin);
// String existSqlNew = StringUtils.replaceOnce(existSql, existTableName, create);
// ResultSet executeQuery = stmt.executeQuery(existSqlNew); String existSqlNew = StringUtils.replaceOnce(existSql, existTableName, create);
// if (executeQuery.next()) { ResultSet executeQuery = stmt.executeQuery(existSqlNew);
// log.info("table exist :" + msg); if (executeQuery.next()) {
// } else { log.info("table exist :" + msg);
// createSqlOrigin = createSqlOrigin.substring(0,createSqlOrigin.indexOf(";")); } else {
// String creatsql = StringUtils.replace(createSqlOrigin, existTableName, create).replaceFirst(create, DB+create).replace("bigint(64)", "int8").replace("smallint(16)","int2"); createSqlOrigin = createSqlOrigin.substring(0,createSqlOrigin.indexOf(";"));
// if (0 == stmt.executeUpdate(creatsql)) { String creatsql = StringUtils.replace(createSqlOrigin, existTableName, create).replaceFirst(create, DB+create).replace("bigint(64)", "int8").replace("smallint(16)","int2");
// log.info(msg + "success !"); if (0 == stmt.executeUpdate(creatsql)) {
// log.info(msg + "success !");
// } else {
// log.error(msg + "fail !"); } else {
// } log.error(msg + "fail !");
// } }
// } catch (Exception e) { }
// log.error("create table fail error : {} ", e.getMessage()); } catch (Exception e) {
// } finally { log.error("create table fail error : {} ", e.getMessage());
// if (stmt != null) { } finally {
// try { if (stmt != null) {
// stmt.close(); try {
// } catch (SQLException e) { stmt.close();
// log.error("SQLException", e); } catch (SQLException e) {
// } log.error("SQLException", e);
// } }
// if (conn != null) { }
// try { if (conn != null) {
// conn.close(); try {
// } catch (SQLException e) { conn.close();
// log.error("SQLException", e); } catch (SQLException e) {
// } log.error("SQLException", e);
// } }
// } }
// } }
// }
//}
}
eureka.client.serviceUrl.defaultZone=http://${spring.security.user.name}:${spring.security.user.password}@172.16.10.243:10001/eureka/ eureka.client.serviceUrl.defaultZone=http://${spring.security.user.name}:${spring.security.user.password}@172.16.10.210:10001/eureka/
eureka.client.registry-fetch-interval-seconds=5 eureka.client.registry-fetch-interval-seconds=5
spring.security.user.name=admin spring.security.user.name=admin
spring.security.user.password=a1234560 spring.security.user.password=a1234560
...@@ -10,6 +10,7 @@ eureka.instance.lease-expiration-duration-in-seconds=10 ...@@ -10,6 +10,7 @@ eureka.instance.lease-expiration-duration-in-seconds=10
eureka.instance.lease-renewal-interval-in-seconds=5 eureka.instance.lease-renewal-interval-in-seconds=5
eureka.instance.metadata-map.management.context-path=${server.servlet.context-path}/actuator eureka.instance.metadata-map.management.context-path=${server.servlet.context-path}/actuator
eureka.instance.status-page-url-path=/actuator/info eureka.instance.status-page-url-path=/actuator/info
eureka.instance.ip-address=172.16.3.32
ribbon.eureka.enabled = true ribbon.eureka.enabled = true
ribbon.ConnectTimeout = 5000 ribbon.ConnectTimeout = 5000
...@@ -20,14 +21,14 @@ ribbon.MaxAutoRetries = 1 ...@@ -20,14 +21,14 @@ ribbon.MaxAutoRetries = 1
xiy_amos_satety_business xiy_amos_satety_business
spring.reactor.debug-agent.enabled=true spring.reactor.debug-agent.enabled=true
#DB properties: #DB properties:
spring.datasource.url=jdbc:vastbase://172.16.10.243:5432/tzs_amos_tzs_biz_init?currentSchema=amos_tzs_biz&allowMultiQueries=true #spring.datasource.url=jdbc:vastbase://172.16.10.210:5432/tzs_amos_tzs_biz_init?currentSchema=amos_tzs_biz&allowMultiQueries=true
spring.datasource.username=admin #spring.datasource.username=admin
spring.datasource.password=Yeejoin@2023 #spring.datasource.password=Yeejoin@2023
spring.datasource.driver-class-name = cn.com.vastbase.Driver #spring.datasource.driver-class-name = cn.com.vastbase.Driver
spring.datasource.hikari.maxLifetime = 1765000 #spring.datasource.hikari.maxLifetime = 1765000
spring.datasource.hikari.maximum-pool-size = 10 #spring.datasource.hikari.maximum-pool-size = 10
spring.datasource.testWhileIdle = true #spring.datasource.testWhileIdle = true
spring.datasource.validationQuery = SELECT 1 #spring.datasource.validationQuery = SELECT 1
security.password=a1234560 security.password=a1234560
...@@ -36,10 +37,10 @@ security.productWeb=STUDIO_APP_WEB ...@@ -36,10 +37,10 @@ security.productWeb=STUDIO_APP_WEB
security.productApp=STUDIO_APP_MOBILE security.productApp=STUDIO_APP_MOBILE
security.appKey=studio_normalapp_3056965 security.appKey=studio_normalapp_3056965
#redis 配置 #redis ??
spring.redis.database=1 spring.redis.database=1
spring.redis.host=172.16.10.243 spring.redis.host=172.16.10.210
spring.redis.port=6379 spring.redis.port=16379
spring.redis.password=yeejoin@2020 spring.redis.password=yeejoin@2020
spring.redis.jedis.pool.max-active=200 spring.redis.jedis.pool.max-active=200
spring.redis.jedis.pool.max-wait=-1 spring.redis.jedis.pool.max-wait=-1
...@@ -47,12 +48,12 @@ spring.redis.jedis.pool.max-idle=10 ...@@ -47,12 +48,12 @@ spring.redis.jedis.pool.max-idle=10
spring.redis.jedis.pool.min-idle=0 spring.redis.jedis.pool.min-idle=0
spring.redis.timeout=1000 spring.redis.timeout=1000
#巡检计划定时任务 #????????
jobs.cron = 0 0 22 * * ? jobs.cron = 0 0 22 * * ?
jobs.cron.static = 0 0 1 * * ? jobs.cron.static = 0 0 1 * * ?
jobs.cron.bak= - jobs.cron.bak= -
#邮件配置 #????
#params.mailPush = false #params.mailPush = false
#spring.mail.host: #spring.mail.host:
#spring.mail.username: #spring.mail.username:
...@@ -65,23 +66,23 @@ jobs.cron.bak= - ...@@ -65,23 +66,23 @@ jobs.cron.bak= -
#spring.mail.properties.mail.smtp.starttls.required: true #spring.mail.properties.mail.smtp.starttls.required: true
#spring.mail.properties.mail.smtp.ssl.enable:true #spring.mail.properties.mail.smtp.ssl.enable:true
#jpush 推送配置项 #jpush ?????
params.isPush = false params.isPush = false
#巡检消息同步开关 #????????
emq.patrol.sync.switch=true emq.patrol.sync.switch=true
params.work.flow.normalProcessDefinitionKey=normalHazardManagement params.work.flow.normalProcessDefinitionKey=normalHazardManagement
params.work.flow.processDefinitionKey=hazardManagement params.work.flow.processDefinitionKey=hazardManagement
params.work.flow.address=http://172.16.10.243:30040 params.work.flow.address=http://172.16.10.210:30040
params.spc.address=http://172.16.3.89:9001 params.spc.address=http://172.16.3.89:9001
#websocket #websocket
params.remoteWebsocketUrl=http://172.16.10.243:8080/ params.remoteWebsocketUrl=http://172.16.10.210:8080/
#websocket send message url #websocket send message url
params.remoteWebSocketSendMsgUrl=http://172.16.10.243:10601/ params.remoteWebSocketSendMsgUrl=http://172.16.10.210:10601/
#上传文件配置 #??????
spring.http.multipart.maxFileSize = 80480000 spring.http.multipart.maxFileSize = 80480000
spring.http.multipart.MaxRequestSize = 80480000 spring.http.multipart.MaxRequestSize = 80480000
windows.img.path = D:\\ windows.img.path = D:\\
...@@ -90,17 +91,17 @@ linux.img.path = / ...@@ -90,17 +91,17 @@ linux.img.path = /
## emqx ## emqx
emqx.clean-session=false emqx.clean-session=false
emqx.client-id=${spring.application.name}-${random.int[1024,65536]} emqx.client-id=${spring.application.name}-${random.int[1024,65536]}
emqx.broker=tcp://172.16.10.243:1883 emqx.broker=tcp://172.16.10.210:1883
emqx.client-user-name=super emqx.client-user-name=super
emqx.client-password=123456 emqx.client-password=123456
emqx.keepAliveInterval=1000 emqx.keepAliveInterval=1000
file.url=http://172.16.10.243:9000/ file.url=http://172.16.10.210:9000/
##代码中有部分逻辑冲突需要处理 为区分机场和电力逻辑 增加开关 若为true 则为机场逻辑 为false 则为电力逻辑 ##?????????????? ?????????? ???? ??true ?????? ?false ??????
logic=false logic=false
#是否为中心级系统 true-中心级系统 false-站端系统 #???????? true-????? false-????
is.zxj=true is.zxj=true
## \uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\u052E\uFFFD\uFFFD\uFFFD\u03F2\uFFFDID ## \uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\u052E\uFFFD\uFFFD\uFFFD\u03F2\uFFFDID
...@@ -110,7 +111,7 @@ fire-rescue=1432549862557130753 ...@@ -110,7 +111,7 @@ fire-rescue=1432549862557130753
action.auto_create_index:true action.auto_create_index:true
elasticsearch.username=elastic elasticsearch.username=elastic
elasticsearch.password=a123456 elasticsearch.password=a123456
spring.elasticsearch.rest.uris=http://172.16.10.243:9200 spring.elasticsearch.rest.uris=http://172.16.10.210:9200
## unit(h) ## unit(h)
alertcall.es.synchrony.time=48 alertcall.es.synchrony.time=48
...@@ -124,70 +125,86 @@ spring.jpa.properties.hibernate.jdbc.batch_versioned_data=true ...@@ -124,70 +125,86 @@ spring.jpa.properties.hibernate.jdbc.batch_versioned_data=true
spring.jpa.properties.hibernate.order_inserts=true spring.jpa.properties.hibernate.order_inserts=true
spring.jpa.properties.hibernate.order_updates =true spring.jpa.properties.hibernate.order_updates =true
##ds tzs
#spring.datasource.dynamic.datasource.tzs.url=jdbc:vastbase://36.46.137.116:5432/tzs_amos_tzs_biz_init?currentSchema=amos_tzs_biz&allowMultiQueries=true
#spring.datasource.dynamic.datasource.tzs.username=admin
#spring.datasource.dynamic.datasource.tzs.password=Yeejoin@2023
#spring.datasource.dynamic.datasource.tzs.driver-class-name=cn.com.vastbase.Driver
#spring.datasource.dynamic.primary=tzs
spring.datasource.dynamic.primary=tzs
#ds tzs
spring.datasource.dynamic.datasource.tzs.url=jdbc:vastbase://36.46.137.116:5432/tzs_amos_tzs_biz_init?currentSchema=amos_tzs_biz&allowMultiQueries=true
spring.datasource.dynamic.datasource.tzs.username=admin
spring.datasource.dynamic.datasource.tzs.password=Yeejoin@2023
spring.datasource.dynamic.datasource.tzs.driver-class-name=cn.com.vastbase.Driver
#spring.shardingsphere.mode.type=Standalone #spring.shardingsphere.mode.type=Standalone
#spring.shardingsphere.mode.repository.type=JDBC #spring.shardingsphere.mode.repository.type=JDBC
## ds0 #????SQL??????: false
#spring.shardingsphere.datasource.ds0.type=com.zaxxer.hikari.HikariDataSource spring.shardingsphere.props.sql-show= true
#spring.shardingsphere.datasource.ds0.driver-class-name=cn.com.vastbase.Driver spring.shardingsphere.datasource.names=ds0
#spring.shardingsphere.datasource.ds0.jdbc-url=jdbc:vastbase://172.16.10.243:5432/tzs_amos_tzs_biz_init?currentSchema=amos_tzs_biz&allowMultiQueries=true spring.shardingsphere.props.max-connections-size-per-query=5
#spring.shardingsphere.datasource.ds0.username=admin
#spring.shardingsphere.datasource.ds0.password=Yeejoin@2023 shardingsphere.create.tables.num=10
#spring.shardingsphere.datasource.ds0.idle-timeout=600000 # ds0
#spring.shardingsphere.datasource.ds0.connection-timeout=30000 spring.shardingsphere.datasource.ds0.type=com.zaxxer.hikari.HikariDataSource
#spring.shardingsphere.datasource.ds0.validation-timeout=3000 spring.shardingsphere.datasource.ds0.driver-class-name=cn.com.vastbase.Driver
#spring.shardingsphere.datasource.ds0.max-lifetime=58880 spring.shardingsphere.datasource.ds0.jdbc-url=jdbc:vastbase://36.46.137.116:5432/tzs_amos_tzs_biz_init?currentSchema=amos_tzs_biz&allowMultiQueries=true
#spring.shardingsphere.datasource.ds0.minimum-idle=10 spring.shardingsphere.datasource.ds0.username=admin
#spring.shardingsphere.datasource.ds0.maximum-pool-size=50 spring.shardingsphere.datasource.ds0.password=Yeejoin@2023
# spring.shardingsphere.datasource.ds0.idle-timeout=600000
# spring.shardingsphere.datasource.ds0.connection-timeout=30000
##数据源名称,多数据源以逗号分隔 spring.shardingsphere.datasource.ds0.validation-timeout=3000
#spring.shardingsphere.datasource.names=ds0 spring.shardingsphere.datasource.ds0.max-lifetime=58880
## 这里由于分库分表字段不相同配置,不然会导致使用user_id 查询找不到相应的表,如果我们分库分表都使用 user_id 则不需要这个配置 spring.shardingsphere.datasource.ds0.minimum-idle=10
#spring.shardingsphere.rules.sharding.tables.p_plan_task_history.actual-data-nodes = ds0.p_plan_task_history_$->{1..2} spring.shardingsphere.datasource.ds0.maximum-pool-size=50
#spring.shardingsphere.rules.sharding.tables.p_plan_task_detail_history.actual-data-nodes = ds0.p_plan_task_detail_history_$->{1..2}
#spring.shardingsphere.rules.sharding.tables.p_check_history.actual-data-nodes = ds0.p_check_history_$->{1..2} #???????????????
#spring.shardingsphere.rules.sharding.tables.p_check_input_history.actual-data-nodes = ds0.p_check_input_history_$->{1..2}
#spring.shardingsphere.rules.sharding.tables.p_check_shot_history.actual-data-nodes = ds0.p_check_shot_history_$->{1..2} # ???????????????????????user_id ????????????????????? user_id ????????
# spring.shardingsphere.rules.sharding.tables.p_plan_task_history.actual-data-nodes = ds0.p_plan_task_history_$->{1..${shardingsphere.create.tables.num}}
#spring.shardingsphere.sharding.default-data.source-name=ds0 spring.shardingsphere.rules.sharding.tables.p_plan_task_detail_history.actual-data-nodes = ds0.p_plan_task_detail_history_$->{1..${shardingsphere.create.tables.num}}
#spring.main.allow-bean-definition-overriding=true spring.shardingsphere.rules.sharding.tables.p_check_history.actual-data-nodes = ds0.p_check_history_$->{1..${shardingsphere.create.tables.num}}
# spring.shardingsphere.rules.sharding.tables.p_check_input_history.actual-data-nodes = ds0.p_check_input_history_$->{1..${shardingsphere.create.tables.num}}
## -----分表开始 spring.shardingsphere.rules.sharding.tables.p_check_shot_history.actual-data-nodes = ds0.p_check_shot_history_$->{1..${shardingsphere.create.tables.num}}
## 分片键位sequence_nbr,每个库分为两表,所以取模2
#spring.shardingsphere.rules.sharding.tables.p_plan_task_history.table-strategy.standard.sharding-column=id spring.shardingsphere.sharding.default-data.source-name=ds0
#spring.shardingsphere.rules.sharding.tables.p_plan_task_history.table-strategy.standard.sharding-algorithm-name=task-inline spring.main.allow-bean-definition-overriding=true
#
#spring.shardingsphere.rules.sharding.tables.p_plan_task_detail_history.table-strategy.standard.sharding-column=id # -----????
#spring.shardingsphere.rules.sharding.tables.p_plan_task_detail_history.table-strategy.standard.sharding-algorithm-name=task-detail-inline # ????sequence_nbr,????????????2
# spring.shardingsphere.rules.sharding.tables.p_plan_task_history.table-strategy.standard.sharding-column=id
#spring.shardingsphere.rules.sharding.tables.p_check_history.table-strategy.standard.sharding-column=id spring.shardingsphere.rules.sharding.tables.p_plan_task_history.table-strategy.standard.sharding-algorithm-name=task-inline
#spring.shardingsphere.rules.sharding.tables.p_check_history.table-strategy.standard.sharding-algorithm-name=check-inline
# spring.shardingsphere.rules.sharding.tables.p_plan_task_detail_history.table-strategy.standard.sharding-column=id
#spring.shardingsphere.rules.sharding.tables.p_check_input_history.table-strategy.standard.sharding-column=id spring.shardingsphere.rules.sharding.tables.p_plan_task_detail_history.table-strategy.standard.sharding-algorithm-name=task-detail-inline
#spring.shardingsphere.rules.sharding.tables.p_check_input_history.table-strategy.standard.sharding-algorithm-name=check-input-inline
# spring.shardingsphere.rules.sharding.tables.p_check_history.table-strategy.standard.sharding-column=id
#spring.shardingsphere.rules.sharding.tables.p_check_shot_history.table-strategy.standard.sharding-column=id spring.shardingsphere.rules.sharding.tables.p_check_history.table-strategy.standard.sharding-algorithm-name=check-inline
#spring.shardingsphere.rules.sharding.tables.p_check_shot_history.table-strategy.standard.sharding-algorithm-name=check-shot-inline
# spring.shardingsphere.rules.sharding.tables.p_check_input_history.table-strategy.standard.sharding-column=id
##行表达式分片算法 spring.shardingsphere.rules.sharding.tables.p_check_input_history.table-strategy.standard.sharding-algorithm-name=check-input-inline
#spring.shardingsphere.rules.sharding.sharding-algorithms.task-inline.type=INLINE
#spring.shardingsphere.rules.sharding.sharding-algorithms.task-inline.props.algorithm-expression=p_plan_task_history_$->{id % 2 + 1} spring.shardingsphere.rules.sharding.tables.p_check_shot_history.table-strategy.standard.sharding-column=id
# spring.shardingsphere.rules.sharding.tables.p_check_shot_history.table-strategy.standard.sharding-algorithm-name=check-shot-inline
#spring.shardingsphere.rules.sharding.sharding-algorithms.task-detail-inline.type=INLINE
#spring.shardingsphere.rules.sharding.sharding-algorithms.task-detail-inline.props.algorithm-expression=p_plan_task_detail_history_$->{id % 2 + 1} #????????
# spring.shardingsphere.rules.sharding.sharding-algorithms.task-inline.type=INLINE
#spring.shardingsphere.rules.sharding.sharding-algorithms.check-inline.type=INLINE spring.shardingsphere.rules.sharding.sharding-algorithms.task-inline.props.algorithm-expression=p_plan_task_history_$->{id % ${shardingsphere.create.tables.num} + 1}
#spring.shardingsphere.rules.sharding.sharding-algorithms.check-inline.props.algorithm-expression=p_check_history_$->{id % 2 + 1}
# spring.shardingsphere.rules.sharding.sharding-algorithms.task-detail-inline.type=INLINE
#spring.shardingsphere.rules.sharding.sharding-algorithms.check-input-inline.type=INLINE spring.shardingsphere.rules.sharding.sharding-algorithms.task-detail-inline.props.algorithm-expression=p_plan_task_detail_history_$->{id % ${shardingsphere.create.tables.num} + 1}
#spring.shardingsphere.rules.sharding.sharding-algorithms.check-input-inline.props.algorithm-expression=p_check_input_history_$->{id % 2 + 1}
# spring.shardingsphere.rules.sharding.sharding-algorithms.check-inline.type=INLINE
#spring.shardingsphere.rules.sharding.sharding-algorithms.check-shot-inline.type=INLINE spring.shardingsphere.rules.sharding.sharding-algorithms.check-inline.props.algorithm-expression=p_check_history_$->{id % ${shardingsphere.create.tables.num} + 1}
#spring.shardingsphere.rules.sharding.sharding-algorithms.check-shot-inline.props.algorithm-expression=p_check_shot_history_$->{id % 2 + 1}
## -----分表结束 spring.shardingsphere.rules.sharding.sharding-algorithms.check-input-inline.type=INLINE
# spring.shardingsphere.rules.sharding.sharding-algorithms.check-input-inline.props.algorithm-expression=p_check_input_history_$->{id % ${shardingsphere.create.tables.num} + 1}
##是否开启SQL显示,默认值: false
#spring.shardingsphere.props.sql-show= false spring.shardingsphere.rules.sharding.sharding-algorithms.check-shot-inline.type=INLINE
#spring.shardingsphere.props.max-connections-size-per-query=5 spring.shardingsphere.rules.sharding.sharding-algorithms.check-shot-inline.props.algorithm-expression=p_check_shot_history_$->{id % ${shardingsphere.create.tables.num} + 1}
# -----????
shardingsphere.create.tables.num=10
\ No newline at end of file logging.level.com.baomidou=debug
\ No newline at end of file
...@@ -1711,6 +1711,9 @@ ...@@ -1711,6 +1711,9 @@
no_risk_end = no_risk_end + #{noRiskEnd} no_risk_end = no_risk_end + #{noRiskEnd}
where org_code = #{orgCode} AND check_time = #{checkTime} where org_code = #{orgCode} AND check_time = #{checkTime}
</update> </update>
<update id="truncateTable">
TRUNCATE ${tableName};
</update>
<select id="selectPlanTaskIdList" resultType="java.lang.String"> <select id="selectPlanTaskIdList" resultType="java.lang.String">
SELECT SELECT
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment