Commit 1e5178ba authored by wujiang's avatar wujiang

Merge branch 'developer' of http://36.40.66.175:5000/moa/jxdj_zx/amos-boot-zx-biz into developer

# Conflicts: # amos-boot-system-jxiop/amos-boot-module-jxiop-analyse-biz/src/main/java/com/yeejoin/amos/boot/module/jxiop/biz/service/impl/TdengineTimeServiceImpl.java
parents bd0b5024 c47d6e3d
...@@ -8,6 +8,7 @@ import java.text.SimpleDateFormat; ...@@ -8,6 +8,7 @@ import java.text.SimpleDateFormat;
import java.time.Duration; import java.time.Duration;
import java.time.LocalDate; import java.time.LocalDate;
import java.time.LocalDateTime; import java.time.LocalDateTime;
import java.time.ZoneId;
import java.time.format.DateTimeFormatter; import java.time.format.DateTimeFormatter;
import java.util.*; import java.util.*;
...@@ -664,13 +665,6 @@ public class DateUtils { ...@@ -664,13 +665,6 @@ public class DateUtils {
/*System.out.println(dateFormat(maxDateOfMonth(dateParse("2016-02", "yyyy-MM")), null)); /*System.out.println(dateFormat(maxDateOfMonth(dateParse("2016-02", "yyyy-MM")), null));
System.out.println(dateFormat(minDateOfMonth(dateParse("2016-03-31", null)), null));*/ System.out.println(dateFormat(minDateOfMonth(dateParse("2016-03-31", null)), null));*/
// System.out.println(dateFormat(new Date(), CHN_DATE_PATTERN_YEAR));
// System.out.println(dateFormat(new Date(), CHN_DATE_PATTERN_MONTH));
// System.out.println(getWeekOfYear(new Date()));
// System.out.println(getQuarterStr(getMonth(dateParse("2021-5-11", null))));
// System.out.println(getWeekBeginDate(dateParse("2021-10-11", null)));
// System.out.println(getWeekEndDate(dateParse("2021-10-11", null)));
System.out.println(secondsToTimeStr(3600));
List<String> beforeCurrentMonth = getBeforeCurrentMonth(3, true); List<String> beforeCurrentMonth = getBeforeCurrentMonth(3, true);
System.out.println(beforeCurrentMonth); System.out.println(beforeCurrentMonth);
...@@ -688,6 +682,53 @@ public class DateUtils { ...@@ -688,6 +682,53 @@ public class DateUtils {
return name; return name;
} }
//获取当前时间下一整时分点 例如 传入17:18 返回 17:20
public static String getNextWholeMinute(String currentTimeString) {
// 定义日期时间字符串的格式
DateTimeFormatter formatter = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss");
// 将字符串转换为 LocalDateTime
LocalDateTime currentTime = LocalDateTime.parse(currentTimeString, formatter);
int currentMinute = currentTime.getMinute();
int seconds = currentTime.getSecond();
int nanos = currentTime.getNano();
// 如果当前分钟已经是整时分点,则加上 60 分钟
if (currentMinute % 10 == 0 && seconds == 0 && nanos == 0) {
return currentTimeString;
}
// 否则计算下一个整时分点
LocalDateTime localDateTime = currentTime.withSecond(0).withNano(0).plusMinutes(10 - currentMinute % 10);
return localDateTime.format(formatter);
}
//获取当前时间下一整时分点 例如 传入17:18 返回 17:20
public static String getBeforeWholeMinute(String currentTimeString) {
// 定义日期时间字符串的格式
DateTimeFormatter formatter = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss");
// 将字符串转换为 LocalDateTime
LocalDateTime currentTime = LocalDateTime.parse(currentTimeString, formatter);
int currentMinute = currentTime.getMinute();
int seconds = currentTime.getSecond();
int nanos = currentTime.getNano();
// 如果当前分钟已经是整时分点,则加上 60 分钟
if (currentMinute % 10 == 0 && seconds == 0 && nanos == 0) {
return currentTimeString;
}
// 否则计算下一个整时分点
LocalDateTime localDateTime = currentTime.withSecond(0).withNano(0).minusMinutes(currentMinute % 10);
return localDateTime.format(formatter);
}
/** /**
* 获取某月的日期List * 获取某月的日期List
* *
......
...@@ -61,6 +61,7 @@ import org.springframework.stereotype.Component; ...@@ -61,6 +61,7 @@ import org.springframework.stereotype.Component;
import org.springframework.util.CollectionUtils; import org.springframework.util.CollectionUtils;
import org.springframework.util.ObjectUtils; import org.springframework.util.ObjectUtils;
import javax.annotation.PostConstruct;
import javax.annotation.Resource; import javax.annotation.Resource;
import java.text.SimpleDateFormat; import java.text.SimpleDateFormat;
import java.time.LocalDate; import java.time.LocalDate;
...@@ -176,6 +177,7 @@ public class TanYinDataAcquisitionServiceImpl implements TanYinDataAcquisitionSe ...@@ -176,6 +177,7 @@ public class TanYinDataAcquisitionServiceImpl implements TanYinDataAcquisitionSe
*/ */
@Scheduled (cron = "${dataRequestScheduled.tanYin}") @Scheduled (cron = "${dataRequestScheduled.tanYin}")
@Override @Override
public void customerInfoList() { public void customerInfoList() {
try { try {
String startDate = LocalDate.now().minusMonths(1).format(DateTimeFormatter.ofPattern("yyyy-MM-dd")); String startDate = LocalDate.now().minusMonths(1).format(DateTimeFormatter.ofPattern("yyyy-MM-dd"));
...@@ -221,6 +223,7 @@ public class TanYinDataAcquisitionServiceImpl implements TanYinDataAcquisitionSe ...@@ -221,6 +223,7 @@ public class TanYinDataAcquisitionServiceImpl implements TanYinDataAcquisitionSe
@Scheduled (cron = "${dataRequestScheduled.tanYin}") @Scheduled (cron = "${dataRequestScheduled.tanYin}")
@Async @Async
@Override @Override
@PostConstruct
public void stationList() { public void stationList() {
long ts = System.currentTimeMillis(); long ts = System.currentTimeMillis();
log.info("-------碳银同步电站开始: {} ------- ", ts); log.info("-------碳银同步电站开始: {} ------- ", ts);
......
package com.yeejoin.amos.boot.module.jxiop.biz.controller; package com.yeejoin.amos.boot.module.jxiop.biz.controller;
import cn.hutool.core.date.DateUtil;
import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper; import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper;
import com.yeejoin.amos.boot.biz.common.utils.DateUtils;
import com.yeejoin.amos.boot.biz.common.utils.RedisUtils; import com.yeejoin.amos.boot.biz.common.utils.RedisUtils;
import com.yeejoin.amos.boot.module.jxiop.biz.Enum.SmartAnalyseEnum; import com.yeejoin.amos.boot.module.jxiop.biz.Enum.SmartAnalyseEnum;
import com.yeejoin.amos.boot.module.jxiop.biz.dto.FullViewRecallDataDTO; import com.yeejoin.amos.boot.module.jxiop.biz.dto.FullViewRecallDataDTO;
...@@ -172,7 +174,11 @@ public class KafkaAnalyseController { ...@@ -172,7 +174,11 @@ public class KafkaAnalyseController {
List<String> addressInfo = idxBizFanHealthIndexMapper.getAddressInfo(); List<String> addressInfo = idxBizFanHealthIndexMapper.getAddressInfo();
String join = String.join(",", addressInfo); String join = String.join(",", addressInfo);
List<IndicatorData> indicatorData = indicatorDataMapper.selectByAddresses(join, "1668801435891929089"); String startTime = DateUtils.convertDateToString(DateUtil.offsetDay(new Date(), -1),
DateUtils.DATE_TIME_PATTERN);
String endTime = DateUtils.convertDateToString(DateUtils.getCurrentDayEndTime(new Date()),
DateUtils.DATE_TIME_PATTERN);
List<IndicatorData> indicatorData = indicatorDataMapper.selectByAddresses(join, "1668801435891929089",startTime,endTime);
return ResponseHelper.buildResponse(indicatorData); return ResponseHelper.buildResponse(indicatorData);
} }
......
...@@ -47,4 +47,7 @@ public class FanHealthIndexDto implements Serializable { ...@@ -47,4 +47,7 @@ public class FanHealthIndexDto implements Serializable {
private String orgCode; private String orgCode;
private List<String> gatewayIds; private List<String> gatewayIds;
private String warningPeriod; private String warningPeriod;
private String tableName;
private Long startDateTs;
private Long endDateTs;
} }
...@@ -46,5 +46,8 @@ public class PvHealthIndexDto { ...@@ -46,5 +46,8 @@ public class PvHealthIndexDto {
private String sortOne; private String sortOne;
private String sortsString; private String sortsString;
private String orgCode; private String orgCode;
private String tableName;
private Long startDateTs;
private Long endDateTs;
private List<String> gatewayIds; private List<String> gatewayIds;
} }
...@@ -43,7 +43,7 @@ import static com.yeejoin.amos.boot.module.jxiop.biz.kafka.Constant.*; ...@@ -43,7 +43,7 @@ import static com.yeejoin.amos.boot.module.jxiop.biz.kafka.Constant.*;
* @create 2022/11/1 10:06 * @create 2022/11/1 10:06
**/ **/
@Slf4j @Slf4j
//@Service @Service
public class KafkaConsumerService { public class KafkaConsumerService {
@Autowired @Autowired
...@@ -478,9 +478,13 @@ public class KafkaConsumerService { ...@@ -478,9 +478,13 @@ public class KafkaConsumerService {
private void buildZXZExecData(List<ConsumerRecord<String, String>> consumerRecords, private void buildZXZExecData(List<ConsumerRecord<String, String>> consumerRecords,
Map<String, Set<String>> gatewayPoints, Map<String, Set<String>> gatewayPoints,
Map<String, List<IdxBizFanPointProcessVariableClassification>> zxzIds, String xgxPvConsumer) { Map<String, List<IdxBizFanPointProcessVariableClassification>> zxzIds, String xgxPvConsumer) {
String startTime = DateUtils.convertDateToString(DateUtil.offsetDay(new Date(), -1),
DateUtils.DATE_TIME_PATTERN);
String endTime = DateUtils.convertDateToString(DateUtils.getCurrentDayEndTime(new Date()),
DateUtils.DATE_TIME_PATTERN);
for (String gatewayId : gatewayPoints.keySet()) { for (String gatewayId : gatewayPoints.keySet()) {
String join = String.join(",", gatewayPoints.get(gatewayId)); String join = String.join(",", gatewayPoints.get(gatewayId));
List<IndicatorData> indicatorData = indicatorDataMapper.selectByAddresses(join, gatewayId); List<IndicatorData> indicatorData = indicatorDataMapper.selectByAddresses(join, gatewayId,startTime,endTime);
JsonReadOptions options = JsonReadOptions.builderFromString(JSON.toJSONString(indicatorData)) JsonReadOptions options = JsonReadOptions.builderFromString(JSON.toJSONString(indicatorData))
.columnTypes(new Function<String, ColumnType>() { .columnTypes(new Function<String, ColumnType>() {
...@@ -499,9 +503,13 @@ public class KafkaConsumerService { ...@@ -499,9 +503,13 @@ public class KafkaConsumerService {
private void buildExecData(List<ConsumerRecord<String, String>> consumerRecords, private void buildExecData(List<ConsumerRecord<String, String>> consumerRecords,
Map<String, Set<String>> gatewayPoints, String xgxPvConsumer) { Map<String, Set<String>> gatewayPoints, String xgxPvConsumer) {
String startTime = DateUtils.convertDateToString(DateUtil.offsetDay(new Date(), -1),
DateUtils.DATE_TIME_PATTERN);
String endTime = DateUtils.convertDateToString(DateUtils.getCurrentDayEndTime(new Date()),
DateUtils.DATE_TIME_PATTERN);
for (String gatewayId : gatewayPoints.keySet()) { for (String gatewayId : gatewayPoints.keySet()) {
String join = String.join(",", gatewayPoints.get(gatewayId)); String join = String.join(",", gatewayPoints.get(gatewayId));
List<IndicatorData> indicatorData = indicatorDataMapper.selectByAddresses(join, gatewayId); List<IndicatorData> indicatorData = indicatorDataMapper.selectByAddresses(join, gatewayId,startTime,endTime);
JsonReadOptions options = JsonReadOptions.builderFromString(JSON.toJSONString(indicatorData)) JsonReadOptions options = JsonReadOptions.builderFromString(JSON.toJSONString(indicatorData))
.columnTypes(new Function<String, ColumnType>() { .columnTypes(new Function<String, ColumnType>() {
...@@ -816,9 +824,13 @@ public class KafkaConsumerService { ...@@ -816,9 +824,13 @@ public class KafkaConsumerService {
private void buildZXZPvExecData(List<ConsumerRecord<String, String>> consumerRecords, private void buildZXZPvExecData(List<ConsumerRecord<String, String>> consumerRecords,
Map<String, Set<String>> gatewayPoints, Map<String, Set<String>> gatewayPoints,
Map<String, List<IdxBizPvPointProcessVariableClassification>> zxzIds, String xgxPvConsumer) { Map<String, List<IdxBizPvPointProcessVariableClassification>> zxzIds, String xgxPvConsumer) {
String startTime = DateUtils.convertDateToString(DateUtil.offsetDay(new Date(), -1),
DateUtils.DATE_TIME_PATTERN);
String endTime = DateUtils.convertDateToString(DateUtils.getCurrentDayEndTime(new Date()),
DateUtils.DATE_TIME_PATTERN);
for (String gatewayId : gatewayPoints.keySet()) { for (String gatewayId : gatewayPoints.keySet()) {
String join = String.join(",", gatewayPoints.get(gatewayId)); String join = String.join(",", gatewayPoints.get(gatewayId));
List<IndicatorData> indicatorData = indicatorDataMapper.selectByAddresses(join, gatewayId); List<IndicatorData> indicatorData = indicatorDataMapper.selectByAddresses(join, gatewayId,startTime,endTime);
JsonReadOptions options = JsonReadOptions.builderFromString(JSON.toJSONString(indicatorData)) JsonReadOptions options = JsonReadOptions.builderFromString(JSON.toJSONString(indicatorData))
.columnTypes(new Function<String, ColumnType>() { .columnTypes(new Function<String, ColumnType>() {
......
...@@ -2999,6 +2999,7 @@ public class CommonServiceImpl { ...@@ -2999,6 +2999,7 @@ public class CommonServiceImpl {
if (500 == newList.size() || i == pvHealthIndices.size() - 1) { // 载体list达到要求,进行批量操作 if (500 == newList.size() || i == pvHealthIndices.size() - 1) { // 载体list达到要求,进行批量操作
// 调用批量插入 // 调用批量插入
pvHealthIndexMapper.saveBatchHealthIndexList(newList, "pv_health_index_moment", analysisType); pvHealthIndexMapper.saveBatchHealthIndexList(newList, "pv_health_index_moment", analysisType);
// pvHealthIndexMapper.saveBatchHealthIndexListNew(newList, "pv_health_index_moment_new", analysisType);
idxFanHealthIndexMapper.saveBatchHealthIndexLatestInfoPv(newList); idxFanHealthIndexMapper.saveBatchHealthIndexLatestInfoPv(newList);
newList.clear();// 每次批量操作后,清空载体list,等待下次的数据填入 newList.clear();// 每次批量操作后,清空载体list,等待下次的数据填入
} }
......
...@@ -112,7 +112,7 @@ public class IdxBizFanHealthIndexServiceImpl extends BaseService<IdxBizFanHealth ...@@ -112,7 +112,7 @@ public class IdxBizFanHealthIndexServiceImpl extends BaseService<IdxBizFanHealth
endTime = endTime.concat(" 23:59:59"); endTime = endTime.concat(" 23:59:59");
} else if (startTime.length() == 13) { } else if (startTime.length() == 13) {
startTime = startTime.concat(":00:00"); startTime = startTime.concat(":00:00");
endTime = endTime.concat(":59:59"); endTime = endTime.concat(":50:59");
} else if (startTime.length() == 16) { } else if (startTime.length() == 16) {
startTime = startTime.concat(":00"); startTime = startTime.concat(":00");
endTime = endTime.concat(":59"); endTime = endTime.concat(":59");
......
...@@ -89,7 +89,7 @@ public interface FanHealthIndexDayMapper extends BaseMapper<FanHealthIndexDay> { ...@@ -89,7 +89,7 @@ public interface FanHealthIndexDayMapper extends BaseMapper<FanHealthIndexDay> {
); );
@Select("<script>"+ @Select("<script>"+
"SELECT distinct d.`health_index` AS healthIndex,d.rec_date as recDate, d.`health_index` AS `value`, d.anomaly, substr(d.analysis_time,1,10) as analysisTime,d.station,d.equipment_name AS equipmentName, d.point_name as pointName, ( CASE d.HEALTH_LEVEL WHEN '危险' THEN 3 WHEN '警告' THEN 2 WHEN '注意' THEN 1 ELSE 0 END ) AS status" + "SELECT distinct d.`health_index` AS healthIndex,d.ts,d.rec_date as recDate, d.`health_index` AS `value`, d.anomaly, substr(d.analysis_time,1,10) as analysisTime,d.station,d.equipment_name AS equipmentName, d.point_name as pointName, ( CASE d.HEALTH_LEVEL WHEN '危险' THEN 3 WHEN '警告' THEN 2 WHEN '注意' THEN 1 ELSE 0 END ) AS status" +
" FROM analysis_data.fan_health_index_day d" + " FROM analysis_data.fan_health_index_day d" +
" INNER JOIN (SELECT last(ts) as maxTs " + " INNER JOIN (SELECT last(ts) as maxTs " +
"<if test='area!= null '> ,area </if> " + "<if test='area!= null '> ,area </if> " +
......
...@@ -8,6 +8,7 @@ import com.yeejoin.amos.boot.module.jxiop.biz.tdengine.FanHealthIndexDay; ...@@ -8,6 +8,7 @@ import com.yeejoin.amos.boot.module.jxiop.biz.tdengine.FanHealthIndexDay;
import org.apache.ibatis.annotations.Param; import org.apache.ibatis.annotations.Param;
import org.apache.ibatis.annotations.Select; import org.apache.ibatis.annotations.Select;
import java.util.ArrayList;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
...@@ -107,4 +108,5 @@ public interface FanHealthIndexMapper extends BaseMapper<FanHealthIndex> { ...@@ -107,4 +108,5 @@ public interface FanHealthIndexMapper extends BaseMapper<FanHealthIndex> {
@Param("tableName") String tableName, @Param("tableName") String tableName,
@Param("analysisObjectType") String analysisObjectType); @Param("analysisObjectType") String analysisObjectType);
int saveBatchHealthIndexListNew(@Param("list") List<FanHealthIndex> list, @Param("tableName") String tableName, @Param("analysisType") String analysisType);
} }
...@@ -111,7 +111,7 @@ public interface PvHealthIndexDayMapper extends BaseMapper<PvHealthIndexDay> { ...@@ -111,7 +111,7 @@ public interface PvHealthIndexDayMapper extends BaseMapper<PvHealthIndexDay> {
@Select("<script>"+ @Select("<script>"+
"SELECT distinct d.`health_index` AS healthIndex,d.rec_date as recDate, d.`health_index` AS `value`, d.anomaly, substr(d.analysis_time,1,10) as analysisTime,d.station,d.equipment_name AS equipmentName, d.point_name as pointName, d.index_address as indexAddress, ( CASE d.HEALTH_LEVEL WHEN '危险' THEN 3 WHEN '警告' THEN 2 WHEN '注意' THEN 1 ELSE 0 END ) AS status" + "SELECT distinct d.`health_index` AS healthIndex,d.ts,d.rec_date as recDate, d.`health_index` AS `value`, d.anomaly, substr(d.analysis_time,1,10) as analysisTime,d.station,d.equipment_name AS equipmentName, d.point_name as pointName, d.index_address as indexAddress, ( CASE d.HEALTH_LEVEL WHEN '危险' THEN 3 WHEN '警告' THEN 2 WHEN '注意' THEN 1 ELSE 0 END ) AS status" +
" FROM analysis_data.pv_health_index_day d" + " FROM analysis_data.pv_health_index_day d" +
" INNER JOIN (SELECT last(ts) as maxTs " + " INNER JOIN (SELECT last(ts) as maxTs " +
"<if test='area!= null '> ,area </if> " + "<if test='area!= null '> ,area </if> " +
......
...@@ -82,7 +82,7 @@ public interface PvHealthIndexHourMapper extends BaseMapper<PvHealthIndexHour> { ...@@ -82,7 +82,7 @@ public interface PvHealthIndexHourMapper extends BaseMapper<PvHealthIndexHour> {
@Select("<script>"+ @Select("<script>"+
"SELECT distinct d.`health_index` AS healthIndex,d.rec_date as recDate, d.`health_index` AS `value`, d.anomaly, substr(d.analysis_time,1,10) as analysisTime,d.station,d.equipment_name AS equipmentName, d.point_name as pointName, d.index_address as indexAddress, ( CASE d.HEALTH_LEVEL WHEN '危险' THEN 3 WHEN '警告' THEN 2 WHEN '注意' THEN 1 ELSE 0 END ) AS status" + "SELECT distinct d.`health_index` AS healthIndex,d.ts,d.rec_date as recDate, d.`health_index` AS `value`, d.anomaly, substr(d.analysis_time,1,10) as analysisTime,d.station,d.equipment_name AS equipmentName, d.point_name as pointName, d.index_address as indexAddress, ( CASE d.HEALTH_LEVEL WHEN '危险' THEN 3 WHEN '警告' THEN 2 WHEN '注意' THEN 1 ELSE 0 END ) AS status" +
" FROM analysis_data.pv_health_index_hour d" + " FROM analysis_data.pv_health_index_hour d" +
" INNER JOIN (SELECT last(ts) as maxTs " + " INNER JOIN (SELECT last(ts) as maxTs " +
"<if test='area!= null '> ,area </if> " + "<if test='area!= null '> ,area </if> " +
......
...@@ -30,6 +30,7 @@ public interface PvHealthIndexMapper extends BaseMapper<PvHealthIndex> { ...@@ -30,6 +30,7 @@ public interface PvHealthIndexMapper extends BaseMapper<PvHealthIndex> {
List<PvHealthIndex> selectData (@Param("healthLevel")String healthLevel, @Param("area")String area, @Param("equipmentName")String equipmentName, @Param("subarray")String subarray, @Param("analysisType")String analysisType, @Param("analysisObjType")String analysisObjType, @Param("station")String station, @Param("pointName")String pointName, @Param("indexAddress")String indexAddress, @Param("startTimeTop") String startTimeTop, @Param("endTimeTop")String endTimeTop); List<PvHealthIndex> selectData (@Param("healthLevel")String healthLevel, @Param("area")String area, @Param("equipmentName")String equipmentName, @Param("subarray")String subarray, @Param("analysisType")String analysisType, @Param("analysisObjType")String analysisObjType, @Param("station")String station, @Param("pointName")String pointName, @Param("indexAddress")String indexAddress, @Param("startTimeTop") String startTimeTop, @Param("endTimeTop")String endTimeTop);
int saveBatchHealthIndexList(@Param("list") List<PvHealthIndex> list, @Param("tableName") String tableName, @Param("analysisType") String analysisType); int saveBatchHealthIndexList(@Param("list") List<PvHealthIndex> list, @Param("tableName") String tableName, @Param("analysisType") String analysisType);
int saveBatchHealthIndexListNew(@Param("list") List<PvHealthIndex> list, @Param("tableName") String tableName, @Param("analysisType") String analysisType);
// int saveBatchHealthIndexLatestInfo(@Param("list") List<PvHealthIndex> list, @Param("tableName") String tableName); // int saveBatchHealthIndexLatestInfo(@Param("list") List<PvHealthIndex> list, @Param("tableName") String tableName);
...@@ -61,6 +62,8 @@ public interface PvHealthIndexMapper extends BaseMapper<PvHealthIndex> { ...@@ -61,6 +62,8 @@ public interface PvHealthIndexMapper extends BaseMapper<PvHealthIndex> {
List<PvHealthIndex> getInfoByPage(@Param("dto") PvHealthIndexDto dto); List<PvHealthIndex> getInfoByPage(@Param("dto") PvHealthIndexDto dto);
long getTsByRecDate(@Param("tableName")String tableName,@Param("recDate")String recDate,@Param("sort")String sort);
Integer getInfoByPageTotal(@Param("dto") PvHealthIndexDto dto); Integer getInfoByPageTotal(@Param("dto") PvHealthIndexDto dto);
List<PvHealthIndex> getInfoList(@Param("startTime") String startTime, List<PvHealthIndex> getInfoList(@Param("startTime") String startTime,
......
...@@ -79,7 +79,7 @@ public interface PvHealthIndexMomentMapper extends BaseMapper<PvHealthIndexMomen ...@@ -79,7 +79,7 @@ public interface PvHealthIndexMomentMapper extends BaseMapper<PvHealthIndexMomen
@Param("orgCode") String orgCode); @Param("orgCode") String orgCode);
@Select("<script>"+ @Select("<script>"+
"SELECT distinct d.`health_index` AS healthIndex,d.rec_date as recDate, d.`health_index` AS `value`, d.anomaly, substr(d.analysis_time,1,10) as analysisTime,d.station,d.equipment_name AS equipmentName, d.point_name as pointName, d.index_address as indexAddress, ( CASE d.HEALTH_LEVEL WHEN '危险' THEN 3 WHEN '警告' THEN 2 WHEN '注意' THEN 1 ELSE 0 END ) AS status" + "SELECT distinct d.`health_index` AS healthIndex,d.ts,d.rec_date as recDate, d.`health_index` AS `value`, d.anomaly, substr(d.analysis_time,1,10) as analysisTime,d.station,d.equipment_name AS equipmentName, d.point_name as pointName, d.index_address as indexAddress, ( CASE d.HEALTH_LEVEL WHEN '危险' THEN 3 WHEN '警告' THEN 2 WHEN '注意' THEN 1 ELSE 0 END ) AS status" +
" FROM analysis_data.pv_health_index_moment d" + " FROM analysis_data.pv_health_index_moment d" +
" INNER JOIN (SELECT last(ts) as maxTs " + " INNER JOIN (SELECT last(ts) as maxTs " +
"<if test='area!= null '> ,area </if> " + "<if test='area!= null '> ,area </if> " +
......
...@@ -34,8 +34,8 @@ public interface IndicatorDataMapper extends BaseMapper<IndicatorData> { ...@@ -34,8 +34,8 @@ public interface IndicatorDataMapper extends BaseMapper<IndicatorData> {
List<IndicatorData> selectDataById (@Param("id")String id); List<IndicatorData> selectDataById (@Param("id")String id);
@Select("select `id`, `value` from iot_data.indicator_data where `address` in (${addresses}) and gateway_id = #{gatewayId}") @Select("select `id`, `value` from iot_data.indicator_data where `address` in (${addresses}) and gateway_id = #{gatewayId} and ts >= #{startTime} and ts <= #{endTime}")
List<IndicatorData> selectByAddresses(@Param("addresses") String addresses, @Param("gatewayId") String gatewayId); List<IndicatorData> selectByAddresses(@Param("addresses") String addresses, @Param("gatewayId") String gatewayId,@Param("startTime") String startTime, @Param("endTime")String endTime);
/** /**
* 根据测点名称查询测点值信息 * 根据测点名称查询测点值信息
......
...@@ -26,14 +26,15 @@ spring.db6.datasource.password=Yeejoin@2020 ...@@ -26,14 +26,15 @@ spring.db6.datasource.password=Yeejoin@2020
spring.db6.datasource.driver-class-name=com.kingbase8.Driver spring.db6.datasource.driver-class-name=com.kingbase8.Driver
## eureka properties: ## eureka properties:
eureka.instance.hostname=10.20.1.160 eureka.instance.hostname=47.92.234.253
eureka.client.serviceUrl.defaultZone=http://admin:a1234560@${eureka.instance.hostname}:10001/eureka/ eureka.client.serviceUrl.defaultZone=http://admin:a1234560@${eureka.instance.hostname}:10001/eureka/
## redis properties: ## redis properties:
spring.redis.database=1 spring.redis.database=1
spring.redis.host=10.20.0.169 spring.redis.host=10.20.1.210
spring.redis.port=6379 spring.redis.port=6379
spring.redis.password=yeejoin@2020 spring.redis.password=yeejoin@2020
openHealth=false
spring.cache.type=GENERIC spring.cache.type=GENERIC
j2cache.open-spring-cache=true j2cache.open-spring-cache=true
j2cache.cache-clean-mode=passive j2cache.cache-clean-mode=passive
...@@ -78,13 +79,13 @@ emqx.client-password=public ...@@ -78,13 +79,13 @@ emqx.client-password=public
tdengine-server: tdengine-server:
driver-class-name: com.taosdata.jdbc.rs.RestfulDriver driver-class-name: com.taosdata.jdbc.rs.RestfulDriver
jdbc-url: jdbc:TAOS-RS://10.20.0.169:6041/iot_data_1?user=root&password=taosdata&timezone=GMT%2b8&allowMultiQueries=true jdbc-url: jdbc:TAOS-RS://10.20.0.203:6041/iot_data_1?user=root&password=taosdata&timezone=GMT%2b8&allowMultiQueries=true
username: root username: root
password: taosdata password: taosdata
#spring.db3.datasource.type: com.alibaba.druid.pool.DruidDataSource #spring.db3.datasource.type: com.alibaba.druid.pool.DruidDataSource
spring.db3.datasource.url=jdbc:TAOS-RS://10.20.0.169:6041/iot_data?user=root&password=taosdata&timezone=GMT%2b8&allowMultiQueries=true spring.db3.datasource.url=jdbc:TAOS-RS://10.20.0.203:6041/iot_data?user=root&password=taosdata&timezone=GMT%2b8&allowMultiQueries=true
spring.db3.datasource.username=root spring.db3.datasource.username=root
spring.db3.datasource.password=taosdata spring.db3.datasource.password=taosdata
spring.db3.datasource.driver-class-name: com.taosdata.jdbc.rs.RestfulDriver spring.db3.datasource.driver-class-name: com.taosdata.jdbc.rs.RestfulDriver
...@@ -92,7 +93,7 @@ spring.db3.datasource.driver-class-name: com.taosdata.jdbc.rs.RestfulDriver ...@@ -92,7 +93,7 @@ spring.db3.datasource.driver-class-name: com.taosdata.jdbc.rs.RestfulDriver
# ����ר��td���ݿ� analyse_data # ����ר��td���ݿ� analyse_data
#spring.db4.datasource.type: com.alibaba.druid.pool.DruidDataSource #spring.db4.datasource.type: com.alibaba.druid.pool.DruidDataSource
spring.db4.datasource.url=jdbc:TAOS-RS://10.20.0.169:6041/analysis_data?user=root&password=taosdata&timezone=GMT%2b8&allowMultiQueries=true spring.db4.datasource.url=jdbc:TAOS-RS://10.20.0.203:6041/analysis_data?user=root&password=taosdata&timezone=GMT%2b8&allowMultiQueries=true
spring.db4.datasource.username=root spring.db4.datasource.username=root
spring.db4.datasource.password=taosdata spring.db4.datasource.password=taosdata
spring.db4.datasource.driver-class-name: com.taosdata.jdbc.rs.RestfulDriver spring.db4.datasource.driver-class-name: com.taosdata.jdbc.rs.RestfulDriver
...@@ -149,7 +150,7 @@ amos.secret.key=qaz ...@@ -149,7 +150,7 @@ amos.secret.key=qaz
#eureka.instance.ip-address=172.16.3.122 #eureka.instance.ip-address=172.16.3.122
spring.activemq.broker-url=tcp://10.20.0.169:61616 spring.activemq.broker-url=tcp://10.20.1.210:61616
spring.activemq.user=admin spring.activemq.user=admin
spring.activemq.password=admin spring.activemq.password=admin
spring.jms.pub-sub-domain=false spring.jms.pub-sub-domain=false
...@@ -165,16 +166,16 @@ pictureUrl=upload/jxiop/syz/ ...@@ -165,16 +166,16 @@ pictureUrl=upload/jxiop/syz/
#kafka #kafka
spring.kafka.bootstrap-servers=10.20.0.169:9092 spring.kafka.bootstrap-servers=10.20.0.223:9092,10.20.0.133:9200
spring.kafka.producer.retries=1 spring.kafka.producer.retries=1
spring.kafka.producer.bootstrap-servers=10.20.0.169:9092 spring.kafka.producer.bootstrap-servers=10.20.0.223:9092,10.20.0.133:9200
spring.kafka.producer.batch-size=16384 spring.kafka.producer.batch-size=16384
spring.kafka.producer.buffer-memory=33554432 spring.kafka.producer.buffer-memory=33554432
spring.kafka.producer.acks=1 spring.kafka.producer.acks=1
spring.kafka.producer.key-serializer=org.apache.kafka.common.serialization.StringSerializer spring.kafka.producer.key-serializer=org.apache.kafka.common.serialization.StringSerializer
spring.kafka.producer.value-serializer=org.apache.kafka.common.serialization.StringSerializer spring.kafka.producer.value-serializer=org.apache.kafka.common.serialization.StringSerializer
spring.kafka.consumer.group-id=consumerGroup spring.kafka.consumer.group-id=consumerGroup
spring.kafka.consumer.bootstrap-servers=10.20.0.169:9092 spring.kafka.consumer.bootstrap-servers=10.20.0.223:9092,10.20.0.133:9200
spring.kafka.consumer.enable-auto-commit=false spring.kafka.consumer.enable-auto-commit=false
spring.kafka.consumer.auto-offset-reset=earliest spring.kafka.consumer.auto-offset-reset=earliest
spring.kafka.consumer.key-deserializer=org.apache.kafka.common.serialization.StringDeserializer spring.kafka.consumer.key-deserializer=org.apache.kafka.common.serialization.StringDeserializer
...@@ -182,21 +183,22 @@ spring.kafka.consumer.value-deserializer=org.apache.kafka.common.serialization.S ...@@ -182,21 +183,22 @@ spring.kafka.consumer.value-deserializer=org.apache.kafka.common.serialization.S
spring.kafka.listener.ack-mode=manual_immediate spring.kafka.listener.ack-mode=manual_immediate
spring.kafka.listener.type=batch spring.kafka.listener.type=batch
#һ����ȡ���� && �߳����� #һ����ȡ���� && �߳�����
spring.kafka.consumer.max-poll-records=30 spring.kafka.consumer.max-poll-records=50
#spring.kafka.consumer.fetch-max-wait= 10000 spring.kafka.consumer.fetch-max-wait= 10000
#��ǰʱ����ǰƫ������ ����ʷƫ������ #��ǰʱ����ǰƫ������ ����ʷƫ������
last.month.num = 12 last.month.num = 12
#����� �㷨���� #����� �㷨����
base.url.XGX=http://139.9.171.247:8052/intelligent-analysis/correlation base.url.XGX=http://10.20.1.29:8052/intelligent-analysis/correlation
#�������� �㷨���õ�ַ #�������� �㷨���õ�ַ
base.url.GKHF=http://139.9.171.247:8052/intelligent-analysis/working-condition-division base.url.GKHF=http://10.20.1.29:8052/intelligent-analysis/working-condition-division
#����� �㷨���� #����� �㷨����
base.url.ZXZ=http://139.9.171.247:8052/intelligent-analysis/central-value base.url.ZXZ=http://10.20.1.29:8052/intelligent-analysis/central-value
#ָ���������㷨���� #ָ���������㷨����
base.url.zsfx:http://139.9.171.247:8052/intelligent-analysis/index-analysis base.url.zsfx:http://10.20.1.29:8052/intelligent-analysis/index-analysis
forecast.url= forecast.url=
logic= analyse.cycle.offset=5
\ No newline at end of file logic=false
\ No newline at end of file
...@@ -2,7 +2,7 @@ spring.application.name=AMOS-JXIOP-ANALYSE-CZ ...@@ -2,7 +2,7 @@ spring.application.name=AMOS-JXIOP-ANALYSE-CZ
server.servlet.context-path=/jxiop-analyse server.servlet.context-path=/jxiop-analyse
server.port=33400 server.port=33400
server.uri-encoding=UTF-8 server.uri-encoding=UTF-8
spring.profiles.active=dev1 spring.profiles.active=kingbase8
spring.jackson.time-zone=GMT+8 spring.jackson.time-zone=GMT+8
spring.jackson.date-format=yyyy-MM-dd HH:mm:ss spring.jackson.date-format=yyyy-MM-dd HH:mm:ss
logging.config=classpath:logback-${spring.profiles.active}.xml logging.config=classpath:logback-${spring.profiles.active}.xml
...@@ -75,7 +75,7 @@ station.section=10 ...@@ -75,7 +75,7 @@ station.section=10
gl.sum.column=日发电量,月发电量,年发电量 gl.sum.column=日发电量,月发电量,年发电量
gl.avg.column=有功功率,日利用小时,瞬时风速 gl.avg.column=有功功率,日利用小时,瞬时风速
spring.elasticsearch.rest.uris=http://10.20.0.169:9200 spring.elasticsearch.rest.uris=http://10.20.0.223:9200
spring.elasticsearch.rest.connection-timeout=30000 spring.elasticsearch.rest.connection-timeout=30000
spring.elasticsearch.rest.username=elastic spring.elasticsearch.rest.username=elastic
spring.elasticsearch.rest.password=Yeejoin@2020 spring.elasticsearch.rest.password=Yeejoin@2020
......
...@@ -171,8 +171,8 @@ ...@@ -171,8 +171,8 @@
<where> <where>
<if test="dto.analysisObjType!= null and dto.analysisObjType!= ''">analysis_obj_type = #{dto.analysisObjType}</if> <if test="dto.analysisObjType!= null and dto.analysisObjType!= ''">analysis_obj_type = #{dto.analysisObjType}</if>
<if test="dto.analysisType!= null and dto.analysisType!= ''">and analysis_type = #{dto.analysisType}</if> <if test="dto.analysisType!= null and dto.analysisType!= ''">and analysis_type = #{dto.analysisType}</if>
<if test="dto.endDate!= null and dto.endDate!= '' "> and rec_date &lt;= #{dto.endDate} </if> <if test="dto.endDate!= null and dto.endDate!= '' "> and ts &lt;= #{dto.endDate} </if>
<if test="dto.startDate!= null and dto.startDate!= ''"> and rec_date &gt;= #{dto.startDate} </if> <if test="dto.startDate!= null and dto.startDate!= ''"> and ts &gt;= #{dto.startDate} </if>
<if test="dto.area!= null and dto.area!= ''"> AND area = #{dto.area} </if> <if test="dto.area!= null and dto.area!= ''"> AND area = #{dto.area} </if>
<if test="dto.number!= null and dto.number!= ''"> AND `number` = #{dto.number} </if> <if test="dto.number!= null and dto.number!= ''"> AND `number` = #{dto.number} </if>
<if test="dto.pointName!= null and dto.pointName!= ''">AND point_name = #{dto.pointName} </if> <if test="dto.pointName!= null and dto.pointName!= ''">AND point_name = #{dto.pointName} </if>
...@@ -200,8 +200,8 @@ ...@@ -200,8 +200,8 @@
<where> <where>
<if test="dto.analysisObjType!= null and dto.analysisObjType!= ''">analysis_obj_type = #{dto.analysisObjType}</if> <if test="dto.analysisObjType!= null and dto.analysisObjType!= ''">analysis_obj_type = #{dto.analysisObjType}</if>
<if test="dto.analysisType!= null and dto.analysisType!= ''">and analysis_type = #{dto.analysisType}</if> <if test="dto.analysisType!= null and dto.analysisType!= ''">and analysis_type = #{dto.analysisType}</if>
<if test="dto.endDate!= null and dto.endDate!= '' "> and rec_date &lt;= #{dto.endDate} </if> <if test="dto.endDate!= null and dto.endDate!= '' "> and ts &lt;= #{dto.endDate} </if>
<if test="dto.startDate!= null and dto.startDate!= ''"> and rec_date &gt;= #{dto.startDate} </if> <if test="dto.startDate!= null and dto.startDate!= ''"> and ts &gt;= #{dto.startDate} </if>
<if test="dto.area!= null and dto.area!= ''"> AND area = #{dto.area} </if> <if test="dto.area!= null and dto.area!= ''"> AND area = #{dto.area} </if>
<if test="dto.number!= null and dto.number!= ''"> AND `number` = #{dto.number} </if> <if test="dto.number!= null and dto.number!= ''"> AND `number` = #{dto.number} </if>
<if test="dto.pointName!= null and dto.pointName!= ''">AND point_name = #{dto.pointName} </if> <if test="dto.pointName!= null and dto.pointName!= ''">AND point_name = #{dto.pointName} </if>
...@@ -554,4 +554,36 @@ ...@@ -554,4 +554,36 @@
analysis_obj_type = #{analysisObjectType} analysis_obj_type = #{analysisObjectType}
and ts > #{startTime} and ts > #{startTime}
</select> </select>
<insert id="saveBatchHealthIndexListNew" >
insert into
${tableName}
using fan_health_index_data_new TAGS (#{analysisType})
values
<foreach collection="list" separator="," item="item" index="index">
(
now,
#{item.recDate, jdbcType=VARCHAR},
#{item.analysisObjType, jdbcType=VARCHAR},
#{item.analysisObjSeq, jdbcType=VARCHAR},
#{item.weight, jdbcType=FLOAT},
#{item.healthIndex, jdbcType=FLOAT},
#{item.healthLevel, jdbcType=VARCHAR},
#{item.analysisStartTime, jdbcType=VARCHAR},
#{item.analysisEndTime, jdbcType=VARCHAR},
#{item.area, jdbcType=VARCHAR},
#{item.station, jdbcType=VARCHAR},
#{item.subSystem, jdbcType=VARCHAR},
#{item.number, jdbcType=VARCHAR},
#{item.equipmentName, jdbcType=VARCHAR},
#{item.gatewayId, jdbcType=VARCHAR},
#{item.indexAddress, jdbcType=VARCHAR},
#{item.anomaly, jdbcType=FLOAT},
#{item.pointName, jdbcType=VARCHAR},
#{item.analysisTime, jdbcType=VARCHAR},
#{item.kks, jdbcType=VARCHAR},
#{item.orgCode, jdbcType=VARCHAR}
)
</foreach>
</insert>
</mapper> </mapper>
...@@ -199,12 +199,12 @@ ...@@ -199,12 +199,12 @@
</select> </select>
<select id="getInfoByPage" resultType="com.yeejoin.amos.boot.module.jxiop.biz.tdengine.PvHealthIndex"> <select id="getInfoByPage" resultType="com.yeejoin.amos.boot.module.jxiop.biz.tdengine.PvHealthIndex">
SELECT * FROM pv_health_index_data SELECT * FROM ${dto.tableName}
<where> <where>
<if test="dto.analysisObjType!= null and dto.analysisObjType!= ''">analysis_obj_type = #{dto.analysisObjType}</if> <if test="dto.analysisObjType!= null and dto.analysisObjType!= ''">analysis_obj_type = #{dto.analysisObjType}</if>
<if test="dto.analysisType!= null and dto.analysisType!= ''">and analysis_type = #{dto.analysisType}</if> <if test="dto.analysisType!= null and dto.analysisType!= ''">and analysis_type = #{dto.analysisType}</if>
<if test="dto.endDate!= null and dto.endDate!= ''"> and rec_date &lt;= #{dto.endDate} </if> <if test="dto.endDateTs!= null and dto.endDateTs!= ''"> and ts &lt;= #{dto.endDateTs} </if>
<if test="dto.startDate!= null and dto.startDate!= ''"> and rec_date &gt;= #{dto.startDate} </if> <if test="dto.startDateTs!= null and dto.startDateTs!= ''"> and ts &gt;= #{dto.startDateTs} </if>
<if test="dto.area!= null and dto.area!= ''"> AND area = #{dto.area} </if> <if test="dto.area!= null and dto.area!= ''"> AND area = #{dto.area} </if>
<if test="dto.subarray!= null and dto.subarray!= ''"> AND subarray = #{dto.subarray} </if> <if test="dto.subarray!= null and dto.subarray!= ''"> AND subarray = #{dto.subarray} </if>
<if test="dto.pointName!= null and dto.pointName!= ''">AND point_name = #{dto.pointName} </if> <if test="dto.pointName!= null and dto.pointName!= ''">AND point_name = #{dto.pointName} </if>
...@@ -226,13 +226,18 @@ ...@@ -226,13 +226,18 @@
limit #{dto.current}, #{dto.size} limit #{dto.current}, #{dto.size}
</select> </select>
<select id="getTsByRecDate" resultType="long">
SELECT ts FROM analysis_data.${tableName}
where rec_date = #{recDate} order by ts ${sort} limit 1 ;
</select>
<select id="getInfoByPageTotal" resultType="java.lang.Integer"> <select id="getInfoByPageTotal" resultType="java.lang.Integer">
SELECT count(1) FROM pv_health_index_data SELECT count(1) FROM pv_health_index_data
<where> <where>
<if test="dto.analysisObjType!= null and dto.analysisObjType!= ''">analysis_obj_type = #{dto.analysisObjType}</if> <if test="dto.analysisObjType!= null and dto.analysisObjType!= ''">analysis_obj_type = #{dto.analysisObjType}</if>
<if test="dto.analysisType!= null and dto.analysisType!= ''">and analysis_type = #{dto.analysisType}</if> <if test="dto.analysisType!= null and dto.analysisType!= ''">and analysis_type = #{dto.analysisType}</if>
<if test="dto.endDate!= null and dto.endDate!= ''"> and rec_date &lt;= #{dto.endDate} </if> <if test="dto.endDateTs!= null and dto.endDateTs!= ''"> and ts &lt;= #{dto.endDateTs} </if>
<if test="dto.startDate!= null and dto.startDate!= ''"> and rec_date &gt;= #{dto.startDate} </if> <if test="dto.startDateTs!= null and dto.startDateTs!= ''"> and ts &gt;= #{dto.startDateTs} </if>
<if test="dto.area!= null and dto.area!= ''"> AND area = #{dto.area} </if> <if test="dto.area!= null and dto.area!= ''"> AND area = #{dto.area} </if>
<if test="dto.subarray!= null and dto.subarray!= ''"> AND subarray = #{dto.subarray} </if> <if test="dto.subarray!= null and dto.subarray!= ''"> AND subarray = #{dto.subarray} </if>
<if test="dto.pointName!= null and dto.pointName!= ''">AND point_name = #{dto.pointName} </if> <if test="dto.pointName!= null and dto.pointName!= ''">AND point_name = #{dto.pointName} </if>
...@@ -241,7 +246,7 @@ ...@@ -241,7 +246,7 @@
<if test="dto.equipmentName!= null and dto.equipmentName!= ''">AND equipment_name = #{dto.equipmentName} <if test="dto.equipmentName!= null and dto.equipmentName!= ''">AND equipment_name = #{dto.equipmentName}
</if> </if>
<if test="dto.orgCode != null and dto.orgCode != ''"> <if test="dto.orgCode != null and dto.orgCode != ''">
and org_code like #{dto.orgCode} and org_code like #{dto.orgCode}
</if> </if>
</where> </where>
...@@ -318,4 +323,35 @@ ...@@ -318,4 +323,35 @@
analysis_obj_type = #{analysisObjectType} analysis_obj_type = #{analysisObjectType}
and ts > #{startTime} and ts > #{startTime}
</select> </select>
<insert id="saveBatchHealthIndexListNew">
insert into analysis_data.${tableName}
using analysis_data.pv_health_index_data_new TAGS (#{analysisType},#{recDate})
values
<foreach collection="list" separator="," item="item" index="index">
(
now,
#{item.analysisObjType, jdbcType=VARCHAR},
#{item.analysisObjSeq, jdbcType=VARCHAR},
#{item.weight, jdbcType=FLOAT},
#{item.healthIndex, jdbcType=FLOAT},
#{item.healthLevel, jdbcType=VARCHAR},
#{item.analysisStartTime, jdbcType=VARCHAR},
#{item.analysisEndTime, jdbcType=VARCHAR},
#{item.area, jdbcType=VARCHAR},
#{item.station, jdbcType=VARCHAR},
#{item.subarray, jdbcType=VARCHAR},
#{item.manufacturer, jdbcType=VARCHAR},
#{item.deviceType, jdbcType=VARCHAR},
#{item.equipmentName, jdbcType=VARCHAR},
#{item.gatewayId, jdbcType=VARCHAR},
#{item.indexAddress, jdbcType=VARCHAR},
#{item.anomaly, jdbcType=FLOAT},
#{item.pointName, jdbcType=VARCHAR},
#{item.orgCode, jdbcType=VARCHAR},
#{item.analysisTime, jdbcType=VARCHAR},
#{item.kks, jdbcType=VARCHAR}
)
</foreach>
</insert>
</mapper> </mapper>
...@@ -56,4 +56,10 @@ public class StationInfoDto { ...@@ -56,4 +56,10 @@ public class StationInfoDto {
@ApiModelProperty(value = "风险等级") @ApiModelProperty(value = "风险等级")
private String riskLevel; private String riskLevel;
@ApiModelProperty(value = "系统名称默认值")
private String equipmentNameDefault;
@ApiModelProperty(value = "设备状态默认值")
private String subSystemDefault;
@ApiModelProperty(value = "变量状态默认值")
private String indexAddressDefault;
} }
...@@ -72,5 +72,6 @@ public interface StationBasicMapper extends BaseMapper<StationBasic> { ...@@ -72,5 +72,6 @@ public interface StationBasicMapper extends BaseMapper<StationBasic> {
StationBasicDto getStationInfoByCode(@Param("stationCode")String stationCode); StationBasicDto getStationInfoByCode(@Param("stationCode")String stationCode);
List<StationBasicDto> getStationsByAreaCode(@Param("areaCode")String stationCode); List<StationBasicDto> getStationsByAreaCode(@Param("areaCode")String stationCode);
List<StationBasicDto> getStationBasicList();
} }
...@@ -213,4 +213,12 @@ ...@@ -213,4 +213,12 @@
is_delete = 0 is_delete = 0
ORDER BY sequence_nbr ASC ORDER BY sequence_nbr ASC
</select> </select>
<select id="getStationBasicList" resultType="com.yeejoin.amos.boot.module.jxiop.api.dto.StationBasicDto">
SELECT
sequence_nbr AS sequenceNbr,
station_type stationType
FROM
station_basic
</select>
</mapper> </mapper>
package com.yeejoin.amos.boot.module.jxiop.biz.service.impl; package com.yeejoin.amos.boot.module.jxiop.biz.service.impl;
import com.alibaba.fastjson.JSON; import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject; import com.alibaba.fastjson.JSONObject;
import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper; import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectMapper;
import com.yeejoin.amos.boot.biz.common.utils.RedisUtils;
import com.yeejoin.amos.boot.module.jxiop.api.dto.StationCoordinateDto; import com.yeejoin.amos.boot.module.jxiop.api.dto.StationCoordinateDto;
import com.yeejoin.amos.boot.module.jxiop.api.dto.StationInfoDto; import com.yeejoin.amos.boot.module.jxiop.api.dto.StationInfoDto;
import com.yeejoin.amos.boot.module.jxiop.api.dto.StationRecordInfo; import com.yeejoin.amos.boot.module.jxiop.api.dto.StationRecordInfo;
...@@ -63,6 +65,8 @@ public class StationBasicServiceImpl extends BaseService<StationBasicDto, Statio ...@@ -63,6 +65,8 @@ public class StationBasicServiceImpl extends BaseService<StationBasicDto, Statio
@Autowired @Autowired
MapRegionServiceImpl mapRegionServiceImpl; MapRegionServiceImpl mapRegionServiceImpl;
private final String CZLX = "CZLX"; private final String CZLX = "CZLX";
@Autowired
private RedisUtils redisUtils;
/** /**
* 分页查询 * 分页查询
...@@ -279,6 +283,7 @@ public class StationBasicServiceImpl extends BaseService<StationBasicDto, Statio ...@@ -279,6 +283,7 @@ public class StationBasicServiceImpl extends BaseService<StationBasicDto, Statio
} }
public List<StationInfoDto> getStationList(String areaCode, String type) { public List<StationInfoDto> getStationList(String areaCode, String type) {
JSONArray stationSelectDefault =(JSONArray)redisUtils.get("STATION_FIRST_SELECT");
// 场站信息列表 地图接口返回使用 // 场站信息列表 地图接口返回使用
List<StationInfoDto> stationInfoDtoList = new LinkedList<>(); List<StationInfoDto> stationInfoDtoList = new LinkedList<>();
//场站信息列表 //场站信息列表
...@@ -338,6 +343,19 @@ public class StationBasicServiceImpl extends BaseService<StationBasicDto, Statio ...@@ -338,6 +343,19 @@ public class StationBasicServiceImpl extends BaseService<StationBasicDto, Statio
stationInfoDto.setTitlePos(doubleList); stationInfoDto.setTitlePos(doubleList);
stationInfoDto.setIndicatorData(indicatorList); stationInfoDto.setIndicatorData(indicatorList);
stationInfoDto.setRiskLevel(stationRecordInfo.getRiskLevel()); stationInfoDto.setRiskLevel(stationRecordInfo.getRiskLevel());
//添加默认场站选中的值
if(!CollectionUtils.isEmpty(stationSelectDefault)){
for (Object o : stationSelectDefault) {
if(o instanceof JSONObject){
JSONObject jsonObject=(JSONObject) o;
if(stationRecordInfo.getStationId().equals(jsonObject.getLong("stationBasicId"))){
stationInfoDto.setEquipmentNameDefault(jsonObject.getString("equipmentNameDefault"));
stationInfoDto.setSubSystemDefault(jsonObject.getString("subSystemDefault"));
stationInfoDto.setIndexAddressDefault(jsonObject.getString("indexAddressDefault"));
}
}
}
}
stationInfoDtoList.add(stationInfoDto); stationInfoDtoList.add(stationInfoDto);
}); });
......
...@@ -26,21 +26,7 @@ ...@@ -26,21 +26,7 @@
FROM FROM
sjgl_zsj_zsbtz sjgl_zsj_zsbtz
WHERE WHERE
FSB IN ( MACHGENRE = #{DATAID} and WERKS = #{WERKS}
SELECT
DBID
FROM
sjgl_zsj_zsbtz
WHERE
MACHGENRE = (
SELECT
DATAID
FROM
tpri_dmp_databook
WHERE
MACHGENRE = #{DATAID} and WERKS = #{WERKS}
)
)
</select> </select>
<select id="getStationInfoMapByStationGFWerks" resultType="map"> <select id="getStationInfoMapByStationGFWerks" resultType="map">
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment