Commit b6147d66 authored by zhangsen's avatar zhangsen

计算问题处理,代码提交

parent 048ede6f
...@@ -258,14 +258,17 @@ public class BigScreenAnalyseController extends BaseController { ...@@ -258,14 +258,17 @@ public class BigScreenAnalyseController extends BaseController {
public ResponseModel<Map<String, Object>> getHealthInfoByArea(@RequestParam(required = false) String areaCode) { public ResponseModel<Map<String, Object>> getHealthInfoByArea(@RequestParam(required = false) String areaCode) {
Map<String, Object> resultMap = new HashMap<>(); Map<String, Object> resultMap = new HashMap<>();
List<Map<String, Object>> healthListInfo = idxBizFanHealthIndexMapper.getHealthInfoByStation(areaCode); List<Map<String, Object>> healthListInfo = idxBizFanHealthIndexMapper.getHealthInfoByStation(areaCode);
Map<String, BigDecimal> collect = healthListInfo.stream().collect(Collectors.toMap(t -> t.get("station").toString(), t -> new BigDecimal(t.get("healthIndex").toString()))); // Map<String, BigDecimal> collect = healthListInfo.stream().collect(Collectors.toMap(t -> t.get("station").toString(), t -> new BigDecimal(t.get("healthIndex").toString())));
// List<StationBasic> stationBasics = stationBasicMapper.selectList(new LambdaQueryWrapper<StationBasic>().like(StationBasic::getAreaName, areaCode));
// List<String> list = stationBasics.stream().map(StationBasic::getStationName).collect(Collectors.toList());
// List<Object> seriesData = new ArrayList<>();
List<StationBasic> stationBasics = stationBasicMapper.selectList(new LambdaQueryWrapper<StationBasic>().like(StationBasic::getAreaName, areaCode)); // list.forEach(item -> seriesData.add(collect.getOrDefault(item, new BigDecimal("100"))));
List<String> list = stationBasics.stream().map(StationBasic::getStationName).collect(Collectors.toList()); List<String> list = new ArrayList<>();
List<Object> seriesData = new ArrayList<>(); List<Object> seriesData = new ArrayList<>();
list.forEach(item -> seriesData.add(collect.getOrDefault(item, new BigDecimal("100")))); healthListInfo.forEach(item -> {
list.add(item.get("station").toString());
seriesData.add(new BigDecimal(item.get("healthIndex").toString()));
});
resultMap.put("axisData", list); resultMap.put("axisData", list);
resultMap.put("seriesData", seriesData); resultMap.put("seriesData", seriesData);
return ResponseHelper.buildResponse(resultMap); return ResponseHelper.buildResponse(resultMap);
......
...@@ -88,6 +88,8 @@ public class KafkaAnalyseController { ...@@ -88,6 +88,8 @@ public class KafkaAnalyseController {
@ApiOperation(httpMethod = "POST", value = "中心值 - 风电 - 新", notes = "中心值 - 风电 - 新") @ApiOperation(httpMethod = "POST", value = "中心值 - 风电 - 新", notes = "中心值 - 风电 - 新")
public ResponseModel<Object> getFanConditionVariablesZXZ() { public ResponseModel<Object> getFanConditionVariablesZXZ() {
if (redisUtils.hasKey(kafkaTopicConsumerZXZFan)) { if (redisUtils.hasKey(kafkaTopicConsumerZXZFan)) {
// redisUtils.del(kafkaTopicConsumerZXZFan);
// redisUtils.getExpire(kafkaTopicConsumerZXZFan);
return ResponseHelper.buildResponse("计算中"); return ResponseHelper.buildResponse("计算中");
} }
fanConditionVariablesMessage.getFanConditionVariablesZXZ(); fanConditionVariablesMessage.getFanConditionVariablesZXZ();
......
...@@ -157,11 +157,7 @@ public class KafkaConsumerService { ...@@ -157,11 +157,7 @@ public class KafkaConsumerService {
minRow = minRow > values.size() ? values.size() : minRow; minRow = minRow > values.size() ? values.size() : minRow;
} }
values.setName("processVariable" + index); values.setName("processVariable" + index);
if (!dataTable.isEmpty() && dataTable.rowCount() < values.size()) { dataTable = getDataTable(dataTable, values);
dataTable.addColumns(values.inRange(0, dataTable.rowCount()));
} else {
dataTable.addColumns(values);
}
data1.put("processVariable" + index + "Id", processVariable.getSequenceNbr()); data1.put("processVariable" + index + "Id", processVariable.getSequenceNbr());
...@@ -171,6 +167,7 @@ public class KafkaConsumerService { ...@@ -171,6 +167,7 @@ public class KafkaConsumerService {
IntervalValues.add(processVariable.getIntervalValue2()); IntervalValues.add(processVariable.getIntervalValue2());
IntervalValues.add(processVariable.getIntervalValue3()); IntervalValues.add(processVariable.getIntervalValue3());
IntervalValues.add(processVariable.getIntervalValue4()); IntervalValues.add(processVariable.getIntervalValue4());
IntervalValues.add(processVariable.getIntervalValue5());
data2.put("processVariable" + index, IntervalValues); data2.put("processVariable" + index, IntervalValues);
index++; index++;
...@@ -178,15 +175,11 @@ public class KafkaConsumerService { ...@@ -178,15 +175,11 @@ public class KafkaConsumerService {
Selection selection = table.stringColumn("id").isEqualTo(analysisVariable.getIndexAddress() + "_" + analysisVariable.getGatewayId()); Selection selection = table.stringColumn("id").isEqualTo(analysisVariable.getIndexAddress() + "_" + analysisVariable.getGatewayId());
DoubleColumn values = table.where(selection).doubleColumn("value"); DoubleColumn values = table.where(selection).doubleColumn("value");
values.setName("analysisVariable"); values.setName("analysisVariable");
if (!dataTable.isEmpty() && dataTable.rowCount() < values.size()) { dataTable = getDataTable(dataTable, values);
dataTable.addColumns(values.inRange(0, dataTable.rowCount()));
} else {
dataTable.addColumns(values);
}
data1.put("analysisVariableId", analysisVariable.getSequenceNbr()); data1.put("analysisVariableId", analysisVariable.getSequenceNbr());
// 获取相同长度的数据 // 获取相同长度的数据
dataTable = dataTable.inRange(0, minRow); // dataTable = dataTable.inRange(0, minRow);
List<String> list = dataTable.columnNames(); List<String> list = dataTable.columnNames();
for (String column : list) { for (String column : list) {
data1.put(column, dataTable.doubleColumn(column).asDoubleArray()); data1.put(column, dataTable.doubleColumn(column).asDoubleArray());
...@@ -201,7 +194,7 @@ public class KafkaConsumerService { ...@@ -201,7 +194,7 @@ public class KafkaConsumerService {
idxBizFanPointVarCentralValueMapper.delete(new QueryWrapper<IdxBizFanPointVarCentralValue>().eq("ANALYSIS_POINT_ID", analysisVariableId)); idxBizFanPointVarCentralValueMapper.delete(new QueryWrapper<IdxBizFanPointVarCentralValue>().eq("ANALYSIS_POINT_ID", analysisVariableId));
JSONObject jsonObject = JSON.parseObject(response); JSONObject jsonObject = JSON.parseObject(response);
int length = jsonObject.getJSONArray("stdDev").size(); int length = jsonObject.getJSONArray("stdDev").size() > 64 ? 64 : jsonObject.getJSONArray("stdDev").size();
List<IdxBizFanPointVarCentralValue> insertList = new ArrayList<>(); List<IdxBizFanPointVarCentralValue> insertList = new ArrayList<>();
for (int i = 0; i < length; i++) { for (int i = 0; i < length; i++) {
IdxBizFanPointVarCentralValue idxBizFanPointVarCentralValue = new IdxBizFanPointVarCentralValue(); IdxBizFanPointVarCentralValue idxBizFanPointVarCentralValue = new IdxBizFanPointVarCentralValue();
...@@ -236,6 +229,19 @@ public class KafkaConsumerService { ...@@ -236,6 +229,19 @@ public class KafkaConsumerService {
} }
} }
private Table getDataTable(Table dataTable, DoubleColumn values) {
if (!dataTable.isEmpty() && dataTable.rowCount() < values.size()) {
dataTable.addColumns(values.inRange(0, dataTable.rowCount()));
} else if (!dataTable.isEmpty() && dataTable.rowCount() > values.size()){
dataTable = dataTable.inRange(0, values.size());
dataTable.addColumns(values);
} else {
dataTable.addColumns(values);
}
return dataTable;
}
private void execPvCorrelation(List<ConsumerRecord<String, String>> consumerRecords, Table table) { private void execPvCorrelation(List<ConsumerRecord<String, String>> consumerRecords, Table table) {
consumerRecords.parallelStream().forEach(record -> { consumerRecords.parallelStream().forEach(record -> {
Optional<?> kafkaMessage = Optional.ofNullable(record.value()); Optional<?> kafkaMessage = Optional.ofNullable(record.value());
...@@ -788,11 +794,7 @@ public class KafkaConsumerService { ...@@ -788,11 +794,7 @@ public class KafkaConsumerService {
minRow = minRow > values.size() ? values.size() : minRow; minRow = minRow > values.size() ? values.size() : minRow;
} }
values.setName("processVariable" + index); values.setName("processVariable" + index);
if (!dataTable.isEmpty() && dataTable.rowCount() < values.size()) { dataTable = getDataTable(dataTable, values);
dataTable.addColumns(values.inRange(0, dataTable.rowCount()));
} else {
dataTable.addColumns(values);
}
data1.put("processVariable" + index + "Id", processVariable.getSequenceNbr()); data1.put("processVariable" + index + "Id", processVariable.getSequenceNbr());
// 构建工况区间数组 // 构建工况区间数组
...@@ -801,6 +803,7 @@ public class KafkaConsumerService { ...@@ -801,6 +803,7 @@ public class KafkaConsumerService {
IntervalValues.add(processVariable.getIntervalValue2()); IntervalValues.add(processVariable.getIntervalValue2());
IntervalValues.add(processVariable.getIntervalValue3()); IntervalValues.add(processVariable.getIntervalValue3());
IntervalValues.add(processVariable.getIntervalValue4()); IntervalValues.add(processVariable.getIntervalValue4());
IntervalValues.add(processVariable.getIntervalValue5());
data2.put("processVariable" + index, IntervalValues); data2.put("processVariable" + index, IntervalValues);
index++; index++;
...@@ -808,15 +811,12 @@ public class KafkaConsumerService { ...@@ -808,15 +811,12 @@ public class KafkaConsumerService {
Selection selection = table.stringColumn("id").isEqualTo(analysisVariable.getIndexAddress() + "_" + analysisVariable.getGatewayId()); Selection selection = table.stringColumn("id").isEqualTo(analysisVariable.getIndexAddress() + "_" + analysisVariable.getGatewayId());
DoubleColumn values = table.where(selection).doubleColumn("value"); DoubleColumn values = table.where(selection).doubleColumn("value");
values.setName("analysisVariable"); values.setName("analysisVariable");
if (!dataTable.isEmpty() && dataTable.rowCount() < values.size()) { dataTable = getDataTable(dataTable, values);
dataTable.addColumns(values.inRange(0, dataTable.rowCount()));
} else {
dataTable.addColumns(values);
}
data1.put("analysisVariableId", analysisVariable.getSequenceNbr()); data1.put("analysisVariableId", analysisVariable.getSequenceNbr());
// 获取相同长度的数据 // 获取相同长度的数据
dataTable = dataTable.inRange(0, minRow); // dataTable = dataTable.inRange(0, minRow);
List<String> list = dataTable.columnNames(); List<String> list = dataTable.columnNames();
for (String column : list) { for (String column : list) {
data1.put(column, dataTable.doubleColumn(column).asDoubleArray()); data1.put(column, dataTable.doubleColumn(column).asDoubleArray());
...@@ -831,7 +831,7 @@ public class KafkaConsumerService { ...@@ -831,7 +831,7 @@ public class KafkaConsumerService {
idxBizPvPointVarCentralValueMapper.delete(new QueryWrapper<IdxBizPvPointVarCentralValue>().eq("ANALYSIS_POINT_ID", analysisVariable.getSequenceNbr())); idxBizPvPointVarCentralValueMapper.delete(new QueryWrapper<IdxBizPvPointVarCentralValue>().eq("ANALYSIS_POINT_ID", analysisVariable.getSequenceNbr()));
JSONObject jsonObject = JSON.parseObject(response); JSONObject jsonObject = JSON.parseObject(response);
int length = jsonObject.getJSONArray("stdDev").size(); int length = jsonObject.getJSONArray("stdDev").size() > 64 ? 64 : jsonObject.getJSONArray("stdDev").size();
List<IdxBizPvPointVarCentralValue> insertList = new ArrayList<>(); List<IdxBizPvPointVarCentralValue> insertList = new ArrayList<>();
for (int i = 0; i < length; i++) { for (int i = 0; i < length; i++) {
IdxBizPvPointVarCentralValue idxBizPvPointVarCentralValue = new IdxBizPvPointVarCentralValue(); IdxBizPvPointVarCentralValue idxBizPvPointVarCentralValue = new IdxBizPvPointVarCentralValue();
......
...@@ -1454,7 +1454,7 @@ public class CommonServiceImpl { ...@@ -1454,7 +1454,7 @@ public class CommonServiceImpl {
} }
@Scheduled(cron = "0 0/5 * * * ?") @Scheduled(cron = "0/5 * * * * ?")
public void healthWarningMinuteByFan() { public void healthWarningMinuteByFan() {
Date time = new Date(); Date time = new Date();
List<IdxBizFanPointProcessVariableClassificationDto> data = idxBizFanPointProcessVariableClassificationMapper.getInfluxDBData(); List<IdxBizFanPointProcessVariableClassificationDto> data = idxBizFanPointProcessVariableClassificationMapper.getInfluxDBData();
......
...@@ -11,18 +11,22 @@ ...@@ -11,18 +11,22 @@
IFNULL( AVG( HEALTH_INDEX ), 100 ) AS avgHealthIndex IFNULL( AVG( HEALTH_INDEX ), 100 ) AS avgHealthIndex
FROM FROM
idx_biz_fan_health_index idx_biz_fan_health_index
<where>ANALYSIS_TYPE = '按天' <where>
AND ANALYSIS_TYPE = '按天'
DATE_FORMAT( REC_DATE, "%Y-%m-%d" ) = CURRENT_DATE AND DATE_FORMAT( REC_DATE, "%Y-%m-%d" ) = CURRENT_DATE
<if test="areaCode != null and areaCode != ''"> <if test="areaCode != null and areaCode != ''">
AND ARAE like concat('%', #{areaCode}, '%') AND ARAE like concat('%', #{areaCode}, '%')
AND ANALYSIS_OBJ_TYPE = '片区'
</if> </if>
<if test="stationCode != null and stationCode != ''"> <if test="stationCode != null and stationCode != ''">
AND STATION = #{stationCode} AND STATION = #{stationCode}
AND ANALYSIS_OBJ_TYPE = '场站'
</if> </if>
<if test="stationCode == null or stationCode == ''">
AND ( STATION IS NULL OR STATION = '' ) <if test="(stationCode == null or stationCode == '') and (areaCode == null or areaCode == '')">
</if> AND ANALYSIS_OBJ_TYPE = '片区'
</if>
</where> </where>
UNION ALL UNION ALL
( (
...@@ -35,12 +39,14 @@ ...@@ -35,12 +39,14 @@
AND DATE_FORMAT( REC_DATE, "%Y-%m-%d" ) = CURRENT_DATE AND DATE_FORMAT( REC_DATE, "%Y-%m-%d" ) = CURRENT_DATE
<if test="areaCode != null and areaCode != ''"> <if test="areaCode != null and areaCode != ''">
AND ARAE like concat('%', #{areaCode}, '%') AND ARAE like concat('%', #{areaCode}, '%')
AND ANALYSIS_OBJ_TYPE = '片区'
</if> </if>
<if test="stationCode != null and stationCode != ''"> <if test="stationCode != null and stationCode != ''">
AND STATION = #{stationCode} AND STATION = #{stationCode}
AND ANALYSIS_OBJ_TYPE = '场站'
</if> </if>
<if test="stationCode == null or stationCode == ''"> <if test="(stationCode == null or stationCode == '') and (areaCode == null or areaCode == '')">
AND ( STATION IS NULL OR STATION = '' ) AND ANALYSIS_OBJ_TYPE = '片区'
</if> </if>
</where> </where>
) )
...@@ -72,12 +78,14 @@ ...@@ -72,12 +78,14 @@
ANALYSIS_TYPE = '按天' ANALYSIS_TYPE = '按天'
<if test="areaCode != null and areaCode != ''"> <if test="areaCode != null and areaCode != ''">
AND ARAE like concat('%', #{areaCode}, '%') AND ARAE like concat('%', #{areaCode}, '%')
AND ANALYSIS_OBJ_TYPE = '片区'
</if> </if>
<if test="stationCode != null and stationCode != ''"> <if test="stationCode != null and stationCode != ''">
AND STATION = #{stationCode} AND STATION = #{stationCode}
AND ANALYSIS_OBJ_TYPE = '场站'
</if> </if>
<if test="stationCode == null or stationCode == ''"> <if test="(stationCode == null or stationCode == '') and (areaCode == null or areaCode == '')">
AND ( STATION IS NULL OR STATION = '' ) AND ANALYSIS_OBJ_TYPE = '片区'
</if> </if>
</where> </where>
UNION ALL UNION ALL
...@@ -91,12 +99,14 @@ ...@@ -91,12 +99,14 @@
ANALYSIS_TYPE = '按天' ANALYSIS_TYPE = '按天'
<if test="areaCode != null and areaCode != ''"> <if test="areaCode != null and areaCode != ''">
AND ARAE like concat('%', #{areaCode}, '%') AND ARAE like concat('%', #{areaCode}, '%')
AND ANALYSIS_OBJ_TYPE = '片区'
</if> </if>
<if test="stationCode != null and stationCode != ''"> <if test="stationCode != null and stationCode != ''">
AND STATION = #{stationCode} AND STATION = #{stationCode}
AND ANALYSIS_OBJ_TYPE = '场站'
</if> </if>
<if test="stationCode == null or stationCode == ''"> <if test="(stationCode == null or stationCode == '') and (areaCode == null or areaCode == '')">
AND ( STATION IS NULL OR STATION = '' ) AND ANALYSIS_OBJ_TYPE = '片区'
</if> </if>
</where> </where>
) )
...@@ -128,7 +138,7 @@ ...@@ -128,7 +138,7 @@
FROM FROM
idx_biz_fan_health_index idx_biz_fan_health_index
WHERE WHERE
( STATION IS NULL OR STATION = '' ) ANALYSIS_OBJ_TYPE = '片区'
AND ANALYSIS_TYPE = '按天' AND ANALYSIS_TYPE = '按天'
AND DATE_FORMAT( REC_DATE, "%Y-%m-%d" ) = CURRENT_DATE AND DATE_FORMAT( REC_DATE, "%Y-%m-%d" ) = CURRENT_DATE
GROUP BY GROUP BY
...@@ -140,7 +150,7 @@ ...@@ -140,7 +150,7 @@
FROM FROM
idx_biz_pv_health_index idx_biz_pv_health_index
WHERE WHERE
( STATION IS NULL OR STATION = '' ) ANALYSIS_OBJ_TYPE = '片区'
AND ANALYSIS_TYPE = '按天' AND ANALYSIS_TYPE = '按天'
AND DATE_FORMAT( REC_DATE, "%Y-%m-%d" ) = CURRENT_DATE AND DATE_FORMAT( REC_DATE, "%Y-%m-%d" ) = CURRENT_DATE
GROUP BY GROUP BY
...@@ -185,8 +195,7 @@ ...@@ -185,8 +195,7 @@
FROM FROM
idx_biz_fan_health_index idx_biz_fan_health_index
<where> <where>
STATION IS NOT NULL ANALYSIS_OBJ_TYPE = '场站'
AND STATION != ''
AND ANALYSIS_TYPE = '按天' AND ANALYSIS_TYPE = '按天'
AND DATE_FORMAT( REC_DATE, "%Y-%m-%d" ) = CURRENT_DATE AND DATE_FORMAT( REC_DATE, "%Y-%m-%d" ) = CURRENT_DATE
<if test="areaCode != null and areaCode != ''"> <if test="areaCode != null and areaCode != ''">
...@@ -202,8 +211,7 @@ ...@@ -202,8 +211,7 @@
FROM FROM
idx_biz_pv_health_index idx_biz_pv_health_index
<where> <where>
STATION IS NOT NULL ANALYSIS_OBJ_TYPE = '场站'
AND STATION != ''
AND ANALYSIS_TYPE = '按天' AND ANALYSIS_TYPE = '按天'
AND DATE_FORMAT( REC_DATE, "%Y-%m-%d" ) = CURRENT_DATE AND DATE_FORMAT( REC_DATE, "%Y-%m-%d" ) = CURRENT_DATE
<if test="areaCode != null and areaCode != ''"> <if test="areaCode != null and areaCode != ''">
......
...@@ -491,9 +491,9 @@ public class MonitorFanIdxController extends BaseController { ...@@ -491,9 +491,9 @@ public class MonitorFanIdxController extends BaseController {
public ResponseModel getMajorAnalogQuantityByPage(@RequestParam(required = false) String stationId) { public ResponseModel getMajorAnalogQuantityByPage(@RequestParam(required = false) String stationId) {
StationBasic stationBasic = stationBasicMapper.selectById(stationId); StationBasic stationBasic = stationBasicMapper.selectById(stationId);
String gatewayId = stationBasic.getBoosterGatewayId(); String gatewayId = stationBasic.getBoosterGatewayId();
if ("夏造风电站".equals(stationBasic.getStationName())) { if (stationBasic.getStationName().contains("夏造")) {
monitorFanIndicator.getMajorAnalogQuantityByPage(gatewayId, stationBasic.getSequenceNbr().toString()); monitorFanIndicator.getMajorAnalogQuantityByPage(gatewayId, stationBasic.getSequenceNbr().toString());
} else if ("泰和前进光伏站".equals(stationBasic.getStationName())) { } else if (stationBasic.getStationName().contains("泰和")) {
monitorFanIndicator.getMajorAnalogQuantityByPageByTHGF(gatewayId, stationBasic.getSequenceNbr().toString()); monitorFanIndicator.getMajorAnalogQuantityByPageByTHGF(gatewayId, stationBasic.getSequenceNbr().toString());
} }
return CommonResponseUtil.success(); return CommonResponseUtil.success();
......
...@@ -1013,7 +1013,7 @@ public class MonitorFanIndicatorImpl implements IMonitorFanIndicator { ...@@ -1013,7 +1013,7 @@ public class MonitorFanIndicatorImpl implements IMonitorFanIndicator {
public List<Map<String, String>> getBoosterStationInfo(String stationId) { public List<Map<String, String>> getBoosterStationInfo(String stationId) {
StationBasic stationBasic = getOneByStationNumber(stationId); StationBasic stationBasic = getOneByStationNumber(stationId);
List<Map<String, String>> boosterStationInfo = new ArrayList<>(); List<Map<String, String>> boosterStationInfo = new ArrayList<>();
if ("夏造风电站".equals(stationBasic.getStationName())) { if (stationBasic.getStationName().contains("夏造风电站")) {
boosterStationInfo = monitorFanIndicatorregionMapper.getBoosterStationInfo2(stationBasic.getBoosterGatewayId()); boosterStationInfo = monitorFanIndicatorregionMapper.getBoosterStationInfo2(stationBasic.getBoosterGatewayId());
} else { } else {
boosterStationInfo = monitorFanIndicatorregionMapper.getBoosterStationInfoNormal(stationBasic.getBoosterGatewayId()); boosterStationInfo = monitorFanIndicatorregionMapper.getBoosterStationInfoNormal(stationBasic.getBoosterGatewayId());
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment