Commit 8e86c1fc authored by zhangsen's avatar zhangsen

计算问题处理,代码提交

parent eb3a4bfa
......@@ -58,6 +58,17 @@
<version>2.4</version>
<classifier>jdk15</classifier>
</dependency>
<dependency>
<groupId>tech.tablesaw</groupId>
<artifactId>tablesaw-core</artifactId>
<version>0.43.1</version>
</dependency>
<dependency>
<groupId>tech.tablesaw</groupId>
<artifactId>tablesaw-json</artifactId>
<version>0.43.1</version>
</dependency>
</dependencies>
<build>
......
package com.yeejoin.amos.boot.module.jxiop.biz.ESDto;
import io.github.classgraph.json.Id;
//import io.github.classgraph.json.Id;
import lombok.Data;
import lombok.experimental.Accessors;
import nonapi.io.github.classgraph.json.Id;
import org.springframework.data.elasticsearch.annotations.DateFormat;
import org.springframework.data.elasticsearch.annotations.Document;
import org.springframework.data.elasticsearch.annotations.Field;
......
......@@ -15,6 +15,7 @@ import org.springframework.core.io.support.PathMatchingResourcePatternResolver;
import org.springframework.jdbc.datasource.DataSourceTransactionManager;
import javax.sql.DataSource;
import java.sql.SQLException;
/**
* 从数据源配置
......@@ -49,6 +50,18 @@ public class TdEngineConfig {
datasource.setUsername(username);
datasource.setPassword(password);
datasource.setDriverClassName(driverClassName);
datasource.setInitialSize(100); // 初始连接数
datasource.setMaxActive(200); // 最大连接数
datasource.setMaxWait(60000); // 最大等待时间
datasource.setMinIdle(5); // 最小空闲连接数
datasource.setValidationQuery("SELECT 1"); // 验证查询
try {
datasource.setFilters("stat");
} catch (SQLException throwables) {
throwables.printStackTrace();
}
datasource.setQueryTimeout(30); // 查询超时时间
datasource.setConnectionProperties("useUnicode=true;characterEncoding=UTF-8"); // 连接属性
return datasource;
}
......
package com.yeejoin.amos.boot.module.jxiop.biz.controller;
import com.yeejoin.amos.boot.biz.common.utils.RedisUtils;
import com.yeejoin.amos.boot.module.jxiop.biz.entity.IndicatorData;
import com.yeejoin.amos.boot.module.jxiop.biz.kafka.FanConditionVariablesMessage;
import com.yeejoin.amos.boot.module.jxiop.biz.mapper2.IdxBizFanHealthIndexMapper;
import com.yeejoin.amos.boot.module.jxiop.biz.tdmapper.IndicatorDataMapper;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
......@@ -14,6 +18,8 @@ import org.typroject.tyboot.core.restful.doc.TycloudOperation;
import org.typroject.tyboot.core.restful.utils.ResponseHelper;
import org.typroject.tyboot.core.restful.utils.ResponseModel;
import java.util.List;
import static com.yeejoin.amos.boot.module.jxiop.biz.kafka.Constant.*;
@RestController
......@@ -36,7 +42,7 @@ public class KafkaAnalyseController {
}
fanConditionVariablesMessage.getFanConditionVariables();
redisUtils.set(kafkaTopicConsumer, RequestContext.getTraceId(), 300);
return ResponseHelper.buildResponse("计算中");
return ResponseHelper.buildResponse("开始计算");
}
@TycloudOperation(ApiLevel = UserType.AGENCY, needAuth = false)
......@@ -48,7 +54,7 @@ public class KafkaAnalyseController {
}
fanConditionVariablesMessage.getPvConditionVariables();
redisUtils.set(kafkaTopicConsumerPv, RequestContext.getTraceId(), 300);
return ResponseHelper.buildResponse("计算中");
return ResponseHelper.buildResponse("开始计算");
}
......@@ -61,7 +67,7 @@ public class KafkaAnalyseController {
}
fanConditionVariablesMessage.getFanConditionVariablesGKHF();
redisUtils.set(kafkaTopicConsumerGKHFFan, RequestContext.getTraceId(), 300);
return ResponseHelper.buildResponse("计算中");
return ResponseHelper.buildResponse("开始计算");
}
@TycloudOperation(ApiLevel = UserType.AGENCY, needAuth = false)
......@@ -73,6 +79,61 @@ public class KafkaAnalyseController {
}
fanConditionVariablesMessage.getPvConditionVariablesPvGKFX();
redisUtils.set(kafkaTopicConsumerGKHFPv, RequestContext.getTraceId(), 300);
return ResponseHelper.buildResponse("计算中");
return ResponseHelper.buildResponse("开始计算");
}
@TycloudOperation(ApiLevel = UserType.AGENCY, needAuth = false)
@PostMapping(value = "/getFanConditionVariablesZXZ")
@ApiOperation(httpMethod = "POST", value = "中心值 - 风电 - 新", notes = "中心值 - 风电 - 新")
public ResponseModel<Object> getFanConditionVariablesZXZ() {
if (redisUtils.hasKey(kafkaTopicConsumerZXZFan)) {
return ResponseHelper.buildResponse("计算中");
}
fanConditionVariablesMessage.getFanConditionVariablesZXZ();
redisUtils.set(kafkaTopicConsumerZXZFan, RequestContext.getTraceId(), 300);
return ResponseHelper.buildResponse("开始计算");
}
@TycloudOperation(ApiLevel = UserType.AGENCY, needAuth = false)
@PostMapping(value = "/getPvConditionVariablesZXZ")
@ApiOperation(httpMethod = "POST", value = "中心值 - 光伏 - 新", notes = "工况划分 - 光伏 - 新")
public ResponseModel<Object> getPvConditionVariablesZXZ() {
if (redisUtils.hasKey(kafkaTopicConsumerZXZPv)) {
return ResponseHelper.buildResponse("计算中");
}
fanConditionVariablesMessage.getPvConditionVariablesZXZ();
redisUtils.set(kafkaTopicConsumerZXZPv, RequestContext.getTraceId(), 300);
return ResponseHelper.buildResponse("开始计算");
}
@Autowired
IndicatorDataMapper indicatorDataMapper;
@Autowired
IdxBizFanHealthIndexMapper idxBizFanHealthIndexMapper;
@TycloudOperation(ApiLevel = UserType.AGENCY, needAuth = false)
@GetMapping(value = "/getAddressInfo")
@ApiOperation(httpMethod = "GET", value = "getAddressInfo", notes = "getAddressInfo")
public ResponseModel<List<IndicatorData>> getAddressInfo() {
List<String> addressInfo = idxBizFanHealthIndexMapper.getAddressInfo();
String join = String.join(",", addressInfo);
List<IndicatorData> indicatorData = indicatorDataMapper.selectByAddresses(join, "1668801435891929089");
return ResponseHelper.buildResponse(indicatorData);
}
}
......@@ -102,7 +102,7 @@ public class IdxBizPvPointProcessVariableClassification{
*
*/
@TableField("POINT_ID")
private Integer pointId;
private String pointId;
/**
* 片区
......
......@@ -11,17 +11,6 @@ import java.util.Date;
* @date 2023年09月02日 11:02
*/
public interface Constant {
String INSERT = "INSERT";
String UPDATE = "UPDATE";
String DATA = "data";
String TOPIC = "topic";
String TABLE = "table";
String TYPE = "type";
String DB_TYPE = "dbType";
String BODY = "body";
String DATA_TYPE = "datatype";
String STATE = "state";
// 风电相关性消费者
String kafkaTopicConsumer = "FanConditionVariables";
......@@ -34,16 +23,10 @@ public interface Constant {
// 光伏 工况区间划分
String kafkaTopicConsumerGKHFPv = "PvConditionVariablesGKHF";
@Value("${last.month.num:12}")
public Integer lastMonthNum = 12;
String startTime = DateUtils.convertDateToString(DateUtil.offsetMonth(new Date(), -lastMonthNum), DateUtils.DATE_TIME_PATTERN);
String endTime = DateUtils.convertDateToString(DateUtils.getCurrentDayEndTime(new Date()), DateUtils.DATE_TIME_PATTERN);
// 相关性
String baseUrlXGX = "http://139.9.171.247:8052/intelligent-analysis/correlation";
// 风电 中心值计算
String kafkaTopicConsumerZXZFan = "FanConditionVariablesGZXZ";
// 光伏 中心值计算
String kafkaTopicConsumerZXZPv = "PvConditionVariablesZXZ";
// 工况划分
String baseUrlGKHF = "http://139.9.171.247:8052/intelligent-analysis/working-condition-division";
}
package com.yeejoin.amos.boot.module.jxiop.biz.kafka;
import com.alibaba.fastjson.JSON;
import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper;
import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
import com.yeejoin.amos.boot.module.jxiop.biz.entity.IdxBizFanPointProcessVariableClassification;
import com.yeejoin.amos.boot.module.jxiop.biz.entity.IdxBizFanPointVarCorrelation;
......@@ -12,6 +13,7 @@ import com.yeejoin.amos.boot.module.jxiop.biz.mapper2.IdxBizPvPointProcessVariab
import com.yeejoin.amos.boot.module.jxiop.biz.mapper2.IdxBizPvPointVarCorrelationMapper;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.scheduling.annotation.Async;
import org.springframework.stereotype.Service;
import java.util.List;
......@@ -36,29 +38,53 @@ public class FanConditionVariablesMessage {
private IdxBizPvPointProcessVariableClassificationMapper classificationMapperPv;
@Autowired
private IdxBizFanPointProcessVariableClassificationMapper idxBizFanPointProcessVariableClassificationMapper;
@Autowired
private IdxBizPvPointProcessVariableClassificationMapper idxBizPvPointProcessVariableClassificationMapper;
@Autowired
private KafkaProducerService kafkaProducerService;
// 相关性分析-风机入口
@Async
public void getFanConditionVariables() {
List<IdxBizFanPointVarCorrelation> pointVarCorrelationsList = pointVarCorrelationMapper.selectList(null);
List<IdxBizFanPointVarCorrelation> pointVarCorrelationsList = pointVarCorrelationMapper.selectList(new LambdaQueryWrapper<IdxBizFanPointVarCorrelation>());
pointVarCorrelationsList.forEach(item -> kafkaProducerService.sendMessageAsync(kafkaTopicConsumer, JSON.toJSONString(item)));
}
// 相关性分析-光伏入口
@Async
public void getPvConditionVariables() {
List<IdxBizPvPointVarCorrelation> pointVarCorrelationsList = pointVarCorrelationMapperPv.selectList(null);
pointVarCorrelationsList.forEach(item -> kafkaProducerService.sendMessageAsync(kafkaTopicConsumerPv, JSON.toJSONString(item)));
}
// 工况划分 - 风电 - 新
@Async
public void getFanConditionVariablesGKHF() {
List<IdxBizFanPointProcessVariableClassification> variableClassificationList = classificationMapperFan.selectList(new QueryWrapper<IdxBizFanPointProcessVariableClassification>().isNotNull("SEQUENCE_NBR").eq("TAG_CODE", "工况变量"));
variableClassificationList.forEach(item -> kafkaProducerService.sendMessageAsync(kafkaTopicConsumerGKHFFan, JSON.toJSONString(item)));
}
// 工况划分 - 光伏 - 新
@Async
public void getPvConditionVariablesPvGKFX() {
List<IdxBizPvPointProcessVariableClassification> variableClassificationList = classificationMapperPv.selectList(new QueryWrapper<IdxBizPvPointProcessVariableClassification>().isNotNull("SEQUENCE_NBR").eq("TAG_CODE", "工况变量"));
variableClassificationList.forEach(item -> kafkaProducerService.sendMessageAsync(kafkaTopicConsumerGKHFPv, JSON.toJSONString(item)));
}
// 中心值 - 风电 - 新
@Async
public void getFanConditionVariablesZXZ() {
List<IdxBizFanPointProcessVariableClassification> fenxiList = idxBizFanPointProcessVariableClassificationMapper.selectList(new QueryWrapper<IdxBizFanPointProcessVariableClassification>().isNotNull("SEQUENCE_NBR").eq("TAG_CODE", "分析变量"));
fenxiList.forEach(item -> kafkaProducerService.sendMessageAsync(kafkaTopicConsumerZXZFan, JSON.toJSONString(item)));
}
// 中心值 - 风电 - 新
@Async
public void getPvConditionVariablesZXZ() {
List<IdxBizPvPointProcessVariableClassification> fenxiList = idxBizPvPointProcessVariableClassificationMapper.selectList(new QueryWrapper<IdxBizPvPointProcessVariableClassification>().isNotNull("SEQUENCE_NBR").eq("TAG_CODE", "分析变量"));
fenxiList.forEach(item -> kafkaProducerService.sendMessageAsync(kafkaTopicConsumerZXZFan, JSON.toJSONString(item)));
}
}
package com.yeejoin.amos.boot.module.jxiop.biz.kafka;
import cn.hutool.core.date.DateUtil;
import com.yeejoin.amos.boot.biz.common.utils.DateUtils;
import org.springframework.beans.factory.annotation.Value;
import java.util.Date;
public class KafkaConstant {
@Value("${last.month.num:12}")
public Integer lastMonthNum = 12;
String startTime = DateUtils.convertDateToString(DateUtil.offsetMonth(new Date(), -lastMonthNum), DateUtils.DATE_TIME_PATTERN);
String endTime = DateUtils.convertDateToString(DateUtils.getCurrentDayEndTime(new Date()), DateUtils.DATE_TIME_PATTERN);
// 相关性
@Value("${base.url.XGX:http://139.9.171.247:8052/intelligent-analysis/working-condition-division}")
public static final String baseUrlXGX = "http://139.9.171.247:8052/intelligent-analysis/correlation";
// 工况划分
@Value("${base.url.GKHF:http://139.9.171.247:8052/intelligent-analysis/working-condition-division}")
public static final String baseUrlGKHF = "http://139.9.171.247:8052/intelligent-analysis/working-condition-division";
@Value("${spring.kafka.consumer.max-poll-records:30}")
public static final Integer threadNum = 30;
}
......@@ -91,4 +91,7 @@ public interface IdxBizFanHealthIndexMapper extends BaseMapper<IdxBizFanHealthIn
List<IdxBizFanHealthLevel> getHealthLevelInfoList();
List<String> getAddressInfo();
}
......@@ -23,4 +23,11 @@ public interface IndicatorDataMapper extends BaseMapper<IndicatorData> {
List<IndicatorData> selectDataByAddressAndtimeNew(@Param("address")String address,@Param("startTime") String startTime, @Param("endTime")String endTime, @Param("gatewayId")String gatewayId, @Param("gatewayId1")String gatewayId1, @Param("address1")String address1);
@Select("select `value` from iot_data.indicator_data where id =#{id}")
List<IndicatorData> selectDataById (@Param("id")String id);
@Select("select `id`, `value` from iot_data.indicator_data where `address` in (${addresses}) and gateway_id = #{gatewayId}")
List<IndicatorData> selectByAddresses(@Param("addresses") String addresses, @Param("gatewayId") String gatewayId);
}
......@@ -142,27 +142,33 @@ pictureUrl=upload/jxiop/syz/
#kafka
spring.kafka.bootstrap-servers=172.16.10.215:9092
spring.kafka.bootstrap-servers=139.9.173.44:9092
spring.kafka.producer.retries=1
spring.kafka.producer.bootstrap-servers=172.16.10.215:9092
spring.kafka.producer.bootstrap-servers=139.9.173.44:9092
spring.kafka.producer.batch-size=16384
spring.kafka.producer.buffer-memory=33554432
spring.kafka.producer.acks=1
spring.kafka.producer.key-serializer=org.apache.kafka.common.serialization.StringSerializer
spring.kafka.producer.value-serializer=org.apache.kafka.common.serialization.StringSerializer
spring.kafka.consumer.group-id=consumerGroup
spring.kafka.consumer.bootstrap-servers=172.16.10.215:9092
spring.kafka.consumer.bootstrap-servers=139.9.173.44:9092
spring.kafka.consumer.enable-auto-commit=false
spring.kafka.consumer.auto-offset-reset=earliest
spring.kafka.consumer.key-deserializer=org.apache.kafka.common.serialization.StringDeserializer
spring.kafka.consumer.value-deserializer=org.apache.kafka.common.serialization.StringDeserializer
spring.kafka.listener.ack-mode=manual_immediate
spring.kafka.listener.type=batch
spring.kafka.consumer.max-poll-records=100
spring.kafka.consumer.fetch-max-wait= 10000
#一次拉取数量 && 线程数量
spring.kafka.consumer.max-poll-records=30
#spring.kafka.consumer.fetch-max-wait= 10000
#当前时间向前偏移月数 向历史偏移月数
last.month.num = 12
#相关性 算法调用
base.url.XGX=http://139.9.171.247:8052/intelligent-analysis/correlation
#工况划分 算法调用地址
base.url.GKHF=http://139.9.171.247:8052/intelligent-analysis/working-condition-division
#相关性 算法调用
base.url.ZXZ=http://139.9.171.247:8052/intelligent-analysis/central-value
......@@ -767,4 +767,8 @@
STATUS IS NOT NULL
OR STATUS != ''
</select>
<select id="getAddressInfo" resultType="java.lang.String">
select index_address from wl_equipment_specific_index where gateway_id = '1668801435891929089' and data_type = 'analog' limit 100
</select>
</mapper>
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment