Commit 854b02af authored by 李成龙's avatar 李成龙

Merge remote-tracking branch 'origin/developer' into developer

parents e2a2d1bd 70fd8fd0
package com.yeejoin.amos.kgd.config;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.*;
import java.net.Socket;
/**
* @Author: xl
* @Description:
* @Date: 2023/10/10 16:21
*/
public class ClientHandler implements Runnable {
private static final Logger log = LoggerFactory.getLogger(ClientHandler.class);
private Socket socket;
public ClientHandler(Socket socket) {
this.socket = socket;
}
@Override
public void run() {
try {
log.info("Received message from client: ");
// 获取输入流和输出流
InputStream inputStream = socket.getInputStream();
// 处理客户端请求,逻辑写到这⬇⬇⬇⬇⬇⬇⬇
log.info("Received message from client: ");
// 关闭连接
socket.close();
} catch (IOException e) {
e.printStackTrace();
}
}
public static void main(String[] args) throws Exception {
requestInfoToSocketServer();
}
private static void requestInfoToSocketServer() {
try {
Socket socket = new Socket("127.0.0.1", 7777);
PrintWriter out = new PrintWriter(socket.getOutputStream(), true);
out.write("我是客户端");
out.flush();
socket.shutdownOutput();
//开始接收服务端的消息
BufferedReader in = new BufferedReader(new InputStreamReader(socket.getInputStream()));
log.info("接收到服务端的回复:" + in.readLine());
} catch (Exception e) {
log.info("Socket传输数据异常!" + e.getMessage());
}
}
}
\ No newline at end of file
package com.yeejoin.amos.kgd.config;
import org.springframework.context.annotation.Configuration;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Component;
import javax.annotation.PostConstruct;
import java.io.IOException;
import java.net.ServerSocket;
import java.net.Socket;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
/**
* @Author: xl
* @Description:
* @Date: 2023/10/9 18:45
*/
@Configuration
@Component
public class SocketConfig {
private static final Logger log = LoggerFactory.getLogger(SocketConfig.class);
@Value("${socket.port}")
private Integer port;
private static final ThreadPoolExecutor threadpool = new ThreadPoolExecutor(15, 15,
10L, TimeUnit.SECONDS, new LinkedBlockingQueue<Runnable>());
@PostConstruct
public void init() {
public void socketStart() {
//直接另起一个线程挂起Socket服务
new Thread(this::socketServer).start();
}
private void socketServer() {
ServerSocket ss = null;
try {
ss = new ServerSocket(port);
log.info("socket端口在: 【{}】中开启并持续监听=====>", port);
while (Boolean.TRUE) {
Socket socket = ss.accept();
// 创建新线程处理连接
log.info("接收到客户端socket: {}", socket.getRemoteSocketAddress());
threadpool.execute(new ClientHandler(socket));
}
} catch (IOException e) {
throw new RuntimeException(e);
}
}
}
......@@ -269,6 +269,48 @@ public class UnitInfoController extends BaseController {
@ApiOperation(httpMethod = "POST", value = "单位注册", notes = "单位注册")
public ResponseModel<UnitRegisterDto> save(@RequestBody UnitRegisterDto model) {
try {
//判断公司名称重复,
LambdaQueryWrapper<UnitInfo> qudg=new LambdaQueryWrapper<>();
qudg.eq(UnitInfo::getName,model.getUnitInfoDto().getName());
qudg.eq(UnitInfo::getIsDelete,0);
List<UnitInfo> unitInfo= unitInfoMapper.selectList(qudg);
if(unitInfo!=null&&!unitInfo.isEmpty()){
throw new BadRequest("公司名称重复");
}
//判断公司统一信息用代码重复
LambdaQueryWrapper<CommerceInfo> queryWrapper1 = new LambdaQueryWrapper<CommerceInfo>();
queryWrapper1.eq(CommerceInfo::getCreditCode, model.getCommerceInfoDto().getCreditCode());
queryWrapper1.eq(CommerceInfo::getIsDelete,0);
List<CommerceInfo> commerceInfo= commerceInfoMapper.selectList(queryWrapper1);
if(commerceInfo!=null&&!commerceInfo.isEmpty()){
throw new BadRequest("统一信用代码重复");
}
//用户名重复
LambdaQueryWrapper<UnitInfo> qudg1=new LambdaQueryWrapper<>();
qudg1.eq(UnitInfo::getAdminLoginName,model.getUnitInfoDto().getAdminLoginName());
qudg1.eq(UnitInfo::getIsDelete,0);
List<UnitInfo> unitInfo1= unitInfoMapper.selectList(qudg);
if(unitInfo1!=null&&!unitInfo1.isEmpty()){
throw new BadRequest("管理员账户名重复,请更换");
}
LambdaQueryWrapper<PublicAgencyUser> qud2=new LambdaQueryWrapper<>();
qud2.eq(PublicAgencyUser::getAmosUserName,model.getUnitInfoDto().getAdminLoginName());
List<PublicAgencyUser> publicAgencyUse= publicAgencyUserMapper.selectList(qud2);
if(publicAgencyUse!=null&&!publicAgencyUse.isEmpty()){
throw new BadRequest("管理员账户名重复,请更换");
}
RequestContext.setAppKey("AMOS_STUDIO");
RequestContext.setProduct("AMOS_STUDIO_WEB");
RequestContext.setToken(requestContext.getToken());
......
......@@ -171,7 +171,7 @@ public class UnitInfoServiceImpl extends BaseService<UnitInfoDto,UnitInfo,UnitIn
regUnitInfo.setManagementUnit("经销商");
try {
// 1. 调用平台进行创建单位、用户信息
this.createCompanyAndUser(regUnitInfo);
// this.createCompanyAndUser(regUnitInfo);
// 2.插入单位表
// regUnitInfo = this.createWithModel(regUnitInfo);
regUnitInfo = this.createWithModelnew(regUnitInfo);
......@@ -187,44 +187,44 @@ public class UnitInfoServiceImpl extends BaseService<UnitInfoDto,UnitInfo,UnitIn
//新增人员基础信息表
PersonnelBusiness re=new PersonnelBusiness();
PublicAgencyUser publicAgencyUser=new PublicAgencyUser();
publicAgencyUser.setAmosUserId(regUnitInfo.getAdminUserId());
publicAgencyUser.setAmosUserName(regUnitInfo.getAdminLoginName());
publicAgencyUser.setRealName(regUnitInfo.getAdminLoginName());
publicAgencyUser.setRole("["+regUnitInfo.getRoleId()+"]");
publicAgencyUser.setEmergencyTelephone(regUnitInfo.getAdminPhone());
publicAgencyUser.setLockStatus("LOCK");
publicAgencyUserMapper.insert(publicAgencyUser);
re.setAmosDealerId(regUnitInfo.getAmosCompanySeq());
re.setAmosUnitId(regUnitInfo.getAmosCompanySeq());
re.setAmosUnitName(regUnitInfo.getName());
re.setAmosUnitOrgCode(regUnitInfo.getAmosCompanyCode());
re.setFoundationId(publicAgencyUser.getSequenceNbr());
re.setUserType("2");
personnelBusinessMapper.insert(re);
// PublicAgencyUser publicAgencyUser=new PublicAgencyUser();
// publicAgencyUser.setAmosUserId(regUnitInfo.getAdminUserId());
// publicAgencyUser.setAmosUserName(regUnitInfo.getAdminLoginName());
// publicAgencyUser.setRealName(regUnitInfo.getAdminLoginName());
// publicAgencyUser.setRole("["+regUnitInfo.getRoleId()+"]");
// publicAgencyUser.setEmergencyTelephone(regUnitInfo.getAdminPhone());
// publicAgencyUser.setLockStatus("LOCK");
// publicAgencyUserMapper.insert(publicAgencyUser);
//
// PersonnelBusiness re=new PersonnelBusiness();
// re.setAmosDealerId(regUnitInfo.getAmosCompanySeq());
// re.setAmosUnitId(regUnitInfo.getAmosCompanySeq());
// re.setAmosUnitName(regUnitInfo.getName());
// re.setAmosUnitOrgCode(regUnitInfo.getAmosCompanyCode());
// re.setFoundationId(publicAgencyUser.getSequenceNbr());
// re.setUserType("2");
// personnelBusinessMapper.insert(re);
model.setCommerceInfoDto(commerceInfo);
model.setUnitInfoDto(regUnitInfo);
} catch (Exception e) {
log.error(e.getMessage(), e);
// 失败后回滚:删除已经创建的企业信息
if (!ObjectUtils.isEmpty(regUnitInfo.getAmosCompanySeq())) {
FeignClientResult<CompanyModel> feignClientResult = Privilege.companyClient
.seleteOne(regUnitInfo.getAmosCompanySeq());
if (feignClientResult != null) {
Privilege.companyClient.deleteCompany(regUnitInfo.getAmosCompanySeq().toString());
}
}
// 失败后回滚:删除已经创建的管理员账号
if (StringUtils.isNotEmpty(regUnitInfo.getAdminUserId())) {
FeignClientResult<AgencyUserModel> feignClientResult = Privilege.agencyUserClient
.queryByUserId(regUnitInfo.getAdminUserId());
if (feignClientResult != null) {
Privilege.agencyUserClient.multDeleteUser(regUnitInfo.getAdminUserId());
}
}
// if (!ObjectUtils.isEmpty(regUnitInfo.getAmosCompanySeq())) {
// FeignClientResult<CompanyModel> feignClientResult = Privilege.companyClient
// .seleteOne(regUnitInfo.getAmosCompanySeq());
// if (feignClientResult != null) {
// Privilege.companyClient.deleteCompany(regUnitInfo.getAmosCompanySeq().toString());
// }
// }
// // 失败后回滚:删除已经创建的管理员账号
// if (StringUtils.isNotEmpty(regUnitInfo.getAdminUserId())) {
// FeignClientResult<AgencyUserModel> feignClientResult = Privilege.agencyUserClient
// .queryByUserId(regUnitInfo.getAdminUserId());
// if (feignClientResult != null) {
// Privilege.agencyUserClient.multDeleteUser(regUnitInfo.getAdminUserId());
// }
// }
throw new RuntimeException(e.getMessage());
}
return model;
......@@ -298,6 +298,89 @@ public class UnitInfoServiceImpl extends BaseService<UnitInfoDto,UnitInfo,UnitIn
return result;
}
private void createCompanyAndUsernew(UnitInfo regUnitInfo) {
CompanyModel companyInfo = new CompanyModel();
FeignClientResult<AgencyUserModel> userResult = null;
try {
//FeignClientResult<List<RoleModel>> roleListResult = Privilege.roleClient.queryRoleList(null, null);
// List<RoleModel> allRoleList = roleListResult.getResult();
List<RoleModel> userRoleList = new ArrayList<>();
List<Long> roleIds = new ArrayList<>();
// 1创建公司
companyInfo.setAddress(regUnitInfo.getRegisterPcd());
companyInfo.setAgencyCode("JXIOP");
companyInfo.setParentId(Long.parseLong(regUnitInfo.getManagementUnitId()));
companyInfo.setLevel("station");
companyInfo.setCompanyName(regUnitInfo.getName());
// companyInfo.setCompanyCode(regUnitInfo.getUnitType());
companyInfo.setContact(regUnitInfo.getHeadName());
companyInfo.setCompanyType(regUnitInfo.getUnitType());
companyInfo.setLandlinePhone(regUnitInfo.getHeadPhone());
FeignClientResult<CompanyModel> companyResult = Privilege.companyClient.create(companyInfo);
if (companyResult == null || companyResult.getStatus()!=200) {
throw new BadRequest("单位注册失败!"+companyResult.getDevMessage());
}
String adminUserName = regUnitInfo.getAdminUserName();
String loginName = regUnitInfo.getAdminLoginName();
String pwd = regUnitInfo.getAdminLoginPwd();
String adminTel = regUnitInfo.getAdminPhone();
// 2 创建平台用户
companyInfo = companyResult.getResult();
AgencyUserModel agencyUserModel = new AgencyUserModel();
agencyUserModel.setUserName(loginName);
agencyUserModel.setRealName(adminUserName);
agencyUserModel.setLockStatus("UNLOCK");
agencyUserModel.setPassword(pwd);
agencyUserModel.setRePassword(pwd);
agencyUserModel.setAgencyCode("JXIOP");
agencyUserModel.setMobile(adminTel);
List<String> split = Arrays.asList(StringUtils.split(appCodes, ','));
Map<Long, List<Long>> roleSeqMap = new HashMap<>();
Map<Long, List<RoleModel>> orgRoles = new HashMap<>();
// userRoleList = allRoleList.stream().filter(r -> r.getSequenceNbr().toString().equals(regUnitInfo.getRoleId()))
// .collect(Collectors.toList());
// userRoleList.forEach(r -> {
// if (!roleIds.contains(r.getSequenceNbr())) {
// roleIds.add(r.getSequenceNbr());
// }
// });
// roleIds.add(Long.valueOf(regUnitInfo.getRoleId()));
roleIds.add(userGroupId);
roleSeqMap.put(companyInfo.getSequenceNbr(), roleIds);
orgRoles.put(companyInfo.getSequenceNbr(), userRoleList);
agencyUserModel.setOrgRoles(orgRoles);
agencyUserModel.setOrgRoleSeqs(roleSeqMap);
userResult = Privilege.agencyUserClient.create(agencyUserModel);
if (userResult == null || userResult.getStatus()!=200) {
throw new BadRequest("单位注册失败!"+userResult.getDevMessage());
}
regUnitInfo.setAdminUserId(userResult.getResult().getUserId());
regUnitInfo.setAmosCompanySeq(companyInfo.getSequenceNbr());
regUnitInfo.setAmosCompanyCode(companyInfo.getOrgCode());
List<String> userId = new ArrayList<>();
userId.add(userResult.getResult().getUserId());
// 将创建用户加入用户组
Privilege.groupUserClient.create(userGroupId, userId);
} catch (Exception e) {
// 删除已经创建的 企业信息
if (companyInfo != null && companyInfo.getSequenceNbr() != null) {
Privilege.companyClient.deleteCompany(companyInfo.getSequenceNbr() + "");
}
if (userResult != null && userResult.getResult() != null
&& StringUtils.isNotEmpty(userResult.getResult().getUserId())) {
Privilege.agencyUserClient.multDeleteUser(userResult.getResult().getUserId());
}
log.error(e.getMessage(), e);
throw new RuntimeException(e.getMessage());
}
}
private void createCompanyAndUser(UnitInfoDto regUnitInfo) {
CompanyModel companyInfo = new CompanyModel();
FeignClientResult<AgencyUserModel> userResult = null;
......@@ -435,9 +518,10 @@ public class UnitInfoServiceImpl extends BaseService<UnitInfoDto,UnitInfo,UnitIn
public String powerStationExamine(long pageId, String nodeCode, String stationId, String taskId, String planInstanceId, Map<String, Object> kv) {
// 2.更新审核记录表
UnitInfo unitInfo=null;
try{
DealerReview dealerReview= dealerReviewMapper.selectOne(new QueryWrapper<DealerReview>().eq("unit_info_id", stationId));
UnitInfo unitInfo= this.getById(stationId);
unitInfo= this.getById(stationId);
DealerReviewEnum nodeByCode = DealerReviewEnum.getNodeByCode(nodeCode);
String approvalStatue="";
if (DealerReviewEnum.经销商管理员审核.getCode().equals(nodeCode)) {
......@@ -458,8 +542,29 @@ public class UnitInfoServiceImpl extends BaseService<UnitInfoDto,UnitInfo,UnitIn
// 1. 更新经销商状态
unitInfo.setAuditStatus(2);
unitInfo.setBlacklist(0);
Privilege.agencyUserClient.unlockUsers(unitInfo.getAdminUserId());
approvalStatue="任务明细:"+DealerReviewEnum.经销商管理员审核.getName()+"审核通过";
this.createCompanyAndUsernew(unitInfo);
PublicAgencyUser publicAgencyUser=new PublicAgencyUser();
publicAgencyUser.setAmosUserId(unitInfo.getAdminUserId());
publicAgencyUser.setAmosUserName(unitInfo.getAdminLoginName());
publicAgencyUser.setRealName(unitInfo.getAdminLoginName());
publicAgencyUser.setRole("["+unitInfo.getRoleId()+"]");
publicAgencyUser.setEmergencyTelephone(unitInfo.getAdminPhone());
publicAgencyUser.setLockStatus("UNLOCK");
publicAgencyUserMapper.insert(publicAgencyUser);
PersonnelBusiness re=new PersonnelBusiness();
re.setAmosDealerId(unitInfo.getAmosCompanySeq());
re.setAmosUnitId(unitInfo.getAmosCompanySeq());
re.setAmosUnitName(unitInfo.getName());
re.setAmosUnitOrgCode(unitInfo.getAmosCompanyCode());
re.setFoundationId(publicAgencyUser.getSequenceNbr());
re.setUserType("2");
personnelBusinessMapper.insert(re);
// Privilege.agencyUserClient.unlockUsers(unitInfo.getAdminUserId());
approvalStatue="任务明细:"+DealerReviewEnum.经销商管理员审核.getName()+"审核通过";
}
}
// 2. 更新流程状态
......@@ -479,6 +584,26 @@ public class UnitInfoServiceImpl extends BaseService<UnitInfoDto,UnitInfo,UnitIn
}
this.saveOrUpdate(unitInfo);
}catch (Exception e){
if (!ObjectUtils.isEmpty(unitInfo.getAmosCompanySeq())) {
FeignClientResult<CompanyModel> feignClientResult = Privilege.companyClient
.seleteOne(unitInfo.getAmosCompanySeq());
if (feignClientResult != null) {
Privilege.companyClient.deleteCompany(unitInfo.getAmosCompanySeq().toString());
}
}
// 失败后回滚:删除已经创建的管理员账号
if (StringUtils.isNotEmpty(unitInfo.getAdminUserId())) {
FeignClientResult<AgencyUserModel> feignClientResult = Privilege.agencyUserClient
.queryByUserId(unitInfo.getAdminUserId());
if (feignClientResult != null) {
Privilege.agencyUserClient.multDeleteUser(unitInfo.getAdminUserId());
}
}
throw new BaseException("获取工作流节点失败!","400","获取工作流节点失败!");
}
......
......@@ -47,6 +47,17 @@
<artifactId>taos-jdbcdriver</artifactId>
<version>3.2.4</version>
</dependency>
<!--kafka依赖-->
<dependency>
<groupId>org.springframework.kafka</groupId>
<artifactId>spring-kafka</artifactId>
</dependency>
<dependency>
<groupId>net.sf.json-lib</groupId>
<artifactId>json-lib</artifactId>
<version>2.4</version>
<classifier>jdk15</classifier>
</dependency>
</dependencies>
<build>
......
package com.yeejoin.amos.boot.module.jxiop.biz.controller;
import cn.hutool.core.date.DateUtil;
import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
import com.yeejoin.amos.boot.biz.common.controller.BaseController;
import com.yeejoin.amos.boot.biz.common.utils.DateUtils;
......@@ -67,6 +68,30 @@ public class AnalyseController extends BaseController {
}
return ResponseHelper.buildResponse(commonServiceImpl.getFanConditionVariablesByTimeAnalyseThread(startTime, endTime));
}
@TycloudOperation(ApiLevel = UserType.AGENCY, needAuth = false)
@PostMapping(value = "/getFanConditionVariablesByTimeAnalyseNew")
@ApiOperation(httpMethod = "POST", value = "相关性分析 - 风机 - 新", notes = "相关性分析 - 风机 - 新")
public ResponseModel<Object> getFanConditionVariablesByTimeAnalyseNew() throws InterruptedException {
commonServiceImpl.chuli();
return ResponseHelper.buildResponse(null);
}
@TycloudOperation(ApiLevel = UserType.AGENCY, needAuth = false)
@ApiOperation(httpMethod = "GET", value = "相关性分析 - 光伏 - 新", notes = "相关性分析 - 光伏 - 新")
@GetMapping(value = "/getPvConditionVariablesByTimeAnalyseNew")
public ResponseModel<String> getPvConditionVariablesByTimeAnalyseNew(@RequestParam(required = false) String startTime, @RequestParam(required = false) String endTime) throws InterruptedException {
if (StringUtils.isEmpty(startTime) && StringUtils.isEmpty(endTime) ){
startTime = DateUtils.convertDateToString(DateUtil.beginOfYear(new Date()), DateUtils.DATE_TIME_PATTERN);
endTime = DateUtils.convertDateToString(DateUtils.getCurrentDayEndTime(new Date()), DateUtils.DATE_TIME_PATTERN);
}
commonServiceImpl.chuliPv(startTime, endTime);
return ResponseHelper.buildResponse(commonServiceImpl.getPvConditionVariablesByTimeAnalyseThread(startTime, endTime));
}
@TycloudOperation(ApiLevel = UserType.AGENCY, needAuth = false)
@ApiOperation(httpMethod = "GET", value = "相关性分析-风机", notes = "相关性分析-风机")
@GetMapping(value = "/getPvConditionVariablesByTimeAnalyse")
......
package com.yeejoin.amos.boot.module.jxiop.biz.controller;
import com.yeejoin.amos.boot.biz.common.utils.RedisUtils;
import com.yeejoin.amos.boot.module.jxiop.biz.kafka.FanConditionVariablesMessage;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import org.typroject.tyboot.core.foundation.context.RequestContext;
import org.typroject.tyboot.core.foundation.enumeration.UserType;
import org.typroject.tyboot.core.restful.doc.TycloudOperation;
import org.typroject.tyboot.core.restful.utils.ResponseHelper;
import org.typroject.tyboot.core.restful.utils.ResponseModel;
import static com.yeejoin.amos.boot.module.jxiop.biz.kafka.Constant.*;
@RestController
@Api(tags = "智能分析")
@RequestMapping(value = "/kafkaAnalyse")
public class KafkaAnalyseController {
@Autowired
FanConditionVariablesMessage fanConditionVariablesMessage;
@Autowired
RedisUtils redisUtils;
@TycloudOperation(ApiLevel = UserType.AGENCY, needAuth = false)
@PostMapping(value = "/getFanConditionVariables")
@ApiOperation(httpMethod = "POST", value = "计算相关性分析 - 风机 - 新", notes = "计算相关性分析 - 风机 - 新")
public ResponseModel<Object> getFanConditionVariables() {
if (redisUtils.hasKey(kafkaTopicConsumer)) {
return ResponseHelper.buildResponse("计算中");
}
fanConditionVariablesMessage.getFanConditionVariables();
redisUtils.set(kafkaTopicConsumer, RequestContext.getTraceId(), 300);
return ResponseHelper.buildResponse("计算中");
}
@TycloudOperation(ApiLevel = UserType.AGENCY, needAuth = false)
@PostMapping(value = "/getPvConditionVariables")
@ApiOperation(httpMethod = "POST", value = "计算相关性分析 - 光伏 - 新", notes = "计算相关性分析 - 光伏 - 新")
public ResponseModel<Object> getPvConditionVariables() {
if (redisUtils.hasKey(kafkaTopicConsumerPv)) {
return ResponseHelper.buildResponse("计算中");
}
fanConditionVariablesMessage.getPvConditionVariables();
redisUtils.set(kafkaTopicConsumerPv, RequestContext.getTraceId(), 300);
return ResponseHelper.buildResponse("计算中");
}
@TycloudOperation(ApiLevel = UserType.AGENCY, needAuth = false)
@PostMapping(value = "/getFanConditionVariablesGKHF")
@ApiOperation(httpMethod = "POST", value = "工况划分 - 风电 - 新", notes = "工况划分 - 风电 - 新")
public ResponseModel<Object> getFanConditionVariablesGKHF() {
if (redisUtils.hasKey(kafkaTopicConsumerGKHFFan)) {
return ResponseHelper.buildResponse("计算中");
}
fanConditionVariablesMessage.getFanConditionVariablesGKHF();
redisUtils.set(kafkaTopicConsumerGKHFFan, RequestContext.getTraceId(), 300);
return ResponseHelper.buildResponse("计算中");
}
@TycloudOperation(ApiLevel = UserType.AGENCY, needAuth = false)
@PostMapping(value = "/getPvConditionVariablesPvGKHF")
@ApiOperation(httpMethod = "POST", value = "工况划分 - 光伏 - 新", notes = "工况划分 - 光伏 - 新")
public ResponseModel<Object> getPvConditionVariablesPvGKFX() {
if (redisUtils.hasKey(kafkaTopicConsumerGKHFPv)) {
return ResponseHelper.buildResponse("计算中");
}
fanConditionVariablesMessage.getPvConditionVariablesPvGKFX();
redisUtils.set(kafkaTopicConsumerGKHFPv, RequestContext.getTraceId(), 300);
return ResponseHelper.buildResponse("计算中");
}
}
package com.yeejoin.amos.boot.module.jxiop.biz.kafka;
import cn.hutool.core.date.DateUtil;
import com.yeejoin.amos.boot.biz.common.utils.DateUtils;
import org.springframework.beans.factory.annotation.Value;
import java.util.Date;
/**
* @author LiuLin
* @date 2023年09月02日 11:02
*/
public interface Constant {
String INSERT = "INSERT";
String UPDATE = "UPDATE";
String DATA = "data";
String TOPIC = "topic";
String TABLE = "table";
String TYPE = "type";
String DB_TYPE = "dbType";
String BODY = "body";
String DATA_TYPE = "datatype";
String STATE = "state";
// 风电相关性消费者
String kafkaTopicConsumer = "FanConditionVariables";
// 光伏相关性消费者
String kafkaTopicConsumerPv = "PvConditionVariables";
// 风电 工况区间划分
String kafkaTopicConsumerGKHFFan = "FanConditionVariablesGKHF";
// 光伏 工况区间划分
String kafkaTopicConsumerGKHFPv = "PvConditionVariablesGKHF";
@Value("${last.month.num:12}")
public Integer lastMonthNum = 12;
String startTime = DateUtils.convertDateToString(DateUtil.offsetMonth(new Date(), -lastMonthNum), DateUtils.DATE_TIME_PATTERN);
String endTime = DateUtils.convertDateToString(DateUtils.getCurrentDayEndTime(new Date()), DateUtils.DATE_TIME_PATTERN);
// 相关性
String baseUrlXGX = "http://139.9.171.247:8052/intelligent-analysis/correlation";
// 工况划分
String baseUrlGKHF = "http://139.9.171.247:8052/intelligent-analysis/working-condition-division";
}
package com.yeejoin.amos.boot.module.jxiop.biz.kafka;
import com.alibaba.fastjson.JSON;
import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
import com.yeejoin.amos.boot.module.jxiop.biz.entity.IdxBizFanPointProcessVariableClassification;
import com.yeejoin.amos.boot.module.jxiop.biz.entity.IdxBizFanPointVarCorrelation;
import com.yeejoin.amos.boot.module.jxiop.biz.entity.IdxBizPvPointProcessVariableClassification;
import com.yeejoin.amos.boot.module.jxiop.biz.entity.IdxBizPvPointVarCorrelation;
import com.yeejoin.amos.boot.module.jxiop.biz.mapper2.IdxBizFanPointProcessVariableClassificationMapper;
import com.yeejoin.amos.boot.module.jxiop.biz.mapper2.IdxBizFanPointVarCorrelationMapper;
import com.yeejoin.amos.boot.module.jxiop.biz.mapper2.IdxBizPvPointProcessVariableClassificationMapper;
import com.yeejoin.amos.boot.module.jxiop.biz.mapper2.IdxBizPvPointVarCorrelationMapper;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import java.util.List;
import static com.yeejoin.amos.boot.module.jxiop.biz.kafka.Constant.*;
@Service
@Slf4j
public class FanConditionVariablesMessage {
@Autowired
private IdxBizFanPointVarCorrelationMapper pointVarCorrelationMapper;
@Autowired
private IdxBizPvPointVarCorrelationMapper pointVarCorrelationMapperPv;
@Autowired
private IdxBizFanPointProcessVariableClassificationMapper classificationMapperFan;
@Autowired
private IdxBizPvPointProcessVariableClassificationMapper classificationMapperPv;
@Autowired
private KafkaProducerService kafkaProducerService;
// 相关性分析-风机入口
public void getFanConditionVariables() {
List<IdxBizFanPointVarCorrelation> pointVarCorrelationsList = pointVarCorrelationMapper.selectList(null);
pointVarCorrelationsList.forEach(item -> kafkaProducerService.sendMessageAsync(kafkaTopicConsumer, JSON.toJSONString(item)));
}
// 相关性分析-光伏入口
public void getPvConditionVariables() {
List<IdxBizPvPointVarCorrelation> pointVarCorrelationsList = pointVarCorrelationMapperPv.selectList(null);
pointVarCorrelationsList.forEach(item -> kafkaProducerService.sendMessageAsync(kafkaTopicConsumerPv, JSON.toJSONString(item)));
}
// 工况划分 - 风电 - 新
public void getFanConditionVariablesGKHF() {
List<IdxBizFanPointProcessVariableClassification> variableClassificationList = classificationMapperFan.selectList(new QueryWrapper<IdxBizFanPointProcessVariableClassification>().isNotNull("SEQUENCE_NBR").eq("TAG_CODE", "工况变量"));
variableClassificationList.forEach(item -> kafkaProducerService.sendMessageAsync(kafkaTopicConsumerGKHFFan, JSON.toJSONString(item)));
}
// 工况划分 - 光伏 - 新
public void getPvConditionVariablesPvGKFX() {
List<IdxBizPvPointProcessVariableClassification> variableClassificationList = classificationMapperPv.selectList(new QueryWrapper<IdxBizPvPointProcessVariableClassification>().isNotNull("SEQUENCE_NBR").eq("TAG_CODE", "工况变量"));
variableClassificationList.forEach(item -> kafkaProducerService.sendMessageAsync(kafkaTopicConsumerGKHFPv, JSON.toJSONString(item)));
}
}
package com.yeejoin.amos.boot.module.jxiop.biz.kafka;
import cn.hutool.http.HttpUtil;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import com.yeejoin.amos.boot.biz.common.utils.RedisUtils;
import com.yeejoin.amos.boot.module.jxiop.biz.entity.*;
import com.yeejoin.amos.boot.module.jxiop.biz.mapper2.IdxBizFanPointProcessVariableClassificationMapper;
import com.yeejoin.amos.boot.module.jxiop.biz.service.impl.IdxBizFanPointProcessVariableClassificationServiceImpl;
import com.yeejoin.amos.boot.module.jxiop.biz.service.impl.IdxBizFanPointVarCorrelationServiceImpl;
import com.yeejoin.amos.boot.module.jxiop.biz.service.impl.IdxBizPvPointProcessVariableClassificationServiceImpl;
import com.yeejoin.amos.boot.module.jxiop.biz.service.impl.IdxBizPvPointVarCorrelationServiceImpl;
import com.yeejoin.amos.boot.module.jxiop.biz.tdmapper.IndicatorDataMapper;
import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.kafka.support.Acknowledgment;
import org.springframework.stereotype.Service;
import java.util.*;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.stream.Collectors;
import static com.yeejoin.amos.boot.module.jxiop.biz.kafka.Constant.*;
/**
* kafka 消费服务
*
* @author litw
* @create 2022/11/1 10:06
**/
@Slf4j
@Service
public class KafkaConsumerService {
@Autowired
private IndicatorDataMapper indicatorDataMapper;
@Autowired
RedisUtils redisUtils;
@Autowired
private IdxBizFanPointVarCorrelationServiceImpl idxBizFanPointVarCorrelationService;
@Autowired
private IdxBizPvPointVarCorrelationServiceImpl idxBizPvPointVarCorrelationService;
@Autowired
IdxBizFanPointProcessVariableClassificationServiceImpl idxBizFanPointProcessVariableClassificationService;
@Autowired
IdxBizPvPointProcessVariableClassificationServiceImpl idxBizPvPointProcessVariableClassificationService;
ExecutorService service = Executors.newFixedThreadPool(30);
/**
* 批量消费kafka消息 【风电站 相关性】
*
* @param consumerRecords messages
* @param ack ack
*/
@KafkaListener(id = "xgxFanConsumer", groupId = "consumerGroup", topics = kafkaTopicConsumer)
public void listenXGXFan(List<ConsumerRecord<String, String>> consumerRecords, Acknowledgment ack) {
ack.acknowledge();
try {
CompletableFuture[] completableFutures = consumerRecords.stream().map(m -> CompletableFuture.supplyAsync(() -> consumerRecords(m), service)).toArray(CompletableFuture[]::new);
CompletableFuture.allOf(completableFutures).join();
} finally {
}
}
/**
* 风电处理消息
* @param consumerRecord
* @return
*/
boolean consumerRecords(ConsumerRecord<String, String> consumerRecord) {
try {
Optional<?> kafkaMessage = Optional.ofNullable(consumerRecord.value());
if (kafkaMessage.isPresent()) {
IdxBizFanPointVarCorrelation fanPointVarCorrelation = JSON.parseObject(kafkaMessage.get().toString(), IdxBizFanPointVarCorrelation.class);
List<IndicatorData> tdengineData1 = indicatorDataMapper.selectDataByAddressAndtimeNew(fanPointVarCorrelation.getAnalysisIndexAddress(), startTime, endTime, fanPointVarCorrelation.getAnalysisGatewayId(), fanPointVarCorrelation.getProcessGatewayId(), fanPointVarCorrelation.getProcessIndexAddress());
List<Double> data1 = new ArrayList<>();
List<Double> data2 = new ArrayList<>();
tdengineData1.forEach(item -> {
if (item.getAddress().equals(fanPointVarCorrelation.getAnalysisIndexAddress()) && item.getGatewayId().equals(fanPointVarCorrelation.getAnalysisGatewayId())) {
data1.add(Double.parseDouble(item.getValue()));
} else {
data2.add(Double.parseDouble(item.getValue()));
}
});
if (data1.size() < data2.size()) {
Integer a = data2.size() - data1.size();
for (int i = 0; i < a; i++) {
data2.remove(0);
}
} else if (data2.size() < data1.size()) {
Integer a = data1.size() - data2.size();
for (int i = 0; i < a; i++) {
data1.remove(0);
}
}
HashMap<String, Object> resultMap = new HashMap<>();
resultMap.put("data1", data1);
resultMap.put("data2", data2);
String response = HttpUtil.createPost(baseUrlXGX).body(JSON.toJSONString(resultMap)).execute().body();
if (response.contains("correlation") && !response.contains("warning")) {
com.alibaba.fastjson.JSONObject jsonObject = JSON.parseObject(response);
fanPointVarCorrelation.setCorrelationCoefficient(jsonObject.getDoubleValue("correlation"));
log.info("------------------------------------------风机相关性::计算成功,待更新表数据----------------------------------------");
} else {
fanPointVarCorrelation.setCorrelationCoefficient(0.0);
}
fanPointVarCorrelation.setRecDate(new Date());
idxBizFanPointVarCorrelationService.saveOrUpdate(fanPointVarCorrelation);
log.info("表数据已更新");
log.info("----------------------------风机相关性--------------分析变量与工况变量相关性分析算法结束----------------------------------------");
log.info("kafka消费zhTestGroup消息{}", consumerRecord);
}
} catch (Exception e) {
log.error("kafka失败,当前失败的批次: data:{}, {}", consumerRecord, e);
} finally {
redisUtils.expire(kafkaTopicConsumer, 600);
}
return true;
}
/**
* 批量消费kafka消息 【光伏相关性 】
*
* @param consumerRecords messages
* @param ack ack
*/
@KafkaListener(id = "xgxPvConsumer", groupId = "consumerGroup", topics = kafkaTopicConsumerPv)
public void listenXGXPv(List<ConsumerRecord<String, String>> consumerRecords, Acknowledgment ack) {
ack.acknowledge();
try {
CompletableFuture[] completableFutures = consumerRecords.stream().map(m -> CompletableFuture.supplyAsync(() -> consumerRecordsPv(m), service)).toArray(CompletableFuture[]::new);
CompletableFuture.allOf(completableFutures).join();
} finally {
}
}
/**
* 光伏处理消息
* @param consumerRecord
* @return
*/
boolean consumerRecordsPv(ConsumerRecord<String, String> consumerRecord) {
try {
Optional<?> kafkaMessage = Optional.ofNullable(consumerRecord.value());
if (kafkaMessage.isPresent()) {
IdxBizPvPointVarCorrelation pvPointVarCorrelation = JSON.parseObject(kafkaMessage.get().toString(), IdxBizPvPointVarCorrelation.class);
List<IndicatorData> tdengineData1 = indicatorDataMapper.selectDataByAddressAndtimeNew(pvPointVarCorrelation.getAnalysisIndexAddress().toString(), startTime, endTime, pvPointVarCorrelation.getAnalysisGatewayId(), pvPointVarCorrelation.getProcessGatewayId(), pvPointVarCorrelation.getProcessIndexAddress());
List<Double> data1 = new ArrayList<>();
List<Double> data2 = new ArrayList<>();
tdengineData1.forEach(item -> {
if (item.getAddress().equals(pvPointVarCorrelation.getAnalysisIndexAddress()) && item.getGatewayId().equals(pvPointVarCorrelation.getAnalysisGatewayId())) {
data1.add(Double.parseDouble(item.getValue()));
} else {
data2.add(Double.parseDouble(item.getValue()));
}
});
if (data1.size() < data2.size()) {
Integer a = data2.size() - data1.size();
for (int i = 0; i < a; i++) {
data2.remove(0);
}
} else if (data2.size() < data1.size()) {
Integer a = data1.size() - data2.size();
for (int i = 0; i < a; i++) {
data1.remove(0);
}
}
HashMap<String, Object> resultMap = new HashMap<>();
resultMap.put("data1", data1);
resultMap.put("data2", data2);
String response = HttpUtil.createPost(baseUrlXGX).body(JSON.toJSONString(resultMap)).execute().body();
if (response.contains("correlation") && !response.contains("warning")) {
com.alibaba.fastjson.JSONObject jsonObject = JSON.parseObject(response);
pvPointVarCorrelation.setCorrelationCoefficient(jsonObject.getDoubleValue("correlation"));
log.info("------------------------------------------光伏相关性::计算成功,待更新表数据----------------------------------------");
} else {
pvPointVarCorrelation.setCorrelationCoefficient(0.0);
}
pvPointVarCorrelation.setRecDate(new Date());
idxBizPvPointVarCorrelationService.saveOrUpdate(pvPointVarCorrelation);
log.info("表数据已更新");
log.info("kafka消费zhTestGroup消息{}", consumerRecord);
}
} catch (Exception e) {
log.error("kafka失败,当前失败的批次: data:{}, {}", consumerRecord, e);
} finally {
redisUtils.expire(kafkaTopicConsumerPv, 600);
}
return true;
}
/**
* 批量消费kafka消息 【风电 工况划分 】
*
* @param consumerRecords messages
* @param ack ack
*/
@KafkaListener(id = "GKHFFanConsumer", groupId = "consumerGroup", topics = kafkaTopicConsumerGKHFFan)
public void listenGKHFFan(List<ConsumerRecord<String, String>> consumerRecords, Acknowledgment ack) {
ack.acknowledge();
try {
CompletableFuture[] completableFutures = consumerRecords.stream().map(m -> CompletableFuture.supplyAsync(() -> consumerRecordsGKFXFan(m), service)).toArray(CompletableFuture[]::new);
CompletableFuture.allOf(completableFutures).join();
} finally {
}
}
/**
* 风电 工况划分 处理
* @param consumerRecord
* @return
*/
boolean consumerRecordsGKFXFan(ConsumerRecord<String, String> consumerRecord) {
try {
Optional<?> kafkaMessage = Optional.ofNullable(consumerRecord.value());
if (kafkaMessage.isPresent()) {
IdxBizFanPointProcessVariableClassification fanPointVarCorrelation = JSON.parseObject(kafkaMessage.get().toString(), IdxBizFanPointProcessVariableClassification.class);
List<IndicatorData> tdengineData1 = indicatorDataMapper.selectDataByAddressAndtime(fanPointVarCorrelation.getIndexAddress(), startTime, endTime, fanPointVarCorrelation.getGatewayId());
HashMap<String, Object> resultMap = new HashMap<>();
resultMap.put("processVariable", tdengineData1.stream().map(t -> Double.parseDouble(t.getValue())).collect(Collectors.toList()));
resultMap.put("processVariableId", fanPointVarCorrelation.getSequenceNbr());
String response = HttpUtil.createPost(baseUrlGKHF).body(JSON.toJSONString(resultMap)).execute().body();
if (response.contains("intervalValue1") && response.contains("processVariableId")) {
com.alibaba.fastjson.JSONObject jsonObject = JSON.parseObject(response);
fanPointVarCorrelation.setIntervalValue5(jsonObject.getDoubleValue("intervalValue5"));
fanPointVarCorrelation.setIntervalValue4(jsonObject.getDoubleValue("intervalValue4"));
fanPointVarCorrelation.setIntervalValue3(jsonObject.getDoubleValue("intervalValue3"));
fanPointVarCorrelation.setIntervalValue2(jsonObject.getDoubleValue("intervalValue2"));
fanPointVarCorrelation.setIntervalValue1(jsonObject.getDoubleValue("intervalValue1"));
log.info("------------------------------------------光伏相关性::计算成功,待更新表数据----------------------------------------");
} else {
fanPointVarCorrelation.setIntervalValue5(0.0);
fanPointVarCorrelation.setIntervalValue4(0.0);
fanPointVarCorrelation.setIntervalValue3(0.0);
fanPointVarCorrelation.setIntervalValue2(0.0);
fanPointVarCorrelation.setIntervalValue1(0.0);
}
fanPointVarCorrelation.setRecDate(new Date());
idxBizFanPointProcessVariableClassificationService.saveOrUpdate(fanPointVarCorrelation);
log.info("表数据已更新");
}
} catch (Exception e) {
log.error("kafka失败,当前失败的批次: data:{}, {}", consumerRecord, e);
} finally {
redisUtils.expire(kafkaTopicConsumerGKHFFan, 600);
}
return true;
}
/**
* 批量消费kafka消息 【光伏 工况划分 】
*
* @param consumerRecords messages
* @param ack ack
*/
@KafkaListener(id = "GKHFPvConsumer", groupId = "consumerGroup", topics = kafkaTopicConsumerGKHFPv)
public void listenGKHFPv(List<ConsumerRecord<String, String>> consumerRecords, Acknowledgment ack) {
ack.acknowledge();
try {
CompletableFuture[] completableFutures = consumerRecords.stream().map(m -> CompletableFuture.supplyAsync(() -> consumerRecordsGKFXPv(m), service)).toArray(CompletableFuture[]::new);
CompletableFuture.allOf(completableFutures).join();
} finally {
}
}
/**
* 光伏 工况划分 处理
* @param consumerRecord
* @return
*/
boolean consumerRecordsGKFXPv(ConsumerRecord<String, String> consumerRecord) {
try {
Optional<?> kafkaMessage = Optional.ofNullable(consumerRecord.value());
if (kafkaMessage.isPresent()) {
IdxBizPvPointProcessVariableClassification pvPointVarCorrelation = JSON.parseObject(kafkaMessage.get().toString(), IdxBizPvPointProcessVariableClassification.class);
List<IndicatorData> tdengineData1 = indicatorDataMapper.selectDataByAddressAndtime(pvPointVarCorrelation.getIndexAddress(), startTime, endTime, pvPointVarCorrelation.getGatewayId());
HashMap<String, Object> resultMap = new HashMap<>();
resultMap.put("processVariable", tdengineData1.stream().map(t -> Double.parseDouble(t.getValue())).collect(Collectors.toList()));
resultMap.put("processVariableId", pvPointVarCorrelation.getSequenceNbr());
String response = HttpUtil.createPost(baseUrlGKHF).body(JSON.toJSONString(resultMap)).execute().body();
if (response.contains("intervalValue1") && response.contains("processVariableId")) {
com.alibaba.fastjson.JSONObject jsonObject = JSON.parseObject(response);
pvPointVarCorrelation.setIntervalValue5(jsonObject.getDoubleValue("intervalValue5"));
pvPointVarCorrelation.setIntervalValue4(jsonObject.getDoubleValue("intervalValue4"));
pvPointVarCorrelation.setIntervalValue3(jsonObject.getDoubleValue("intervalValue3"));
pvPointVarCorrelation.setIntervalValue2(jsonObject.getDoubleValue("intervalValue2"));
pvPointVarCorrelation.setIntervalValue1(jsonObject.getDoubleValue("intervalValue1"));
log.info("------------------------------------------光伏相关性::计算成功,待更新表数据----------------------------------------");
} else {
pvPointVarCorrelation.setIntervalValue5(0.0);
pvPointVarCorrelation.setIntervalValue4(0.0);
pvPointVarCorrelation.setIntervalValue3(0.0);
pvPointVarCorrelation.setIntervalValue2(0.0);
pvPointVarCorrelation.setIntervalValue1(0.0);
}
pvPointVarCorrelation.setRecDate(new Date());
idxBizPvPointProcessVariableClassificationService.saveOrUpdate(pvPointVarCorrelation);
log.info("表数据已更新");
}
} catch (Exception e) {
log.error("kafka失败,当前失败的批次: data:{}, {}", consumerRecord, e);
} finally {
redisUtils.expire(kafkaTopicConsumerGKHFPv, 600);
}
return true;
}
}
package com.yeejoin.amos.boot.module.jxiop.biz.kafka;
import lombok.extern.slf4j.Slf4j;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.kafka.support.SendResult;
import org.springframework.stereotype.Service;
import org.springframework.util.concurrent.ListenableFuture;
import org.springframework.util.concurrent.ListenableFutureCallback;
import javax.annotation.Resource;
/**
* kafka 生产服务
*
* @author litw
* @create 2022/11/1 10:06
**/
@Slf4j
@Service
public class KafkaProducerService {
@Resource
private KafkaTemplate<String, String> kafkaTemplate;
/**
* 发送消息(异步)
* @param topic 主题
* @param message 消息内容
*/
public void sendMessageAsync(String topic, String message) {
ListenableFuture<SendResult<String, String>> future = kafkaTemplate.send(topic, message);
//添加回调
future.addCallback(new ListenableFutureCallback<SendResult<String, String>>() {
@Override
public void onFailure(Throwable throwable) {
log.error("发送消息(异步) failure! topic : {}, message: {}", topic, message);
}
@Override
public void onSuccess(SendResult<String, String> stringStringSendResult) {
// log.info("发送消息(异步) success! topic: {}, message: {}", topic, message);
}
});
}
}
......@@ -23,6 +23,7 @@ import org.slf4j.LoggerFactory;
import org.springframework.beans.BeanUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Configuration;
import org.springframework.data.elasticsearch.core.ElasticsearchRestTemplate;
import org.springframework.data.elasticsearch.core.SearchHit;
import org.springframework.data.elasticsearch.core.SearchHits;
......@@ -35,10 +36,11 @@ import org.springframework.util.ObjectUtils;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.*;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.*;
import java.util.stream.Collectors;
@Service
@Service("commonServiceImpl")
@Configuration
public class CommonServiceImpl {
private static final HashMap<String, Object> cacheExecInfo = new HashMap<>();
//utc时间格式
......@@ -52,6 +54,9 @@ public class CommonServiceImpl {
Integer zxzsleepTime;
@Value("${base.url:http://139.9.173.44:30009/maas/maas/processes/api/}")
String baseUrl;
String baseUrlXGX = "http://139.9.173.44:8052/intelligent-analysis/correlation";
//----------------工况变量工况变量划分请求属性配置------------------------
@Value("${gkblhffan.url:74435221-796d-43c0-ae72-319792b8f89e}")
String gkqjhfurlfan;
......@@ -1104,7 +1109,7 @@ public class CommonServiceImpl {
}
@Scheduled(cron = "0 0/10 * * * ?")
private void healthWarningMinuteByGF() {
public void healthWarningMinuteByGF() {
Date time = new Date();
List<IdxBizPvPointProcessVariableClassificationDto> data = idxBizPvPointProcessVariableClassificationMapper.getInfluxDBData();
Map<String, List<IdxBizPvPointProcessVariableClassificationDto>> maps = data.stream().collect(Collectors.groupingBy(IdxBizPvPointProcessVariableClassificationDto::getGatewayId));
......@@ -1277,4 +1282,172 @@ public class CommonServiceImpl {
resultMap.put("title", indicatorData.getUnit());
return resultMap;
}
private static final BlockingQueue<IdxBizFanPointProcessVariableClassification> fifo = new LinkedBlockingQueue<>(5000);
private static int threadNumber = Runtime.getRuntime().availableProcessors() * 2;
public void chuli() throws InterruptedException {
this.getFanConditionVariablesByTimeAnalyseNew(null, null);
// 借助Executors
ExecutorService service = Executors.newCachedThreadPool();
for (int i = 0; i < threadNumber; i++) {
Consumer consumer = new Consumer(fifo, this);
service.execute(consumer);
}
Thread.sleep(200000);
// 退出Executor
service.shutdown();
}
public void info(String gatewayId, String startTime, String endTime, IdxBizFanPointProcessVariableClassification idxBizFanPointProcessVariableClassification){
logger.info("异步执行,gatewayId:{},分析Id:{}", gatewayId, idxBizFanPointProcessVariableClassification.getSequenceNbr());
List<IdxBizFanPointVarCorrelation> gongkuangList = idxBizFanPointVarCorrelationMapper.selectList(new QueryWrapper<IdxBizFanPointVarCorrelation>().eq("ANALYSIS_GATEWAY_ID", idxBizFanPointProcessVariableClassification.getGatewayId()).eq("ANALYSIS_POINT_ID", idxBizFanPointProcessVariableClassification.getSequenceNbr()));
if (gongkuangList.size() > 0) {
foreachHandlerConditionVariabAnalyseFanNew(gatewayId, gongkuangList, startTime, endTime, idxBizFanPointProcessVariableClassification);
}
}
//遍历处理数据-组装风机
public void foreachHandlerConditionVariabAnalyseFanNew(String tableName, List<IdxBizFanPointVarCorrelation> list, String startTime, String endTime, IdxBizFanPointProcessVariableClassification idxBizFanPointProcessVariableClassification) {
List<IndicatorData> returnList = indicatorDataMapper.selectDataByAddressAndtime(idxBizFanPointProcessVariableClassification.getIndexAddress(), startTime, endTime,tableName);
List<Double> data1 = returnList.stream().map(t -> Double.parseDouble(t.getValue())).collect(Collectors.toList());
list.forEach(idxBizFanPointVarCorrelation -> {
logger.info("---------------------------------风机相关性-----------开始查询influxdb--------------------------------");
List<IndicatorData> returnList1 = indicatorDataMapper.selectDataByAddressAndtime(idxBizFanPointVarCorrelation.getProcessIndexAddress(), startTime, endTime,tableName);
List<Double> data2 = returnList1.stream().map(t -> Double.parseDouble(t.getValue())).collect(Collectors.toList());
HashMap<String, Object> map1 = new HashMap<>();
map1.put("data1", data1);
map1.put("data2", data2);
if (!map1.isEmpty()) {
logger.info("------------------------------风机相关性------------分析变量与工况变量相关性分析算法开始----------------------------------------");
logger.info("------------------------------风机相关性------------分析变量与工况变量相关性分析算参数---------------------------------------" + JSON.toJSONString(map1));
String response = HttpUtil.createPost(baseUrlXGX).body(JSON.toJSONString(map1)).execute().body();
if (response.contains("correlation")) {
JSONObject jsonObject = JSONObject.parseObject(response);
idxBizFanPointVarCorrelation.setCorrelationCoefficient(jsonObject.getDoubleValue("correlation"));
logger.info("------------------------------------------风机相关性::相关性更新业务表成功----------------------------------------");
} else {
idxBizFanPointVarCorrelation.setCorrelationCoefficient(0.0);
}
idxBizFanPointVarCorrelation.setRecDate(new Date());
idxBizFanPointVarCorrelationMapper.updateById(idxBizFanPointVarCorrelation);
logger.info("----------------------------风机相关性--------------分析变量与工况变量相关性分析算法结束----------------------------------------");
} else {
idxBizFanPointVarCorrelation.setCorrelationCoefficient(0.0);
idxBizFanPointVarCorrelation.setRecDate(new Date());
idxBizFanPointVarCorrelationMapper.updateById(idxBizFanPointVarCorrelation);
}
});
}
//相关性分析-风机入口
public void getFanConditionVariablesByTimeAnalyseNew(String startTime, String endTime) {
try {
HashMap<String, List<IdxBizFanPointProcessVariableClassification>> idxBizFanPointProcessVariableClassificationHashMap = getIdxBizFanPointProcessVariableClassificationListOfAnaLyse();
idxBizFanPointProcessVariableClassificationHashMap.keySet().forEach(s -> {
List<IdxBizFanPointProcessVariableClassification> list = idxBizFanPointProcessVariableClassificationHashMap.get(s);
fifo.addAll(list);
});
cacheExecInfo.remove(SmartAnalyseEnum.FAN_XGX.getKey());
} catch (Exception exception) {
cacheExecInfo.remove(SmartAnalyseEnum.PV_XGX.getKey());
}
}
/**
* 以下为相关性-光伏-方法
*/
private static final BlockingQueue<IdxBizPvPointProcessVariableClassification> fifoPv = new LinkedBlockingQueue<>(5000);
public void chuliPv(String startTime, String endTime) throws InterruptedException {
this.getPvConditionVariablesByTimeAnalyseNew();
// 借助Executors
ExecutorService service = Executors.newCachedThreadPool();
for (int i = 0; i < threadNumber; i++) {
ConsumerPv consumer = new ConsumerPv(fifoPv, this, startTime, endTime);
service.execute(consumer);
}
Thread.sleep(200000);
// 退出Executor
service.shutdown();
}
public void infoPv(String gatewayId, String startTime, String endTime, IdxBizPvPointProcessVariableClassification idxBizPvPointProcessVariableClassification){
logger.info("异步执行,gatewayId:{},分析Id:{}", gatewayId, idxBizPvPointProcessVariableClassification.getSequenceNbr());
List<IdxBizPvPointVarCorrelation> gongkuangList = idxBizPvPointVarCorrelationMapper.selectList(new QueryWrapper<IdxBizPvPointVarCorrelation>().eq("ANALYSIS_GATEWAY_ID", idxBizPvPointProcessVariableClassification.getGatewayId()).eq("ANALYSIS_POINT_ID", idxBizPvPointProcessVariableClassification.getSequenceNbr()));
if (gongkuangList.size() > 0) {
foreachHandlerConditionVariabAnalysePvNew(gatewayId, gongkuangList, startTime, endTime, idxBizPvPointProcessVariableClassification);
}
}
//遍历处理数据-组装风机
public void foreachHandlerConditionVariabAnalysePvNew(String tableName, List<IdxBizPvPointVarCorrelation> list, String startTime, String endTime, IdxBizPvPointProcessVariableClassification idxBizPvPointProcessVariableClassification) {
List<IndicatorData> returnList = indicatorDataMapper.selectDataByAddressAndtime(idxBizPvPointProcessVariableClassification.getIndexAddress(), startTime, endTime,tableName);
List<Double> data1 = returnList.stream().map(t -> Double.parseDouble(t.getValue())).collect(Collectors.toList());
list.forEach(idxBizPvPointVarCorrelation -> {
logger.info("---------------------------------风机相关性-----------开始查询influxdb--------------------------------");
List<IndicatorData> returnList1 = indicatorDataMapper.selectDataByAddressAndtime(idxBizPvPointVarCorrelation.getProcessIndexAddress(), startTime, endTime,tableName);
List<Double> data2 = returnList1.stream().map(t -> Double.parseDouble(t.getValue())).collect(Collectors.toList());
HashMap<String, Object> map1 = new HashMap<>();
map1.put("data1", data1);
map1.put("data2", data2);
if (!map1.isEmpty()) {
logger.info("------------------------------风机相关性------------分析变量与工况变量相关性分析算法开始----------------------------------------");
logger.info("------------------------------风机相关性------------分析变量与工况变量相关性分析算参数---------------------------------------" + JSON.toJSONString(map1));
String response = HttpUtil.createPost(baseUrlXGX).body(JSON.toJSONString(map1)).execute().body();
if (response.contains("correlation")) {
JSONObject jsonObject = JSONObject.parseObject(response);
idxBizPvPointVarCorrelation.setCorrelationCoefficient(jsonObject.getDoubleValue("correlation"));
logger.info("------------------------------------------风机相关性::相关性更新业务表成功----------------------------------------");
} else {
idxBizPvPointVarCorrelation.setCorrelationCoefficient(0.0);
}
idxBizPvPointVarCorrelation.setRecDate(new Date());
idxBizPvPointVarCorrelationMapper.updateById(idxBizPvPointVarCorrelation);
logger.info("----------------------------风机相关性--------------分析变量与工况变量相关性分析算法结束----------------------------------------");
} else {
idxBizPvPointVarCorrelation.setCorrelationCoefficient(0.0);
idxBizPvPointVarCorrelation.setRecDate(new Date());
idxBizPvPointVarCorrelationMapper.updateById(idxBizPvPointVarCorrelation);
}
});
}
//相关性分析-光伏
public void getPvConditionVariablesByTimeAnalyseNew() {
try {
HashMap<String, List<IdxBizFanPointProcessVariableClassification>> idxBizFanPointProcessVariableClassificationHashMap = getIdxBizFanPointProcessVariableClassificationListOfAnaLyse();
idxBizFanPointProcessVariableClassificationHashMap.keySet().forEach(s -> {
List<IdxBizFanPointProcessVariableClassification> list = idxBizFanPointProcessVariableClassificationHashMap.get(s);
fifo.addAll(list);
});
cacheExecInfo.remove(SmartAnalyseEnum.FAN_XGX.getKey());
} catch (Exception exception) {
cacheExecInfo.remove(SmartAnalyseEnum.PV_XGX.getKey());
}
}
}
package com.yeejoin.amos.boot.module.jxiop.biz.service.impl;
import cn.hutool.core.date.DateUtil;
import com.yeejoin.amos.boot.biz.common.utils.DateUtils;
import com.yeejoin.amos.boot.module.jxiop.biz.entity.IdxBizFanPointProcessVariableClassification;
import org.apache.commons.lang3.ObjectUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import org.springframework.stereotype.Service;
import java.util.Date;
import java.util.Random;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.TimeUnit;
/**
* 消费者线程
*
* @author jackyuj
*/
public class Consumer implements Runnable {
private BlockingQueue<IdxBizFanPointProcessVariableClassification> queue = new LinkedBlockingQueue<>(5000);
private static final int DEFAULT_RANGE_FOR_SLEEP = 1000;
//构造函数
public Consumer(BlockingQueue<IdxBizFanPointProcessVariableClassification> queue, CommonServiceImpl commonService) {
this.queue = queue;
this.commonService = commonService;
}
public final String startTime = DateUtils.convertDateToString(DateUtil.beginOfYear(new Date()), DateUtils.DATE_TIME_PATTERN);
public final String endTime = DateUtils.convertDateToString(DateUtils.getCurrentDayEndTime(new Date()), DateUtils.DATE_TIME_PATTERN);
@Autowired
CommonServiceImpl commonService;
public void run() {
Random r = new Random();
boolean isRunning = true;
try {
while (isRunning) {
IdxBizFanPointProcessVariableClassification poll = queue.poll();//有数据时直接从队列的队首取走,无数据时阻塞,在2s内有数据,取走,超过2s还没数据,返回失败
if (ObjectUtils.isNotEmpty(poll)) {
commonService.info(poll.getGatewayId(), startTime, endTime, poll);
Thread.sleep(r.nextInt(DEFAULT_RANGE_FOR_SLEEP));
} else {
// 超过2s还没数据,认为所有生产线程都已经退出,自动退出消费线程。
isRunning = false;
}
}
} catch (Exception e) {
e.printStackTrace();
Thread.currentThread().interrupt();
} finally {
System.out.println("退出消费者线程!");
}
}
}
\ No newline at end of file
package com.yeejoin.amos.boot.module.jxiop.biz.service.impl;
import cn.hutool.core.date.DateUtil;
import com.yeejoin.amos.boot.biz.common.utils.DateUtils;
import com.yeejoin.amos.boot.module.jxiop.biz.entity.IdxBizPvPointProcessVariableClassification;
import org.apache.commons.lang3.ObjectUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import org.springframework.stereotype.Service;
import java.util.Date;
import java.util.Random;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.TimeUnit;
/**
* 消费者线程
*
* @author jackyuj
*/
public class ConsumerPv implements Runnable {
private BlockingQueue<IdxBizPvPointProcessVariableClassification> queue = new LinkedBlockingQueue<>(5000);
private static final int DEFAULT_RANGE_FOR_SLEEP = 1000;
String startTime1 = DateUtils.convertDateToString(DateUtil.beginOfYear(new Date()), DateUtils.DATE_TIME_PATTERN);
String endTime1 = DateUtils.convertDateToString(DateUtils.getCurrentDayEndTime(new Date()), DateUtils.DATE_TIME_PATTERN);
//构造函数
public ConsumerPv(BlockingQueue<IdxBizPvPointProcessVariableClassification> queue, CommonServiceImpl commonService, String startTime, String endTime) {
this.queue = queue;
this.commonService = commonService;
this.startTime1 = startTime;
this.endTime1 = endTime;
}
@Autowired
CommonServiceImpl commonService;
public void run() {
Random r = new Random();
boolean isRunning = true;
try {
while (isRunning) {
IdxBizPvPointProcessVariableClassification poll = queue.poll();//有数据时直接从队列的队首取走,无数据时阻塞,在2s内有数据,取走,超过2s还没数据,返回失败
if (ObjectUtils.isNotEmpty(poll)) {
commonService.infoPv(poll.getGatewayId(), startTime1, endTime1, poll);
Thread.sleep(r.nextInt(DEFAULT_RANGE_FOR_SLEEP));
} else {
// 超过2s还没数据,认为所有生产线程都已经退出,自动退出消费线程。
isRunning = false;
}
}
} catch (Exception e) {
e.printStackTrace();
Thread.currentThread().interrupt();
} finally {
System.out.println("退出消费者线程!");
}
}
}
\ No newline at end of file
......@@ -18,4 +18,9 @@ public interface IndicatorDataMapper extends BaseMapper<IndicatorData> {
@Select("select `unit` from iot_data.indicator_data where address=#{address} and gateway_id =#{gatewayId} limit 1")
IndicatorData selectUnitByAddressAndGatewayId(@Param("address")String address, @Param("gatewayId")String gatewayId);
@Select("select `value`, created_time as createdTime, `value_f` as valueF, `address`, gateway_id as gatewayId from iot_data.indicator_data where created_time >= #{startTime} and created_time <= #{endTime} and ((gateway_id =#{gatewayId} and address=#{address}) or (gateway_id =#{gatewayId1} and address=#{address1})) ")
List<IndicatorData> selectDataByAddressAndtimeNew(@Param("address")String address,@Param("startTime") String startTime, @Param("endTime")String endTime, @Param("gatewayId")String gatewayId, @Param("gatewayId1")String gatewayId1, @Param("address1")String address1);
}
## DB properties:
## db1-production database
spring.db1.datasource.type: com.alibaba.druid.pool.DruidDataSource
spring.db1.datasource.url=jdbc:mysql://39.98.224.23:3306/production?allowMultiQueries=true&serverTimezone=GMT%2B8&characterEncoding=utf8
spring.db1.datasource.url=jdbc:mysql://139.9.173.44:3306/production?allowMultiQueries=true&serverTimezone=GMT%2B8&characterEncoding=utf8
spring.db1.datasource.username=root
spring.db1.datasource.password=Yeejoin@2020
spring.db1.datasource.driver-class-name: com.mysql.cj.jdbc.Driver
## db2-sync_data
spring.db2.datasource.type: com.alibaba.druid.pool.DruidDataSource
spring.db2.datasource.url=jdbc:mysql://36.40.66.175:3306/yeeamos_amos_idx_biz?allowMultiQueries=true&serverTimezone=GMT%2B8&characterEncoding=utf8
spring.db2.datasource.url=jdbc:mysql://139.9.173.44:3306/amos_idx_biz?allowMultiQueries=true&serverTimezone=GMT%2B8&characterEncoding=utf8
spring.db2.datasource.username=root
spring.db2.datasource.password=Yeejoin@2020
spring.db2.datasource.driver-class-name: com.mysql.cj.jdbc.Driver
......@@ -53,10 +53,10 @@ lettuce.timeout=10000
emqx.clean-session=true
emqx.client-id=${spring.application.name}-${random.int[1024,65536]}
emqx.broker=tcp://172.16.10.253:1883
emqx.broker=tcp://172.16.10.220:1883
emqx.user-name=admin
emqx.password=public
mqtt.scene.host=mqtt://172.16.10.253:8083/mqtt
mqtt.scene.host=mqtt://172.16.10.220:8083/mqtt
mqtt.client.product.id=mqtt
mqtt.topic=topic_mqtt
spring.mqtt.completionTimeout=3000
......@@ -96,7 +96,7 @@ spring.db3.datasource.driver-class-name: com.taosdata.jdbc.rs.RestfulDriver
#spring.influx.bufferLimit=20000
spring.influx.url=http://172.16.10.253:8086
spring.influx.url=http://139.9.173.44:8086
spring.influx.password=Yeejoin@2020
spring.influx.user=root
spring.influx.database=iot_platform
......@@ -126,7 +126,7 @@ amos.secret.key=qaz
#eureka.instance.ip-address=172.16.3.122
spring.activemq.broker-url=tcp://39.98.223.23:61616
spring.activemq.broker-url=tcp://172.16.10.220:61616
spring.activemq.user=admin
spring.activemq.password=admin
spring.jms.pub-sub-domain=false
......@@ -136,3 +136,33 @@ myqueue=amos.privilege.v1.JXIOP.AQSC_FDGL.userBusiness
fan.statuts.stattuspath=upload/jxiop/device_status
pictureUrl=upload/jxiop/syz/
#kafka
spring.kafka.bootstrap-servers=139.9.173.44:9092
spring.kafka.producer.retries=1
spring.kafka.producer.bootstrap-servers=139.9.173.44:9092
spring.kafka.producer.batch-size=16384
spring.kafka.producer.buffer-memory=33554432
spring.kafka.producer.acks=1
spring.kafka.producer.key-serializer=org.apache.kafka.common.serialization.StringSerializer
spring.kafka.producer.value-serializer=org.apache.kafka.common.serialization.StringSerializer
spring.kafka.consumer.group-id=consumerGroup
spring.kafka.consumer.bootstrap-servers=139.9.173.44:9092
spring.kafka.consumer.enable-auto-commit=false
spring.kafka.consumer.auto-offset-reset=earliest
spring.kafka.consumer.key-deserializer=org.apache.kafka.common.serialization.StringDeserializer
spring.kafka.consumer.value-deserializer=org.apache.kafka.common.serialization.StringDeserializer
spring.kafka.listener.ack-mode=manual_immediate
spring.kafka.listener.type=batch
spring.kafka.consumer.max-poll-records=100
spring.kafka.consumer.fetch-max-wait= 10000
#当前时间向前偏移月数 向历史偏移月数
last.month.num = 12
......@@ -75,10 +75,10 @@ station.section=10
gl.sum.column=日发电量,月发电量,年发电量
gl.avg.column=有功功率,日利用小时,瞬时风速
spring.elasticsearch.rest.uris=http://39.98.224.23:9200
spring.elasticsearch.rest.uris=http://139.9.173.44:9200
spring.elasticsearch.rest.connection-timeout=30000
spring.elasticsearch.rest.username=elastic
spring.elasticsearch.rest.password=123456
spring.elasticsearch.rest.password=Yeejoin@2020
spring.elasticsearch.rest.read-timeout=30000
healthValue_Warn=39
......
......@@ -154,4 +154,10 @@ public class StationBasic extends BaseEntity {
//地图偏移量
@TableField(value = "title_pos",typeHandler = FastjsonTypeHandler.class)
private List<BigDecimal> titlePos;
/**
* 赋码颜色
*/
@TableField("qrcode_color")
private String qrcodeColor;
}
......@@ -24,5 +24,8 @@ public interface StationPlanMapper extends BaseMapper<StationPlan> {
//
// @Select("SELECT monthly ,sum(value) value from station_plan where year= #{year} GROUP BY monthly ")
// List<StationPlan> getStationPlanbyids(@Param("year") String year);
@Select("select value from station_plan where station_basic_id=#{stationBasicId} and year= #{year} and monthly= #{monthly}")
Double getPlanGenByStationIdAndMonth(@Param("stationBasicId") String stationId, @Param("year") String year, @Param("monthly") String monthly);
}
......@@ -35,12 +35,12 @@
<groupId>org.influxdb</groupId>
<artifactId>influxdb-java</artifactId>
</dependency>
<dependency>
<groupId>com.yeejoin</groupId>
<artifactId>amos-component-influxdb</artifactId>
<version>1.9.0-SNAPSHOT</version>
<scope>compile</scope>
</dependency>
<!-- <dependency>-->
<!-- <groupId>com.yeejoin</groupId>-->
<!-- <artifactId>amos-component-influxdb</artifactId>-->
<!-- <version>1.9.0-SNAPSHOT</version>-->
<!-- <scope>compile</scope>-->
<!-- </dependency>-->
<dependency>
<groupId>com.taosdata.jdbc</groupId>
<artifactId>taos-jdbcdriver</artifactId>
......
......@@ -19,6 +19,7 @@ import com.yeejoin.amos.boot.module.jxiop.biz.constants.CommonConstans;
import com.yeejoin.amos.boot.module.jxiop.biz.dto.*;
import com.yeejoin.amos.boot.module.jxiop.biz.service.impl.CommonServiceImpl;
import com.yeejoin.amos.boot.module.jxiop.biz.service.impl.MonitorFanIndicatorImpl;
import com.yeejoin.amos.boot.module.jxiop.biz.service.impl.MonitoringServiceImpl;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import org.apache.commons.io.IOUtils;
......@@ -77,6 +78,8 @@ public class MonitorFanIdxController extends BaseController {
@Autowired
EmqKeeper emqKeeper;
@Autowired
MonitoringServiceImpl monitoringService;
@Autowired
private ElasticsearchRestTemplate elasticsearchTemplate;
@Value("classpath:/json/overview.json")
private Resource overview;
......@@ -105,6 +108,24 @@ public class MonitorFanIdxController extends BaseController {
}
@TycloudOperation(needAuth = false, ApiLevel = UserType.AGENCY)
@ApiOperation(value = "大屏三维地图信息弹框")
@GetMapping("/getBasicInfonew")
public ResponseModel< Map<String, Object>> getBasicInfonew(@RequestParam(value = "stationId") String stationId) {
List<Map<String,String>> list =new ArrayList<>();
StationBasic stationBasic = stationBasicMapper.selectById(stationId);
Map<String,String> date= monitoringService.getStationfs(stationBasic);
Map<String,String> date1= monitoringService.getStationrl(stationBasic);
list.add(date);
list.add(date1);
Map<String, Object> queryCondtion = new HashMap<>();
queryCondtion.put("color",stationBasic.getQrcodeColor());
queryCondtion.put("date",list);
return ResponseHelper.buildResponse(queryCondtion);
}
// @TycloudOperation(needAuth = false, ApiLevel = UserType.AGENCY)
// @ApiOperation(value = "风机布置图 - 风机状态列表")
// @GetMapping("/getFanStatusList")
......@@ -226,7 +247,7 @@ public class MonitorFanIdxController extends BaseController {
queryCondtion.put(CommonConstans.QueryStringEquipmentIndexName, Arrays.asList("30秒平均风速"));
queryCondtion.put(CommonConstans.QueryStringGateWayId, Arrays.asList(gatewayId));
List<ESEquipments> result2 = commonServiceImpl.getListDataByCondtions(queryCondtion, null, ESEquipments.class);
columnMap.put(column, String.format("%.2f", result2.get(0).getValueF()));
columnMap.put(column, String.format("%.2f", commonServiceImpl.getAvagerByEquipmentIndxName(result2, "30秒平均风速")));
}
......@@ -1026,7 +1047,7 @@ public class MonitorFanIdxController extends BaseController {
Double totalAnnual = (powerOfAnnualFD.get() + powerOfAnnualGF.get());
HashMap<String, String> stringHashMap13 = new HashMap<>();
stringHashMap13.put("title", String.format(CommonConstans.Twodecimalplaces, (totalAnnual * CommonConstans.carbonDioxide * CommonConstans.kgToT)));
stringHashMap13.put("unit", "二氧化碳减排量(ft)");
stringHashMap13.put("unit", "二氧化碳减排量(t)");
list3.add(stringHashMap13);
HashMap<String, String> stringHashMap14 = new HashMap<>();
stringHashMap14.put("title", String.format(CommonConstans.Twodecimalplaces, (totalAnnual * CommonConstans.standardCoal * CommonConstans.kgToT)));
......
......@@ -95,6 +95,12 @@ public class CommonServiceImpl {
return Double.valueOf(String.format("%.2f", totalvalue));
}
public Double getAvagerByEquipmentIndxName(List<ESEquipments> equipments, String indexName) {
Double result = 0.00;
result = equipments.stream().filter(esEquipments -> esEquipments.getEquipmentIndexName().equals(indexName)).filter(esEquipments -> esEquipments.getValueF() != null).mapToDouble(ESEquipments::getValueF).average().getAsDouble();
return result;
}
// public Double getNumByIndicatior(String gatewayId,String indicator){
// String sql = "SELECT * FROM indicators_"+gatewayId+" where equipmentIndexName=~/"+indicator+"$/";
// Double totalvalue = 0.0;
......
......@@ -81,6 +81,49 @@ public class MonitoringServiceImpl {
return ObjectUtils.isEmpty(installCapacity) ? 0.00 : installCapacity;
}
/***
* 获取风站场站
*
* */
public Map<String,String> getStationfs(StationBasic stationBasic) {
String value="0";
Map<String,String> map=new HashMap<>();
Map<String, List<String>> queryCondtion = new HashMap<>();
if(stationBasic.getStationType().equals("FDZ")){
String gatewayId = stationBasic.getFanGatewayId();
queryCondtion.put(CommonConstans.QueryStringEquipmentIndexName, Arrays.asList("30秒平均风速"));
queryCondtion.put(CommonConstans.QueryStringGateWayId, Arrays.asList(gatewayId));
List<ESEquipments> result2 = commonServiceImpl.getListDataByCondtions(queryCondtion, null, ESEquipments.class);
value= String.format("%.2f", commonServiceImpl.getAvagerByEquipmentIndxName(result2, "30秒平均风速"));
map.put("name","风速/辐照度");
map.put("value",value+"m/s");
}else{
queryCondtion.put(CommonConstans.QueryStringEquipmentIndexName, Arrays.asList("WTX-801_25_WTX-801_总辐射"));
queryCondtion.put(CommonConstans.QueryStringGateWayId, Arrays.asList(stationBasic.getBoosterGatewayId()));
List<ESEquipments> result1 = commonServiceImpl.getListDataByCondtions(queryCondtion, null, ESEquipments.class);
value= String.format("%.2f", commonServiceImpl.getSumByEquipmentIndxName(result1, "WTX-801_25_WTX-801_总辐射"));
map.put("name","风速/辐照度");
map.put("value",value+"W/㎡");
}
return map;
}
/***
* 电站容量
* */
public Map<String,String> getStationrl(StationBasic stationBasic) {
Double installCapacity = 0.0;
installCapacity = this.SjglZsjZsbtzServiceImpl.getStationCapactityByStationWerks(stationBasic.getStationNumber());
String value= ObjectUtils.isEmpty(installCapacity) ? "0.00" :String.format("%.2f", installCapacity);
Map<String,String> map=new HashMap<>();
map.put("name","容量");
map.put("value",value+"MW");
return map;
}
/**
* 根据省份名称查询电站详情
*
......@@ -916,19 +959,19 @@ public class MonitoringServiceImpl {
Double totalAnnual = (powerOfAnnualFD.get() + powerOfAnnualGF.get());
HashMap<String, String> stringHashMap13 = new HashMap<>();
stringHashMap13.put("title", String.format(CommonConstans.Twodecimalplaces, (totalAnnual * CommonConstans.carbonDioxide*CommonConstans.kgToT)));
stringHashMap13.put("unit", "二氧化碳减排量(t)");
stringHashMap13.put("unit", "二氧化碳减排量(t)");
list3.add(stringHashMap13);
HashMap<String, String> stringHashMap14 = new HashMap<>();
stringHashMap14.put("title", String.format(CommonConstans.Twodecimalplaces, (totalAnnual * CommonConstans.standardCoal*CommonConstans.kgToT)));
stringHashMap14.put("unit", "节约标准煤(t)");
stringHashMap14.put("unit", "节约标准煤(t)");
list3.add(stringHashMap14);
HashMap<String, String> stringHashMap15 = new HashMap<>();
stringHashMap15.put("title", String.format(CommonConstans.Twodecimalplaces, (totalAnnual * CommonConstans.toner * CommonConstans.kgToT)));
stringHashMap15.put("unit", "碳粉尘减排量(t)");
stringHashMap15.put("unit", "碳粉尘减排量(t)");
list3.add(stringHashMap15);
HashMap<String, String> stringHashMap16 = new HashMap<>();
stringHashMap16.put("title", String.format(CommonConstans.Twodecimalplaces, (totalAnnual * CommonConstans.sulfurDioxide * CommonConstans.kgToT)));
stringHashMap16.put("unit", "二氧化硫减排量(t)");
stringHashMap16.put("unit", "二氧化硫减排量(t)");
list3.add(stringHashMap16);
page3.setRecords(list3);
try {
......
......@@ -177,9 +177,7 @@ public class PowerGenerationImpl {
}
public List<ESMoonPowerGeneration> getESMoonPowerGeneration(StationCacheInfoDto stationCacheInfoDto,List<IndicatorData> indexDto,String daty,String day,String year,Double flags){
List<ESMoonPowerGeneration> list=new ArrayList<>();
SimpleDateFormat myFmt2=new SimpleDateFormat("yyyy-MM-dd");
Date now=new Date();
String datynew= myFmt2.format(now);
if(indexDto!=null&&!indexDto.isEmpty()){
for (IndicatorData dto : indexDto) {
double value=dto.getValueF()!=null?(double)dto.getValueF():0.0;
......@@ -187,7 +185,7 @@ public class PowerGenerationImpl {
value=value*flags;
}
ESMoonPowerGeneration moonPowerGeneration=new ESMoonPowerGeneration(
datynew+"_"+dto.getAddress(),
daty+"_"+dto.getAddress(),
stationCacheInfoDto.getStationId(),
stationCacheInfoDto.getStationName(),
stationCacheInfoDto.getStationType(),
......@@ -209,9 +207,7 @@ public class PowerGenerationImpl {
}
public List<ESYearPowerGeneration> getESYearPowerGeneration(StationCacheInfoDto stationCacheInfoDto,List<IndicatorData> indexDto,String daty,String day,Double flags){
List<ESYearPowerGeneration> list=new ArrayList<>();
SimpleDateFormat myFmt2=new SimpleDateFormat("yyyy-MM-dd");
Date now=new Date();
String datynew= myFmt2.format(now);
if(indexDto!=null&&!indexDto.isEmpty()){
for (IndicatorData dto : indexDto) {
double value=dto.getValueF()!=null?(double)dto.getValueF():0.0;
......@@ -219,7 +215,7 @@ public class PowerGenerationImpl {
value=value*flags;
}
ESYearPowerGeneration yearPowerGeneration=new ESYearPowerGeneration(
datynew+"_"+dto.getAddress(),
daty+"_"+dto.getAddress(),
stationCacheInfoDto.getStationId(),
stationCacheInfoDto.getStationName(),
stationCacheInfoDto.getStationType(),
......
package com.yeejoin.amos.boot.module.jxiop.biz.emqx;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import com.baomidou.mybatisplus.core.conditions.update.LambdaUpdateWrapper;
import com.baomidou.mybatisplus.core.toolkit.ObjectUtils;
import com.yeejoin.amos.boot.biz.common.entity.BaseEntity;
import com.yeejoin.amos.boot.module.jxiop.api.entity.PersonBasic;
import com.yeejoin.amos.boot.module.jxiop.api.mapper.PersonBasicMapper;
import com.yeejoin.amos.boot.module.jxiop.api.mapper.StationBasicMapper;
import com.yeejoin.amos.boot.module.jxiop.biz.service.impl.PersonBasicServiceImpl;
import com.yeejoin.amos.component.robot.AmosRequestContext;
import lombok.extern.slf4j.Slf4j;
import org.eclipse.paho.client.mqttv3.MqttException;
import org.eclipse.paho.client.mqttv3.MqttMessage;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.scheduling.annotation.EnableScheduling;
import org.springframework.stereotype.Component;
import org.typroject.tyboot.component.emq.EmqKeeper;
import org.typroject.tyboot.component.emq.EmqxListener;
import org.typroject.tyboot.core.foundation.context.RequestContext;
import javax.annotation.PostConstruct;
import java.util.List;
import java.util.Map;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.LinkedBlockingQueue;
@Component
@EnableScheduling
@Slf4j
public class PersonYardMessage extends EmqxListener {
@Autowired
private AmosRequestContext amosAuth;
@Autowired
protected EmqKeeper emqKeeper;
......
......@@ -3,15 +3,11 @@ package com.yeejoin.amos.boot.module.jxiop.biz.emqx;
import com.alibaba.fastjson.JSONArray;
import com.yeejoin.amos.boot.module.jxiop.api.mapper.StationBasicMapper;
import lombok.extern.slf4j.Slf4j;
import com.yeejoin.amos.component.robot.AmosRequestContext;
import org.eclipse.paho.client.mqttv3.MqttMessage;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.scheduling.annotation.EnableScheduling;
import org.springframework.stereotype.Component;
import org.typroject.tyboot.component.emq.EmqKeeper;
import org.typroject.tyboot.component.emq.EmqxListener;
import org.typroject.tyboot.core.foundation.context.RequestContext;
import javax.annotation.PostConstruct;
import java.util.List;
......@@ -20,12 +16,9 @@ import java.util.concurrent.BlockingQueue;
import java.util.concurrent.LinkedBlockingQueue;
@Component
@EnableScheduling
@Slf4j
public class StationYardMessage extends EmqxListener {
@Autowired
private AmosRequestContext amosAuth;
@Autowired
protected EmqKeeper emqKeeper;
......@@ -34,7 +27,9 @@ public class StationYardMessage extends EmqxListener {
private StationBasicMapper stationBasicMapper;
// 江西电建接收红黄绿码主题
/**
* 江西电建接收红黄绿码主题
*/
private static final String JXIOP_STATION_YARD = "jxIop/station/yard";
private static final BlockingQueue<List<Map<String, String>>> blockingQueue = new LinkedBlockingQueue<List<Map<String, String>>>();
......
......@@ -5,6 +5,8 @@ import lombok.experimental.Accessors;
import java.util.Date;
import com.baomidou.mybatisplus.annotation.TableField;
@Data
@Accessors(chain = true)
public class EquipAlarmEventDto {
......@@ -14,5 +16,12 @@ public class EquipAlarmEventDto {
private String alarmDesc;
private String equipName;
private long sort;
/**
* 分类 逆变器、箱变、汇流箱
*/
@TableField("frontModule")
private String frontModule;
@TableField("value")
private String value;
}
......@@ -29,4 +29,6 @@ public class StationCacheInfoDto implements Serializable {
private String boosterGatewayId;
//片区code
private String areaCode;
//
private String address;
}
......@@ -4,16 +4,15 @@ import com.baomidou.mybatisplus.core.mapper.BaseMapper;
import com.yeejoin.amos.boot.module.jxiop.biz.dto.AlarmEventDto;
import com.yeejoin.amos.boot.module.jxiop.biz.entity.AlarmEvent;
import java.util.Date;
import java.util.List;
public interface AlarmEventMapper extends BaseMapper<AlarmEvent> {
String getLastDataBySort(String gatewayId);
String getLastDataBySort(String gatewayId);
List<AlarmEventDto> getAlarmEventList(String gatewayId, String stationId, long current, long size, Date time);
List <AlarmEventDto> getAlarmEventList(String gatewayId,String stationId);
List<String> getOldAlarmsBySort(String gatewayId, String sort);
List<String> getOldAlarmsBySort(String gatewayId, String sort);
}
package com.yeejoin.amos.boot.module.jxiop.biz.mapper2;
import java.util.Date;
import java.util.List;
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
import com.yeejoin.amos.boot.module.jxiop.biz.dto.EquipAlarmEventDto;
import com.yeejoin.amos.boot.module.jxiop.biz.entity.EquipAlarmEvent;
import java.util.List;
public interface EquipAlarmEventMapper extends BaseMapper<EquipAlarmEvent> {
String getLastDataBySort(String gatewayId);
String getLastDataBySort(String gatewayId);
List<EquipAlarmEvent> getOldDataBySort(String sort, String gatewayId);
List<EquipAlarmEvent> getOldDataBySort(String sort,String gatewayId);
List<EquipAlarmEventDto> getAlarmEventList(String gatewayId, String equipIndex, String frontModule, long current,
long size,Date time);
}
......@@ -18,6 +18,7 @@ import org.typroject.tyboot.core.rdbms.service.BaseService;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Calendar;
import java.util.List;
@Service
......@@ -29,7 +30,7 @@ public class EquipAlarmEventServiceImpl extends BaseService<EquipAlarmEventDto,
StationBasicMapper stationBasicMapper;
public ResultsData getEventByEquipIndex(String gatewayId, int current, int size, String equipIndex, String frontModule) {
List<EquipAlarmEvent> equipAlarmEvents = new ArrayList<>();
List<EquipAlarmEventDto> equipAlarmEvents = new ArrayList<>();
LambdaQueryWrapper<EquipAlarmEvent> queryWrapper = new LambdaQueryWrapper<>();
if(equipIndex!=null)
{
......@@ -40,11 +41,15 @@ public class EquipAlarmEventServiceImpl extends BaseService<EquipAlarmEventDto,
{
queryWrapper.eq(EquipAlarmEvent::getFrontModule, frontModule);
}
queryWrapper.orderByDesc(EquipAlarmEvent::getCreatedTime);
Calendar calendar = Calendar.getInstance();
calendar.add(Calendar.DAY_OF_MONTH, -7);
queryWrapper.gt(EquipAlarmEvent::getCreatedTime, calendar.getTime());
int count = equipAlarmEventMapper.selectCount(queryWrapper);
IPage<EquipAlarmEvent> p = new Page<>(current, size);
IPage<EquipAlarmEvent> page = equipAlarmEventMapper.selectPage(p, queryWrapper);
// equipAlarmEvents = equipAlarmEventMapper.selectList(queryWrapper);
equipAlarmEvents=page.getRecords();
equipAlarmEvents= equipAlarmEventMapper.getAlarmEventList(gatewayId, equipIndex, frontModule, (p.getCurrent()-1)*p.getSize(), p.getSize(),calendar.getTime());
// equipAlarmEvents = equipAlarmEventMapper.selectList(queryWrapper);
// equipAlarmEvents=page.getRecords();
equipAlarmEvents.forEach(i->{
if("true".equals(i))
{
......@@ -54,7 +59,7 @@ public class EquipAlarmEventServiceImpl extends BaseService<EquipAlarmEventDto,
i.setValue("分");
}
});
DataGridMock DataGridMock = new DataGridMock(current, equipAlarmEvents.size(), false, current, equipAlarmEvents.subList((current - 1) * size, current * size));
DataGridMock DataGridMock = new DataGridMock(current, count, false, current, equipAlarmEvents);
ColModel colModelEventMovement = new ColModel("equipName", "equipName", "设备名", "设备名", "dataGrid", "equipName");
ColModel colModelStationName = new ColModel("alarmDesc", "alarmDesc", "事件描述", "事件描述", "dataGrid", "alarmDesc");
ColModel colModelEventDesc = new ColModel("createdTime", "createdTime", "告警时间", "告警时间", "dataGrid", "createdTime");
......
......@@ -974,6 +974,7 @@ public class MonitorFanIndicatorImpl implements IMonitorFanIndicator {
queryCondtion.put(CommonConstans.QueryStringEquipmentIndexName, Arrays.asList("风向角", "有功功率", "瞬时风速", "日发电量"));
List<ESEquipments> allList = commonServiceImpl.getListDataByCondtions(queryCondtion, null, ESEquipments.class);
List<ESEquipments> windSqlList = new ArrayList<>();
List<ESEquipments> windSqlSortList = new ArrayList<>();
List<ESEquipments> powerSqlList = new ArrayList<>();
List<ESEquipments> windSpeedSqlList = new ArrayList<>();
List<ESEquipments> electricitySqlList = new ArrayList<>();
......@@ -996,18 +997,19 @@ public class MonitorFanIndicatorImpl implements IMonitorFanIndicator {
});
Map<String, String> powerSqlMap = powerSqlList.stream().collect(Collectors.toMap(ESEquipments::getEquipmentNumber, ESEquipments::getValue));
Map<String, String> windSpeedSqlMap = windSpeedSqlList.stream().collect(Collectors.toMap(ESEquipments::getEquipmentNumber, ESEquipments::getValue));
windSqlSortList = windSqlList.stream().sorted(Comparator.comparing(ESEquipments::getEquipmentNumber,Comparator.comparing(Integer::parseInt))).collect(Collectors.toList());
Map<String, String> electricitySqlMap = electricitySqlList.stream().collect(Collectors.toMap(ESEquipments::getEquipmentNumber, ESEquipments::getValue));
ArrayList<Map<String, String>> resultList = new ArrayList<>();
windSqlList.forEach(item -> {
for (int i = 0; i < windSqlSortList.size(); i++) {
ESEquipments esEquipments = windSqlSortList.get(i);
HashMap<String, String> stringStringHashMap = new HashMap<>();
stringStringHashMap.put("equipmentNumber", item.getEquipmentNumber());
stringStringHashMap.put("wind", ObjectUtils.isEmpty(item.getValue()) ? "0.0" : item.getValue());
stringStringHashMap.put("power", powerSqlMap.get(item.getEquipmentNumber()));
stringStringHashMap.put("windSpeed", windSpeedSqlMap.get(item.getEquipmentNumber()));
stringStringHashMap.put("electricity", String.format(CommonConstans.Fourdecimalplaces, ObjectUtils.isEmpty(electricitySqlMap.get(item.getEquipmentNumber())) ? 0.0000 : Double.valueOf(electricitySqlMap.get(item.getEquipmentNumber()))));
stringStringHashMap.put("equipmentNumber", esEquipments.getEquipmentNumber());
stringStringHashMap.put("wind", ObjectUtils.isEmpty(esEquipments.getValue()) ? "0.0" : esEquipments.getValue());
stringStringHashMap.put("power", powerSqlMap.get(esEquipments.getEquipmentNumber()));
stringStringHashMap.put("windSpeed", windSpeedSqlMap.get(esEquipments.getEquipmentNumber()));
stringStringHashMap.put("electricity", String.format(CommonConstans.Fourdecimalplaces, ObjectUtils.isEmpty(electricitySqlMap.get(esEquipments.getEquipmentNumber())) ? 0.0000 : Double.valueOf(electricitySqlMap.get(esEquipments.getEquipmentNumber()))));
resultList.add(stringStringHashMap);
});
}
//构建平台数据
DataGridMock DataGridMock = new DataGridMock(current, resultList.size(), false, current, resultList);
ColModel colModelEquipmentNumber = new ColModel("equipmentNumber", "equipmentNumber", "风机编码", "风机编码", "dataGrid", "equipmentNumber");
......@@ -1021,23 +1023,33 @@ public class MonitorFanIndicatorImpl implements IMonitorFanIndicator {
}
public ResultsData getAlarmEventList(int current, int size, String stationId) {
List<AlarmEventDto> alarmEventList = alarmEventMapper.getAlarmEventList(null, stationId);
public ResultsData getAlarmEventList(int current, int size, String stationId) {
LambdaQueryWrapper<AlarmEvent> queryWrapper = new LambdaQueryWrapper<>();
if(stationId!=null)
{
queryWrapper.eq(AlarmEvent::getStationId, stationId);
}
Calendar calendar = Calendar.getInstance();
calendar.add(Calendar.DAY_OF_MONTH, -7);
queryWrapper.gt(AlarmEvent::getCreatedTime, calendar.getTime());
int count = alarmEventMapper.selectCount(queryWrapper);
List<AlarmEventDto> alarmEventList = alarmEventMapper.getAlarmEventList(null, stationId,(current-1)*size,size,calendar.getTime());
// List<AlarmEventDto> sorrtedAlarmEventList = alarmEventList.stream().sorted(Comparator.comparing(AlarmEventDto::getCreatedTime).reversed()).collect(Collectors.toList());
ArrayList<Map<String, String>> resultList = new ArrayList<>();
alarmEventList.forEach(item -> {
HashMap<String, String> stringStringHashMap = new HashMap<>();
stringStringHashMap.put("stationName", item.getStationName());
stringStringHashMap.put("eventDesc", item.getEventDesc());
stringStringHashMap.put("alarmGroupName", item.getAlarmGroupName());
stringStringHashMap.put("eventTime", item.getEventTime());
stringStringHashMap.put("eventMovement", item.getEventMovement());
resultList.add(stringStringHashMap);
});
// ArrayList<Map<String, String>> resultList = new ArrayList<>();
// alarmEventList.forEach(item -> {
// HashMap<String, String> stringStringHashMap = new HashMap<>();
// stringStringHashMap.put("stationName", item.getStationName());
// stringStringHashMap.put("eventDesc", item.getEventDesc());
// stringStringHashMap.put("alarmGroupName", item.getAlarmGroupName());
// stringStringHashMap.put("eventTime", item.getEventTime());
// stringStringHashMap.put("eventMovement", item.getEventMovement());
// resultList.add(stringStringHashMap);
// });
//构建平台数据
DataGridMock DataGridMock = new DataGridMock(current, resultList.size(), false, current, resultList);
DataGridMock DataGridMock = new DataGridMock(current, count, false, current, alarmEventList);
ColModel colModelStationName = new ColModel("stationName", "stationName", "名称", "名称", "dataGrid", "stationName");
ColModel colModelEventDesc = new ColModel("eventDesc", "eventDesc", "事件描述", "事件描述", "dataGrid", "eventDesc");
ColModel colModelAlarmGroupName = new ColModel("alarmGroupName", "alarmGroupName", "事件告警组", "事件告警组", "dataGrid", "alarmGroupName");
......@@ -1995,12 +2007,12 @@ public class MonitorFanIndicatorImpl implements IMonitorFanIndicator {
HashMap<String, Object> map6 = new HashMap<>();
map6.put("count", gzNum);
map6.put("equipmentIndexName", "故障停机");
map6.put("color", "#00AAFF");
map6.put("color", "#F91414");
resultList.add(map6);
HashMap<String, Object> map7 = new HashMap<>();
map7.put("count", djNum);
map7.put("equipmentIndexName", "待机");
map7.put("color", "#F91414");
map7.put("color", "#00AAFF");
resultList.add(map7);
HashMap<String, Object> map8 = new HashMap<>();
map8.put("count", txNum);
......@@ -2064,7 +2076,7 @@ public class MonitorFanIndicatorImpl implements IMonitorFanIndicator {
return deaviationRateDtoPage;
}
@Scheduled(cron = "0 */1 * * * ?")
@Scheduled(cron = "0 */5 * * * ?")
public void addNbqAlarmEvent() {
LambdaQueryWrapper<StationBasic> wrapper = new LambdaQueryWrapper<>();
......@@ -2075,7 +2087,7 @@ public class MonitorFanIndicatorImpl implements IMonitorFanIndicator {
Map<String, String> shouldCondtion = new HashMap<>();
queryCondtion.put(CommonConstans.QueryStringGateWayId, Arrays.asList(stationBasic.getFanGatewayId()));
queryCondtion.put(CommonConstans.QueryStringEquipmentIndexName, Arrays.asList("待机", "停机", "告警运行", "限额运行", "降额运行", "故障停机", "通讯故障", "运行"));
queryCondtion.put(CommonConstans.QueryStringValue, Arrays.asList("true"));
queryCondtion.put(CommonConstans.QueryStringValueKeyword, Arrays.asList("true"));
/**
* 逆变器
......@@ -2085,7 +2097,7 @@ public class MonitorFanIndicatorImpl implements IMonitorFanIndicator {
* 汇流箱
*/
queryCondtion.remove(CommonConstans.QueryStringEquipmentIndexName);
queryCondtion.remove(CommonConstans.QueryStringValue);
queryCondtion.remove(CommonConstans.QueryStringValueKeyword);
queryCondtion.put(CommonConstans.QueryStringDataType, Arrays.asList("state"));
shouldCondtion.put(CommonConstans.QueryStringFrontMoudle, "汇流箱");
List<ESEquipments> indicatorsDtoListHLX = commonServiceImpl.getListDataByCondtions(queryCondtion, shouldCondtion, ESEquipments.class);
......@@ -2095,28 +2107,30 @@ public class MonitorFanIndicatorImpl implements IMonitorFanIndicator {
shouldCondtion.put(CommonConstans.QueryStringFrontMoudle, "箱变");
List<ESEquipments> indicatorsDtoListXB = commonServiceImpl.getListDataByCondtions(queryCondtion, shouldCondtion, ESEquipments.class);
;
indicatorsDtoList.addAll(indicatorsDtoListHLX);
indicatorsDtoList.addAll(indicatorsDtoListXB);
indicatorsDtoList.addAll(indicatorsDtoListXB);
List<EquipAlarmEvent> newEquipAlarmEvents = new ArrayList<>();
List<EquipAlarmEvent> newEquipAlarmEvent = new ArrayList<>();
long time = new Date().getTime();
for (ESEquipments esEquipments : indicatorsDtoList) {
EquipAlarmEvent equipAlarmEvent = new EquipAlarmEvent();
equipAlarmEvent.setEquipIndex(esEquipments.getEquipmentNumber());
equipAlarmEvent.setEquipName(esEquipments.getEquipmentSpecificName());
equipAlarmEvent.setAlarmDesc(AlarmDesc.getCode(esEquipments.getEquipmentIndexName()));
equipAlarmEvent.setAlarmDesc(StringUtils.isEmpty(equipAlarmEvent.getAlarmDesc()) ? esEquipments.getEquipmentIndexName() : equipAlarmEvent.getAlarmDesc());
equipAlarmEvent.setCreatedTime(esEquipments.getCreatedTime());
equipAlarmEvent.setGatewayId(stationBasic.getFanGatewayId());
equipAlarmEvent.setSort(time);
equipAlarmEvent.setFrontModule(esEquipments.getFrontModule());
equipAlarmEvent.setValue(esEquipments.getValue());
newEquipAlarmEvents.add(equipAlarmEvent);
newEquipAlarmEvent.add(equipAlarmEvent);
if (CollectionUtils.isNotEmpty(indicatorsDtoList)){
for (ESEquipments esEquipments : indicatorsDtoList) {
EquipAlarmEvent equipAlarmEvent = new EquipAlarmEvent();
equipAlarmEvent.setEquipIndex(esEquipments.getEquipmentNumber());
equipAlarmEvent.setEquipName(esEquipments.getEquipmentSpecificName());
equipAlarmEvent.setAlarmDesc(AlarmDesc.getCode(esEquipments.getEquipmentIndexName()));
equipAlarmEvent.setAlarmDesc(StringUtils.isEmpty(equipAlarmEvent.getAlarmDesc()) ? esEquipments.getEquipmentIndexName() : equipAlarmEvent.getAlarmDesc());
equipAlarmEvent.setCreatedTime(esEquipments.getCreatedTime());
equipAlarmEvent.setGatewayId(stationBasic.getFanGatewayId());
equipAlarmEvent.setSort(time);
equipAlarmEvent.setFrontModule(esEquipments.getFrontModule());
equipAlarmEvent.setValue(esEquipments.getValue());
newEquipAlarmEvents.add(equipAlarmEvent);
newEquipAlarmEvent.add(equipAlarmEvent);
}
}
String lastSort = equipAlarmEventMapper.getLastDataBySort(stationBasic.getFanGatewayId());
if (null != lastSort) {
List<EquipAlarmEvent> oldEquipAlarmEvents = equipAlarmEventMapper.getOldDataBySort(lastSort, stationBasic.getFanGatewayId());
......
......@@ -2,6 +2,7 @@ package com.yeejoin.amos.boot.module.jxiop.biz.service.impl;
import cn.hutool.core.date.DateUnit;
import cn.hutool.core.date.DateUtil;
import com.alibaba.druid.sql.visitor.functions.If;
import com.alibaba.fastjson.JSON;
import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper;
import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
......@@ -69,11 +70,12 @@ public class MonitoringServiceImpl {
@Autowired
EmqKeeper emqKeeper;
// @Autowired
// @Autowired
// InfluxdbUtil influxdbUtil;
@Autowired
IndicatorDataMapper indicatorDataMapper;
/**
* 根据场站编号获取该场站的装机容量
*
......@@ -257,7 +259,7 @@ public class MonitoringServiceImpl {
List<ESEquipments> result = commonServiceImpl.getListDataByCondtions(queryCondtion, null, ESEquipments.class, null);
List<ESEquipments> result1 = commonServiceImpl.getListDataByCondtions(queryCondtion1, null, ESEquipments.class);
completionOfPowerIndicatorsDto.setWindSpeedOrIrradiance(String.format(CommonConstans.Twodecimalplaces, commonServiceImpl.getSumByEquipmentIndxName(result1, "WTX-801_25_WTX-801_总辐射")));
completionOfPowerIndicatorsDto.setActivePower(String.format(CommonConstans.Twodecimalplaces, commonServiceImpl.getSumByEquipmentIndxName(result1, "南瑞光差保护_313P")*CommonConstans.kwToMv));
completionOfPowerIndicatorsDto.setActivePower(String.format(CommonConstans.Twodecimalplaces, commonServiceImpl.getSumByEquipmentIndxName(result1, "南瑞光差保护_313P") * CommonConstans.kwToMv));
completionOfPowerIndicatorsDto.setDailyPower(String.format(CommonConstans.Fourdecimalplaces, commonServiceImpl.getSumByEquipmentIndxName(result, CommonConstans.taiHeGenIndicatorDay)));
completionOfPowerIndicatorsDto.setMonthlyPower(String.format(CommonConstans.Fourdecimalplaces, commonServiceImpl.getSumByEquipmentIndxName(result, CommonConstans.taiHeGenIndicatorMonth)));
completionOfPowerIndicatorsDto.setAnnualPower(String.format(CommonConstans.Fourdecimalplaces, commonServiceImpl.getSumByEquipmentIndxName(result, CommonConstans.taiHeGenIndicatorYear)));
......@@ -691,6 +693,12 @@ public class MonitoringServiceImpl {
public Page<HashMap<String, String>> getDetailsOnPowergeneration(String areaName) {
Page<HashMap<String, String>> hashMapPage = new Page<>(1, 99);
List<HashMap<String, String>> hashMapList = new ArrayList<>();
List<StationCacheInfoDto> stationCacheInfoDtoList = new ArrayList<>();
stationCacheInfoDtoList = commonServiceImpl.getListStationCacheInfoDto();
if (!ObjectUtils.isEmpty(areaName)) {
stationCacheInfoDtoList = stationCacheInfoDtoList.stream().filter(stationCacheInfoDto -> stationCacheInfoDto.getBelongArea().equals(areaName)).collect(Collectors.toList());
}
List<StationBasic> stationBasicList = stationBasicMapper.selectList(new QueryWrapper<StationBasic>().isNotNull("fan_gateway_id"));
//日发电量
AtomicReference<Double> dailyPower = new AtomicReference<>(0.0);
......@@ -698,11 +706,11 @@ public class MonitoringServiceImpl {
AtomicReference<Double> monthlyPower = new AtomicReference<>(0.0);
//年发电量
AtomicReference<Double> annualPower = new AtomicReference<>(0.0);
stationBasicList.forEach(stationBasic -> {
if ("FDZ".equals(stationBasic.getStationType())) {
stationCacheInfoDtoList.forEach( stationCacheInfoDto->{
if ("FDZ".equals(stationCacheInfoDto.getStationType())) {
Map<String, List<String>> queryCondtion = new HashMap<>();
queryCondtion.put(CommonConstans.QueryStringEquipmentIndexName, Arrays.asList("日发电量", "月发电量", "年发电量"));
queryCondtion.put(CommonConstans.QueryStringGateWayId, Arrays.asList(stationBasic.getFanGatewayId()));
queryCondtion.put(CommonConstans.QueryStringGateWayId, Arrays.asList(stationCacheInfoDto.getFanGatewayId()));
List<ESEquipments> result = commonServiceImpl.getListDataByCondtions(queryCondtion, null, ESEquipments.class);
dailyPower.updateAndGet(v -> v + keepFourdecimalPlaces(commonServiceImpl.getSumByEquipmentIndxName(result, "日发电量")));
monthlyPower.updateAndGet(v -> v + keepFourdecimalPlaces(commonServiceImpl.getSumByEquipmentIndxName(result, "月发电量")));
......@@ -710,7 +718,7 @@ public class MonitoringServiceImpl {
} else {
Map<String, List<String>> queryCondtion = new HashMap<>();
queryCondtion.put(CommonConstans.QueryStringEquipmentIndexName, CommonConstans.taiHeGenIndicator);
queryCondtion.put(CommonConstans.QueryStringGateWayId, Arrays.asList(stationBasic.getBoosterGatewayId()));
queryCondtion.put(CommonConstans.QueryStringGateWayId, Arrays.asList(stationCacheInfoDto.getBoosterGatewayId()));
List<ESEquipments> result = commonServiceImpl.getListDataByCondtions(queryCondtion, null, ESEquipments.class);
dailyPower.updateAndGet(v -> v + keepFourdecimalPlaces(commonServiceImpl.getSumByEquipmentIndxName(result, CommonConstans.taiHeGenIndicatorDay)));
monthlyPower.updateAndGet(v -> v + keepFourdecimalPlaces(commonServiceImpl.getSumByEquipmentIndxName(result, CommonConstans.taiHeGenIndicatorMonth)));
......@@ -789,37 +797,36 @@ public class MonitoringServiceImpl {
public Page<HashMap<String, String>> getPowerGenerationTrendsOfCompletionTopFive(String areaName) {
Page<HashMap<String, String>> hashMapPage = new Page<>(1, 5);
List<HashMap<String, String>> mapList = new ArrayList<>();
List<StationBasic> stationBasicListAll = new ArrayList<>();
List<StationCacheInfoDto> stationCacheInfoDtoList = new ArrayList<>();
stationCacheInfoDtoList = commonServiceImpl.getListStationCacheInfoDto();
if (!ObjectUtils.isEmpty(areaName)) {
stationBasicListAll = getListOfStationBasicByAreaName(areaName);
} else {
stationBasicListAll = stationBasicMapper.selectList(new QueryWrapper<StationBasic>().isNotNull("fan_gateway_id"));
stationCacheInfoDtoList = stationCacheInfoDtoList.stream().filter(stationCacheInfoDto -> stationCacheInfoDto.getBelongArea().equals(areaName)).collect(Collectors.toList());
}
HashMap<String, List<String>> hashMap = new HashMap<>();
List<String> xList = new ArrayList<>();
List<String> yList = new ArrayList<>();
AtomicReference<Double> total = new AtomicReference<>(0.00);
//月发电量
String finalIndicator = "月发电量";
stationBasicListAll.forEach(stationBasic -> {
stationCacheInfoDtoList.forEach(stationCacheInfoDto -> {
List<Map<String, Object>> mapListData = new ArrayList<>();
if (!stationBasic.getStationType().equals("FDZ")) {
if (!stationCacheInfoDto.getStationType().equals("FDZ")) {
Map<String, List<String>> queryCondtion = new HashMap<>();
queryCondtion.put(CommonConstans.QueryStringEquipmentIndexName, Arrays.asList(CommonConstans.taiHeGenIndicatorMonth));
queryCondtion.put(CommonConstans.QueryStringGateWayId, Arrays.asList(stationBasic.getBoosterGatewayId()));
queryCondtion.put(CommonConstans.QueryStringGateWayId, Arrays.asList(stationCacheInfoDto.getBoosterGatewayId()));
List<ESEquipments> result = commonServiceImpl.getListDataByCondtions(queryCondtion, null, ESEquipments.class);
total.set(commonServiceImpl.getSumByEquipmentIndxName(result, CommonConstans.taiHeGenIndicatorMonth));
} else {
Map<String, List<String>> queryCondtion = new HashMap<>();
queryCondtion.put(CommonConstans.QueryStringEquipmentIndexName, Arrays.asList(finalIndicator));
queryCondtion.put(CommonConstans.QueryStringGateWayId, Arrays.asList(stationBasic.getFanGatewayId()));
queryCondtion.put(CommonConstans.QueryStringGateWayId, Arrays.asList(stationCacheInfoDto.getFanGatewayId()));
List<ESEquipments> result = commonServiceImpl.getListDataByCondtions(queryCondtion, null, ESEquipments.class);
total.set(commonServiceImpl.getSumByEquipmentIndxName(result, finalIndicator));
}
stationBasic.setAddress(String.format(CommonConstans.Twodecimalplaces, total.get() % 100));
stationCacheInfoDto.setAddress(String.format(CommonConstans.Twodecimalplaces, (total.get())*100/getPlanGenByStationIdAndMonth(stationCacheInfoDto.getStationId())));
});
List<StationBasic> sorted = stationBasicListAll.stream().sorted(Comparator.comparing(StationBasic::getAddress, Comparator.comparingDouble(Double::parseDouble)).reversed()).collect(Collectors.toList());
List<StationCacheInfoDto> sorted = stationCacheInfoDtoList.stream().sorted(Comparator.comparing(StationCacheInfoDto::getAddress, Comparator.comparingDouble(Double::parseDouble)).reversed()).collect(Collectors.toList());
sorted.forEach(stationBasic -> {
HashMap<String, String> hashMap1 = new HashMap<>();
hashMap1.put("stationName", stationBasic.getStationName());
......@@ -1307,19 +1314,19 @@ public class MonitoringServiceImpl {
queryCondtion1.put(CommonConstans.QueryStringGateWayId, Arrays.asList(stationBasic.getBoosterGatewayId()));
queryCondtion1.put(CommonConstans.QueryStringEquipmentIndexName, Arrays.asList("南瑞光差保护_313P"));
List<ESEquipments> esEquipmentsList = commonServiceImpl.getListDataByCondtions(queryCondtion1, null, ESEquipments.class);
total.updateAndGet(v -> v + commonServiceImpl.getSumByEquipmentIndxName(esEquipmentsList, "南瑞光差保护_313P"));
total.updateAndGet(v -> v + commonServiceImpl.getSumByEquipmentIndxName(esEquipmentsList, "南瑞光差保护_313P")*CommonConstans.kwToMv);
}
});
//有功功率换算
hashMap.put("title", String.format(CommonConstans.Twodecimalplaces, total.get() / 1000));
hashMap.put("title", String.format(CommonConstans.Twodecimalplaces, total.get()));
return hashMap;
}
public Double keepFourdecimalPlaces(Double param) {
return Double.valueOf(String.format("%.4f", param));
}
public Double getPlanGenByStationIdAndMonth(String stationId){
String [] dates = DateUtil.today().split("-");
return stationPlanMapper.getPlanGenByStationIdAndMonth(stationId,dates[0], String.valueOf(Integer.valueOf(dates[1])));
}
}
......@@ -15,14 +15,15 @@
from
fault_alarm_event
<where>
<if test="gatewayId != null and gatewayId != ''">
gateway_id = #{gatewayId}
</if>
<if test="gatewayId != null and gatewayId != ''">
gateway_id = #{gatewayId}
</if>
<if test="stationId != null and stationId != ''">
station_id = #{stationId}
</if>
AND #{time} &lt;= event_time
</where>
order by event_time desc
ORDER BY event_time DESC LIMIT #{current}, #{size}
</select>
......
......@@ -2,6 +2,30 @@
<!DOCTYPE mapper PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN" "http://mybatis.org/dtd/mybatis-3-mapper.dtd">
<mapper namespace="com.yeejoin.amos.boot.module.jxiop.biz.mapper2.EquipAlarmEventMapper">
<select id="getAlarmEventList" resultType="com.yeejoin.amos.boot.module.jxiop.biz.dto.EquipAlarmEventDto">
SELECT
*
FROM
equip_alarm_event a
JOIN ( SELECT sequence_nbr FROM equip_alarm_event
<where>
<if test="gatewayId != null and gatewayId != ''">
AND gateway_id = #{gatewayId}
</if>
<if test="equipIndex!= null and equipIndex != ''">
AND equip_index = #{equipIndex}
</if>
<if test="frontModule!= null and frontModule != ''">
AND front_module = #{frontModule}
</if>
AND #{time} &lt;= created_time
</where>
ORDER BY created_time DESC LIMIT #{current}, #{size} ) b ON a.sequence_nbr = b.sequence_nbr
</select>
<select id="getLastDataBySort" resultType="java.lang.String">
select
sort
......
......@@ -53,4 +53,6 @@ spring.servlet.multipart.maxRequestSize=100MB
spring.main.allow-bean-definition-overriding=true
spring.http.encoding.charset=utf-8
spring.http.encoding.enabled=true
spring.http.encoding.force=true
\ No newline at end of file
spring.http.encoding.force=true
socket.port=7777
\ No newline at end of file
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment