Commit def1b30d authored by chenzhao's avatar chenzhao

Merge remote-tracking branch 'origin/developer' into developer

parents b2df1ac7 840a1a2c
......@@ -99,9 +99,9 @@ public class GoodWeRequestUtil {
}
}
}
if (jsonArray.size() == 0) {
jsonArray.add(jsonObject);
}
// if (jsonArray.size() == 0) {
// jsonArray.add(jsonObject);
// }
}
return jsonArray;
}
......
......@@ -13,9 +13,10 @@ public class GoodWeConstant {
public static final HashMap<String, String> stationStaus = new HashMap<String, String>() {
{
put("-1", "离线");
put("0", "待机");
put("1", "正常");
put("2", "停机");
put("0", "在线");
put("1", "在线");
put("2", "报警");
put("-2", "报警");
}
};
public static final HashMap<String, String> warningStaus = new HashMap<String, String>() {
......@@ -31,6 +32,27 @@ public class GoodWeConstant {
}
};
public static final HashMap<String, String> types = new HashMap<String, String>() {
{
put("0", "家庭户用");
put("1", "工商业屋顶");
put("2", "地面电站");
put("3", "扶贫电站");
put("4", "储能电站");
}
};
public static final HashMap<String, List<String>> errorCodeMap = new HashMap<String, List<String>>() {
{
put("E0", Arrays.asList("漏电流自检异常",
......
......@@ -85,16 +85,16 @@ public class HouseholdTestController {
@PostMapping(value = "/goodweDemo")
@ApiOperation(httpMethod = "POST", value = "固德威示例", notes = "固德威示例")
public void goodweDemo() throws IOException {
// goodWeDataAcquisitionService.stationList();
goodWeDataAcquisitionService.stationList();
// goodWeDataAcquisitionService.stationDetail();
// goodWeDataAcquisitionService.inverAlramInfo();
// goodWeDataAcquisitionService.inverAlramInfo();
// goodWeDataAcquisitionService.inverterList();
// goodWeDataAcquisitionService.inverterDetail();
// goodWeDataAcquisitionService.stationMonthGen();
// goodWeDataAcquisitionService.stationYearGen();
// goodWeDataAcquisitionService.inverterDetail();
goodWeDataAcquisitionService.inverterMonthGen();
goodWeDataAcquisitionService.inverterYearGen();
// goodWeDataAcquisitionService.inverterMonthGen();
// goodWeDataAcquisitionService.inverterYearGen();
}
/**
......@@ -111,8 +111,8 @@ public class HouseholdTestController {
// goLangDataAcquisitionService.collectorList();
// goLangDataAcquisitionService.inverterList();
// goLangDataAcquisitionService.collectorDetail();
goLangDataAcquisitionService.inverterDetail();
// goLangDataAcquisitionService.inverAlramInfo();
// goLangDataAcquisitionService.inverterDetail();
// goLangDataAcquisitionService.inverAlramInfo();
}
......@@ -125,13 +125,13 @@ public class HouseholdTestController {
@PostMapping(value = "/sofarnew")
@ApiOperation(httpMethod = "POST", value = "首航", notes = "首航")
public void sofarnew() throws IOException {
sofarDataAcquisitionService.stationList();
// sofarDataAcquisitionService.stationList();
// goLangDataAcquisitionService.stationDetail();
// goLangDataAcquisitionService.collectorList();
// goLangDataAcquisitionService.inverterList();
// goLangDataAcquisitionService.collectorDetail();
// goLangDataAcquisitionService.inverterDetail();
// goLangDataAcquisitionService.inverAlramInfo();
goLangDataAcquisitionService.inverAlramInfo();
}
/**
......
......@@ -35,5 +35,5 @@ public interface GoodWeDataAcquisitionService {
/**
* @descrption 采集器告警列表数据入库
*/
void inverAlramInfo();
void inverAlramInfo(String stationid);
}
......@@ -109,19 +109,22 @@ public class GoodWeDataAcquisitionServiceImpl implements GoodWeDataAcquisitionSe
jpStation.setCapacity(goodWeStationList.getCapacity());
jpStation.setName(goodWeStationList.getStationname());
jpStation.setPrice(0.42);
jpStation.setAddress(goodWeStationList.getAddress());
jpStation.setAddress(goodWeStationList.getLocation());
jpStation.setLatitude(goodWeStationList.getLatitude());
jpStation.setLongitude(goodWeStationList.getLongitude());
jpStation.setUserName(goodWeStationList.getOwner_name());
jpStation.setUserName(goodWeStationList.getOwner_name()==null?goodWeStationList.getStationname():goodWeStationList.getOwner_name());
jpStation.setUserPhone(goodWeStationList.getOwner_phone());
jpStation.setStationContact(goodWeStationList.getOwner_name());
jpStation.setModuleCount(0);
jpStation.setRealTimePower(goodWeStationList.getPac() * GoodWeConstant.wToKw);
jpStation.setState(GoodWeConstant.stationStaus.get(goodWeStationList.getStatus()));
jpStation.setRealTimePower(goodWeStationList.getPac()!=null?goodWeStationList.getPac() * GoodWeConstant.wToKw:null);
jpStation.setState(goodWeStationList.getStatus()!=null?GoodWeConstant.stationStaus.get(goodWeStationList.getStatus()+""):null);
jpStation.setDayGenerate(goodWeStationList.getEday());
jpStation.setDayIncome(goodWeStationList.getEday_income());
jpStation.setAccumulatedPower(goodWeStationList.getEtotal());
jpStation.setCumulativeIncome(goodWeStationList.getEtotal_income());
jpStation.setType(goodWeStationList.getPowerstation_type());
jpStation.setRecDate(new Date());
if (ObjectUtils.isEmpty(jpStation.getSequenceNbr())) {
jpStation.setCreateTime(new Date(goodWeStationList.getCreatedTime()));
if (ObjectUtils.isNotEmpty(goodWeStationList.getTurnon_time())) {
......@@ -133,8 +136,7 @@ public class GoodWeDataAcquisitionServiceImpl implements GoodWeDataAcquisitionSe
jpStationMapper.updateById(jpStation);
}
this.inverAlramInfo(goodWeStationList.getPowerstation_id());
//增加td 电站区域公司,经销商绑定表
// TdJpStation tdJpStation = tdJpStationMapper.selectOne(new QueryWrapper<TdJpStation>().
// eq("third_code",PVProducerInfoEnum.GDW.getCode()).
......@@ -287,6 +289,7 @@ public class GoodWeDataAcquisitionServiceImpl implements GoodWeDataAcquisitionSe
}
@Override
@Scheduled(cron = "${dataRequstScheduled.GoodWe}")
public void stationDetail() {
List<String> stationIds = goodWeStationMonitorListMapper.getStationIds();
stationIds.forEach(stationId -> {
......@@ -316,6 +319,7 @@ public class GoodWeDataAcquisitionServiceImpl implements GoodWeDataAcquisitionSe
}
@Override
@Scheduled(cron = "${dataRequstScheduled.GoodWe}")
public void stationMonthGen() {
List<String> stationIds = goodWeStationMonitorListMapper.getStationIds();
stationIds.forEach(stationId -> {
......@@ -340,6 +344,7 @@ public class GoodWeDataAcquisitionServiceImpl implements GoodWeDataAcquisitionSe
}
@Override
@Scheduled(cron = "${dataRequstScheduled.GoodWe}")
public void stationYearGen() {
List<String> stationIds = goodWeStationMonitorListMapper.getStationIds();
stationIds.forEach(stationId -> {
......@@ -375,6 +380,7 @@ public class GoodWeDataAcquisitionServiceImpl implements GoodWeDataAcquisitionSe
}
@Override
@Scheduled(cron = "${dataRequstScheduled.GoodWe}")
public void inverterList() {
List<String> stationIds = goodWeStationMonitorListMapper.getStationIds();
stationIds.stream().forEach(stationId -> {
......@@ -413,6 +419,7 @@ public class GoodWeDataAcquisitionServiceImpl implements GoodWeDataAcquisitionSe
}
@Override
@Scheduled(cron = "${dataRequstScheduled.GoodWe}")
public void inverterDetail() {
List<String> goodweSnList = jpInverterMapper.getGoodWeSnCodes();
List<List<String>> splitList = Lists.partition(goodweSnList, 50);
......@@ -686,6 +693,7 @@ public class GoodWeDataAcquisitionServiceImpl implements GoodWeDataAcquisitionSe
}
@Override
@Scheduled(cron = "${dataRequstScheduled.GoodWe}")
public void inverterMonthGen() {
List<String> sns =jpInverterMapper.getGoodWeSnCodes() ;
String currentMonth = DateUtil.format(new Date(), "yyyyMM");
......@@ -710,6 +718,7 @@ public class GoodWeDataAcquisitionServiceImpl implements GoodWeDataAcquisitionSe
}
@Override
@Scheduled(cron = "${dataRequstScheduled.GoodWe}")
public void inverterYearGen() {
String currentYear = DateUtil.format(new Date(), "yyyy");
List<String> sns =jpInverterMapper.getGoodWeSnCodes() ;
......@@ -734,14 +743,15 @@ public class GoodWeDataAcquisitionServiceImpl implements GoodWeDataAcquisitionSe
}
@Override
public void inverAlramInfo() {
public void inverAlramInfo(String stationid) {
HashMap<String, Object> requestInfo = new HashMap<>();
String today = DateUtil.today();
requestInfo.put("page_index", 1);
requestInfo.put("page_size", 1000);
requestInfo.put("starttime", today + " 00:00:00");
requestInfo.put("endtime", today + " 23:59:59");
requestInfo.put("status", 2);
requestInfo.put("stationid", stationid);
// requestInfo.put("status", 2);
String requstParam = JSON.toJSONString(requestInfo);
List<GoodWeAlarmDto> alarmList = goodWeRequestUtil.getResPonse(GoodWeConstant.alarmListUrl, GoodWeConstant.requestPost, requstParam, GoodWeConstant.resovleRule_data_list, GoodWeAlarmDto.class);
alarmList.forEach(goodWeAlarmDto -> {
......
......@@ -74,6 +74,6 @@ dataRequstScheduled.huawei=0 0/50 * * * *
dataRequstScheduled.keshida=0 0/50 * * * *
dataRequstScheduled.Sunlight=0 0/50 * * * *
dataRequstScheduled.GoodWe=0 0/50 * * * *
dataRequstScheduled.GoodWe=0 0/3 * * * *
dataRequstScheduled.Sofar=0 0/3 * * * *
\ No newline at end of file
dataRequstScheduled.Sofar=0 0/50 * * * *
\ No newline at end of file
......@@ -33,6 +33,9 @@ public class PowerStationDto extends BaseDto {
@ApiModelProperty(value = "服务代理商")
private String serviceAgent;
@ApiModelProperty(value = "区域公司")
private String regionalCompaniesName;
@ApiModelProperty(value = "电站类型")
private String powerStationType;
......
......@@ -19,5 +19,6 @@ public interface PowerStationMapper extends BaseMapper<PowerStation> {
@UserEmpower(field ={"regional_companies_code"} ,dealerField={"a.developer_code","a.regional_companies_code","a.developer_user_id"} ,fieldConditions ={"in","in","in"}, relationship="and")
List<PowerStationDto> queryPage(@Param("powerStationCode") String powerStationCode,
@Param("ownersName")String ownersName,
@Param("serviceAgent")String serviceAgent);
@Param("serviceAgent")String serviceAgent,
@Param("regionalCompaniesName")String regionalCompaniesName);
}
......@@ -7,6 +7,7 @@
b.initiate_status,
b.contract_lock_id,
hygf_peasant_household.regional_companies_code,
hygf_peasant_household.regional_companies_name,
hygf_peasant_household.developer_code,
hygf_peasant_household.developer_user_id
from hygf_power_station LEFT JOIN ( select peasant_household_id,initiate_status, contract_lock_id from hygf_household_contract where hygf_household_contract.status !='已作废'
......@@ -21,7 +22,10 @@
and hygf_power_station.owners_name like concat(concat("%",#{ownersName}),"%")
</if>
<if test="serviceAgent!=null and serviceAgent!=''">
and hygf_power_station.service_agent=#{serviceAgent}
and hygf_power_station.service_agent like concat(concat("%",#{serviceAgent}),"%")
</if>
<if test="regionalCompaniesName!=null and regionalCompaniesName!=''">
and hygf_peasant_household.regional_companies_name like concat(concat("%",#{regionalCompaniesName}),"%")
</if>
ORDER BY hygf_power_station.rec_date desc
) a
......
......@@ -100,12 +100,14 @@ public class PowerStationController extends BaseController {
public ResponseModel<Page<PowerStationDto>> queryForPage(@RequestParam(value = "current") int current,
@RequestParam(value = "size") int size,
@RequestParam(value = "powerStationCode",required = false)String powerStationCode,
@RequestParam(value = "ownersName",required = false)String ownersName) {
@RequestParam(value = "ownersName",required = false)String ownersName,
@RequestParam(value = "serviceAgent",required = false)String serviceAgent,
@RequestParam(value = "regionalCompaniesName",required = false)String regionalCompaniesName) {
Page<PowerStationDto> page = new Page<PowerStationDto>();
page.setCurrent(current);
page.setSize(size);
AgencyUserModel userInfo = getUserInfo();
return ResponseHelper.buildResponse(powerStationServiceImpl.queryForPowerStationUserRoles(page,powerStationCode,ownersName,userInfo));
return ResponseHelper.buildResponse(powerStationServiceImpl.queryForPowerStationUserRoles(page,powerStationCode,ownersName,userInfo,serviceAgent,regionalCompaniesName));
}
/**
......
......@@ -25,6 +25,7 @@ import org.apache.poi.ss.formula.functions.T;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import org.springframework.web.bind.annotation.RequestParam;
import org.typroject.tyboot.component.emq.EmqKeeper;
import org.typroject.tyboot.core.foundation.exception.BaseException;
import org.typroject.tyboot.core.rdbms.annotation.Condition;
......@@ -45,7 +46,8 @@ import java.util.stream.Collectors;
*/
@Service
@Slf4j
public class PowerStationServiceImpl extends BaseService<PowerStationDto, PowerStation, PowerStationMapper> implements IPowerStationService {
public class PowerStationServiceImpl extends BaseService<PowerStationDto, PowerStation, PowerStationMapper>
implements IPowerStationService {
@Autowired
IdxFeginService idxFeginService;
......@@ -59,9 +61,9 @@ public class PowerStationServiceImpl extends BaseService<PowerStationDto, PowerS
@Autowired
PeasantHouseholdServiceImpl peasantHouseholdService;
private static final String IDX_REQUEST_STATE="200";
private static final String VERIFY_RESULT_YES="0";
private static final String VERIFY_RESULT_NO="1";
private static final String IDX_REQUEST_STATE = "200";
private static final String VERIFY_RESULT_YES = "0";
private static final String VERIFY_RESULT_NO = "1";
@Autowired
WorkflowFeignClient workflowFeignClient;
......@@ -84,8 +86,8 @@ public class PowerStationServiceImpl extends BaseService<PowerStationDto, PowerS
@Autowired
WorkflowImpl workflow;
public Page<PowerStationDto> queryForPowerStationUserRoles(Page<PowerStationDto> page, String powerStationCode, String ownersName, AgencyUserModel userInfo){
String serviceAgent =null;
public Page<PowerStationDto> queryForPowerStationUserRoles(Page<PowerStationDto> page, String powerStationCode,
String ownersName, AgencyUserModel userInfo, String serviceAgent, String regionalCompaniesName) {
// Map<Long, List<RoleModel>> orgRoles = userInfo.getOrgRoles();
// Collection<List<RoleModel>> roleModels = orgRoles.values();
// if(roleModels !=null){
......@@ -100,25 +102,25 @@ public class PowerStationServiceImpl extends BaseService<PowerStationDto, PowerS
// }
// }
//获取用户所在经销商单位
// 获取用户所在经销商单位
// UserUnitInformationDto userUnitInformationDto=personnelBusinessMapper.getUserUnitInformationDto(userInfo.getUserId());
//
// if(userUnitInformationDto!=null&&userUnitInformationDto.getAmosDealerName()!=null){
// serviceAgent=userUnitInformationDto.getAmosDealerName();
// }
//return this.queryForPowerStationPage(page,powerStationCode,ownersName,serviceAgent);
return this.queryPage((int) page.getCurrent(), (int) page.getSize(),powerStationCode,ownersName,serviceAgent);
// return
// this.queryForPowerStationPage(page,powerStationCode,ownersName,serviceAgent);
return this.queryPage((int) page.getCurrent(), (int) page.getSize(), powerStationCode, ownersName,
serviceAgent,regionalCompaniesName);
}
//查询电站审核记录
public Page<PowerStationDto> queryPage(int current, int size,
String powerStationCode,
String ownersName,String serviceAgent) {
// 查询电站审核记录
public Page<PowerStationDto> queryPage(int current, int size, String powerStationCode, String ownersName,
String serviceAgent,String regionalCompaniesName) {
PageHelper.startPage(current, size);
List<PowerStationDto> list= powerStationMapper.queryPage(powerStationCode,ownersName,serviceAgent);
List<PowerStationDto> list = powerStationMapper.queryPage(powerStationCode, ownersName, serviceAgent,regionalCompaniesName);
PageInfo<PowerStationDto> pages = new PageInfo(list);
com.baomidou.mybatisplus.extension.plugins.pagination.Page<PowerStationDto> pagenew = new com.baomidou.mybatisplus.extension.plugins.pagination.Page<PowerStationDto>();
......@@ -130,28 +132,27 @@ public class PowerStationServiceImpl extends BaseService<PowerStationDto, PowerS
return pagenew;
}
/**
* 分页查询
*/
public Page<PowerStationDto> queryForPowerStationPage(Page<PowerStationDto> page,@Condition(Operator.like) String powerStationCode,@Condition(Operator.like) String ownersName,String serviceAgent) {
return this.queryForPage(page, "rec_date", false,powerStationCode,ownersName,serviceAgent);
public Page<PowerStationDto> queryForPowerStationPage(Page<PowerStationDto> page,
@Condition(Operator.like) String powerStationCode, @Condition(Operator.like) String ownersName,
String serviceAgent) {
return this.queryForPage(page, "rec_date", false, powerStationCode, ownersName, serviceAgent);
}
/**
* 列表查询 示例
*/
public List<PowerStationDto> queryForPowerStationList() {
return this.queryForList("" , false);
return this.queryForList("", false);
}
@Override
@Transactional
public boolean savePowerStation(PowerStation powerStation, boolean flag,String name,String meg) {
try{
//流程节点code
public boolean savePowerStation(PowerStation powerStation, boolean flag, String name, String meg) {
try {
// 流程节点code
// if (flag) {
// String flowTaskIdnext = this.getTaskNoAuth(powerStation.getProcessInstanceId());
// WorkDto workDto=this.getNodeInfoCode(flowTaskIdnext);
......@@ -162,50 +163,60 @@ public class PowerStationServiceImpl extends BaseService<PowerStationDto, PowerS
powerStation.setRecDate(new Date());
Boolean fl= this.saveOrUpdate(powerStation);
PowerStationNodeEnum powerStationNodeEnum= PowerStationNodeEnum.getNodeByCode(powerStation.getNextProcessNode());
if (flag){
ToDoTasks toDoTasks=null;
if(PowerStationNodeEnum.经销商上传图纸.getCode().equals(powerStationNodeEnum.getCode())||PowerStationNodeEnum.经销商审核.getCode().equals(powerStationNodeEnum.getCode())){
//获取经销商orgcode
PeasantHousehold peasantHouseholdd= peasantHouseholdService.getById(powerStation.getPeasantHouseholdId());
toDoTasks= new ToDoTasks ( TaskTypeStationEnum.电站审核.getCode(), powerStation.getSequenceNbr(), "用户"+name+"电站勘察待"+powerStationNodeEnum.getName(),peasantHouseholdd.getDeveloperCode());
if(PowerStationNodeEnum.经销商审核.getCode().equals(powerStationNodeEnum.getCode())){
toDoTasksServiceImpl.addToDoTasksByUserId(peasantHouseholdd.getDeveloperUserId(),toDoTasks,meg);
}else{
toDoTasksServiceImpl.addToDoTasksByRole(powerStation.getNodeRole(),toDoTasks,meg);
Boolean fl = this.saveOrUpdate(powerStation);
PowerStationNodeEnum powerStationNodeEnum = PowerStationNodeEnum
.getNodeByCode(powerStation.getNextProcessNode());
if (flag) {
ToDoTasks toDoTasks = null;
if (PowerStationNodeEnum.经销商上传图纸.getCode().equals(powerStationNodeEnum.getCode())
|| PowerStationNodeEnum.经销商审核.getCode().equals(powerStationNodeEnum.getCode())) {
// 获取经销商orgcode
PeasantHousehold peasantHouseholdd = peasantHouseholdService
.getById(powerStation.getPeasantHouseholdId());
toDoTasks = new ToDoTasks(TaskTypeStationEnum.电站审核.getCode(), powerStation.getSequenceNbr(),
"用户" + name + "电站勘察待" + powerStationNodeEnum.getName(),
peasantHouseholdd.getDeveloperCode());
if (PowerStationNodeEnum.经销商审核.getCode().equals(powerStationNodeEnum.getCode())) {
toDoTasksServiceImpl.addToDoTasksByUserId(peasantHouseholdd.getDeveloperUserId(), toDoTasks,
meg);
} else {
toDoTasksServiceImpl.addToDoTasksByRole(powerStation.getNodeRole(), toDoTasks, meg);
}
}else{
toDoTasks= new ToDoTasks ( TaskTypeStationEnum.电站审核.getCode(), powerStation.getSequenceNbr(), "用户"+name+"电站勘察待"+powerStationNodeEnum.getName(),null);
toDoTasksServiceImpl.addToDoTasksByRole(powerStation.getNodeRole(),toDoTasks,meg);
} else {
toDoTasks = new ToDoTasks(TaskTypeStationEnum.电站审核.getCode(), powerStation.getSequenceNbr(),
"用户" + name + "电站勘察待" + powerStationNodeEnum.getName(), null);
toDoTasksServiceImpl.addToDoTasksByRole(powerStation.getNodeRole(), toDoTasks, meg);
}
}else{
} else {
LambdaQueryWrapper<ToDoTasks> wrapper = new LambdaQueryWrapper<>();
wrapper.eq(ToDoTasks::getType, TaskTypeStationEnum.电站审核.getCode());
wrapper.eq(ToDoTasks::getState, "待办");
wrapper.eq(ToDoTasks::getBusinessId, powerStation.getSequenceNbr());
ToDoTasks doTasks= toDoTasksMapper.selectOne(wrapper);
if(doTasks!=null){
ToDoTasks doTasks = toDoTasksMapper.selectOne(wrapper);
if (doTasks != null) {
doTasks.setState("已办");
doTasks.setCompleteTime(new Date());
toDoTasksMapper.updateById(doTasks);
emqKeeper.getMqttClient().publish("TASK_MESSAGE" ,JSON.toJSONString(doTasks).getBytes(), 2 ,false);
emqKeeper.getMqttClient().publish("TASK_MESSAGE", JSON.toJSONString(doTasks).getBytes(), 2, false);
UserMessage userMessage= new UserMessage( doTasks.getType(), doTasks.getBusinessId(), doTasks.getAmosUserId(), new Date(), doTasks.getTaskName()+"已完成."+meg, doTasks.getAmosOrgCode());
UserMessage userMessage = new UserMessage(doTasks.getType(), doTasks.getBusinessId(),
doTasks.getAmosUserId(), new Date(), doTasks.getTaskName() + "已完成." + meg,
doTasks.getAmosOrgCode());
userMessageMapper.insert(userMessage);
emqKeeper.getMqttClient().publish("MY_MESSAGE" , JSON.toJSONString(userMessage).getBytes(), 2 ,false);
emqKeeper.getMqttClient().publish("MY_MESSAGE", JSON.toJSONString(userMessage).getBytes(), 2,
false);
}
}
return fl;
}catch (Exception e){
} catch (Exception e) {
e.printStackTrace();
throw new BaseException("获取工作流节点失败!","400","获取工作流节点失败!");
throw new BaseException("获取工作流节点失败!", "400", "获取工作流节点失败!");
}
}
......@@ -219,18 +230,20 @@ public class PowerStationServiceImpl extends BaseService<PowerStationDto, PowerS
@Override
@Transactional
public String powerStationExamine(long pageId, String nodeCode, String stationId, String taskId, String planInstanceId, Map<String, Object> kv) {
String meg="";
public String powerStationExamine(long pageId, String nodeCode, String stationId, String taskId,
String planInstanceId, Map<String, Object> kv) {
String meg = "";
// 1. 业务相关数据落表
PowerStation powerStation = this.baseMapper.selectById(stationId);
PowerStationNodeEnum nodeByCode = PowerStationNodeEnum.getNodeByCode(nodeCode);
String result = String.valueOf(kv.get("approvalStatus"));
boolean flag = true;
if (PowerStationNodeEnum.设计上传图纸.getCode().equals(nodeCode)||PowerStationNodeEnum.经销商上传图纸.getCode().equals(nodeCode)) {
if (PowerStationNodeEnum.设计上传图纸.getCode().equals(nodeCode)
|| PowerStationNodeEnum.经销商上传图纸.getCode().equals(nodeCode)) {
powerStation.setProcessStatus(PowerStationProcessStateEnum.进行中.getName());
this.updateSeve(nodeCode,powerStation.getPeasantHouseholdId(),kv);
this.updateSeve(nodeCode, powerStation.getPeasantHouseholdId(), kv);
} else {
if (VERIFY_RESULT_NO.equals(result)) {
......@@ -266,12 +279,13 @@ public class PowerStationServiceImpl extends BaseService<PowerStationDto, PowerS
break;
}
}
meg="任务明细:"+nodeByCode+(VERIFY_RESULT_YES.equals(result)?"通过":"不通过");
meg = "任务明细:" + nodeByCode + (VERIFY_RESULT_YES.equals(result) ? "通过" : "不通过");
// 2. 更新流程状态
String code = null;
try{
try {
// 3. 工作流执行
// FeignClientResult<String> submit = idxFeginService.submit(pageId, taskId, planInstanceId, null, null, null, kv);
// FeignClientResult<String> submit = idxFeginService.submit(pageId, taskId,
// planInstanceId, null, null, null, kv);
// if (IDX_REQUEST_STATE.equals(String.valueOf(submit.getStatus()))) {
// code = submit.getResult();
// log.info("流程执行成功:{}", code);
......@@ -283,64 +297,63 @@ public class PowerStationServiceImpl extends BaseService<PowerStationDto, PowerS
//
// }
//执行工作流
BasicGridAcceptance basicGridAcceptance=new BasicGridAcceptance();
// 执行工作流
BasicGridAcceptance basicGridAcceptance = new BasicGridAcceptance();
StandardDto standardDto = new StandardDto();
if (PowerStationNodeEnum.设计上传图纸.getCode().equals(nodeCode)||PowerStationNodeEnum.经销商上传图纸.getCode().equals(nodeCode)) {
if (PowerStationNodeEnum.设计上传图纸.getCode().equals(nodeCode)
|| PowerStationNodeEnum.经销商上传图纸.getCode().equals(nodeCode)) {
standardDto.setComment(kv.get("approvalInfo")!=null?String.valueOf(kv.get("approvalInfo")):"");
standardDto.setComment(kv.get("approvalInfo") != null ? String.valueOf(kv.get("approvalInfo")) : "");
standardDto.setResult("0");
standardDto.setTaskId(powerStation.getFlowTaskId());
VariableDto variable = new VariableDto();
variable.setApprovalStatus("0");
variable.setComment(kv.get("approvalInfo")!=null?String.valueOf(kv.get("approvalInfo")):"");
variable.setComment(kv.get("approvalInfo") != null ? String.valueOf(kv.get("approvalInfo")) : "");
variable.setOperationTime(String.valueOf(kv.get("approveDate")));
variable.setOperator("");
standardDto.setVariable(variable);
}else{
standardDto.setComment(kv.get("approvalInfo")!=null?String.valueOf(kv.get("approvalInfo")):"");
} else {
standardDto.setComment(kv.get("approvalInfo") != null ? String.valueOf(kv.get("approvalInfo")) : "");
standardDto.setResult(String.valueOf(kv.get("approvalStatus")));
standardDto.setTaskId(powerStation.getFlowTaskId());
VariableDto variable = new VariableDto();
variable.setApprovalStatus(String.valueOf(kv.get("approvalStatus")));
variable.setComment(kv.get("approvalInfo")!=null?String.valueOf(kv.get("approvalInfo")):"");
variable.setComment(kv.get("approvalInfo") != null ? String.valueOf(kv.get("approvalInfo")) : "");
variable.setOperationTime(String.valueOf(kv.get("approveDate")));
variable.setOperator(String.valueOf(kv.get("approveName")));
standardDto.setVariable(variable);
}
BasicGridAcceptance workBasicGridAcceptance = workflow.standard(basicGridAcceptance, standardDto, requestContext.getUserId());
BasicGridAcceptance workBasicGridAcceptance = workflow.standard(basicGridAcceptance, standardDto,
requestContext.getUserId());
powerStation.setFlowTaskId(basicGridAcceptance.getNextTaskId());
powerStation.setNodeRole(basicGridAcceptance.getNextExecutorIds());
powerStation.setNodeRouting(basicGridAcceptance.getNextNodeKey()!=null?PowerStationEnum.getNodeByKey(basicGridAcceptance.getNextNodeKey()):"");
powerStation.setNodeRouting(basicGridAcceptance.getNextNodeKey() != null
? PowerStationEnum.getNodeByKey(basicGridAcceptance.getNextNodeKey())
: "");
powerStation.setNextProcessNode(basicGridAcceptance.getNextNodeKey());
powerStation.setPromoter(basicGridAcceptance.getPromoter());
powerStation.setNextExecuteUserIds(basicGridAcceptance.getNextExecuteUserIds());
powerStation.setNextNodeName(basicGridAcceptance.getNextNodeName());
powerStationService.savePowerStation(powerStation, flag,powerStation.getOwnersName(),meg);
if(!flag){
//更新农户状态
powerStationService.savePowerStation(powerStation, flag, powerStation.getOwnersName(), meg);
if (!flag) {
// 更新农户状态
String peasantHouseholdId = powerStation.getPeasantHouseholdId();
PeasantHousehold peasantHousehold = peasantHouseholdService.getBaseMapper().selectById(Long.valueOf(peasantHouseholdId));
PeasantHousehold peasantHousehold = peasantHouseholdService.getBaseMapper()
.selectById(Long.valueOf(peasantHouseholdId));
peasantHousehold.setSurveyOrNot(3);
peasantHousehold.setConstructionState(ArrivalStateeEnum.勘察完成.getCode());
peasantHouseholdService.saveOrUpdate(peasantHousehold);
}
//}
}catch (Exception e){
// }
} catch (Exception e) {
e.printStackTrace();
throw new BaseException("获取工作流节点失败!","400","获取工作流节点失败!");
throw new BaseException("获取工作流节点失败!", "400", "获取工作流节点失败!");
}
return code;
// String meg="";
// // 1. 业务相关数据落表
// PowerStation powerStation = this.baseMapper.selectById(stationId);
......@@ -419,68 +432,67 @@ public class PowerStationServiceImpl extends BaseService<PowerStationDto, PowerS
// return code;
}
public WorkDto getNodeInfoCode(String flowTaskId){
WorkDto workDto=null;
try{
FeignClientResult<JSONObject> jSONObject= workflowFeignClient.getNodeInfo(flowTaskId);
if(IDX_REQUEST_STATE.equals(String.valueOf(jSONObject.getStatus()))){
JSONObject js=jSONObject.getResult();
if(js==null){
throw new BaseException("获取工作流节点失败!","400","获取工作流节点失败!");
public WorkDto getNodeInfoCode(String flowTaskId) {
WorkDto workDto = null;
try {
FeignClientResult<JSONObject> jSONObject = workflowFeignClient.getNodeInfo(flowTaskId);
if (IDX_REQUEST_STATE.equals(String.valueOf(jSONObject.getStatus()))) {
JSONObject js = jSONObject.getResult();
if (js == null) {
throw new BaseException("获取工作流节点失败!", "400", "获取工作流节点失败!");
}
LinkedHashMap taskInfo= js.get("taskInfo")!=null?(LinkedHashMap)js.get("taskInfo"):null;
String nextProcessNode=taskInfo!=null?taskInfo.get("taskDefinitionKey").toString():null;
List<LinkedHashMap> executor= js.get("executor")!=null?( List<LinkedHashMap>)js.get("executor"):null;
String nodeRole=null;
if(!executor.isEmpty()){
List<String> idList = executor.stream().map(e->e.get("groupId").toString()).collect(Collectors.toList());
nodeRole=StringUtils.join(idList,",");
LinkedHashMap taskInfo = js.get("taskInfo") != null ? (LinkedHashMap) js.get("taskInfo") : null;
String nextProcessNode = taskInfo != null ? taskInfo.get("taskDefinitionKey").toString() : null;
List<LinkedHashMap> executor = js.get("executor") != null ? (List<LinkedHashMap>) js.get("executor")
: null;
String nodeRole = null;
if (!executor.isEmpty()) {
List<String> idList = executor.stream().map(e -> e.get("groupId").toString())
.collect(Collectors.toList());
nodeRole = StringUtils.join(idList, ",");
}
LinkedHashMap extensionInfo= js.get("extensionInfo")!=null?(LinkedHashMap)js.get("extensionInfo"):null;
String nodeRouting=extensionInfo!=null?extensionInfo.get("nodeRole").toString():null;
workDto=new WorkDto(nodeRouting, nodeRole, nextProcessNode);
LinkedHashMap extensionInfo = js.get("extensionInfo") != null ? (LinkedHashMap) js.get("extensionInfo")
: null;
String nodeRouting = extensionInfo != null ? extensionInfo.get("nodeRole").toString() : null;
workDto = new WorkDto(nodeRouting, nodeRole, nextProcessNode);
}
return workDto;
}catch (Exception e){
} catch (Exception e) {
e.printStackTrace();
throw new BaseException("获取工作流节点失败!","400","获取工作流节点失败!");
throw new BaseException("获取工作流节点失败!", "400", "获取工作流节点失败!");
}
}
}
public String getTaskNoAuth(String processInstanceId){
String flowTaskId=null;
try{
JSONObject jSONObject= workflowFeignClient.getTaskNoAuth(processInstanceId);
if(IDX_REQUEST_STATE.equals(String.valueOf(jSONObject.get("code")))){
LinkedHashMap jsd= jSONObject.get("data")!=null?(LinkedHashMap)jSONObject.get("data"):null;
flowTaskId=jsd!=null?jsd.get("id").toString():null;
public String getTaskNoAuth(String processInstanceId) {
String flowTaskId = null;
try {
JSONObject jSONObject = workflowFeignClient.getTaskNoAuth(processInstanceId);
if (IDX_REQUEST_STATE.equals(String.valueOf(jSONObject.get("code")))) {
LinkedHashMap jsd = jSONObject.get("data") != null ? (LinkedHashMap) jSONObject.get("data") : null;
flowTaskId = jsd != null ? jsd.get("id").toString() : null;
}
if(flowTaskId==null){
throw new BaseException("获取工作流节点失败!","400","获取工作流节点失败!");
if (flowTaskId == null) {
throw new BaseException("获取工作流节点失败!", "400", "获取工作流节点失败!");
}
return flowTaskId;
}catch (Exception e){
} catch (Exception e) {
e.printStackTrace();
throw new BaseException("获取工作流节点失败!","400","获取工作流节点失败!");
throw new BaseException("获取工作流节点失败!", "400", "获取工作流节点失败!");
}
}
// 设计信息填充
public void updateSeve(String nodeCode,String peasantHouseholdId, Map<String, Object> kv ){
public void updateSeve(String nodeCode, String peasantHouseholdId, Map<String, Object> kv) {
LambdaQueryWrapper<DesignInformation> wrapper = new LambdaQueryWrapper<>();
wrapper.eq(DesignInformation::getPeasantHouseholdId, peasantHouseholdId);
DesignInformation designInformation=designInformationMapper.selectOne(wrapper);
if(designInformation!=null){
DesignInformation designInformationnew=this.mapToBean(kv,DesignInformation.class);
DesignInformation designInformation = designInformationMapper.selectOne(wrapper);
if (designInformation != null) {
DesignInformation designInformationnew = this.mapToBean(kv, DesignInformation.class);
if (PowerStationNodeEnum.经销商上传图纸.getCode().equals(nodeCode)) {
designInformation.setPeasantHouseholdId(peasantHouseholdId);
......@@ -489,7 +501,7 @@ public void updateSeve(String nodeCode,String peasantHouseholdId, Map<String, Ob
designInformation.setConnectionLine(designInformationnew.getConnectionLine());
designInformation.setOnceLine(designInformationnew.getOnceLine());
designInformationMapper.updateById(designInformation);
}else{
} else {
// designInformationnew.setTypicalDiagram(designInformationnew.getTypicalDiagram());
designInformationnew.setPeasantHouseholdId(peasantHouseholdId);
designInformationnew.setComponentLayout(designInformation.getComponentLayout());
......@@ -504,32 +516,30 @@ public void updateSeve(String nodeCode,String peasantHouseholdId, Map<String, Ob
designInformationnew.setCable(designInformation.getCable());
designInformationMapper.updateById(designInformationnew);
}
}else{
DesignInformation designInformationnew=this.mapToBean(kv,DesignInformation.class);
designInformationnew.setTypicalDiagram((List<Object>)kv.get("typicalDiagram")) ;
} else {
DesignInformation designInformationnew = this.mapToBean(kv, DesignInformation.class);
designInformationnew.setTypicalDiagram((List<Object>) kv.get("typicalDiagram"));
designInformationnew.setPeasantHouseholdId(peasantHouseholdId);
designInformationMapper.insert(designInformationnew);
}
}
}
public <T> T mapToBean(Map<String, Object> map, Class<T> clazz) {
ObjectMapper objectMapper= new ObjectMapper();
ObjectMapper objectMapper = new ObjectMapper();
T bean =null;
T bean = null;
try {
bean = clazz.newInstance();
bean = objectMapper.convertValue(map,clazz);
bean = objectMapper.convertValue(map, clazz);
}catch (Exception e){
throw new BaseException(" 数据转化异常!","400","数据转化异常!");
} catch (Exception e) {
throw new BaseException(" 数据转化异常!", "400", "数据转化异常!");
}
return bean;
}
// private CollectionToList(Collection<? extends E> c){
// Object[] objects = c.toArray();
// }
......
......@@ -2,6 +2,7 @@ package com.yeejoin.amos.boot.module.jxiop.biz.controller;
import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper;
import com.yeejoin.amos.boot.biz.common.utils.RedisUtils;
import com.yeejoin.amos.boot.module.jxiop.biz.Enum.SmartAnalyseEnum;
import com.yeejoin.amos.boot.module.jxiop.biz.dto.FullViewRecallDataDTO;
import com.yeejoin.amos.boot.module.jxiop.biz.dto.FullViewRecallInfoDTO;
import com.yeejoin.amos.boot.module.jxiop.biz.entity.IdxBizFanHealthLevel;
......@@ -52,6 +53,44 @@ public class KafkaAnalyseController {
IdxBizFanHealthIndexMapper idxBizFanHealthIndexMapper;
@TycloudOperation(ApiLevel = UserType.AGENCY, needAuth = false)
@ApiOperation(httpMethod = "GET", value = "获取执行结果", notes = "获取执行结果")
@GetMapping(value = "/isRun")
public ResponseModel<String> isRun(@RequestParam(required = true) String key) {
String result = "非法key值";
if (kafkaTopicConsumer.equals(key) || kafkaTopicConsumerPv.equals(key) || kafkaTopicConsumerGKHFFan.equals(key)
|| kafkaTopicConsumerGKHFPv.equals(key) || kafkaTopicConsumerZXZFan.equals(key)
|| kafkaTopicConsumerZXZPv.equals(key)) {
if (redisUtils.hasKey(key)) {
result = "正在计算中";
} else {
result = "未计算";
}
}
return ResponseHelper.buildResponse(result);
}
@TycloudOperation(ApiLevel = UserType.AGENCY, needAuth = false)
@ApiOperation(httpMethod = "GET", value = "获取执行结果", notes = "获取执行结果")
@GetMapping(value = "/start")
public ResponseModel<Object> start(@RequestParam(required = true) String key) {
String result = "非法key值";
if (kafkaTopicConsumer.equals(key)) {
return getFanConditionVariables();
} else if (kafkaTopicConsumerPv.equals(key)) {
return getPvConditionVariables();
} else if (kafkaTopicConsumerGKHFFan.equals(key)) {
return getFanConditionVariablesGKHF();
} else if (kafkaTopicConsumerGKHFPv.equals(key)) {
return getPvConditionVariablesPvGKFX();
} else if (kafkaTopicConsumerZXZFan.equals(key)) {
return getFanConditionVariablesZXZ();
} else if (kafkaTopicConsumerZXZPv.equals(key)) {
return getPvConditionVariablesZXZ();
}
return ResponseHelper.buildResponse(result);
}
@TycloudOperation(ApiLevel = UserType.AGENCY, needAuth = false)
@PostMapping(value = "/getFanConditionVariables")
@ApiOperation(httpMethod = "POST", value = "计算相关性分析 - 风机 - 新", notes = "计算相关性分析 - 风机 - 新")
public ResponseModel<Object> getFanConditionVariables() {
......@@ -140,28 +179,35 @@ public class KafkaAnalyseController {
@TycloudOperation(needAuth = false, ApiLevel = UserType.AGENCY)
@ApiOperation(value = "全景诊断回溯")
@GetMapping("/getFullViewRecall")
public ResponseModel<List<FullViewRecallInfoDTO>> getFullViewRecall(@RequestParam(required = false, value = "analysisType") String analysisType) {
public ResponseModel<List<FullViewRecallInfoDTO>> getFullViewRecall(
@RequestParam(required = false, value = "analysisType") String analysisType) {
List<String> gatewayIds = this.getGatewayIds();
List<Map<String, Object>> stationIndexInfo = idxBizFanHealthIndexMapper.getStationIndexInfoByParam(analysisType, gatewayIds);
Map<String, Double> stationHealthIndexMap = stationIndexInfo.stream().collect(Collectors.toMap(t -> t.get("station").toString(), t -> Double.parseDouble(t.get("healthIndex").toString())));
List<Map<String, Object>> equipmentIndexInfo = idxBizFanHealthIndexMapper.getEquipmentIndexInfoByParam(analysisType, gatewayIds);
Map<String, Double> equipmentIndexInfoMap = equipmentIndexInfo.stream().collect(Collectors.toMap(t -> t.get("equipmentName").toString(), t -> Double.parseDouble(t.get("healthIndex").toString())));
List<Map<String, Object>> subSystemIndexInfo = idxBizFanHealthIndexMapper.getSubSystemIndexInfoByParam(analysisType, gatewayIds);
Map<String, Double> subSystemIndexInfoMap = subSystemIndexInfo.stream().collect(Collectors.toMap(t -> t.get("subSystem").toString(), t -> Double.parseDouble(t.get("healthIndex").toString())));
List<Map<String, Object>> pointNameIndexInfo = idxBizFanHealthIndexMapper.getPointNameIndexInfoByParam(analysisType, gatewayIds);
Map<String, Double> pointNameIndexInfoMap = pointNameIndexInfo.stream().collect(Collectors.toMap(t -> t.get("gatewayIndexAddress").toString(), t -> Double.parseDouble(t.get("healthIndex").toString())));
List<Map<String, Object>> stationIndexInfo = idxBizFanHealthIndexMapper.getStationIndexInfoByParam(analysisType,
gatewayIds);
Map<String, Double> stationHealthIndexMap = stationIndexInfo.stream().collect(Collectors
.toMap(t -> t.get("station").toString(), t -> Double.parseDouble(t.get("healthIndex").toString())));
List<Map<String, Object>> equipmentIndexInfo = idxBizFanHealthIndexMapper
.getEquipmentIndexInfoByParam(analysisType, gatewayIds);
Map<String, Double> equipmentIndexInfoMap = equipmentIndexInfo.stream().collect(Collectors.toMap(
t -> t.get("equipmentName").toString(), t -> Double.parseDouble(t.get("healthIndex").toString())));
List<Map<String, Object>> subSystemIndexInfo = idxBizFanHealthIndexMapper
.getSubSystemIndexInfoByParam(analysisType, gatewayIds);
Map<String, Double> subSystemIndexInfoMap = subSystemIndexInfo.stream().collect(Collectors
.toMap(t -> t.get("subSystem").toString(), t -> Double.parseDouble(t.get("healthIndex").toString())));
List<Map<String, Object>> pointNameIndexInfo = idxBizFanHealthIndexMapper
.getPointNameIndexInfoByParam(analysisType, gatewayIds);
Map<String, Double> pointNameIndexInfoMap = pointNameIndexInfo.stream()
.collect(Collectors.toMap(t -> t.get("gatewayIndexAddress").toString(),
t -> Double.parseDouble(t.get("healthIndex").toString())));
List<IdxBizFanHealthLevel> healthLevelInfoList = idxBizFanHealthIndexMapper.getHealthLevelInfoList(gatewayIds);
List<FullViewRecallDataDTO> list = idxBizFanHealthIndexMapper.getFullViewRecall(gatewayIds);
Map<String, Map<String, Map<String, Map<String, Map<String, List<FullViewRecallDataDTO>>>>>> resultMap = list.stream()
Map<String, Map<String, Map<String, Map<String, Map<String, List<FullViewRecallDataDTO>>>>>> resultMap = list
.stream()
.collect(Collectors.groupingBy(FullViewRecallDataDTO::getArea,
Collectors.groupingBy(FullViewRecallDataDTO::getStation,
Collectors.groupingBy(FullViewRecallDataDTO::getEquipmentName,
......@@ -172,7 +218,8 @@ public class KafkaAnalyseController {
int stationInt = 1;
int equipmentInt = 1;
int subSystemInt = 1;
Double healthScoreInfo = idxBizFanHealthIndexMapper.getHealthScoreInfoByParam(null, null, analysisType).doubleValue();
Double healthScoreInfo = idxBizFanHealthIndexMapper.getHealthScoreInfoByParam(null, null, analysisType)
.doubleValue();
healthScoreInfo = Double.parseDouble(df.format(healthScoreInfo));
LambdaQueryWrapper<IdxBizFanHealthLevel> query = new LambdaQueryWrapper<>();
......@@ -193,11 +240,13 @@ public class KafkaAnalyseController {
allMapDto.setChildren(new ArrayList<>());
allMapDto.setParentKey("0");
for (Map.Entry<String, Map<String, Map<String, Map<String, Map<String, List<FullViewRecallDataDTO>>>>>> areaMap : resultMap.entrySet()) {
for (Map.Entry<String, Map<String, Map<String, Map<String, Map<String, List<FullViewRecallDataDTO>>>>>> areaMap : resultMap
.entrySet()) {
Double areaLowScore = null;
Double areaHighScore = null;
Double areaHealthScoreInfo = idxBizFanHealthIndexMapper.getHealthScoreInfoByParam(areaMap.getKey(), null, analysisType).doubleValue();
Double areaHealthScoreInfo = idxBizFanHealthIndexMapper
.getHealthScoreInfoByParam(areaMap.getKey(), null, analysisType).doubleValue();
areaHealthScoreInfo = Double.parseDouble(df.format(areaHealthScoreInfo));
LambdaQueryWrapper<IdxBizFanHealthLevel> areaQuery = new LambdaQueryWrapper<>();
areaQuery.isNull(IdxBizFanHealthLevel::getStatus);
......@@ -215,7 +264,8 @@ public class KafkaAnalyseController {
allMapDto.getChildren().add(areaMapDto);
areaInt++;
List<FullViewRecallInfoDTO> areaMapList = new ArrayList<>();
for (Map.Entry<String, Map<String, Map<String, Map<String, List<FullViewRecallDataDTO>>>>> stationMap : areaMap.getValue().entrySet()) {
for (Map.Entry<String, Map<String, Map<String, Map<String, List<FullViewRecallDataDTO>>>>> stationMap : areaMap
.getValue().entrySet()) {
Double stationLowScore = null;
Double stationHighScore = null;
if (areaLowScore == null && areaHighScore == null) {
......@@ -257,8 +307,8 @@ public class KafkaAnalyseController {
stationDto.setParentKey(areaMapDto.getKey());
areaMapDto.getChildren().add(stationDto);
stationInt++;
for (Map.Entry<String, Map<String, Map<String, List<FullViewRecallDataDTO>>>> equipmentMap : stationMap.getValue().entrySet()) {
for (Map.Entry<String, Map<String, Map<String, List<FullViewRecallDataDTO>>>> equipmentMap : stationMap
.getValue().entrySet()) {
if (stationLowScore == null && stationHighScore == null) {
stationLowScore = equipmentIndexInfoMap.getOrDefault(equipmentMap.getKey(), 100.0);
......@@ -276,27 +326,32 @@ public class KafkaAnalyseController {
equipmentMapDto.setKey(stationDto.getKey() + "-" + equipmentInt);
equipmentMapDto.setName(equipmentMap.getKey());
equipmentMapDto.setScoreRange("");
IdxBizFanHealthLevel levelInfo = getHealthLevelByScore(healthLevelInfoList, stationMap.getKey(), "设备", equipmentIndexInfoMap.getOrDefault(equipmentMap.getKey(), 100.0));
IdxBizFanHealthLevel levelInfo = getHealthLevelByScore(healthLevelInfoList, stationMap.getKey(),
"设备", equipmentIndexInfoMap.getOrDefault(equipmentMap.getKey(), 100.0));
equipmentMapDto.setStatus(levelInfo.getHealthLevel());
equipmentMapDto.setScore(equipmentIndexInfoMap.getOrDefault(equipmentMap.getKey(), 100.0));
equipmentMapDto.setParentKey(stationDto.getKey());
stationDto.getChildren().add(equipmentMapDto);
equipmentInt++;
for (Map.Entry<String, Map<String, List<FullViewRecallDataDTO>>> subSystemMap : equipmentMap.getValue().entrySet()) {
for (Map.Entry<String, Map<String, List<FullViewRecallDataDTO>>> subSystemMap : equipmentMap
.getValue().entrySet()) {
FullViewRecallInfoDTO subSystemMapDto = new FullViewRecallInfoDTO();
subSystemMapDto.setKey(equipmentMapDto.getKey() + "-" + subSystemInt);
subSystemMapDto.setName(subSystemMap.getKey());
subSystemMapDto.setScoreRange("");
// subSystemMapDto.setStatus(null);
IdxBizFanHealthLevel levelInfoZxt = getHealthLevelByScore(healthLevelInfoList, stationMap.getKey(), "子系统", subSystemIndexInfoMap.getOrDefault(subSystemMap.getKey(), 100.0));
IdxBizFanHealthLevel levelInfoZxt = getHealthLevelByScore(healthLevelInfoList,
stationMap.getKey(), "子系统",
subSystemIndexInfoMap.getOrDefault(subSystemMap.getKey(), 100.0));
subSystemMapDto.setStatus(levelInfoZxt.getHealthLevel());
subSystemMapDto.setScore(subSystemIndexInfoMap.getOrDefault(subSystemMap.getKey(), 100.0));
subSystemMapDto.setParentKey(equipmentMapDto.getKey());
equipmentMapDto.getChildren().add(subSystemMapDto);
subSystemInt++;
for (Map.Entry<String, List<FullViewRecallDataDTO>> pointNameMap : subSystemMap.getValue().entrySet()) {
for (Map.Entry<String, List<FullViewRecallDataDTO>> pointNameMap : subSystemMap.getValue()
.entrySet()) {
FullViewRecallInfoDTO pointNameMapDto = new FullViewRecallInfoDTO();
pointNameMapDto.setKey(subSystemMapDto.getKey() + "-" + pointNameInt);
pointNameMapDto.setName(pointNameMap.getKey());
......@@ -305,10 +360,15 @@ public class KafkaAnalyseController {
pointNameMapDto.setScoreRange("");
IdxBizFanHealthLevel levelInfoBL = getHealthLevelByScore(healthLevelInfoList, stationMap.getKey(), "测点", pointNameIndexInfoMap.getOrDefault(fullViewRecallDataDTO.getStation() + "_" + fullViewRecallDataDTO.getIndexAddress(), 100.0));
IdxBizFanHealthLevel levelInfoBL = getHealthLevelByScore(healthLevelInfoList,
stationMap.getKey(), "测点",
pointNameIndexInfoMap.getOrDefault(fullViewRecallDataDTO.getStation() + "_"
+ fullViewRecallDataDTO.getIndexAddress(), 100.0));
pointNameMapDto.setStatus(levelInfoBL.getHealthLevel());
pointNameMapDto.setScore(pointNameIndexInfoMap.getOrDefault(fullViewRecallDataDTO.getStation() + "_" + fullViewRecallDataDTO.getIndexAddress(), 100.0));
pointNameMapDto.setScore(pointNameIndexInfoMap.getOrDefault(
fullViewRecallDataDTO.getStation() + "_" + fullViewRecallDataDTO.getIndexAddress(),
100.0));
pointNameMapDto.setParentKey(subSystemMapDto.getKey());
subSystemMapDto.getChildren().add(pointNameMapDto);
......@@ -327,13 +387,14 @@ public class KafkaAnalyseController {
return ResponseHelper.buildResponse(resultList);
}
private IdxBizFanHealthLevel getHealthLevelByScore(List<IdxBizFanHealthLevel> healthLevelInfoList, String station, String type, Double score) {
private IdxBizFanHealthLevel getHealthLevelByScore(List<IdxBizFanHealthLevel> healthLevelInfoList, String station,
String type, Double score) {
IdxBizFanHealthLevel resultDto = new IdxBizFanHealthLevel();
String stationType = "风电站";
List<IdxBizFanHealthLevel> collect = healthLevelInfoList.stream().filter(item -> item.getAnalysisObjType().contains(station)).collect(Collectors.toList());
List<IdxBizFanHealthLevel> collect = healthLevelInfoList.stream()
.filter(item -> item.getAnalysisObjType().contains(station)).collect(Collectors.toList());
for (IdxBizFanHealthLevel item : collect) {
if (item.getAnalysisObjType().contains("子阵")) {
stationType = "光伏站";
......@@ -341,23 +402,28 @@ public class KafkaAnalyseController {
}
}
for (IdxBizFanHealthLevel item : collect) {
if (type.equals("设备") && stationType.equals("风电站") && item.getAnalysisObjType().contains(type) && score >= item.getGroupLowerLimit() && score <= item.getGroupUpperLimit()) {
if (type.equals("设备") && stationType.equals("风电站") && item.getAnalysisObjType().contains(type)
&& score >= item.getGroupLowerLimit() && score <= item.getGroupUpperLimit()) {
resultDto = item;
break;
}
if (type.equals("子系统") && stationType.equals("风电站") && item.getAnalysisObjType().contains(type) && score >= item.getGroupLowerLimit() && score <= item.getGroupUpperLimit()) {
if (type.equals("子系统") && stationType.equals("风电站") && item.getAnalysisObjType().contains(type)
&& score >= item.getGroupLowerLimit() && score <= item.getGroupUpperLimit()) {
resultDto = item;
break;
}
if (type.equals("测点") && item.getAnalysisObjType().contains(type) && score >= item.getGroupLowerLimit() && score <= item.getGroupUpperLimit()) {
if (type.equals("测点") && item.getAnalysisObjType().contains(type) && score >= item.getGroupLowerLimit()
&& score <= item.getGroupUpperLimit()) {
resultDto = item;
break;
}
if (type.equals("设备") && stationType.equals("光伏站") && item.getAnalysisObjType().contains("子阵") && score >= item.getGroupLowerLimit() && score <= item.getGroupUpperLimit()) {
if (type.equals("设备") && stationType.equals("光伏站") && item.getAnalysisObjType().contains("子阵")
&& score >= item.getGroupLowerLimit() && score <= item.getGroupUpperLimit()) {
resultDto = item;
break;
}
if (type.equals("子系统") && stationType.equals("光伏站") && item.getAnalysisObjType().contains("设备") && score >= item.getGroupLowerLimit() && score <= item.getGroupUpperLimit()) {
if (type.equals("子系统") && stationType.equals("光伏站") && item.getAnalysisObjType().contains("设备")
&& score >= item.getGroupLowerLimit() && score <= item.getGroupUpperLimit()) {
resultDto = item;
break;
}
......@@ -373,22 +439,28 @@ public class KafkaAnalyseController {
return permissions;
}
private List<FullViewRecallInfoDTO> getFullViewRecallResultByCurrentUser(List<FullViewRecallInfoDTO> fullViewRecallInfoDTOS) {
private List<FullViewRecallInfoDTO> getFullViewRecallResultByCurrentUser(
List<FullViewRecallInfoDTO> fullViewRecallInfoDTOS) {
List<FullViewRecallInfoDTO> result = new ArrayList<>();
if (fullViewRecallInfoDTOS.size() > 0) {
String rootNodeName = permissionService.getCurrentUserPersmissions();
if (rootNodeName.equals("all")) {
return fullViewRecallInfoDTOS;
} else {
List<String> stringList = Arrays.asList(rootNodeName,rootNodeName.replace("片区","区域"),rootNodeName.replace("区域","片区"),rootNodeName.replace("电站","电场"),rootNodeName.replace("电场","电站"));
List<FullViewRecallInfoDTO> fullViewRecallInfoDTOS1 = fullViewRecallInfoDTOS.get(0).getChildren().stream().filter(item -> stringList.contains(item.getName())).collect(Collectors.toList());
List<String> stringList = Arrays.asList(rootNodeName, rootNodeName.replace("片区", "区域"),
rootNodeName.replace("区域", "片区"), rootNodeName.replace("电站", "电场"),
rootNodeName.replace("电场", "电站"));
List<FullViewRecallInfoDTO> fullViewRecallInfoDTOS1 = fullViewRecallInfoDTOS.get(0).getChildren()
.stream().filter(item -> stringList.contains(item.getName())).collect(Collectors.toList());
if (fullViewRecallInfoDTOS1.size() > 0) {
return fullViewRecallInfoDTOS1;
} else {
List<FullViewRecallInfoDTO> fullViewRecallInfoDTOS2 = fullViewRecallInfoDTOS.get(0).getChildren();
for (FullViewRecallInfoDTO fullViewRecallInfoDTO : fullViewRecallInfoDTOS2) {
List<FullViewRecallInfoDTO> fullViewRecallInfoDTOS3=fullViewRecallInfoDTO.getChildren().stream().filter(item -> stringList.contains(item.getName())).collect(Collectors.toList());
if (fullViewRecallInfoDTOS3.size()>0){
List<FullViewRecallInfoDTO> fullViewRecallInfoDTOS3 = fullViewRecallInfoDTO.getChildren()
.stream().filter(item -> stringList.contains(item.getName()))
.collect(Collectors.toList());
if (fullViewRecallInfoDTOS3.size() > 0) {
return fullViewRecallInfoDTOS3;
}
}
......
......@@ -13,20 +13,21 @@ import java.util.Date;
public interface Constant {
// 风电相关性消费者
String kafkaTopicConsumer = "FanConditionVariables";
String kafkaTopicConsumer = "FAN_XGX";
// 光伏相关性消费者
String kafkaTopicConsumerPv = "PvConditionVariables";
String kafkaTopicConsumerPv = "PV_XGX";
// 风电 工况区间划分
String kafkaTopicConsumerGKHFFan = "FanConditionVariablesGKHF";
String kafkaTopicConsumerGKHFFan = "FAN_QJHF";
// 光伏 工况区间划分
String kafkaTopicConsumerGKHFPv = "PvConditionVariablesGKHF";
String kafkaTopicConsumerGKHFPv = "PV_QJHF";
// 风电 中心值计算
String kafkaTopicConsumerZXZFan = "FanConditionVariablesGZXZ";
String kafkaTopicConsumerZXZFan = "FAN_ZXZ";
// 光伏 中心值计算
String kafkaTopicConsumerZXZPv = "PvConditionVariablesZXZ";
String kafkaTopicConsumerZXZPv = "PV_ZXZ";
}
......@@ -53,14 +53,12 @@ public class KafkaConsumerService {
@Autowired
private IdxBizFanPointVarCorrelationServiceImpl idxBizFanPointVarCorrelationService;
@Autowired
private IdxBizPvPointVarCorrelationServiceImpl idxBizPvPointVarCorrelationService;
@Autowired
IdxBizFanPointProcessVariableClassificationServiceImpl idxBizFanPointProcessVariableClassificationService;
@Autowired
IdxBizPvPointProcessVariableClassificationServiceImpl idxBizPvPointProcessVariableClassificationService;
......@@ -77,20 +75,19 @@ public class KafkaConsumerService {
IdxBizPvPointVarCentralValueMapper idxBizPvPointVarCentralValueMapper;
// 相关性
@Value("${base.url.XGX:http://139.9.171.247:8052/intelligent-analysis/correlation}")
@Value("${base.url.XGX:http://10.20.1.29:8052/intelligent-analysis/correlation}")
private String baseUrlXGX;
// 工况划分
@Value("${base.url.GKHF:http://139.9.171.247:8052/intelligent-analysis/working-condition-division}")
@Value("${base.url.GKHF:http://10.20.1.29:8052/intelligent-analysis/working-condition-division}")
private String baseUrlGKHF;
@Value("${base.url.ZXZ:http://172.16.3.29:8052/intelligent-analysis/central-value}")
@Value("${base.url.ZXZ:http://10.20.1.29:8052/intelligent-analysis/central-value}")
private String zxzJsUrlFanBySF;
@Value("${spring.kafka.consumer.max-poll-records:30}")
private Integer threadNum = 30;
@Value("${last.month.num:12}")
private Integer lastMonthNum;
......@@ -99,17 +96,17 @@ public class KafkaConsumerService {
BlockingQueue<PointData> queue = new LinkedBlockingQueue<>();
public KafkaConsumerService() {
for (int i = 0 ; i < threadNum; i++) {
for (int i = 0; i < threadNum; i++) {
service.execute(new Runnable() {
@Override
public void run() {
while(true) {
while (true) {
try {
PointData pointsData = queue.take();
List<ConsumerRecord<String, String>> consumerRecords = pointsData.getConsumerRecords();
Table table = pointsData.getTable();
if ("xgxFanConsumer".equals(pointsData.getOperatorType())){
if ("xgxFanConsumer".equals(pointsData.getOperatorType())) {
execFanCorrelation(consumerRecords, table);
} else if ("xgxPvConsumer".equals(pointsData.getOperatorType())) {
execPvCorrelation(consumerRecords, table);
......@@ -122,6 +119,7 @@ public class KafkaConsumerService {
} else if ("ZXZPvConsumer".equals(pointsData.getOperatorType())) {
consumerRecordsZXZPv(consumerRecords, pointsData);
}
Thread.sleep(1000);
} catch (Exception e) {
e.printStackTrace();
}
......@@ -132,16 +130,17 @@ public class KafkaConsumerService {
}
}
private void consumerRecordsZXZFan(List<ConsumerRecord<String, String>> consumerRecords, PointData pointsData ) {
private void consumerRecordsZXZFan(List<ConsumerRecord<String, String>> consumerRecords, PointData pointsData) {
redisUtils.expire(kafkaTopicConsumerZXZFan, 600);
Table table = pointsData.getTable();
Map<String, List<IdxBizFanPointProcessVariableClassification>> zxzIds = pointsData.getZxzIds();
for (String id : zxzIds.keySet()) {
List<IdxBizFanPointProcessVariableClassification> variableClassificationList = zxzIds.get(id);
String analysisVariableId = id;
List<IdxBizFanPointProcessVariableClassification> processVariableList = variableClassificationList.stream().filter(v -> !id.equals(v.getSequenceNbr().toString())).collect(Collectors.toList());
IdxBizFanPointProcessVariableClassification analysisVariable = variableClassificationList.stream().filter(v -> id.equals(v.getSequenceNbr().toString())).findFirst().get();
List<IdxBizFanPointProcessVariableClassification> processVariableList = variableClassificationList.stream()
.filter(v -> !id.equals(v.getSequenceNbr().toString())).collect(Collectors.toList());
IdxBizFanPointProcessVariableClassification analysisVariable = variableClassificationList.stream()
.filter(v -> id.equals(v.getSequenceNbr().toString())).findFirst().get();
// if (analysisVariable.getEquipmentName().equals("W2500#14风机") && analysisVariable.getPointName().equals("桨距角")) {
// log.info("抓包数据:{}", JSON.toJSONString(analysisVariable));
......@@ -153,7 +152,8 @@ public class KafkaConsumerService {
Table dataTable = Table.create();
int minRow = 0;
for (IdxBizFanPointProcessVariableClassification processVariable : processVariableList) {
Selection selection = table.stringColumn("id").isEqualTo(processVariable.getIndexAddress() + "_" + processVariable.getGatewayId());
Selection selection = table.stringColumn("id")
.isEqualTo(processVariable.getIndexAddress() + "_" + processVariable.getGatewayId());
DoubleColumn values = table.where(selection).doubleColumn("value");
// 获取最小数据长度
if (index == 1) {
......@@ -177,7 +177,8 @@ public class KafkaConsumerService {
index++;
}
Selection selection = table.stringColumn("id").isEqualTo(analysisVariable.getIndexAddress() + "_" + analysisVariable.getGatewayId());
Selection selection = table.stringColumn("id")
.isEqualTo(analysisVariable.getIndexAddress() + "_" + analysisVariable.getGatewayId());
DoubleColumn values = table.where(selection).doubleColumn("value");
values.setName("analysisVariable");
dataTable = getDataTable(dataTable, values);
......@@ -190,25 +191,39 @@ public class KafkaConsumerService {
data1.put(column, dataTable.doubleColumn(column).asDoubleArray());
}
Map<String,Object> requestMap = new HashMap<>();
Map<String, Object> requestMap = new HashMap<>();
requestMap.put("data1", data1);
requestMap.put("data2", data2);
String response = HttpUtil.createPost(zxzJsUrlFanBySF).body(JSON.toJSONString(requestMap)).execute().body();
if (response.contains("stdDev")) {
idxBizFanPointVarCentralValueMapper.delete(new QueryWrapper<IdxBizFanPointVarCentralValue>().eq("ANALYSIS_POINT_ID", analysisVariableId));
idxBizFanPointVarCentralValueMapper.delete(
new QueryWrapper<IdxBizFanPointVarCentralValue>().eq("ANALYSIS_POINT_ID", analysisVariableId));
JSONObject jsonObject = JSON.parseObject(response);
int length = jsonObject.getJSONArray("stdDev").size() > 64 ? 64 : jsonObject.getJSONArray("stdDev").size();
int length = jsonObject.getJSONArray("stdDev").size() > 64 ? 64
: jsonObject.getJSONArray("stdDev").size();
List<IdxBizFanPointVarCentralValue> insertList = new ArrayList<>();
for (int i = 0; i < length; i++) {
IdxBizFanPointVarCentralValue idxBizFanPointVarCentralValue = new IdxBizFanPointVarCentralValue();
idxBizFanPointVarCentralValue.setProcess1Min(ObjectUtils.isNull(jsonObject.getJSONArray("process1Min").get(i)) ? null : jsonObject.getJSONArray("process1Min").getDoubleValue(i));
idxBizFanPointVarCentralValue.setProcess2Min(ObjectUtils.isNull(jsonObject.getJSONArray("process2Min").get(i)) ? null : jsonObject.getJSONArray("process2Min").getDoubleValue(i));
idxBizFanPointVarCentralValue.setProcess3Min(ObjectUtils.isNull(jsonObject.getJSONArray("process3Min").get(i)) ? null : jsonObject.getJSONArray("process3Min").getDoubleValue(i));
idxBizFanPointVarCentralValue.setProcess1Max(ObjectUtils.isNull(jsonObject.getJSONArray("process1Max").get(i)) ? null : jsonObject.getJSONArray("process1Max").getDoubleValue(i));
idxBizFanPointVarCentralValue.setPorcess2Max(ObjectUtils.isNull(jsonObject.getJSONArray("process2Max").get(i)) ? null : jsonObject.getJSONArray("process2Max").getDoubleValue(i));
idxBizFanPointVarCentralValue.setProcess3Max(ObjectUtils.isNull(jsonObject.getJSONArray("process3Max").get(i)) ? null : jsonObject.getJSONArray("process3Max").getDoubleValue(i));
idxBizFanPointVarCentralValue
.setProcess1Min(ObjectUtils.isNull(jsonObject.getJSONArray("process1Min").get(i)) ? null
: jsonObject.getJSONArray("process1Min").getDoubleValue(i));
idxBizFanPointVarCentralValue
.setProcess2Min(ObjectUtils.isNull(jsonObject.getJSONArray("process2Min").get(i)) ? null
: jsonObject.getJSONArray("process2Min").getDoubleValue(i));
idxBizFanPointVarCentralValue
.setProcess3Min(ObjectUtils.isNull(jsonObject.getJSONArray("process3Min").get(i)) ? null
: jsonObject.getJSONArray("process3Min").getDoubleValue(i));
idxBizFanPointVarCentralValue
.setProcess1Max(ObjectUtils.isNull(jsonObject.getJSONArray("process1Max").get(i)) ? null
: jsonObject.getJSONArray("process1Max").getDoubleValue(i));
idxBizFanPointVarCentralValue
.setPorcess2Max(ObjectUtils.isNull(jsonObject.getJSONArray("process2Max").get(i)) ? null
: jsonObject.getJSONArray("process2Max").getDoubleValue(i));
idxBizFanPointVarCentralValue
.setProcess3Max(ObjectUtils.isNull(jsonObject.getJSONArray("process3Max").get(i)) ? null
: jsonObject.getJSONArray("process3Max").getDoubleValue(i));
idxBizFanPointVarCentralValue.setAnalysisPointId(analysisVariableId);
idxBizFanPointVarCentralValue.setAnalysisPointName(analysisVariable.getPointName());
idxBizFanPointVarCentralValue.setProcessPoint1Id(data1.get("processVariable1Id").toString());
......@@ -217,8 +232,12 @@ public class KafkaConsumerService {
idxBizFanPointVarCentralValue.setProcessPoint2Name(processVariableList.get(1).getPointName());
idxBizFanPointVarCentralValue.setProcessPoint3Id(data1.get("processVariable3Id").toString());
idxBizFanPointVarCentralValue.setProcessPoint3Name(processVariableList.get(2).getPointName());
idxBizFanPointVarCentralValue.setAnalysisStdDev(ObjectUtils.isNull(jsonObject.getJSONArray("stdDev").get(i)) ? null : jsonObject.getJSONArray("stdDev").getDoubleValue(i));
idxBizFanPointVarCentralValue.setAnalysisCenterValue(ObjectUtils.isNull(jsonObject.getJSONArray("centerValue").get(i)) ? null : jsonObject.getJSONArray("centerValue").getDoubleValue(i));
idxBizFanPointVarCentralValue
.setAnalysisStdDev(ObjectUtils.isNull(jsonObject.getJSONArray("stdDev").get(i)) ? null
: jsonObject.getJSONArray("stdDev").getDoubleValue(i));
idxBizFanPointVarCentralValue.setAnalysisCenterValue(
ObjectUtils.isNull(jsonObject.getJSONArray("centerValue").get(i)) ? null
: jsonObject.getJSONArray("centerValue").getDoubleValue(i));
idxBizFanPointVarCentralValue.setArae(analysisVariable.getArae());
idxBizFanPointVarCentralValue.setStation(analysisVariable.getStation());
idxBizFanPointVarCentralValue.setSubSystem(analysisVariable.getSubSystem());
......@@ -230,15 +249,14 @@ public class KafkaConsumerService {
idxBizFanPointVarCentralValueService.saveBatch(insertList);
}
}
redisUtils.expire(kafkaTopicConsumerZXZFan, 600);
}
//redisUtils.del(kafkaTopicConsumerZXZFan);
}
private Table getDataTable(Table dataTable, DoubleColumn values) {
if (!dataTable.isEmpty() && dataTable.rowCount() < values.size()) {
dataTable.addColumns(values.inRange(0, dataTable.rowCount()));
} else if (!dataTable.isEmpty() && dataTable.rowCount() > values.size()){
} else if (!dataTable.isEmpty() && dataTable.rowCount() > values.size()) {
dataTable = dataTable.inRange(0, values.size());
dataTable.addColumns(values);
} else {
......@@ -248,13 +266,17 @@ public class KafkaConsumerService {
}
private void execPvCorrelation(List<ConsumerRecord<String, String>> consumerRecords, Table table) {
redisUtils.expire(kafkaTopicConsumerPv, 600);
consumerRecords.parallelStream().forEach(record -> {
Optional<?> kafkaMessage = Optional.ofNullable(record.value());
if (kafkaMessage.isPresent()) {
IdxBizPvPointVarCorrelation pvPointVarCorrelation = JSON.parseObject(kafkaMessage.get().toString(), IdxBizPvPointVarCorrelation.class);
Selection selection = table.stringColumn("id").isEqualTo(pvPointVarCorrelation.getAnalysisIndexAddress() + "_" + pvPointVarCorrelation.getAnalysisGatewayId());
IdxBizPvPointVarCorrelation pvPointVarCorrelation = JSON.parseObject(kafkaMessage.get().toString(),
IdxBizPvPointVarCorrelation.class);
Selection selection = table.stringColumn("id").isEqualTo(pvPointVarCorrelation.getAnalysisIndexAddress()
+ "_" + pvPointVarCorrelation.getAnalysisGatewayId());
double[] data1 = table.where(selection).doubleColumn("value").asDoubleArray();
selection = table.stringColumn("id").isEqualTo(pvPointVarCorrelation.getProcessIndexAddress() + "_" + pvPointVarCorrelation.getProcessGatewayId());
selection = table.stringColumn("id").isEqualTo(pvPointVarCorrelation.getProcessIndexAddress() + "_"
+ pvPointVarCorrelation.getProcessGatewayId());
double[] data2 = table.where(selection).doubleColumn("value").asDoubleArray();
int shortestLength = Math.min(data1.length, data2.length);
......@@ -268,7 +290,8 @@ public class KafkaConsumerService {
if (response.contains("correlation") && !response.contains("warning")) {
com.alibaba.fastjson.JSONObject jsonObject = JSON.parseObject(response);
pvPointVarCorrelation.setCorrelationCoefficient(jsonObject.getDoubleValue("correlation"));
log.info("------------------------------------------光伏相关性::计算成功,待更新表数据----------------------------------------");
log.info(
"------------------------------------------光伏相关性::计算成功,待更新表数据----------------------------------------");
} else {
pvPointVarCorrelation.setCorrelationCoefficient(0.0);
}
......@@ -277,17 +300,22 @@ public class KafkaConsumerService {
idxBizPvPointVarCorrelationService.saveOrUpdate(pvPointVarCorrelation);
}
});
redisUtils.expire(kafkaTopicConsumerPv, 600);
//redisUtils.del(kafkaTopicConsumerPv);
}
private void execFanCorrelation(List<ConsumerRecord<String, String>> consumerRecords, Table table) {
redisUtils.expire(kafkaTopicConsumer, 600);
consumerRecords.parallelStream().forEach(record -> {
Optional<?> kafkaMessage = Optional.ofNullable(record.value());
if (kafkaMessage.isPresent()) {
IdxBizFanPointVarCorrelation fanPointVarCorrelation = JSON.parseObject(kafkaMessage.get().toString(), IdxBizFanPointVarCorrelation.class);
Selection selection = table.stringColumn("id").isEqualTo(fanPointVarCorrelation.getAnalysisIndexAddress() + "_" + fanPointVarCorrelation.getAnalysisGatewayId());
IdxBizFanPointVarCorrelation fanPointVarCorrelation = JSON.parseObject(kafkaMessage.get().toString(),
IdxBizFanPointVarCorrelation.class);
Selection selection = table.stringColumn("id")
.isEqualTo(fanPointVarCorrelation.getAnalysisIndexAddress() + "_"
+ fanPointVarCorrelation.getAnalysisGatewayId());
double[] data1 = table.where(selection).doubleColumn("value").asDoubleArray();
selection = table.stringColumn("id").isEqualTo(fanPointVarCorrelation.getProcessIndexAddress() + "_" + fanPointVarCorrelation.getProcessGatewayId());
selection = table.stringColumn("id").isEqualTo(fanPointVarCorrelation.getProcessIndexAddress() + "_"
+ fanPointVarCorrelation.getProcessGatewayId());
double[] data2 = table.where(selection).doubleColumn("value").asDoubleArray();
int shortestLength = Math.min(data1.length, data2.length);
......@@ -301,7 +329,8 @@ public class KafkaConsumerService {
if (response.contains("correlation") && !response.contains("warning")) {
com.alibaba.fastjson.JSONObject jsonObject = JSON.parseObject(response);
fanPointVarCorrelation.setCorrelationCoefficient(jsonObject.getDoubleValue("correlation"));
log.info("------------------------------------------风机相关性::计算成功,待更新表数据----------------------------------------");
log.info(
"------------------------------------------风机相关性::计算成功,待更新表数据----------------------------------------");
} else {
fanPointVarCorrelation.setCorrelationCoefficient(0.0);
}
......@@ -310,7 +339,7 @@ public class KafkaConsumerService {
idxBizFanPointVarCorrelationService.saveOrUpdate(fanPointVarCorrelation);
}
});
redisUtils.expire(kafkaTopicConsumer, 600);
//redisUtils.del(kafkaTopicConsumer);
}
public static double[] subset(double[] array, int length) {
......@@ -332,26 +361,28 @@ public class KafkaConsumerService {
private Map<String, List<IdxBizPvPointProcessVariableClassification>> zxzPvIds;
public PointData(List<ConsumerRecord<String, String>> consumerRecords, Table table, String operatorType) {
this.consumerRecords = consumerRecords;
this.table = table;
this.operatorType = operatorType;
}
public PointData(List<ConsumerRecord<String, String>> consumerRecords, Table table, String operatorType, Map<String, List<IdxBizFanPointProcessVariableClassification>> zxzIds) {
public PointData(List<ConsumerRecord<String, String>> consumerRecords, Table table, String operatorType,
Map<String, List<IdxBizFanPointProcessVariableClassification>> zxzIds) {
this.consumerRecords = consumerRecords;
this.table = table;
this.operatorType = operatorType;
this.zxzIds = zxzIds;
}
public PointData(List<ConsumerRecord<String, String>> consumerRecords, Table table, String operatorType, Map<String, List<IdxBizPvPointProcessVariableClassification>> zxzPvIds, String notString) {
public PointData(List<ConsumerRecord<String, String>> consumerRecords, Table table, String operatorType,
Map<String, List<IdxBizPvPointProcessVariableClassification>> zxzPvIds, String notString) {
this.consumerRecords = consumerRecords;
this.table = table;
this.operatorType = operatorType;
this.zxzPvIds = zxzPvIds;
}
public Map<String, List<IdxBizPvPointProcessVariableClassification>> getZxzIdsPv() {
return zxzPvIds;
}
......@@ -372,6 +403,7 @@ public class KafkaConsumerService {
return table;
}
}
/**
* 批量消费kafka消息 【风电站 相关性】
*
......@@ -386,7 +418,8 @@ public class KafkaConsumerService {
consumerRecords.stream().forEach(record -> {
Optional<?> kafkaMessage = Optional.ofNullable(record.value());
if (kafkaMessage.isPresent()) {
IdxBizFanPointVarCorrelation fanPointVarCorrelation = JSON.parseObject(kafkaMessage.get().toString(), IdxBizFanPointVarCorrelation.class);
IdxBizFanPointVarCorrelation fanPointVarCorrelation = JSON
.parseObject(kafkaMessage.get().toString(), IdxBizFanPointVarCorrelation.class);
Set<String> idSet = null;
if (gatewayPoints.containsKey(fanPointVarCorrelation.getAnalysisGatewayId())) {
idSet = gatewayPoints.get(fanPointVarCorrelation.getAnalysisGatewayId());
......@@ -419,7 +452,8 @@ public class KafkaConsumerService {
consumerRecords.stream().forEach(record -> {
Optional<?> kafkaMessage = Optional.ofNullable(record.value());
if (kafkaMessage.isPresent()) {
IdxBizPvPointVarCorrelation pvPointVarCorrelation = JSON.parseObject(kafkaMessage.get().toString(), IdxBizPvPointVarCorrelation.class);
IdxBizPvPointVarCorrelation pvPointVarCorrelation = JSON.parseObject(kafkaMessage.get().toString(),
IdxBizPvPointVarCorrelation.class);
Set<String> idSet = null;
if (gatewayPoints.containsKey(pvPointVarCorrelation.getAnalysisGatewayId())) {
idSet = gatewayPoints.get(pvPointVarCorrelation.getAnalysisGatewayId());
......@@ -437,8 +471,9 @@ public class KafkaConsumerService {
}
}
private void buildZXZExecData(List<ConsumerRecord<String, String>> consumerRecords, Map<String, Set<String>> gatewayPoints, Map<String, List<IdxBizFanPointProcessVariableClassification>> zxzIds, String xgxPvConsumer) {
private void buildZXZExecData(List<ConsumerRecord<String, String>> consumerRecords,
Map<String, Set<String>> gatewayPoints,
Map<String, List<IdxBizFanPointProcessVariableClassification>> zxzIds, String xgxPvConsumer) {
for (String gatewayId : gatewayPoints.keySet()) {
String join = String.join(",", gatewayPoints.get(gatewayId));
List<IndicatorData> indicatorData = indicatorDataMapper.selectByAddresses(join, gatewayId);
......@@ -458,7 +493,8 @@ public class KafkaConsumerService {
}
}
private void buildExecData(List<ConsumerRecord<String, String>> consumerRecords, Map<String, Set<String>> gatewayPoints, String xgxPvConsumer) {
private void buildExecData(List<ConsumerRecord<String, String>> consumerRecords,
Map<String, Set<String>> gatewayPoints, String xgxPvConsumer) {
for (String gatewayId : gatewayPoints.keySet()) {
String join = String.join(",", gatewayPoints.get(gatewayId));
List<IndicatorData> indicatorData = indicatorDataMapper.selectByAddresses(join, gatewayId);
......@@ -478,7 +514,6 @@ public class KafkaConsumerService {
}
}
/**
* 批量消费kafka消息 【风电 工况划分 】
*
......@@ -493,7 +528,8 @@ public class KafkaConsumerService {
consumerRecords.stream().forEach(record -> {
Optional<?> kafkaMessage = Optional.ofNullable(record.value());
if (kafkaMessage.isPresent()) {
IdxBizFanPointProcessVariableClassification fanPointProcessVariable = JSON.parseObject(kafkaMessage.get().toString(), IdxBizFanPointProcessVariableClassification.class);
IdxBizFanPointProcessVariableClassification fanPointProcessVariable = JSON.parseObject(
kafkaMessage.get().toString(), IdxBizFanPointProcessVariableClassification.class);
Set<String> idSet = null;
if (gatewayPoints.containsKey(fanPointProcessVariable.getGatewayId())) {
idSet = gatewayPoints.get(fanPointProcessVariable.getGatewayId());
......@@ -510,26 +546,30 @@ public class KafkaConsumerService {
}
}
/**
* 风电 工况划分 处理
*
* @param consumerRecords
* @param table
* @return
*/
boolean consumerRecordsGKFXFan(List<ConsumerRecord<String, String>> consumerRecords, Table table) {
redisUtils.expire(kafkaTopicConsumerGKHFFan, 600);
try {
consumerRecords.parallelStream().forEach(record -> {
Optional<?> kafkaMessage = Optional.ofNullable(record.value());
if (kafkaMessage.isPresent()) {
IdxBizFanPointProcessVariableClassification fanPointVarCorrelation = JSON.parseObject(kafkaMessage.get().toString(), IdxBizFanPointProcessVariableClassification.class);
IdxBizFanPointProcessVariableClassification fanPointVarCorrelation = JSON.parseObject(
kafkaMessage.get().toString(), IdxBizFanPointProcessVariableClassification.class);
HashMap<String, Object> resultMap = new HashMap<>();
Selection selection = table.stringColumn("id").isEqualTo(fanPointVarCorrelation.getIndexAddress() + "_" + fanPointVarCorrelation.getGatewayId());
Selection selection = table.stringColumn("id").isEqualTo(
fanPointVarCorrelation.getIndexAddress() + "_" + fanPointVarCorrelation.getGatewayId());
double[] data1 = table.where(selection).doubleColumn("value").asDoubleArray();
resultMap.put("processVariable", data1);
resultMap.put("processVariableId", fanPointVarCorrelation.getSequenceNbr());
String response = HttpUtil.createPost(baseUrlGKHF).body(JSON.toJSONString(resultMap)).execute().body();
String response = HttpUtil.createPost(baseUrlGKHF).body(JSON.toJSONString(resultMap)).execute()
.body();
if (response.contains("intervalValue1") && response.contains("processVariableId")) {
com.alibaba.fastjson.JSONObject jsonObject = JSON.parseObject(response);
......@@ -539,7 +579,8 @@ public class KafkaConsumerService {
fanPointVarCorrelation.setIntervalValue3(jsonObject.getDoubleValue("intervalValue3"));
fanPointVarCorrelation.setIntervalValue2(jsonObject.getDoubleValue("intervalValue2"));
fanPointVarCorrelation.setIntervalValue1(jsonObject.getDoubleValue("intervalValue1"));
log.info("------------------------------------------光伏相关性::计算成功,待更新表数据----------------------------------------");
log.info(
"------------------------------------------风电工况划分::计算成功,待更新表数据----------------------------------------");
} else {
fanPointVarCorrelation.setIntervalValue5(0.0);
fanPointVarCorrelation.setIntervalValue4(0.0);
......@@ -555,13 +596,11 @@ public class KafkaConsumerService {
} catch (Exception e) {
log.error("kafka失败,当前失败的批次");
} finally {
redisUtils.expire(kafkaTopicConsumerGKHFFan, 600);
/////redisUtils.del(kafkaTopicConsumerGKHFFan);
}
return true;
}
/**
* 批量消费kafka消息 【光伏 工况划分 】
*
......@@ -577,7 +616,8 @@ public class KafkaConsumerService {
consumerRecords.stream().forEach(record -> {
Optional<?> kafkaMessage = Optional.ofNullable(record.value());
if (kafkaMessage.isPresent()) {
IdxBizPvPointProcessVariableClassification pvPointProcessVariable = JSON.parseObject(kafkaMessage.get().toString(), IdxBizPvPointProcessVariableClassification.class);
IdxBizPvPointProcessVariableClassification pvPointProcessVariable = JSON.parseObject(
kafkaMessage.get().toString(), IdxBizPvPointProcessVariableClassification.class);
Set<String> idSet = null;
if (gatewayPoints.containsKey(pvPointProcessVariable.getGatewayId())) {
idSet = gatewayPoints.get(pvPointProcessVariable.getGatewayId());
......@@ -594,26 +634,29 @@ public class KafkaConsumerService {
}
}
/**
* 风电 工况划分 处理
*
* @param consumerRecords
* @return
*/
boolean consumerRecordsGKFXPv(List<ConsumerRecord<String, String>> consumerRecords, Table table) {
redisUtils.expire(kafkaTopicConsumerGKHFPv, 600);
try {
consumerRecords.parallelStream().forEach(record -> {
Optional<?> kafkaMessage = Optional.ofNullable(record.value());
if (kafkaMessage.isPresent()) {
IdxBizPvPointProcessVariableClassification pvPointVarCorrelation = JSON.parseObject(kafkaMessage.get().toString(), IdxBizPvPointProcessVariableClassification.class);
IdxBizPvPointProcessVariableClassification pvPointVarCorrelation = JSON.parseObject(
kafkaMessage.get().toString(), IdxBizPvPointProcessVariableClassification.class);
HashMap<String, Object> resultMap = new HashMap<>();
Selection selection = table.stringColumn("id").isEqualTo(pvPointVarCorrelation.getIndexAddress() + "_" + pvPointVarCorrelation.getGatewayId());
Selection selection = table.stringColumn("id").isEqualTo(
pvPointVarCorrelation.getIndexAddress() + "_" + pvPointVarCorrelation.getGatewayId());
double[] data1 = table.where(selection).doubleColumn("value").asDoubleArray();
resultMap.put("processVariable", data1);
resultMap.put("processVariableId", pvPointVarCorrelation.getSequenceNbr());
String response = HttpUtil.createPost(baseUrlGKHF).body(JSON.toJSONString(resultMap)).execute().body();
String response = HttpUtil.createPost(baseUrlGKHF).body(JSON.toJSONString(resultMap)).execute()
.body();
if (response.contains("intervalValue1") && response.contains("processVariableId")) {
com.alibaba.fastjson.JSONObject jsonObject = JSON.parseObject(response);
......@@ -623,7 +666,8 @@ public class KafkaConsumerService {
pvPointVarCorrelation.setIntervalValue3(jsonObject.getDoubleValue("intervalValue3"));
pvPointVarCorrelation.setIntervalValue2(jsonObject.getDoubleValue("intervalValue2"));
pvPointVarCorrelation.setIntervalValue1(jsonObject.getDoubleValue("intervalValue1"));
log.info("------------------------------------------光伏相关性::计算成功,待更新表数据----------------------------------------");
log.info(
"------------------------------------------光伏工况划分::计算成功,待更新表数据----------------------------------------");
} else {
pvPointVarCorrelation.setIntervalValue5(0.0);
pvPointVarCorrelation.setIntervalValue4(0.0);
......@@ -639,19 +683,11 @@ public class KafkaConsumerService {
} catch (Exception e) {
log.error("kafka失败,当前失败的批次");
} finally {
redisUtils.expire(kafkaTopicConsumerGKHFPv, 600);
////redisUtils.del(kafkaTopicConsumerGKHFPv);
}
return true;
}
/**
* 批量消费kafka消息 【风电 中心值 】
*
......@@ -667,15 +703,25 @@ public class KafkaConsumerService {
consumerRecords.stream().forEach(record -> {
Optional<?> kafkaMessage = Optional.ofNullable(record.value());
if (kafkaMessage.isPresent()) {
IdxBizFanPointProcessVariableClassification fanPointProcessVariable = JSON.parseObject(kafkaMessage.get().toString(), IdxBizFanPointProcessVariableClassification.class);
List<IdxBizFanPointVarCorrelation> gongkuangList = idxBizFanPointVarCorrelationService.list(new QueryWrapper<IdxBizFanPointVarCorrelation>().eq("ANALYSIS_GATEWAY_ID", fanPointProcessVariable.getGatewayId()).eq("ANALYSIS_POINT_ID", fanPointProcessVariable.getSequenceNbr()).orderByDesc("CORRELATION_COEFFICIENT").last("limit 3"));
List<String> processPointIds = gongkuangList.stream().map(idxBizFanPointVarCorrelation -> idxBizFanPointVarCorrelation.getProcessPointId().toString()).collect(Collectors.toList());
List<IdxBizFanPointProcessVariableClassification> idxBizFanPointProcessVariableClassificationList = idxBizFanPointProcessVariableClassificationService.list(new QueryWrapper<IdxBizFanPointProcessVariableClassification>().in("SEQUENCE_NBR", processPointIds));
IdxBizFanPointProcessVariableClassification fanPointProcessVariable = JSON.parseObject(
kafkaMessage.get().toString(), IdxBizFanPointProcessVariableClassification.class);
List<IdxBizFanPointVarCorrelation> gongkuangList = idxBizFanPointVarCorrelationService
.list(new QueryWrapper<IdxBizFanPointVarCorrelation>()
.eq("ANALYSIS_GATEWAY_ID", fanPointProcessVariable.getGatewayId())
.eq("ANALYSIS_POINT_ID", fanPointProcessVariable.getSequenceNbr())
.orderByDesc("CORRELATION_COEFFICIENT").last("limit 3"));
List<String> processPointIds = gongkuangList.stream().map(
idxBizFanPointVarCorrelation -> idxBizFanPointVarCorrelation.getProcessPointId().toString())
.collect(Collectors.toList());
List<IdxBizFanPointProcessVariableClassification> idxBizFanPointProcessVariableClassificationList = idxBizFanPointProcessVariableClassificationService
.list(new QueryWrapper<IdxBizFanPointProcessVariableClassification>().in("SEQUENCE_NBR",
processPointIds));
idxBizFanPointProcessVariableClassificationList.add(fanPointProcessVariable);
zxzIds.put(fanPointProcessVariable.getSequenceNbr(), idxBizFanPointProcessVariableClassificationList);
zxzIds.put(fanPointProcessVariable.getSequenceNbr(),
idxBizFanPointProcessVariableClassificationList);
idxBizFanPointProcessVariableClassificationList.forEach(item -> {
Set<String> idSet = null;
......@@ -696,13 +742,6 @@ public class KafkaConsumerService {
}
}
/**
* 批量消费kafka消息 【光伏 中心值 】
*
......@@ -718,11 +757,20 @@ public class KafkaConsumerService {
consumerRecords.stream().forEach(record -> {
Optional<?> kafkaMessage = Optional.ofNullable(record.value());
if (kafkaMessage.isPresent()) {
IdxBizPvPointProcessVariableClassification pvPointProcessVariable = JSON.parseObject(kafkaMessage.get().toString(), IdxBizPvPointProcessVariableClassification.class);
List<IdxBizPvPointVarCorrelation> gongkuangList = idxBizPvPointVarCorrelationService.list(new QueryWrapper<IdxBizPvPointVarCorrelation>().eq("ANALYSIS_GATEWAY_ID", pvPointProcessVariable.getGatewayId()).eq("ANALYSIS_POINT_ID", pvPointProcessVariable.getSequenceNbr()).orderByDesc("CORRELATION_COEFFICIENT").last("limit 3"));
List<String> processPointIds = gongkuangList.stream().map(idxBizFanPointVarCorrelation -> idxBizFanPointVarCorrelation.getProcessPointId().toString()).collect(Collectors.toList());
List<IdxBizPvPointProcessVariableClassification> idxBizPvPointProcessVariableClassificationList = idxBizPvPointProcessVariableClassificationService.list(new QueryWrapper<IdxBizPvPointProcessVariableClassification>().in("SEQUENCE_NBR", processPointIds));
IdxBizPvPointProcessVariableClassification pvPointProcessVariable = JSON.parseObject(
kafkaMessage.get().toString(), IdxBizPvPointProcessVariableClassification.class);
List<IdxBizPvPointVarCorrelation> gongkuangList = idxBizPvPointVarCorrelationService
.list(new QueryWrapper<IdxBizPvPointVarCorrelation>()
.eq("ANALYSIS_GATEWAY_ID", pvPointProcessVariable.getGatewayId())
.eq("ANALYSIS_POINT_ID", pvPointProcessVariable.getSequenceNbr())
.orderByDesc("CORRELATION_COEFFICIENT").last("limit 3"));
List<String> processPointIds = gongkuangList.stream().map(
idxBizFanPointVarCorrelation -> idxBizFanPointVarCorrelation.getProcessPointId().toString())
.collect(Collectors.toList());
List<IdxBizPvPointProcessVariableClassification> idxBizPvPointProcessVariableClassificationList = idxBizPvPointProcessVariableClassificationService
.list(new QueryWrapper<IdxBizPvPointProcessVariableClassification>().in("SEQUENCE_NBR",
processPointIds));
idxBizPvPointProcessVariableClassificationList.add(pvPointProcessVariable);
......@@ -747,8 +795,9 @@ public class KafkaConsumerService {
}
}
private void buildZXZPvExecData(List<ConsumerRecord<String, String>> consumerRecords, Map<String, Set<String>> gatewayPoints, Map<String, List<IdxBizPvPointProcessVariableClassification>> zxzIds, String xgxPvConsumer) {
private void buildZXZPvExecData(List<ConsumerRecord<String, String>> consumerRecords,
Map<String, Set<String>> gatewayPoints,
Map<String, List<IdxBizPvPointProcessVariableClassification>> zxzIds, String xgxPvConsumer) {
for (String gatewayId : gatewayPoints.keySet()) {
String join = String.join(",", gatewayPoints.get(gatewayId));
List<IndicatorData> indicatorData = indicatorDataMapper.selectByAddresses(join, gatewayId);
......@@ -768,21 +817,24 @@ public class KafkaConsumerService {
}
}
/**
* 中心值 - 光伏
*
* @param consumerRecords
* @param pointsData
*/
private void consumerRecordsZXZPv(List<ConsumerRecord<String, String>> consumerRecords, PointData pointsData ) {
private void consumerRecordsZXZPv(List<ConsumerRecord<String, String>> consumerRecords, PointData pointsData) {
redisUtils.expire(kafkaTopicConsumerZXZPv, 600);
Table table = pointsData.getTable();
Map<String, List<IdxBizPvPointProcessVariableClassification>> zxzIds = pointsData.getZxzIdsPv();
for (String id : zxzIds.keySet()) {
List<IdxBizPvPointProcessVariableClassification> variableClassificationList = zxzIds.get(id);
String analysisVariableId = id;
List<IdxBizPvPointProcessVariableClassification> processVariableList = variableClassificationList.stream().filter(v -> !id.equals(v.getSequenceNbr().toString())).collect(Collectors.toList());
IdxBizPvPointProcessVariableClassification analysisVariable = variableClassificationList.stream().filter(v -> id.equals(v.getSequenceNbr().toString())).findFirst().get();
List<IdxBizPvPointProcessVariableClassification> processVariableList = variableClassificationList.stream()
.filter(v -> !id.equals(v.getSequenceNbr().toString())).collect(Collectors.toList());
IdxBizPvPointProcessVariableClassification analysisVariable = variableClassificationList.stream()
.filter(v -> id.equals(v.getSequenceNbr().toString())).findFirst().get();
Map<String, Object> data1 = new HashMap<>();
Map<String, Object> data2 = new HashMap<>();
......@@ -790,7 +842,8 @@ public class KafkaConsumerService {
Table dataTable = Table.create();
int minRow = 0;
for (IdxBizPvPointProcessVariableClassification processVariable : processVariableList) {
Selection selection = table.stringColumn("id").isEqualTo(processVariable.getIndexAddress() + "_" + processVariable.getGatewayId());
Selection selection = table.stringColumn("id")
.isEqualTo(processVariable.getIndexAddress() + "_" + processVariable.getGatewayId());
DoubleColumn values = table.where(selection).doubleColumn("value");
// 获取最小数据长度
if (index == 1) {
......@@ -813,7 +866,8 @@ public class KafkaConsumerService {
index++;
}
Selection selection = table.stringColumn("id").isEqualTo(analysisVariable.getIndexAddress() + "_" + analysisVariable.getGatewayId());
Selection selection = table.stringColumn("id")
.isEqualTo(analysisVariable.getIndexAddress() + "_" + analysisVariable.getGatewayId());
DoubleColumn values = table.where(selection).doubleColumn("value");
values.setName("analysisVariable");
dataTable = getDataTable(dataTable, values);
......@@ -827,25 +881,39 @@ public class KafkaConsumerService {
data1.put(column, dataTable.doubleColumn(column).asDoubleArray());
}
Map<String,Object> requestMap = new HashMap<>();
Map<String, Object> requestMap = new HashMap<>();
requestMap.put("data1", data1);
requestMap.put("data2", data2);
String response = HttpUtil.createPost(zxzJsUrlFanBySF).body(JSON.toJSONString(requestMap)).execute().body();
if (response.contains("stdDev")) {
idxBizPvPointVarCentralValueMapper.delete(new QueryWrapper<IdxBizPvPointVarCentralValue>().eq("ANALYSIS_POINT_ID", analysisVariable.getSequenceNbr()));
idxBizPvPointVarCentralValueMapper.delete(new QueryWrapper<IdxBizPvPointVarCentralValue>()
.eq("ANALYSIS_POINT_ID", analysisVariable.getSequenceNbr()));
JSONObject jsonObject = JSON.parseObject(response);
int length = jsonObject.getJSONArray("stdDev").size() > 64 ? 64 : jsonObject.getJSONArray("stdDev").size();
int length = jsonObject.getJSONArray("stdDev").size() > 64 ? 64
: jsonObject.getJSONArray("stdDev").size();
List<IdxBizPvPointVarCentralValue> insertList = new ArrayList<>();
for (int i = 0; i < length; i++) {
IdxBizPvPointVarCentralValue idxBizPvPointVarCentralValue = new IdxBizPvPointVarCentralValue();
idxBizPvPointVarCentralValue.setProcess1Min(ObjectUtils.isNull(jsonObject.getJSONArray("process1Min").get(i)) ? null : jsonObject.getJSONArray("process1Min").getDoubleValue(i));
idxBizPvPointVarCentralValue.setProcess2Min(ObjectUtils.isNull(jsonObject.getJSONArray("process2Min").get(i)) ? null : jsonObject.getJSONArray("process2Min").getDoubleValue(i));
idxBizPvPointVarCentralValue.setProcess3Min(ObjectUtils.isNull(jsonObject.getJSONArray("process3Min").get(i)) ? null : jsonObject.getJSONArray("process3Min").getDoubleValue(i));
idxBizPvPointVarCentralValue.setProcess1Max(ObjectUtils.isNull(jsonObject.getJSONArray("process1Max").get(i)) ? null : jsonObject.getJSONArray("process1Max").getDoubleValue(i));
idxBizPvPointVarCentralValue.setProcess2Max(ObjectUtils.isNull(jsonObject.getJSONArray("process2Max").get(i)) ? null : jsonObject.getJSONArray("process2Max").getDoubleValue(i));
idxBizPvPointVarCentralValue.setProcess3Max(ObjectUtils.isNull(jsonObject.getJSONArray("process3Max").get(i)) ? null : jsonObject.getJSONArray("process3Max").getDoubleValue(i));
idxBizPvPointVarCentralValue
.setProcess1Min(ObjectUtils.isNull(jsonObject.getJSONArray("process1Min").get(i)) ? null
: jsonObject.getJSONArray("process1Min").getDoubleValue(i));
idxBizPvPointVarCentralValue
.setProcess2Min(ObjectUtils.isNull(jsonObject.getJSONArray("process2Min").get(i)) ? null
: jsonObject.getJSONArray("process2Min").getDoubleValue(i));
idxBizPvPointVarCentralValue
.setProcess3Min(ObjectUtils.isNull(jsonObject.getJSONArray("process3Min").get(i)) ? null
: jsonObject.getJSONArray("process3Min").getDoubleValue(i));
idxBizPvPointVarCentralValue
.setProcess1Max(ObjectUtils.isNull(jsonObject.getJSONArray("process1Max").get(i)) ? null
: jsonObject.getJSONArray("process1Max").getDoubleValue(i));
idxBizPvPointVarCentralValue
.setProcess2Max(ObjectUtils.isNull(jsonObject.getJSONArray("process2Max").get(i)) ? null
: jsonObject.getJSONArray("process2Max").getDoubleValue(i));
idxBizPvPointVarCentralValue
.setProcess3Max(ObjectUtils.isNull(jsonObject.getJSONArray("process3Max").get(i)) ? null
: jsonObject.getJSONArray("process3Max").getDoubleValue(i));
idxBizPvPointVarCentralValue.setAnalysisPointId(jsonObject.getString("analysisVariableId"));
idxBizPvPointVarCentralValue.setAnalysisPointIdName(analysisVariable.getPointName());
idxBizPvPointVarCentralValue.setProcessPoint1Id(jsonObject.getString("processVariable1Id"));
......@@ -854,8 +922,12 @@ public class KafkaConsumerService {
idxBizPvPointVarCentralValue.setProcessPoint2IdName(processVariableList.get(1).getPointName());
idxBizPvPointVarCentralValue.setProcessPoint3Id(jsonObject.getString("processVariable3Id"));
idxBizPvPointVarCentralValue.setProcessPoint3IdName(processVariableList.get(2).getPointName());
idxBizPvPointVarCentralValue.setAnalysisStdDev(ObjectUtils.isNull(jsonObject.getJSONArray("stdDev").get(i)) ? null : jsonObject.getJSONArray("stdDev").getDoubleValue(i));
idxBizPvPointVarCentralValue.setAnalysisCenterValue(ObjectUtils.isNull(jsonObject.getJSONArray("centerValue").get(i)) ? null : jsonObject.getJSONArray("centerValue").getDoubleValue(i));
idxBizPvPointVarCentralValue
.setAnalysisStdDev(ObjectUtils.isNull(jsonObject.getJSONArray("stdDev").get(i)) ? null
: jsonObject.getJSONArray("stdDev").getDoubleValue(i));
idxBizPvPointVarCentralValue.setAnalysisCenterValue(
ObjectUtils.isNull(jsonObject.getJSONArray("centerValue").get(i)) ? null
: jsonObject.getJSONArray("centerValue").getDoubleValue(i));
idxBizPvPointVarCentralValue.setArae(analysisVariable.getArae());
idxBizPvPointVarCentralValue.setStation(analysisVariable.getStation());
idxBizPvPointVarCentralValue.setSubarray(analysisVariable.getSubarray());
......@@ -869,32 +941,12 @@ public class KafkaConsumerService {
}
}
redisUtils.expire(kafkaTopicConsumerZXZPv, 600);
//redisUtils.del(kafkaTopicConsumerZXZPv);
}
/**
* 风电处理消息 - 弃用
*
* @param consumerRecord
* @return
*/
......@@ -903,15 +955,22 @@ public class KafkaConsumerService {
try {
Optional<?> kafkaMessage = Optional.ofNullable(consumerRecord.value());
if (kafkaMessage.isPresent()) {
IdxBizFanPointVarCorrelation fanPointVarCorrelation = JSON.parseObject(kafkaMessage.get().toString(), IdxBizFanPointVarCorrelation.class);
IdxBizFanPointVarCorrelation fanPointVarCorrelation = JSON.parseObject(kafkaMessage.get().toString(),
IdxBizFanPointVarCorrelation.class);
List<IndicatorData> tdengineData1 = indicatorDataMapper.selectDataById(fanPointVarCorrelation.getAnalysisIndexAddress() + "_" + fanPointVarCorrelation.getAnalysisGatewayId());
List<IndicatorData> tdengineData1 = indicatorDataMapper
.selectDataById(fanPointVarCorrelation.getAnalysisIndexAddress() + "_"
+ fanPointVarCorrelation.getAnalysisGatewayId());
// List<IndicatorData> tdengineData1 = indicatorDataMapper.selectDataByAddressAndtimeNotDate(fanPointVarCorrelation.getAnalysisIndexAddress(), fanPointVarCorrelation.getAnalysisGatewayId());
List<IndicatorData> tdengineData2 = indicatorDataMapper.selectDataById(fanPointVarCorrelation.getProcessIndexAddress() + "_" + fanPointVarCorrelation.getProcessGatewayId());
List<IndicatorData> tdengineData2 = indicatorDataMapper
.selectDataById(fanPointVarCorrelation.getProcessIndexAddress() + "_"
+ fanPointVarCorrelation.getProcessGatewayId());
// List<IndicatorData> tdengineData2 = indicatorDataMapper.selectDataByAddressAndtimeNotDate(fanPointVarCorrelation.getProcessIndexAddress(), fanPointVarCorrelation.getProcessGatewayId());
List<Double> data1 = tdengineData1.stream().map(t -> Double.parseDouble(t.getValue())).collect(Collectors.toList());
List<Double> data2 = tdengineData2.stream().map(t -> Double.parseDouble(t.getValue())).collect(Collectors.toList());
List<Double> data1 = tdengineData1.stream().map(t -> Double.parseDouble(t.getValue()))
.collect(Collectors.toList());
List<Double> data2 = tdengineData2.stream().map(t -> Double.parseDouble(t.getValue()))
.collect(Collectors.toList());
// List<Double> data1 = new ArrayList<>();
// List<Double> data2 = new ArrayList<>();
......@@ -940,7 +999,8 @@ public class KafkaConsumerService {
if (response.contains("correlation") && !response.contains("warning")) {
com.alibaba.fastjson.JSONObject jsonObject = JSON.parseObject(response);
fanPointVarCorrelation.setCorrelationCoefficient(jsonObject.getDoubleValue("correlation"));
log.info("------------------------------------------风机相关性::计算成功,待更新表数据----------------------------------------");
log.info(
"------------------------------------------风机相关性::计算成功,待更新表数据----------------------------------------");
} else {
fanPointVarCorrelation.setCorrelationCoefficient(0.0);
}
......@@ -948,7 +1008,8 @@ public class KafkaConsumerService {
idxBizFanPointVarCorrelationService.saveOrUpdate(fanPointVarCorrelation);
log.info("表数据已更新");
log.info("----------------------------风机相关性--------------分析变量与工况变量相关性分析算法结束----------------------------------------");
log.info(
"----------------------------风机相关性--------------分析变量与工况变量相关性分析算法结束----------------------------------------");
log.info("kafka消费zhTestGroup消息{}", consumerRecord);
}
} catch (Exception e) {
......@@ -961,21 +1022,28 @@ public class KafkaConsumerService {
/**
* 风电 工况划分 处理 -弃用
*
* @param consumerRecord
* @return
*/
boolean consumerRecordsGKFXFan(ConsumerRecord<String, String> consumerRecord) {
try {
String startTime = DateUtils.convertDateToString(DateUtil.offsetMonth(new Date(), -lastMonthNum), DateUtils.DATE_TIME_PATTERN);
String endTime = DateUtils.convertDateToString(DateUtils.getCurrentDayEndTime(new Date()), DateUtils.DATE_TIME_PATTERN);
String startTime = DateUtils.convertDateToString(DateUtil.offsetMonth(new Date(), -lastMonthNum),
DateUtils.DATE_TIME_PATTERN);
String endTime = DateUtils.convertDateToString(DateUtils.getCurrentDayEndTime(new Date()),
DateUtils.DATE_TIME_PATTERN);
Optional<?> kafkaMessage = Optional.ofNullable(consumerRecord.value());
if (kafkaMessage.isPresent()) {
IdxBizFanPointProcessVariableClassification fanPointVarCorrelation = JSON.parseObject(kafkaMessage.get().toString(), IdxBizFanPointProcessVariableClassification.class);
List<IndicatorData> tdengineData1 = indicatorDataMapper.selectDataByAddressAndtime(fanPointVarCorrelation.getIndexAddress(), startTime, endTime, fanPointVarCorrelation.getGatewayId());
IdxBizFanPointProcessVariableClassification fanPointVarCorrelation = JSON
.parseObject(kafkaMessage.get().toString(), IdxBizFanPointProcessVariableClassification.class);
List<IndicatorData> tdengineData1 = indicatorDataMapper.selectDataByAddressAndtime(
fanPointVarCorrelation.getIndexAddress(), startTime, endTime,
fanPointVarCorrelation.getGatewayId());
HashMap<String, Object> resultMap = new HashMap<>();
resultMap.put("processVariable", tdengineData1.stream().map(t -> Double.parseDouble(t.getValue())).collect(Collectors.toList()));
resultMap.put("processVariable",
tdengineData1.stream().map(t -> Double.parseDouble(t.getValue())).collect(Collectors.toList()));
resultMap.put("processVariableId", fanPointVarCorrelation.getSequenceNbr());
String response = HttpUtil.createPost(baseUrlGKHF).body(JSON.toJSONString(resultMap)).execute().body();
......@@ -987,7 +1055,8 @@ public class KafkaConsumerService {
fanPointVarCorrelation.setIntervalValue3(jsonObject.getDoubleValue("intervalValue3"));
fanPointVarCorrelation.setIntervalValue2(jsonObject.getDoubleValue("intervalValue2"));
fanPointVarCorrelation.setIntervalValue1(jsonObject.getDoubleValue("intervalValue1"));
log.info("------------------------------------------光伏相关性::计算成功,待更新表数据----------------------------------------");
log.info(
"------------------------------------------光伏相关性::计算成功,待更新表数据----------------------------------------");
} else {
fanPointVarCorrelation.setIntervalValue5(0.0);
fanPointVarCorrelation.setIntervalValue4(0.0);
......@@ -1007,25 +1076,32 @@ public class KafkaConsumerService {
return true;
}
/**
* 光伏处理消息 - 弃用
*
* @param consumerRecord
* @return
*/
boolean consumerRecordsPv(ConsumerRecord<String, String> consumerRecord) {
String startTime = DateUtils.convertDateToString(DateUtil.offsetMonth(new Date(), -lastMonthNum), DateUtils.DATE_TIME_PATTERN);
String endTime = DateUtils.convertDateToString(DateUtils.getCurrentDayEndTime(new Date()), DateUtils.DATE_TIME_PATTERN);
String startTime = DateUtils.convertDateToString(DateUtil.offsetMonth(new Date(), -lastMonthNum),
DateUtils.DATE_TIME_PATTERN);
String endTime = DateUtils.convertDateToString(DateUtils.getCurrentDayEndTime(new Date()),
DateUtils.DATE_TIME_PATTERN);
try {
Optional<?> kafkaMessage = Optional.ofNullable(consumerRecord.value());
if (kafkaMessage.isPresent()) {
IdxBizPvPointVarCorrelation pvPointVarCorrelation = JSON.parseObject(kafkaMessage.get().toString(), IdxBizPvPointVarCorrelation.class);
List<IndicatorData> tdengineData1 = indicatorDataMapper.selectDataByAddressAndtimeNew(pvPointVarCorrelation.getAnalysisIndexAddress().toString(), startTime, endTime, pvPointVarCorrelation.getAnalysisGatewayId(), pvPointVarCorrelation.getProcessGatewayId(), pvPointVarCorrelation.getProcessIndexAddress());
IdxBizPvPointVarCorrelation pvPointVarCorrelation = JSON.parseObject(kafkaMessage.get().toString(),
IdxBizPvPointVarCorrelation.class);
List<IndicatorData> tdengineData1 = indicatorDataMapper.selectDataByAddressAndtimeNew(
pvPointVarCorrelation.getAnalysisIndexAddress().toString(), startTime, endTime,
pvPointVarCorrelation.getAnalysisGatewayId(), pvPointVarCorrelation.getProcessGatewayId(),
pvPointVarCorrelation.getProcessIndexAddress());
List<Double> data1 = new ArrayList<>();
List<Double> data2 = new ArrayList<>();
tdengineData1.forEach(item -> {
if (item.getAddress().equals(pvPointVarCorrelation.getAnalysisIndexAddress()) && item.getGatewayId().equals(pvPointVarCorrelation.getAnalysisGatewayId())) {
if (item.getAddress().equals(pvPointVarCorrelation.getAnalysisIndexAddress())
&& item.getGatewayId().equals(pvPointVarCorrelation.getAnalysisGatewayId())) {
data1.add(Double.parseDouble(item.getValue()));
} else {
data2.add(Double.parseDouble(item.getValue()));
......@@ -1049,7 +1125,8 @@ public class KafkaConsumerService {
if (response.contains("correlation") && !response.contains("warning")) {
com.alibaba.fastjson.JSONObject jsonObject = JSON.parseObject(response);
pvPointVarCorrelation.setCorrelationCoefficient(jsonObject.getDoubleValue("correlation"));
log.info("------------------------------------------光伏相关性::计算成功,待更新表数据----------------------------------------");
log.info(
"------------------------------------------光伏相关性::计算成功,待更新表数据----------------------------------------");
} else {
pvPointVarCorrelation.setCorrelationCoefficient(0.0);
}
......@@ -1067,23 +1144,30 @@ public class KafkaConsumerService {
return true;
}
/**
* 光伏 工况划分 处理 - 弃用
*
* @param consumerRecord
* @return
*/
boolean consumerRecordsGKFXPv(ConsumerRecord<String, String> consumerRecord) {
try {String startTime = DateUtils.convertDateToString(DateUtil.offsetMonth(new Date(), -lastMonthNum), DateUtils.DATE_TIME_PATTERN);
String endTime = DateUtils.convertDateToString(DateUtils.getCurrentDayEndTime(new Date()), DateUtils.DATE_TIME_PATTERN);
try {
String startTime = DateUtils.convertDateToString(DateUtil.offsetMonth(new Date(), -lastMonthNum),
DateUtils.DATE_TIME_PATTERN);
String endTime = DateUtils.convertDateToString(DateUtils.getCurrentDayEndTime(new Date()),
DateUtils.DATE_TIME_PATTERN);
Optional<?> kafkaMessage = Optional.ofNullable(consumerRecord.value());
if (kafkaMessage.isPresent()) {
IdxBizPvPointProcessVariableClassification pvPointVarCorrelation = JSON.parseObject(kafkaMessage.get().toString(), IdxBizPvPointProcessVariableClassification.class);
List<IndicatorData> tdengineData1 = indicatorDataMapper.selectDataByAddressAndtime(pvPointVarCorrelation.getIndexAddress(), startTime, endTime, pvPointVarCorrelation.getGatewayId());
IdxBizPvPointProcessVariableClassification pvPointVarCorrelation = JSON
.parseObject(kafkaMessage.get().toString(), IdxBizPvPointProcessVariableClassification.class);
List<IndicatorData> tdengineData1 = indicatorDataMapper.selectDataByAddressAndtime(
pvPointVarCorrelation.getIndexAddress(), startTime, endTime,
pvPointVarCorrelation.getGatewayId());
HashMap<String, Object> resultMap = new HashMap<>();
resultMap.put("processVariable", tdengineData1.stream().map(t -> Double.parseDouble(t.getValue())).collect(Collectors.toList()));
resultMap.put("processVariable",
tdengineData1.stream().map(t -> Double.parseDouble(t.getValue())).collect(Collectors.toList()));
resultMap.put("processVariableId", pvPointVarCorrelation.getSequenceNbr());
String response = HttpUtil.createPost(baseUrlGKHF).body(JSON.toJSONString(resultMap)).execute().body();
......@@ -1095,7 +1179,8 @@ public class KafkaConsumerService {
pvPointVarCorrelation.setIntervalValue3(jsonObject.getDoubleValue("intervalValue3"));
pvPointVarCorrelation.setIntervalValue2(jsonObject.getDoubleValue("intervalValue2"));
pvPointVarCorrelation.setIntervalValue1(jsonObject.getDoubleValue("intervalValue1"));
log.info("------------------------------------------光伏相关性::计算成功,待更新表数据----------------------------------------");
log.info(
"------------------------------------------光伏相关性::计算成功,待更新表数据----------------------------------------");
} else {
pvPointVarCorrelation.setIntervalValue5(0.0);
pvPointVarCorrelation.setIntervalValue4(0.0);
......@@ -1115,7 +1200,4 @@ public class KafkaConsumerService {
return true;
}
}
......@@ -4,11 +4,13 @@ import lombok.Data;
@Data
public class BuDunGenDto {
private Double day;
private Double month;
private Double year;
private Double month_complete;
private Double year_complete;
private Double year_hour_number;
private Double cumulative;
private String station_name;
private Double actual_installed_capacity;
private Double day;
private Double month;
private Double year;
private Double month_complete;
private Double year_complete;
private Double year_hour_number;
private Double cumulative;
}
package com.yeejoin.amos.boot.module.jxiop.biz.config;
import com.alibaba.druid.pool.DruidDataSource;
import com.baomidou.mybatisplus.extension.spring.MybatisSqlSessionFactoryBean;
import org.apache.ibatis.session.SqlSessionFactory;
import org.mybatis.spring.annotation.MapperScan;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.core.io.support.PathMatchingResourcePatternResolver;
import org.springframework.jdbc.datasource.DataSourceTransactionManager;
import javax.sql.DataSource;
/**
* 从数据源配置
* 若需要配置更多数据源 , 直接在yml中添加数据源配置再增加相应的新的数据源配置类即可
*/
@Configuration
@MapperScan(basePackages = "com.yeejoin.amos.boot.module.jxiop.biz.tdanalysismapper", sqlSessionFactoryRef = "taosAnalysisSqlSessionFactory")
public class TdEngineAnalysisConfig {
private Logger logger = LoggerFactory.getLogger(TdEngineAnalysisConfig.class);
// 精确到 cluster 目录,以便跟其他数据源隔离
private static final String MAPPER_LOCATION = "classpath*:mapper/tdengineanalysis/*.xml";
@Value("${spring.db4.datasource.url}")
private String dbUrl;
@Value("${spring.db4.datasource.username}")
private String username;
@Value("${spring.db4.datasource.password}")
private String password;
@Value("${spring.db4.datasource.driver-class-name}")
private String driverClassName;
@Bean(name = "taosAnalysisDataSource") //声明其为Bean实例
public DataSource clusterDataSource() {
DruidDataSource datasource = new DruidDataSource();
datasource.setUrl(this.dbUrl);
datasource.setUsername(username);
datasource.setPassword(password);
datasource.setDriverClassName(driverClassName);
return datasource;
}
@Bean(name = "taosAnalysisTransactionManager")
public DataSourceTransactionManager clusterTransactionManager() {
return new DataSourceTransactionManager(clusterDataSource());
}
@Bean(name = "taosAnalysisSqlSessionFactory")
public SqlSessionFactory clusterSqlSessionFactory(@Qualifier("taosDataSource") DataSource culsterDataSource)
throws Exception {
final MybatisSqlSessionFactoryBean sessionFactory = new MybatisSqlSessionFactoryBean();
sessionFactory.setDataSource(culsterDataSource);
sessionFactory.setMapperLocations(new PathMatchingResourcePatternResolver()
.getResources(TdEngineAnalysisConfig.MAPPER_LOCATION));
sessionFactory.setTypeAliasesPackage("com.yeejoin.amos.boot.module.jxiop.biz.entity");
//mybatis 数据库字段与实体类属性驼峰映射配置
sessionFactory.getObject().getConfiguration().setMapUnderscoreToCamelCase(true);
return sessionFactory.getObject();
}
}
......@@ -130,9 +130,9 @@ public class CommonConstans {
public static final String YEAR_GEN_ATTAINMENT_RATE = "年计划完成率";
//年利用小时数
public static final String YEAR_GEN_HOURS = "年利用小时数";
public static final String WIND_SPEED_THIRTY_SECONDS = "30秒平均风速";
public static final String TOTAL_RADIATION = "辐照度";
public static final String TOTAL_RADIATION_SUM = "累计辐照度";
public static final String WIND_SPEED_THIRTY_SECONDS = "30秒平均风速";
public static final String TOTAL_RADIATION = "辐照度";
public static final String TOTAL_RADIATION_SUM = "累计辐照度";
public static final String ACTIVE_POWER = "有功功率";
......
......@@ -12,21 +12,13 @@ import com.yeejoin.amos.boot.module.jxiop.api.entity.StationBasic;
import com.yeejoin.amos.boot.module.jxiop.api.mapper.MapRegionMapper;
import com.yeejoin.amos.boot.module.jxiop.api.mapper.RegionMapper;
import com.yeejoin.amos.boot.module.jxiop.api.mapper.StationBasicMapper;
import com.yeejoin.amos.boot.module.jxiop.api.service.IMapRegionService;
import com.yeejoin.amos.boot.module.jxiop.biz.constants.CommonConstans;
import com.yeejoin.amos.boot.module.jxiop.biz.dto.*;
import com.yeejoin.amos.boot.module.jxiop.biz.entity.Test;
import com.yeejoin.amos.boot.module.jxiop.biz.service.MonitorService;
import com.yeejoin.amos.boot.module.jxiop.biz.service.impl.*;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import javafx.scene.control.Tab;
import org.aspectj.apache.bcel.classfile.Module;
import org.eclipse.paho.client.mqttv3.MqttException;
import org.influxdb.dto.QueryResult;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
......@@ -37,7 +29,6 @@ import org.typroject.tyboot.core.restful.utils.ResponseHelper;
import org.typroject.tyboot.core.restful.utils.ResponseModel;
import java.util.*;
import java.util.logging.Handler;
import java.util.stream.Collectors;
@RestController
......
......@@ -2,6 +2,7 @@ package com.yeejoin.amos.boot.module.jxiop.biz.entity;
import lombok.Data;
import java.io.PipedReader;
import java.util.Date;
@Data
......@@ -23,7 +24,7 @@ public class IndicatorData {
private String unit;
private String value;
private String value="0";
private Float valueF ;
......@@ -35,4 +36,11 @@ public class IndicatorData {
private String displayName;
private String xtime;
private String pointSeq;
private String pointAddress;
private String pointLocation;
private String pointType;
private String pointName;
private String dasTime;
}
......@@ -89,7 +89,6 @@ public interface MonitorService {
IPage<Map> getStationOverViewFanByStationId( String stationId);
/**
* @Descritpion 根据入参动态获取全国发电量数据
* @param stationId
* @return
*/
Map<String,Object> gettimedateyfd( );
......@@ -109,5 +108,15 @@ public interface MonitorService {
*/
ResultsData getElectricQuantityList(int current, int size, StationBasic stationBasic);
/**
* @Description 动态获取电量表计数据
*/
void getTotalData();
/**
* @Description 获取全国功率曲线
* @return
*/
Map<String, Object> getDetailsWindSpeedAlldataqg();
}
......@@ -19,8 +19,10 @@ import com.yeejoin.amos.boot.module.jxiop.api.util.HttpRequestUtil;
import com.yeejoin.amos.boot.module.jxiop.biz.ESDto.ESMoonPowerGeneration;
import com.yeejoin.amos.boot.module.jxiop.biz.constants.CommonConstans;
import com.yeejoin.amos.boot.module.jxiop.biz.dto.*;
import com.yeejoin.amos.boot.module.jxiop.biz.entity.IndicatorData;
import com.yeejoin.amos.boot.module.jxiop.biz.service.CoreCommonService;
import com.yeejoin.amos.boot.module.jxiop.biz.service.MonitorService;
import com.yeejoin.amos.boot.module.jxiop.biz.tdanalysismapper.IndicatorDataNewMapper;
import lombok.RequiredArgsConstructor;
import org.apache.commons.io.IOUtils;
import org.eclipse.paho.client.mqttv3.MqttException;
......@@ -36,13 +38,14 @@ import org.springframework.util.ObjectUtils;
import org.typroject.tyboot.component.emq.EmqKeeper;
import java.io.IOException;;
import java.io.IOException;
import java.math.BigDecimal;
import java.nio.charset.StandardCharsets;
import java.text.DecimalFormat;
import java.text.SimpleDateFormat;
import java.time.LocalDate;
import java.util.*;
import java.util.concurrent.atomic.AtomicReference;
import java.util.stream.Collectors;
import static com.alibaba.fastjson.JSON.parseArray;
......@@ -57,6 +60,7 @@ public class MonitorServiceImpl implements MonitorService {
private final HttpRequestUtil httpRequestUtil;
private final CoreCommonService coreCommonService;
private final StationElectricityMeterMapper stationElectricityMeterMapper;
private final IndicatorDataNewMapper indicatorDataNewMapper;
@Value("classpath:/json/overview.json")
private Resource overview;
@Value("classpath:/json/overviewGF.json")
......@@ -308,7 +312,7 @@ public class MonitorServiceImpl implements MonitorService {
@Override
public SeriesData getSeriesDataqy(String areaCode) {
MapRegion mapRegion = mapRegionMapper.selectOne(new QueryWrapper<MapRegion>().eq("areaCode", areaCode));
MapRegion mapRegion = mapRegionMapper.selectOne(new QueryWrapper<MapRegion>().eq("area_code", areaCode));
String areaName = "";
if (!ObjectUtils.isEmpty(mapRegion) && !ObjectUtils.isEmpty(mapRegion.getName()) && !mapRegion.getName().contains(Constants.areaChinese)) {
areaName = mapRegion.getName() + Constants.areaChinese;
......@@ -348,7 +352,7 @@ public class MonitorServiceImpl implements MonitorService {
@Override
public Page<HashMap<String, String>> socialContribution(String areaCode) {
Double totalAnnual = 0.0;
MapRegion mapRegion = mapRegionMapper.selectOne(new QueryWrapper<MapRegion>().eq("areaCode", areaCode));
MapRegion mapRegion = mapRegionMapper.selectOne(new QueryWrapper<MapRegion>().eq("area_code", areaCode));
String areaName = "";
if (!ObjectUtils.isEmpty(mapRegion) && !ObjectUtils.isEmpty(mapRegion.getName()) && !mapRegion.getName().contains(Constants.areaChinese)) {
areaName = mapRegion.getName() + Constants.areaChinese;
......@@ -388,7 +392,7 @@ public class MonitorServiceImpl implements MonitorService {
String areaName = "";
String requestUrl = Constants.BASE_URL + "?" + Constants.get_hours_num_top + "&topValue=5&tabValue=" + day;
if (areaCode != null) {
MapRegion mapRegion = mapRegionMapper.selectOne(new QueryWrapper<MapRegion>().eq("areaCode", areaCode));
MapRegion mapRegion = mapRegionMapper.selectOne(new QueryWrapper<MapRegion>().eq("area_code", areaCode));
if (!ObjectUtils.isEmpty(mapRegion) && !ObjectUtils.isEmpty(mapRegion.getName()) && !mapRegion.getName().contains(Constants.areaChinese)) {
areaName = mapRegion.getName() + Constants.areaChinese;
}
......@@ -893,4 +897,109 @@ public class MonitorServiceImpl implements MonitorService {
}
}
public Map<String, Object> getDetailsWindSpeedAlldataqg() {
List<StationCacheInfoDto> stationCacheInfoDtos = getListStationCacheInfoDto();
AtomicReference<Double> installedCapacity = new AtomicReference<>(0.0);
stationCacheInfoDtos.forEach( stationCacheInfoDto-> {
installedCapacity.updateAndGet(v -> v + Double.parseDouble(stationCacheInfoDto.getInstalledCapacity()));
});
Map<String, Object> map = new HashMap<>();
List<String> values = new ArrayList<>();
List<String> time = new ArrayList<>();
Map<String, Object> activePowerInfo = new HashMap<>();
List<IndicatorData> activePowerList = new ArrayList<>();
activePowerList = indicatorDataNewMapper.selectDataByequipmentIndexNameAndtimeqgNew(CommonConstans.ACTIVE_POWER);
Map<String,List<IndicatorData>> stringObjectMap =activePowerList.stream().collect(Collectors.groupingBy(IndicatorData::getDasTime));
stringObjectMap.keySet().stream().forEach(dasTime -> {
time.add(dasTime.split(":")[1]);
Double vl= stringObjectMap.get(dasTime).stream().mapToDouble(IndicatorData::getValueF).sum();
values.add(String.format(CommonConstans.Twodecimalplaces, vl));
});
String max = String.format(CommonConstans.Twodecimalplaces, values.stream().mapToDouble(Double::parseDouble).max().getAsDouble());
String min = String.format(CommonConstans.Twodecimalplaces, values.stream().mapToDouble(Double::parseDouble).min().getAsDouble());
String mean = String.format(CommonConstans.Twodecimalplaces, values.stream().mapToDouble(Double::parseDouble).average().getAsDouble());
activePowerInfo.put("mean", mean);
activePowerInfo.put("max", max);
activePowerInfo.put("min", min);
activePowerInfo.put("maxTime", time.get(values.lastIndexOf(max)));
activePowerInfo.put("minTime", time.get(values.lastIndexOf(min)));
activePowerInfo.put("load", String.format(CommonConstans.Twodecimalplaces, Double.valueOf(values.get(values.size() - 1)) / installedCapacity.get()));
List<Map<String, Object>> seriesData = new ArrayList<>();
Map<String, Object> map3 = new HashMap<>();
Map<String, Object> map1 = new HashMap<>();
Map<String, Object> map2 = new HashMap<>();
map1.put("data", values);
seriesData.add(map1);
map.put("seriesData", seriesData);
map.put("axisData", time);
System.out.println(JSON.toJSONString(map));
try {
emqKeeper.getMqttClient().publish( "all_Power_table", JSON.toJSON(map).toString().getBytes("UTF-8"), 1, true);
emqKeeper.getMqttClient().publish("all_Power_info", JSON.toJSON(activePowerInfo).toString().getBytes("UTF-8"), 1, true);
} catch (Exception exception) {
exception.printStackTrace();
}
return map;
}
public Map<String, Object> getDetailsWindqy(String s ) {
List<StationCacheInfoDto> stationCacheInfoDtos = getListStationCacheInfoDto();
AtomicReference<Double> installedCapacity = new AtomicReference<>(0.0);
stationCacheInfoDtos = stationCacheInfoDtos.stream().filter(stationCacheInfoDto -> stationCacheInfoDto.getBelongArea().equals(s)).collect(Collectors.toList());
stationCacheInfoDtos.forEach(stationCacheInfoDto -> {
installedCapacity.updateAndGet(v -> v + Double.parseDouble(stationCacheInfoDto.getInstalledCapacity()));
});
Map<String, Object> map = new HashMap<>();
List<String> values = new ArrayList<>();
List<String> fanGateWayIds = stationCacheInfoDtos.stream().filter(stationCacheInfoDto -> stationCacheInfoDto.getFanGatewayId()!=null).map(StationCacheInfoDto::getFanGatewayId).collect(Collectors.toList());
List<String> boostGateWayIds = stationCacheInfoDtos.stream().filter(stationCacheInfoDto -> stationCacheInfoDto.getBoosterGatewayId()!=null).map(StationCacheInfoDto::getBoosterGatewayId).collect(Collectors.toList());
fanGateWayIds.addAll(boostGateWayIds);
String gatewayIds =Optional.ofNullable(fanGateWayIds).orElse(new ArrayList<>()).stream().distinct().collect(Collectors.joining(","));
List<String> time = new ArrayList<>();
Map<String, Object> activePowerInfo = new HashMap<>();
List<IndicatorData> activePowerList = new ArrayList<>();
activePowerList = indicatorDataNewMapper.selectDataByequipmentIndexNameAndtimeqgNew(CommonConstans.ACTIVE_POWER,gatewayIds);
Map<String,List<IndicatorData>> stringObjectMap =activePowerList.stream().collect(Collectors.groupingBy(IndicatorData::getDasTime));
stringObjectMap.keySet().stream().forEach(dasTime -> {
time.add(dasTime.split(":")[1]);
Double vl= stringObjectMap.get(dasTime).stream().mapToDouble(IndicatorData::getValueF).sum();
values.add(String.format(CommonConstans.Twodecimalplaces, vl));
});
String max = String.format(CommonConstans.Twodecimalplaces, values.stream().mapToDouble(Double::parseDouble).max().getAsDouble());
String min = String.format(CommonConstans.Twodecimalplaces, values.stream().mapToDouble(Double::parseDouble).min().getAsDouble());
String mean = String.format(CommonConstans.Twodecimalplaces, values.stream().mapToDouble(Double::parseDouble).average().getAsDouble());
activePowerInfo.put("mean", mean);
activePowerInfo.put("max", max);
activePowerInfo.put("min", min);
activePowerInfo.put("maxTime", time.get(values.lastIndexOf(max)));
activePowerInfo.put("minTime", time.get(values.lastIndexOf(min)));
activePowerInfo.put("load", String.format(CommonConstans.Twodecimalplaces, Double.valueOf(values.get(values.size() - 1)) / installedCapacity.get()));
List<Map<String, Object>> seriesData = new ArrayList<>();
Map<String, Object> map3 = new HashMap<>();
Map<String, Object> map1 = new HashMap<>();
Map<String, Object> map2 = new HashMap<>();
map1.put("data", values);
seriesData.add(map1);
map.put("seriesData", seriesData);
map.put("axisData", time);
try {
emqKeeper.getMqttClient().publish( s+"_Power_table", JSON.toJSON(map).toString().getBytes("UTF-8"), 1, true);
emqKeeper.getMqttClient().publish(s+"_Power_info", JSON.toJSON(activePowerInfo).toString().getBytes("UTF-8"), 1, true);
} catch (Exception exception) {
exception.printStackTrace();
}
return map;
}
}
package com.yeejoin.amos.boot.module.jxiop.biz.tdanalysismapper;
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
import com.yeejoin.amos.boot.module.jxiop.biz.entity.IndicatorData;
import org.apache.ibatis.annotations.Param;
import org.apache.ibatis.annotations.Select;
import org.springframework.stereotype.Repository;
import java.util.List;
@Repository
public interface IndicatorDataNewMapper extends BaseMapper<IndicatorData> {
@Select("select LAST(address)address,LAST(gateway_id)gatewayId,LAST(data_type)dataType,LAST(equipment_index_name)equipmentIndexName,LAST(`value_f`)valueF,LAST(equipment_number)equipmentNumber,LAST(created_time)createdTime from iot_data.indicator_data where equipment_index_name =#{equipmentIndexName} and gateway_id =#{gatewayId} and ts >=#{startTime} and ts <=#{endTime} group by address ")
List<IndicatorData> selectlastfd(@Param("equipmentIndexName") String equipmentIndexName,@Param("gatewayId") String gatewayId,@Param("startTime") String startTime, @Param("endTime") String endTime);
@Select("select LAST(address)address,LAST(gateway_id)gatewayId,LAST(data_type)dataType,LAST(equipment_index_name)equipmentIndexName,LAST(`value_f`)valueF,LAST(equipment_number)equipmentNumber,LAST(created_time)createdTime from iot_data.indicator_data where equipment_index_name =#{equipmentIndexName} and gateway_id =#{gatewayId} and ts >=#{startTime} and ts <=#{endTime} group by address ")
List<IndicatorData> selectlastgf(@Param("equipmentIndexName") String equipmentIndexName,@Param("gatewayId") String gatewayId,@Param("startTime") String startTime, @Param("endTime") String endTime);
@Select("select created_time createdTime, `value_f` as valueF from iot_data.indicator_data where equipment_index_name =#{equipmentIndexName} and ts >= #{startTime} and ts <= #{endTime} and gateway_id =#{gatewayId}")
List<IndicatorData> selectDataByequipmentIndexNameAndtime(@Param("equipmentIndexName") String equipmentIndexName, @Param("startTime") String startTime, @Param("endTime") String endTime, @Param("gatewayId") String gatewayId);
@Select("select sum(valueFs) valueF,xtime from (select SUBSTR(`created_time`, 11,6) xtime, `value_f` as valueFs from iot_data.indicator_data where equipment_index_name =#{equipmentIndexName} and ts >= #{startTime} and ts <= #{endTime} and gateway_id in (#{gatewayId})) group by xtime order by xtime ")
List<IndicatorData> selectDataByequipmentIndexNameAndtimeqg(@Param("equipmentIndexName") String equipmentIndexName, @Param("startTime") String startTime, @Param("endTime") String endTime, @Param("gatewayId") String gatewayId);
@Select("select `value`, created_time, `value_f` as valueF from iot_data.indicator_data where point_name =#{pointName} and ts >= TODAY()-8h and point_type ='SYNTHETIC'")
List<IndicatorData> selectDataByequipmentIndexNameAndtimeqgNew(@Param("equipmentIndexName") String pointName);
@Select("select `value`, created_time, `value_f` as valueF from iot_data.indicator_data where point_name =#{pointName} and ts >= TODAY()-8h and gateway_id in (#{gatewayId})) and point_type ='SYNTHETIC'")
List<IndicatorData> selectDataByequipmentIndexNameAndtimeqgNew(@Param("equipmentIndexName") String pointName,@Param("gatewayId") String gatewayId);
@Select("select `value`, created_time, `value_f` as valueF from iot_data.indicator_data where equipment_index_name =#{equipmentIndexName} and equipment_number = #{equipmentNumber} and ts >= #{startTime} and ts <= #{endTime} and gateway_id = #{gatewayId}")
List<IndicatorData> selectDataByequipmentIndexNameAndtimeAndEquipmentNumber(@Param("equipmentIndexName") String equipmentIndexName, @Param("equipmentNumber") String equipmentNumber, @Param("startTime") String startTime, @Param("endTime") String endTime, @Param("gatewayId") String gatewayId);
@Select("select `value`, created_time, `value_f` as valueF, equipment_index_name from iot_data.indicator_data where equipment_index_name like '%路电流%' and equipment_number = #{equipmentNumber} and ts >= #{startTime} and ts <= #{endTime} and gateway_id =#{gatewayId}")
List<IndicatorData> selectDataByequipmentIndexNameAndtimeAndEquipmentNumberPv(@Param("equipmentNumber") String equipmentNumber, @Param("startTime") String startTime, @Param("endTime") String endTime, @Param("gatewayId") String gatewayId);
}
......@@ -11,13 +11,16 @@ spring.db2.datasource.url=jdbc:mysql://139.9.173.44:3306/jxiop_sync_data?allowMu
spring.db2.datasource.username=root
spring.db2.datasource.password=Yeejoin@2020
spring.db2.datasource.driver-class-name: com.mysql.cj.jdbc.Driver
##db3
spring.db3.datasource.url=jdbc:TAOS-RS://139.9.170.47:6041/iot_data?user=root&password=taosdata&timezone=GMT%2b8&allowMultiQueries=true
spring.db3.datasource.username=root
spring.db3.datasource.password=taosdata
spring.db3.datasource.driver-class-name: com.taosdata.jdbc.rs.RestfulDriver
##db4
spring.db4.datasource.url=jdbc:TAOS-RS://139.9.170.47:6041/iot_data?user=root&password=taosdata&timezone=GMT%2b8&allowMultiQueries=true
spring.db4.datasource.username=root
spring.db4.datasource.password=taosdata
spring.db4.datasource.driver-class-name: com.taosdata.jdbc.rs.RestfulDriver
## eureka properties:
eureka.instance.hostname=172.16.10.220
......
## DB properties:
## db1-production database
spring.db1.datasource.type: com.alibaba.druid.pool.DruidDataSource
spring.db1.datasource.url=jdbc:mysql://10.20.1.157:3306/production?allowMultiQueries=true&serverTimezone=GMT%2B8&characterEncoding=utf8
spring.db1.datasource.username=root
spring.db1.datasource.password=Yeejoin@2020
spring.db1.datasource.driver-class-name: com.mysql.cj.jdbc.Driver
## db2-sync_data
spring.db2.datasource.type: com.alibaba.druid.pool.DruidDataSource
spring.db2.datasource.url=jdbc:mysql://10.20.1.157:3306/jxiop_sync_data?allowMultiQueries=true&serverTimezone=GMT%2B8&characterEncoding=utf8
spring.db2.datasource.username=root
spring.db2.datasource.password=Yeejoin@2020
spring.db2.datasource.driver-class-name: com.mysql.cj.jdbc.Driver
##db3
spring.db3.datasource.url=jdbc:TAOS-RS://10.20.1.157:6041/iot_data?user=root&password=taosdata&timezone=GMT%2b8&allowMultiQueries=true
spring.db3.datasource.username=root
spring.db3.datasource.password=taosdata
spring.db3.datasource.driver-class-name: com.taosdata.jdbc.rs.RestfulDriver
##db4
spring.db4.datasource.url=jdbc:TAOS-RS://10.20.1.157:6041/iot_data?user=root&password=taosdata&timezone=GMT%2b8&allowMultiQueries=true
spring.db4.datasource.username=root
spring.db4.datasource.password=taosdata
spring.db4.datasource.driver-class-name: com.taosdata.jdbc.rs.RestfulDriver
## eureka properties:
eureka.instance.hostname=172.16.10.220
eureka.client.serviceUrl.defaultZone=http://admin:a1234560@${eureka.instance.hostname}:10001/eureka/
## redis properties:
spring.redis.database=1
spring.redis.host=10.20.1.210
spring.redis.port=6379
spring.redis.password=yeejoin@2020
spring.cache.type=GENERIC
j2cache.open-spring-cache=true
j2cache.cache-clean-mode=passive
j2cache.allow-null-values=true
j2cache.redis-client=lettuce
j2cache.l2-cache-open=true
j2cache.broadcast=net.oschina.j2cache.cache.support.redis.SpringRedisPubSubPolicy
j2cache.L1.provider_class=caffeine
j2cache.L2.provider_class=net.oschina.j2cache.cache.support.redis.SpringRedisProvider
j2cache.L2.config_section=lettuce
j2cache.sync_ttl_to_redis=true
j2cache.default_cache_null_object=false
j2cache.serialization=fst
caffeine.properties=/caffeine.properties
lettuce.mode=single
lettuce.namespace=
lettuce.storage=generic
lettuce.channel=j2cache
lettuce.scheme=redis
lettuce.hosts=${spring.redis.host}:${spring.redis.port}
lettuce.password=${spring.redis.password}
lettuce.database=${spring.redis.database}
lettuce.sentinelMasterId=
lettuce.maxTotal=100
lettuce.maxIdle=10
lettuce.minIdle=10
lettuce.timeout=10000
emqx.clean-session=true
emqx.client-id=${spring.application.name}-${random.int[1024,65536]}
emqx.broker=tcp://172.16.10.220:1883
emqx.user-name=admin
emqx.password=public
mqtt.scene.host=mqtt://172.16.10.220:8083/mqtt
mqtt.client.product.id=mqtt
mqtt.topic=topic_mqtt
spring.mqtt.completionTimeout=3000
emqx.max-inflight=1000
## influxDB
#spring.influx.url= http://172.16.3.155:18186
#spring.influx.password=Yeejoin@2020
#spring.influx.user=root
#spring.influx.database=iot_platform
#spring.influx.retention_policy=default
#spring.influx.retention_policy_time=30d
#spring.influx.actions=10000
#spring.influx.bufferLimit=20000
## influxDB
#spring.influx.url= http://139.9.171.247:8086
#spring.influx.password=Yeejoin@2023
#spring.influx.user=admin
#spring.influx.database=iot_platform
#spring.influx.retention_policy=default
#spring.influx.retention_policy_time=30d
#spring.influx.actions=10000
#spring.influx.bufferLimit=20000
#spring.influx.url=http://10.20.1.157:18086
#spring.influx.password=Yeejoin@2020
#spring.influx.user=root
#spring.influx.database=iot_platform
#spring.influx.retention_policy=default
#spring.influx.retention_policy_time=30d
#spring.influx.actions=10000
#spring.influx.bufferLimit=20000
knife4j.production=false
knife4j.enable=true
knife4j.basic.enable=true
knife4j.basic.username=admin
knife4j.basic.password=a1234560
management.security.enabled=true
spring.security.user.name=admin
spring.security.user.password=a1234560
fire-rescue=123
mybatis-plus.global-config.db-config.update-strategy=ignored
# user-amos setting : This value is the secretkey for person manage moudle accout password encryption.please don't change it!!!
amos.secret.key=qaz
# if your service can't be access ,you can use this setting , you need change ip as your.
#eureka.instance.prefer-ip-address=true
#eureka.instance.ip-address=172.16.3.122
spring.activemq.broker-url=tcp://10.20.1.210:61616
spring.activemq.user=admin
spring.activemq.password=admin
spring.jms.pub-sub-domain=false
myqueue=amos.privilege.v1.JXIOP.AQSC_FDGL.userBusiness
spring.elasticsearch.rest.uris=http://10.20.0.223:9200
spring.elasticsearch.rest.connection-timeout=30000
spring.elasticsearch.rest.username=elastic
spring.elasticsearch.rest.password=Yeejoin@2020
spring.elasticsearch.rest.read-timeout=30000
# ?????????
fan.statuts.stattuspath=upload/jxiop/device_status
pictureUrl=upload/jxiop/syz/
# Ԥ
idx.predict.serviceUrl=http://10.20.1.157:8095/jxdj/predict-data
......@@ -2,7 +2,7 @@ spring.application.name=AMOS-JXIOP-BIGSCREEN
server.servlet.context-path=/jxiop-bigscreen
server.port=33300
server.uri-encoding=UTF-8
spring.profiles.active=dev
spring.profiles.active=dev1
spring.jackson.time-zone=GMT+8
spring.jackson.date-format=yyyy-MM-dd HH:mm:ss
logging.config=classpath:logback-${spring.profiles.active}.xml
......
<?xml version="1.0" encoding="UTF-8"?>
<configuration debug="false">
<!--定义日志文件的存储地址 勿在 LogBack 的配置中使用相对路径-->
<property name="LOG_HOME" value="log" />
<property name="LOG_PATTERN" value="%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %-50.50logger{50} - %msg [%file:%line] %n" />
<!-- 按照每天生成日志文件 -->
<appender name="FILE" class="ch.qos.logback.core.rolling.RollingFileAppender">
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<!--日志文件输出的文件名-->
<FileNamePattern>${LOG_HOME}/ccs.log.%d{yyyy-MM-dd}.log</FileNamePattern>
<!--日志文件保留天数-->
<MaxHistory>7</MaxHistory>
</rollingPolicy>
<encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder">
<!--格式化输出:%d表示日期,%thread表示线程名,%-5level:级别从左显示5个字符宽度%msg:日志消息,%n是换行符-->
<pattern>${LOG_PATTERN}</pattern>
</encoder>
<!--日志文件最大的大小-->
<triggeringPolicy class="ch.qos.logback.core.rolling.SizeBasedTriggeringPolicy">
<MaxFileSize>30mb</MaxFileSize>
</triggeringPolicy>
</appender>
<!-- 控制台输出 -->
<appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
<encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder">
<!--格式化输出:%d表示日期,%thread表示线程名,%-5level:级别从左显示5个字符宽度%msg:日志消息,%n是换行符-->
<pattern>${LOG_PATTERN}</pattern>
</encoder>
</appender>
<!--myibatis log configure-->
<logger name="com.apache.ibatis" level="DEBUG"/>
<logger name="java.sql.Connection" level="DEBUG"/>
<logger name="java.sql.Statement" level="DEBUG"/>
<logger name="java.sql.PreparedStatement" level="DEBUG"/>
<logger name="com.baomidou.mybatisplus" level="DEBUG"/>
<logger name="org.springframework" level="DEBUG"/>
<logger name="org.typroject" level="DEBUG"/>
<logger name="com.yeejoin" level="DEBUG"/>
<!-- 日志输出级别 -->
<root level="INFO">
<appender-ref ref="FILE" />
<appender-ref ref="STDOUT" />
</root>
</configuration>
package com.yeejoin.amos.boot.module.jxiop.biz.service.impl;
import cn.hutool.core.util.DesensitizedUtil;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
......@@ -63,22 +62,22 @@ import java.util.stream.Collectors;
*/
@Slf4j
@Service
public class PersonBasicServiceImpl extends BaseService<PersonBasicDto, PersonBasic, PersonBasicMapper> implements IPersonBasicService {
public class PersonBasicServiceImpl extends BaseService<PersonBasicDto, PersonBasic, PersonBasicMapper>
implements IPersonBasicService {
@Autowired
PersonBasicMapper personBasicMapper;
//人员账号信息service
// 人员账号信息service
@Autowired
PersonAccountServiceImpl personAccountService;
//人员归属信息service
// 人员归属信息service
@Autowired
PersonAscriptionServiceImpl personAscriptionService;
//人员资质信息service
// 人员资质信息service
@Autowired
PersonCertificateServiceImpl personCertificateService;
//人员教育信息
// 人员教育信息
@Autowired
PersonSkillEducationServiceImpl personSkillEducationService;
@Value("${amos.secret.key}")
......@@ -95,7 +94,6 @@ public class PersonBasicServiceImpl extends BaseService<PersonBasicDto, PersonBa
@Autowired
protected EmqKeeper emqKeeper;
@Autowired
private AgencyuserFeign agencyuserFeign;
......@@ -107,18 +105,16 @@ public class PersonBasicServiceImpl extends BaseService<PersonBasicDto, PersonBa
*/
public static final String RYFM_DATA_MQTT_TOPIC = "ryfm/data/analysis";
/**
* 人员红黄码恢复为绿码
*/
public static final String RYFM_GREEN = "ryfm/person/green";
private String UPDATE="UPDATE";
private String INSERT="INSERT";
private String UPDATE = "UPDATE";
private String INSERT = "INSERT";
@Autowired
PersonAccountFedMapper personAccountFedMapper;
/**
* 分页查询
*/
......@@ -140,7 +136,8 @@ public class PersonBasicServiceImpl extends BaseService<PersonBasicDto, PersonBa
} catch (Exception e) {
e.printStackTrace();
}
Map<String, String> collect = elevatorCategory.stream().collect(Collectors.toMap(DictionarieValueModel::getDictDataKey, DictionarieValueModel::getDictDataDesc));
Map<String, String> collect = elevatorCategory.stream().collect(
Collectors.toMap(DictionarieValueModel::getDictDataKey, DictionarieValueModel::getDictDataDesc));
redisUtils.set(Constants.JXIOP_DICT_POST, collect);
List<DictionarieValueModel> elevator = null;
......@@ -149,14 +146,15 @@ public class PersonBasicServiceImpl extends BaseService<PersonBasicDto, PersonBa
} catch (Exception e) {
e.printStackTrace();
}
Map<String, String> collect2 = elevator.stream().collect(Collectors.toMap(DictionarieValueModel::getDictDataKey, DictionarieValueModel::getDictDataDesc));
Map<String, String> collect2 = elevator.stream().collect(
Collectors.toMap(DictionarieValueModel::getDictDataKey, DictionarieValueModel::getDictDataDesc));
redisUtils.set(Constants.JXIOP_DICT_CERTIFICATES, collect2);
}
public RiskBizInfoVo fetchData(PersonBasic personBasic, PersonAccount personAccount, String content) {
RiskBizInfoVo riskBizInfoVo = new RiskBizInfoVo();
riskBizInfoVo.setWarningObjectName(personAccount.getProjectName() + personBasic.getPostName() + personAccount.getName());
riskBizInfoVo.setWarningObjectName(
personAccount.getProjectName() + personBasic.getPostName() + personAccount.getName());
riskBizInfoVo.setWarningObjectCode(String.valueOf(personBasic.getSequenceNbr()));
riskBizInfoVo.setSourceAttribution(personBasic.getProjectOrgCode());
riskBizInfoVo.setSourceAttributionDesc(personAccount.getProjectName());
......@@ -172,7 +170,6 @@ public class PersonBasicServiceImpl extends BaseService<PersonBasicDto, PersonBa
return riskBizInfoVo;
}
/**
* 新增
*/
......@@ -186,15 +183,15 @@ public class PersonBasicServiceImpl extends BaseService<PersonBasicDto, PersonBa
// 证书临期信息
Map<String, String> certificatesMap = (Map<String, String>) redisUtils.get(Constants.JXIOP_DICT_CERTIFICATES);
//获取人员基本信息数据
// 获取人员基本信息数据
PersonUser personUser = model.getPersonUser();
//获取人员账号信息
// 获取人员账号信息
PersonAccount personAccount = model.getPersonAccount();
personUser.setPhone(personAccount.getPhoneNum());
//人员基础信息
// 人员基础信息
PersonBasic personBasic = new PersonBasic();
BeanUtils.copyProperties(personUser, personBasic);
//默认红码
// 默认红码
personBasic.setQrcodeColor(QrcodeColorEnum.RED.getCode());
personBasic.setQrcodeDesc("证书不全");
// 该岗位应获得的证书
......@@ -206,7 +203,7 @@ public class PersonBasicServiceImpl extends BaseService<PersonBasicDto, PersonBa
}
}
List<String> list = new ArrayList(list2);
//人员资质信息
// 人员资质信息
Integer isInMonth = 0;
Integer isOver = 0;
CertificationInfo personCertificate = model.getPersonCertificate();
......@@ -218,16 +215,17 @@ public class PersonBasicServiceImpl extends BaseService<PersonBasicDto, PersonBa
if (CollectionUtils.isNotEmpty(personCertificate.getCertificationInfo())) {
for (PersonCertificate item : personCertificate.getCertificationInfo()) {
if (StringUtils.isNotEmpty(item.getValidPeriod()) && !Objects.isNull(item.getCertificateTime())) {
int validPeriod = StringUtils.isEmpty(item.getValidPeriod()) ? 3 : Integer.parseInt(item.getValidPeriod());
int validPeriod = StringUtils.isEmpty(item.getValidPeriod()) ? 3
: Integer.parseInt(item.getValidPeriod());
Date date = DateUtils.dateAddYears(item.getCertificateTime(), validPeriod);
if (list.contains(item.getCertificateName()) &&
DateUtils.dateCompare(date, new Date()) == -1) {
if (list.contains(item.getCertificateName()) && DateUtils.dateCompare(date, new Date()) == -1) {
isOver = 1;
overCertificateList.add(item.getCertificateName());
}
if (list.contains(item.getCertificateName()) &&
DateUtils.dateBetweenIncludeToday(new Date(), date) < Integer.valueOf(certificatesMap.get(item.getCertificateName())) &&
DateUtils.dateCompare(date, new Date()) == 1) {
if (list.contains(item.getCertificateName())
&& DateUtils.dateBetweenIncludeToday(new Date(), date) < Integer
.valueOf(certificatesMap.get(item.getCertificateName()))
&& DateUtils.dateCompare(date, new Date()) == 1) {
isInMonth = 1;
inMonthCertificateList.add(item.getCertificateName());
}
......@@ -239,13 +237,13 @@ public class PersonBasicServiceImpl extends BaseService<PersonBasicDto, PersonBa
List<String> noCertificateList = new ArrayList<>(list);
List<String> strings = new ArrayList<>();
if (CollectionUtils.isNotEmpty(overCertificateList)) {
strings.add("过期证书:" + String.join("," , overCertificateList));
strings.add("过期证书:" + String.join(",", overCertificateList));
}
if (CollectionUtils.isNotEmpty(inMonthCertificateList)) {
strings.add("临期证书:" + String.join("," , inMonthCertificateList));
strings.add("临期证书:" + String.join(",", inMonthCertificateList));
}
if (CollectionUtils.isNotEmpty(noCertificateList)) {
strings.add("缺少证书:" + String.join("," , noCertificateList));
strings.add("缺少证书:" + String.join(",", noCertificateList));
}
String join = "";
if (CollectionUtils.isNotEmpty(strings)) {
......@@ -267,12 +265,11 @@ public class PersonBasicServiceImpl extends BaseService<PersonBasicDto, PersonBa
personBasic.setQrcodeDate(new Date());
}
CompanyModel companyModel = new CompanyModel();
//单位
// 单位
companyModel = this.getCompanyModel(personAccount.getProjectId());
personBasic.setProjectOrgCode(companyModel.getOrgCode());
if (personUser.getNativePlace()!=null) {
if (personUser.getNativePlace() != null) {
personBasic.setNativePlace(JSON.toJSONString(personUser.getNativePlace()));
}
this.baseMapper.insert(personBasic);
......@@ -287,7 +284,8 @@ public class PersonBasicServiceImpl extends BaseService<PersonBasicDto, PersonBa
bizMessage.setBizInfo(riskBizInfoVo);
bizMessage.setTraceId(String.valueOf(personBasic.getSequenceNbr()));
try {
emqKeeper.getMqttClient().publish(RYFM_DATA_MQTT_TOPIC, JSON.toJSONString(bizMessage).getBytes(StandardCharsets.UTF_8), 2, false);
emqKeeper.getMqttClient().publish(RYFM_DATA_MQTT_TOPIC,
JSON.toJSONString(bizMessage).getBytes(StandardCharsets.UTF_8), 2, false);
} catch (MqttException e) {
e.printStackTrace();
}
......@@ -298,27 +296,28 @@ public class PersonBasicServiceImpl extends BaseService<PersonBasicDto, PersonBa
personCertificateService.save(item);
});
}
//人员技能学历信息
// 人员技能学历信息
PersonSkillEducation personSkillEducation = new PersonSkillEducation();
BeanUtils.copyProperties(personUser, personSkillEducation);
personSkillEducation.setPersonId(personBasic.getSequenceNbr());
personSkillEducationService.save(personSkillEducation);
//人员账号信息
// 人员账号信息
personAccount.setPersonId(personBasic.getSequenceNbr());
personAccount.setPassword(DesUtil.encode(personAccount.getPassword(), secretKey));
personAccount.setSecondaryPassword(DesUtil.encode(personAccount.getSecondaryPassword(), secretKey));
personAccountService.save(personAccount);
//新增平台账号
//组装数据
// 新增平台账号
// 组装数据
AgencyUserModel usd = new AgencyUserModel();
//应用
// 应用
usd.setAppCodes(personAccount.getApplication());
//手机号
// 手机号
usd.setMobile(personUser.getPhone());
//角色
// 角色
Map<Long, List<Long>> map = new HashMap<>();
List<Long> cdids = personAccount.getRoles().stream().map(s -> Long.parseLong(s.trim())).collect(Collectors.toList()); //测点数组
List<Long> cdids = personAccount.getRoles().stream().map(s -> Long.parseLong(s.trim()))
.collect(Collectors.toList()); // 测点数组
if (personAccount.getDepartmentId() != null) {
map.put(personAccount.getDepartmentId(), cdids);
......@@ -326,31 +325,31 @@ public class PersonBasicServiceImpl extends BaseService<PersonBasicDto, PersonBa
map.put(personAccount.getProjectId(), cdids);
}
usd.setOrgRoleSeqs(map);
//密码
// 密码
usd.setPassword(personAccount.getPassword());
//二次密码
// 二次密码
usd.setRePassword(personAccount.getSecondaryPassword());
//用户名
// 用户名
usd.setRealName(personAccount.getName());
//账号
// 账号
usd.setUserName(personAccount.getAccountName());
usd.setLockStatus("UNLOCK");
//新增平台用户
// 新增平台用户
AgencyUserModel agencyUserModel = this.setcreateUser(usd);
//设置userID
// 设置userID
usd.setUserId(agencyUserModel.getUserId());
//设置工号
// 设置工号
usd.setUserName(personAccount.getJobNumber());
//创建支持工号登录
// 创建支持工号登录
this.createLoginInfo(usd);
//查询部门
// 查询部门
DepartmentModel departmentModel = null;
if (personAccount.getDepartmentId() != null) {
departmentModel = this.getdepartmentModel(personAccount.getDepartmentId());
}
if (departmentModel != null) {
//personBasic.setProjectOrgCode(departmentModel.getOrgCode());
// personBasic.setProjectOrgCode(departmentModel.getOrgCode());
personAccount.setProjectDepartmentName(departmentModel.getDepartmentName());
}
// personBasic.setProjectOrgCode(companyModel.getOrgCode());
......@@ -363,7 +362,7 @@ public class PersonBasicServiceImpl extends BaseService<PersonBasicDto, PersonBa
personAccount.setProjectName(companyModel.getCompanyName());
// this.personBasicMapper.updateById(personBasic);
this.personAccountService.updateById(personAccount);
//----------------------------权限表中新增数据-----------------------------
// ----------------------------权限表中新增数据-----------------------------
StdUserEmpower stdUserEmpower = new StdUserEmpower();
stdUserEmpower.setPermissionType("YTH");
stdUserEmpower.setRecDate(new Date());
......@@ -371,23 +370,23 @@ public class PersonBasicServiceImpl extends BaseService<PersonBasicDto, PersonBa
stdUserEmpower.setAmosUserId(personAccount.getPuserId());
userEmpowerMapper.insert(stdUserEmpower);
//----------------------------户用管理端区域公司---------------------------
// ----------------------------户用管理端区域公司---------------------------
StdUserEmpower stdUserEmpowerhygf = new StdUserEmpower();
List<CompanyModel> co= userEmpowerMapper.getCompanyBoList("region",null,null);
List<String> re= personAccount.getRegionalCompaniesSeq();
List<CompanyModel> co = userEmpowerMapper.getCompanyBoList("region", null, null);
List<String> re = personAccount.getRegionalCompaniesSeq();
String flag=personAccount.getRegionalCompaniesSeqFlag();
if(flag!=null&&!flag.isEmpty()){
if(flag.equals("all")){
List<String> all=new ArrayList<>();
String flag = personAccount.getRegionalCompaniesSeqFlag();
if (flag != null && !flag.isEmpty()) {
if (flag.equals("all")) {
List<String> all = new ArrayList<>();
all.add("all");
stdUserEmpowerhygf.setAmosOrgCode(all);
}else{
} else {
stdUserEmpowerhygf.setAmosOrgCode(re);
}
}
List<String> exre= personAccount.getExternalRegionalCompaniesSeq();
List<String> exre = personAccount.getExternalRegionalCompaniesSeq();
// if(exre!=null&&!exre.isEmpty()){
// List<String> pexre=new ArrayList<>();
// List<CompanyModel> exreco = co.stream().filter(product -> !"area".equals(product.getLevel())).collect(Collectors.toList());
......@@ -404,29 +403,23 @@ public class PersonBasicServiceImpl extends BaseService<PersonBasicDto, PersonBa
stdUserEmpowerhygf.setAmosUserId(personAccount.getPuserId());
userEmpowerMapper.insert(stdUserEmpowerhygf);
//----------------------------权限表中新增数据-----------------------------
// ----------------------------权限表中新增数据-----------------------------
PersonAccountFed personAccountFed = new PersonAccountFed();
BeanUtils.copyProperties(personAccount, personAccountFed);
personAccountFed.setSyncState(0);
personAccountFed.setSyncDate(new Date());
personAccountFedMapper.insert(personAccountFed);
Map<String, Object> data=new HashMap<>();
data.put("SEQUENCE_NBR",agencyUserModel.getSequenceNbr());
ProduceMsg produceMsg= new ProduceMsg(data, INSERT,agencyUserModel.getUserId());
Map<String, Object> data = new HashMap<>();
data.put("SEQUENCE_NBR", agencyUserModel.getSequenceNbr());
ProduceMsg produceMsg = new ProduceMsg(data, INSERT, agencyUserModel.getUserId());
querueProduce.produceMsg(JSON.toJSONString(produceMsg));
}
@Transactional
public PersonDto updatePerson(PersonDto model, HttpServletRequest httpServletRequest, Long sequenceNbr) throws ParseException {
public PersonDto updatePerson(PersonDto model, HttpServletRequest httpServletRequest, Long sequenceNbr)
throws ParseException {
if (!redisUtils.hasKey(Constants.JXIOP_DICT_POST) || !redisUtils.hasKey(Constants.JXIOP_DICT_CERTIFICATES)) {
addRedisPostAndCerInfo();
......@@ -436,35 +429,36 @@ public class PersonBasicServiceImpl extends BaseService<PersonBasicDto, PersonBa
// 证书临期信息
Map<String, String> certificatesMap = (Map<String, String>) redisUtils.get(Constants.JXIOP_DICT_CERTIFICATES);
//获取人员基本信息数据
// 获取人员基本信息数据
PersonUser personUser = model.getPersonUser();
//获取人员账号信息
// 获取人员账号信息
PersonAccount personAccount = model.getPersonAccount();
PersonAccount oldpersonAccount = new PersonAccount();
personUser.setPhone(personAccount.getPhoneNum());
//人员基础信息
// 人员基础信息
PersonBasic personBasic = personBasicMapper.selectById(sequenceNbr);
personBasic.setSequenceNbr(sequenceNbr);
personAccount.setPassword(DesUtil.encode(personAccount.getPassword(), secretKey));
personAccount.setSecondaryPassword(DesUtil.encode(personAccount.getSecondaryPassword(), secretKey));
//人员归属信息
// 人员归属信息
PersonSkillEducation personSkillEducation = new PersonSkillEducation();
BeanUtils.copyProperties(personUser, personSkillEducation);
personSkillEducation.setPersonId(personBasic.getSequenceNbr());
PersonSkillEducation personSkillEducationd = personSkillEducationService.getOne(new QueryWrapper<PersonSkillEducation>().eq("person_id", personBasic.getSequenceNbr()));
PersonSkillEducation personSkillEducationd = personSkillEducationService
.getOne(new QueryWrapper<PersonSkillEducation>().eq("person_id", personBasic.getSequenceNbr()));
personSkillEducation.setSequenceNbr(personSkillEducationd.getSequenceNbr());
personSkillEducationService.updateById(personSkillEducation);
//获取人员资质信息
// 获取人员资质信息
CertificationInfo personCertificate = model.getPersonCertificate();
LambdaUpdateWrapper<PersonCertificate> wrapper = new LambdaUpdateWrapper<>();
wrapper.eq(PersonCertificate::getPersonId, personBasic.getSequenceNbr());
personCertificateService.remove(wrapper);
//默认红码
// 默认红码
// personBasic.setQrcodeColor(QrcodeColorEnum.RED.getCode());
// 该岗位应获得的证书
List<String> list2 = new ArrayList<>();
......@@ -475,7 +469,7 @@ public class PersonBasicServiceImpl extends BaseService<PersonBasicDto, PersonBa
}
}
List<String> list = new ArrayList(list2);
//人员资质信息
// 人员资质信息
Integer isInMonth = 0;
Integer isOver = 0;
// 过期的证书
......@@ -485,16 +479,17 @@ public class PersonBasicServiceImpl extends BaseService<PersonBasicDto, PersonBa
if (CollectionUtils.isNotEmpty(personCertificate.getCertificationInfo())) {
for (PersonCertificate item : personCertificate.getCertificationInfo()) {
if (StringUtils.isNotEmpty(item.getValidPeriod()) && !Objects.isNull(item.getCertificateTime())) {
int validPeriod = StringUtils.isEmpty(item.getValidPeriod()) ? 3 : Integer.parseInt(item.getValidPeriod());
int validPeriod = StringUtils.isEmpty(item.getValidPeriod()) ? 3
: Integer.parseInt(item.getValidPeriod());
Date date = DateUtils.dateAddYears(item.getCertificateTime(), validPeriod);
if (list.contains(item.getCertificateName()) &&
DateUtils.dateCompare(date, new Date()) == -1) {
if (list.contains(item.getCertificateName()) && DateUtils.dateCompare(date, new Date()) == -1) {
isOver = 1;
overCertificateList.add(item.getCertificateName());
}
if (list.contains(item.getCertificateName()) &&
DateUtils.dateBetweenIncludeToday(new Date(), date) < Integer.valueOf(certificatesMap.get(item.getCertificateName())) &&
DateUtils.dateCompare(date, new Date()) == 1) {
if (list.contains(item.getCertificateName())
&& DateUtils.dateBetweenIncludeToday(new Date(), date) < Integer
.valueOf(certificatesMap.get(item.getCertificateName()))
&& DateUtils.dateCompare(date, new Date()) == 1) {
isInMonth = 1;
inMonthCertificateList.add(item.getCertificateName());
}
......@@ -509,13 +504,13 @@ public class PersonBasicServiceImpl extends BaseService<PersonBasicDto, PersonBa
List<String> strings = new ArrayList<>();
if (CollectionUtils.isNotEmpty(overCertificateList)) {
strings.add("过期证书:" + String.join("," , overCertificateList));
strings.add("过期证书:" + String.join(",", overCertificateList));
}
if (CollectionUtils.isNotEmpty(inMonthCertificateList)) {
strings.add("临期证书:" + String.join("," , inMonthCertificateList));
strings.add("临期证书:" + String.join(",", inMonthCertificateList));
}
if (CollectionUtils.isNotEmpty(noCertificateList)) {
strings.add("缺少证书:" + String.join("," , noCertificateList));
strings.add("缺少证书:" + String.join(",", noCertificateList));
}
String join = "";
......@@ -525,7 +520,7 @@ public class PersonBasicServiceImpl extends BaseService<PersonBasicDto, PersonBa
String missingCertificateOld = personBasic.getMissingCertificate();
personBasic.setMissingCertificate(join);
CompanyModel companyModel = new CompanyModel();
//单位
// 单位
companyModel = this.getCompanyModel(personAccount.getProjectId());
String qrcodeColorOld = personBasic.getQrcodeColor();
......@@ -545,23 +540,21 @@ public class PersonBasicServiceImpl extends BaseService<PersonBasicDto, PersonBa
personBasic.setProjectOrgCode(companyModel.getOrgCode());
personAccount.setProjectName(companyModel.getCompanyName());
//人员账号信息
// 人员账号信息
personAccount.setPersonId(sequenceNbr);
oldpersonAccount=personAccountService.getById(personAccount.getSequenceNbr());
oldpersonAccount = personAccountService.getById(personAccount.getSequenceNbr());
personAccountService.updateById(personAccount);
//新增平台账号
//组装数据
// 新增平台账号
// 组装数据
AgencyUserModel usd = new AgencyUserModel();
//应用
// 应用
usd.setAppCodes(personAccount.getApplication());
//手机号
// 手机号
usd.setMobile(personUser.getPhone());
//角色
// 角色
Map<Long, List<Long>> map = new HashMap<>();
List<Long> cdids = personAccount.getRoles().stream().map(s -> Long.parseLong(s.trim())).collect(Collectors.toList()); //测点数组
List<Long> cdids = personAccount.getRoles().stream().map(s -> Long.parseLong(s.trim()))
.collect(Collectors.toList()); // 测点数组
if (personAccount.getDepartmentId() != null) {
map.put(personAccount.getDepartmentId(), cdids);
......@@ -569,34 +562,34 @@ public class PersonBasicServiceImpl extends BaseService<PersonBasicDto, PersonBa
map.put(personAccount.getProjectId(), cdids);
}
usd.setOrgRoleSeqs(map);
//密码
// 密码
usd.setPassword(personAccount.getPassword());
//二次密码
// 二次密码
usd.setRePassword(personAccount.getSecondaryPassword());
//用户名
// 用户名
usd.setRealName(personAccount.getName());
//账号
// 账号
usd.setUserName(personAccount.getAccountName());
usd.setLockStatus("UNLOCK");
usd.setUserId(personAccount.getPuserId());
usd.setOriginalPassword(oldpersonAccount.getPassword());
//新增平台用户
// 新增平台用户
AgencyUserModel agencyUserModel = this.updateuser(personAccount.getPuserId(), usd);
log.info("更新平台账户信息::"+ JSONObject.toJSONString(usd));
//设置userID
log.info("更新平台账户信息::" + JSONObject.toJSONString(usd));
// 设置userID
usd.setUserId(agencyUserModel.getUserId());
//设置工号
// 设置工号
usd.setUserName(personAccount.getJobNumber());
//创建支持工号登录
this.updateLoginInfo(oldpersonAccount.getJobNumber(),usd);
//查询部门
// 创建支持工号登录
this.updateLoginInfo(oldpersonAccount.getJobNumber(), usd);
// 查询部门
DepartmentModel departmentModel = null;
if (personAccount.getDepartmentId() != null) {
departmentModel = this.getdepartmentModel(personAccount.getDepartmentId());
}
BeanUtils.copyProperties(personUser, personBasic, "qrcodeDesc", "qrcodeColor");
if (departmentModel != null) {
//personBasic.setProjectOrgCode(departmentModel.getOrgCode());
// personBasic.setProjectOrgCode(departmentModel.getOrgCode());
personAccount.setProjectDepartmentName(departmentModel.getDepartmentName());
}
personBasic.setProjectOrgCode(companyModel.getOrgCode());
......@@ -609,8 +602,7 @@ public class PersonBasicServiceImpl extends BaseService<PersonBasicDto, PersonBa
if (!join.equals(missingCertificateOld)) {
personBasic.setRecDate(new Date());
this.personBasicMapper.updateById(personBasic);
if (("证书不全".equals(personBasic.getQrcodeDesc()) ||
"证书临期".equals(personBasic.getQrcodeDesc()))) {
if (("证书不全".equals(personBasic.getQrcodeDesc()) || "证书临期".equals(personBasic.getQrcodeDesc()))) {
BizMessage bizMessage = new BizMessage();
bizMessage.setIndexKey("RYFM");
bizMessage.setIndexValue(personBasic.getPostName() + personBasic.getQrcodeDesc());
......@@ -618,7 +610,8 @@ public class PersonBasicServiceImpl extends BaseService<PersonBasicDto, PersonBa
bizMessage.setBizInfo(riskBizInfoVo);
bizMessage.setDataSource("人员赋码");
try {
emqKeeper.getMqttClient().publish(PersonBasicServiceImpl.RYFM_DATA_MQTT_TOPIC, JSON.toJSONString(bizMessage).getBytes(StandardCharsets.UTF_8), 2, false);
emqKeeper.getMqttClient().publish(PersonBasicServiceImpl.RYFM_DATA_MQTT_TOPIC,
JSON.toJSONString(bizMessage).getBytes(StandardCharsets.UTF_8), 2, false);
} catch (MqttException e) {
e.printStackTrace();
}
......@@ -631,7 +624,8 @@ public class PersonBasicServiceImpl extends BaseService<PersonBasicDto, PersonBa
personMap.put("sourceAttributionDesc", personAccount.getProjectName());
personMap.put("warningSourceType", "人员赋码");
try {
emqKeeper.getMqttClient().publish(PersonBasicServiceImpl.RYFM_GREEN, JSON.toJSONString(personMap).getBytes(StandardCharsets.UTF_8), 2, false);
emqKeeper.getMqttClient().publish(PersonBasicServiceImpl.RYFM_GREEN,
JSON.toJSONString(personMap).getBytes(StandardCharsets.UTF_8), 2, false);
} catch (MqttException e) {
e.printStackTrace();
}
......@@ -639,37 +633,39 @@ public class PersonBasicServiceImpl extends BaseService<PersonBasicDto, PersonBa
} else {
this.personBasicMapper.updateById(personBasic);
}
StdUserEmpower stdUserEmpower = userEmpowerMapper.selectOne(new QueryWrapper<StdUserEmpower>().eq("amos_user_id", personAccount.getPuserId()).eq("permission_type", "YTH"));
if(ObjectUtils.isEmpty(stdUserEmpower)){
StdUserEmpower stdUserEmpower = userEmpowerMapper.selectOne(new QueryWrapper<StdUserEmpower>()
.eq("amos_user_id", personAccount.getPuserId()).eq("permission_type", "YTH"));
if (ObjectUtils.isEmpty(stdUserEmpower)) {
stdUserEmpower = new StdUserEmpower();
stdUserEmpower.setRecDate(new Date());
stdUserEmpower.setPermissionType("YTH");
stdUserEmpower.setAmosOrgCode(Arrays.asList(personAccount.getYthPermission()));
stdUserEmpower.setAmosUserId(personAccount.getPuserId());
userEmpowerMapper.insert(stdUserEmpower);
}else {
} else {
stdUserEmpower.setAmosOrgCode(Arrays.asList(personAccount.getYthPermission()));
stdUserEmpower.setRecDate(new Date());
userEmpowerMapper.updateById(stdUserEmpower);
}
//户用角色权限
StdUserEmpower stdUserEmpowerhygf = userEmpowerMapper.selectOne(new QueryWrapper<StdUserEmpower>().eq("amos_user_id", personAccount.getPuserId()).eq("permission_type", "HYGF"));
if(ObjectUtils.isEmpty(stdUserEmpowerhygf)){
stdUserEmpowerhygf=new StdUserEmpower();
List<CompanyModel> co= userEmpowerMapper.getCompanyBoList("region",null,null);
List<String> re= personAccount.getRegionalCompaniesSeq();
String flag=personAccount.getRegionalCompaniesSeqFlag();
if(flag!=null&&!flag.isEmpty()){
if(flag.equals("all")){
List<String> all=new ArrayList<>();
// 户用角色权限
StdUserEmpower stdUserEmpowerhygf = userEmpowerMapper.selectOne(new QueryWrapper<StdUserEmpower>()
.eq("amos_user_id", personAccount.getPuserId()).eq("permission_type", "HYGF"));
if (ObjectUtils.isEmpty(stdUserEmpowerhygf)) {
stdUserEmpowerhygf = new StdUserEmpower();
List<CompanyModel> co = userEmpowerMapper.getCompanyBoList("region", null, null);
List<String> re = personAccount.getRegionalCompaniesSeq();
String flag = personAccount.getRegionalCompaniesSeqFlag();
if (flag != null && !flag.isEmpty()) {
if (flag.equals("all")) {
List<String> all = new ArrayList<>();
all.add("all");
stdUserEmpowerhygf.setAmosOrgCode(all);
}else{
} else {
stdUserEmpowerhygf.setAmosOrgCode(re);
}
}
List<String> exre= personAccount.getExternalRegionalCompaniesSeq();
List<String> exre = personAccount.getExternalRegionalCompaniesSeq();
// if(exre!=null&&!exre.isEmpty()){
// List<String> pexre=new ArrayList<>();
// List<CompanyModel> exreco = co.stream().filter(product -> !"area".equals(product.getLevel())).collect(Collectors.toList());
......@@ -685,22 +681,22 @@ public class PersonBasicServiceImpl extends BaseService<PersonBasicDto, PersonBa
stdUserEmpowerhygf.setRecDate(new Date());
stdUserEmpowerhygf.setAmosUserId(personAccount.getPuserId());
userEmpowerMapper.insert(stdUserEmpowerhygf);
}else {
} else {
// List<CompanyModel> co= userEmpowerMapper.getCompanyBoList("region",null,null);
List<String> re= personAccount.getRegionalCompaniesSeq();
String flag=personAccount.getRegionalCompaniesSeqFlag();
if(flag!=null&&!flag.isEmpty()){
if(flag.equals("all")){
List<String> all=new ArrayList<>();
List<String> re = personAccount.getRegionalCompaniesSeq();
String flag = personAccount.getRegionalCompaniesSeqFlag();
if (flag != null && !flag.isEmpty()) {
if (flag.equals("all")) {
List<String> all = new ArrayList<>();
all.add("all");
stdUserEmpowerhygf.setAmosOrgCode(all);
}else{
} else {
stdUserEmpowerhygf.setAmosOrgCode(re);
}
}else{
} else {
stdUserEmpowerhygf.setAmosOrgCode(re);
}
List<String> exre= personAccount.getExternalRegionalCompaniesSeq();
List<String> exre = personAccount.getExternalRegionalCompaniesSeq();
// if(exre!=null&&!exre.isEmpty()){
// List<String> pexre=new ArrayList<>();
// List<CompanyModel> exreco = co.stream().filter(product -> !"area".equals(product.getLevel())).collect(Collectors.toList());
......@@ -725,31 +721,31 @@ public class PersonBasicServiceImpl extends BaseService<PersonBasicDto, PersonBa
personAccountFed.setSyncDate(new Date());
personAccountFedMapper.updateById(personAccountFed);
Map<String, Object> data=new HashMap<>();
data.put("SEQUENCE_NBR",agencyUserModel.getSequenceNbr());
ProduceMsg produceMsg= new ProduceMsg(data, UPDATE,agencyUserModel.getUserId());
Map<String, Object> data = new HashMap<>();
data.put("SEQUENCE_NBR", agencyUserModel.getSequenceNbr());
ProduceMsg produceMsg = new ProduceMsg(data, UPDATE, agencyUserModel.getUserId());
querueProduce.produceMsg(JSON.toJSONString(produceMsg));
return model;
}
@Transactional
public PersonDto getPerson(Long sequenceNbr,String type) {
public PersonDto getPerson(Long sequenceNbr, String type) {
PersonDto personDto = new PersonDto();
PersonUser personUser = new PersonUser();
QueryWrapper<PersonBasic> wrapper1 = new QueryWrapper();
wrapper1.eq("sequence_nbr", sequenceNbr);
wrapper1.eq("is_delete", 0);
//人员基础信息
// 人员基础信息
PersonBasic personBasic = this.getOne(wrapper1);
BeanUtils.copyProperties(personBasic, personUser);
//人员技能学历信息
// 人员技能学历信息
QueryWrapper<PersonSkillEducation> wrapper2 = new QueryWrapper();
wrapper2.eq("person_id", sequenceNbr);
PersonSkillEducation personSkillEducation = personSkillEducationService.getOne(wrapper2);
BeanUtils.copyProperties(personSkillEducation, personUser);
//人员资质信息
// 人员资质信息
LambdaQueryWrapper<PersonCertificate> personCertificateLambdaQueryWrapper = new LambdaQueryWrapper<>();
personCertificateLambdaQueryWrapper.eq(PersonCertificate::getPersonId, sequenceNbr);
List<PersonCertificate> list = personCertificateService.list(personCertificateLambdaQueryWrapper);
......@@ -758,40 +754,41 @@ public class PersonBasicServiceImpl extends BaseService<PersonBasicDto, PersonBa
QueryWrapper<PersonAccount> wrapper4 = new QueryWrapper();
wrapper4.eq("person_id", sequenceNbr);
//人员账号信息
// 人员账号信息
PersonAccount personAccount = personAccountService.getOne(wrapper4);
personAccount.setPhoneNum(personBasic.getPhone());
//对于密码进行解密
if("look".equals(type)){
personAccount.setIdNumber(DesensitizedUtil.idCardNum(personAccount.getIdNumber(),0,4));
}else{
// 对于密码进行解密
if ("look".equals(type)) {
personAccount.setIdNumber(DesensitizedUtil.idCardNum(personAccount.getIdNumber(), 0, 4));
} else {
personAccount.setPassword(DesUtil.decode(personAccount.getPassword(), secretKey));
personAccount.setSecondaryPassword(DesUtil.decode(personAccount.getSecondaryPassword(), secretKey));
}
if (personBasic.getNativePlace() != null) {
personUser.setNativePlace(JSON.parseArray(personBasic.getNativePlace(), Integer.class));
}
StdUserEmpower stdUserEmpower = userEmpowerMapper.selectOne(new QueryWrapper<StdUserEmpower>().eq("amos_user_id", personAccount.getPuserId()).eq("permission_type", "YTH"));
if(!ObjectUtils.isEmpty(stdUserEmpower)){
StdUserEmpower stdUserEmpower = userEmpowerMapper.selectOne(new QueryWrapper<StdUserEmpower>()
.eq("amos_user_id", personAccount.getPuserId()).eq("permission_type", "YTH"));
if (!ObjectUtils.isEmpty(stdUserEmpower)) {
personAccount.setYthPermission(stdUserEmpower.getAmosOrgCode().get(0));
}
StdUserEmpower stdUserEmpowerhygf = userEmpowerMapper.selectOne(new QueryWrapper<StdUserEmpower>()
.eq("amos_user_id", personAccount.getPuserId()).eq("permission_type", "HYGF"));
if (!ObjectUtils.isEmpty(stdUserEmpowerhygf)) {
StdUserEmpower stdUserEmpowerhygf = userEmpowerMapper.selectOne(new QueryWrapper<StdUserEmpower>().eq("amos_user_id", personAccount.getPuserId()).eq("permission_type", "HYGF"));
if(!ObjectUtils.isEmpty(stdUserEmpowerhygf)){
if(stdUserEmpowerhygf.getAmosOrgCode()==null||stdUserEmpowerhygf.getAmosOrgCode().size()==0){
if (stdUserEmpowerhygf.getAmosOrgCode() == null || stdUserEmpowerhygf.getAmosOrgCode().size() == 0) {
// List<String> list2 = new ArrayList<>();
// list2.add("all");
personAccount.setRegionalCompaniesSeq(null);
}else if(stdUserEmpowerhygf.getAmosOrgCode().size()==1&&stdUserEmpowerhygf.getAmosOrgCode().get(0).equals("all")){
} else if (stdUserEmpowerhygf.getAmosOrgCode().size() == 1
&& stdUserEmpowerhygf.getAmosOrgCode().get(0).equals("all")) {
personAccount.setRegionalCompaniesSeqFlag("all");
personAccount.setRegionalCompaniesSeq(null);
}else{
} else {
personAccount.setRegionalCompaniesSeqFlag("no");
personAccount.setRegionalCompaniesSeq(stdUserEmpowerhygf.getAmosOrgCode());
......@@ -809,7 +806,7 @@ public class PersonBasicServiceImpl extends BaseService<PersonBasicDto, PersonBa
@Transactional
public int deletePerson(String[] ids) {
//查询所有平台用户
// 查询所有平台用户
QueryWrapper<PersonAccount> wrapper = new QueryWrapper();
wrapper.in("person_id", ids);
List<PersonAccount> list = personAccountService.list(wrapper);
......@@ -819,19 +816,20 @@ public class PersonBasicServiceImpl extends BaseService<PersonBasicDto, PersonBa
userid.add(personAccount.getPuserId());
// loginId.add(personAccount.getJobNumber());
}
//删除平台
// 删除平台
int deleteResult = personBasicMapper.deleteList(ids);
this.deleuser(String.join(",", userid));
//// this.deleteLoginInfo(String.join(",", loginId));
// this.deleteLoginInfo(loginId.get(1));
QueryWrapper<PersonAccountFed> wrapper1 = new QueryWrapper();
wrapper1.in("person_id",ids);
wrapper1.in("person_id", ids);
personAccountFedMapper.delete(wrapper1);
userEmpowerMapper.delete(new QueryWrapper<StdUserEmpower>().in("amos_user_id", userid).eq("permission_type", "YTH"));
userEmpowerMapper
.delete(new QueryWrapper<StdUserEmpower>().in("amos_user_id", userid).eq("permission_type", "YTH"));
return deleteResult;
}
//新增平台用户
// 新增平台用户
private AgencyUserModel setcreateUser(AgencyUserModel userDto) {
FeignClientResult<AgencyUserModel> amosUser = Privilege.agencyUserClient.create(userDto);
......@@ -846,21 +844,30 @@ public class PersonBasicServiceImpl extends BaseService<PersonBasicDto, PersonBa
}
return user;
}
private LoginInfoModel createLoginInfo(AgencyUserModel userDto) {
FeignClientResult<LoginInfoModel> amosLoginfo = null;
try {
amosLoginfo = Privilege.agencyUserClient.createLoginInfo(userDto);
} catch (Exception e) {
FeignClientResult<List<String>> cResult = Privilege.agencyUserClient.multDeleteUser(userDto.getUserId(),true);
FeignClientResult<List<String>> cResult = Privilege.agencyUserClient.multDeleteUser(userDto.getUserId(),
true);
throw new RuntimeException(e);
}
return amosLoginfo.getResult();
}
//修改平台用户
// 修改平台用户
private AgencyUserModel updateuser(String userId, AgencyUserModel userDto) {
FeignClientResult<AgencyUserModel> amosUser = Privilege.agencyUserClient.update(userDto, userId);
FeignClientResult<AgencyUserModel> amosUser1 = Privilege.agencyUserClient.modifyPassword(userId,userDto);
FeignClientResult<AgencyUserModel> amosUser = null;
FeignClientResult<AgencyUserModel> amosUser1 = null;
try {
amosUser = Privilege.agencyUserClient.update(userDto, userId);
amosUser1 = Privilege.agencyUserClient.modifyPassword(userId, userDto);
} catch (Exception e) {
e.printStackTrace();
}
AgencyUserModel user = new AgencyUserModel();
if (!ObjectUtils.isEmpty(amosUser)) {
if (amosUser.getStatus() == 200) {
......@@ -878,16 +885,18 @@ public class PersonBasicServiceImpl extends BaseService<PersonBasicDto, PersonBa
}
return user;
}
private LoginInfoModel updateLoginInfo(String loginId,AgencyUserModel userDto) {
private LoginInfoModel updateLoginInfo(String loginId, AgencyUserModel userDto) {
FeignClientResult<LoginInfoModel> amosLoginfo = null;
try {
amosLoginfo = Privilege.agencyUserClient.updateLoginInfo(userDto,loginId);
//amosLoginfo = agencyuserFeign.updateLoginInfo(userDto, loginId);
amosLoginfo = Privilege.agencyUserClient.updateLoginInfo(userDto, loginId);
// amosLoginfo = agencyuserFeign.updateLoginInfo(userDto, loginId);
} catch (Exception e) {
throw new RuntimeException(e);
}
return amosLoginfo.getResult();
}
private DepartmentModel getdepartmentModel(Long departmentId) {
FeignClientResult<DepartmentModel> de = Privilege.departmentClient.seleteOne(departmentId);
......@@ -918,7 +927,7 @@ public class PersonBasicServiceImpl extends BaseService<PersonBasicDto, PersonBa
}
private void deleuser(String userid) {
FeignClientResult<List<String>> cResult = Privilege.agencyUserClient.multDeleteUser(userid,true);
FeignClientResult<List<String>> cResult = Privilege.agencyUserClient.multDeleteUser(userid, true);
if (!ObjectUtils.isEmpty(cResult)) {
if (cResult.getStatus() != 200) {
......@@ -936,20 +945,16 @@ public class PersonBasicServiceImpl extends BaseService<PersonBasicDto, PersonBa
}
return amosLoginfo.getResult();
}
/**
* 分页查询
*/
public Page<UserMapperDto> queryPage(Page<UserMapperDto> page,
String name,
String accountName,
String projectName, String orgCode) {
List<UserMapperDto> list = personBasicMapper.queryPage((page.getCurrent() - 1) * page.getSize(), page.getSize(), name,
accountName,
projectName, orgCode);
List<UserMapperDto> listcount = personBasicMapper.queryPagecount(name,
accountName,
projectName, orgCode);
public Page<UserMapperDto> queryPage(Page<UserMapperDto> page, String name, String accountName, String projectName,
String orgCode) {
List<UserMapperDto> list = personBasicMapper.queryPage((page.getCurrent() - 1) * page.getSize(), page.getSize(),
name, accountName, projectName, orgCode);
List<UserMapperDto> listcount = personBasicMapper.queryPagecount(name, accountName, projectName, orgCode);
page.setTotal(listcount.size());
page.setRecords(list);
return page;
......@@ -965,7 +970,6 @@ public class PersonBasicServiceImpl extends BaseService<PersonBasicDto, PersonBa
return resultList;
}
// public Page<Map<String, Object>> getPersonYardByPage(String parentCode,
// Integer current,
// Integer size,
......
......@@ -15,22 +15,6 @@
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
</properties>
<dependencies>
<!--
<dependency>
<groupId>com.amosframework.boot</groupId>
<artifactId>amos-boot-module-ugp-api</artifactId>
<version>${amos-biz-boot.version}</version>
</dependency>-->
<!-- <dependency>-->
<!-- <groupId>com.amosframework.boot</groupId>-->
<!-- <artifactId>amos-boot-module-common-biz</artifactId>-->
<!-- <version>${amos-biz-boot.version}</version>-->
<!-- </dependency>-->
<!-- <dependency>-->
<!-- <groupId>com.amosframework.boot</groupId>-->
<!-- <artifactId>amos-boot-biz-common</artifactId>-->
<!-- <version>1.0.0</version>-->
<!-- </dependency>-->
<dependency>
<groupId>com.alibaba</groupId>
<artifactId>druid-spring-boot-starter</artifactId>
......@@ -39,6 +23,7 @@
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-jdbc</artifactId>
<version>2.1.6.RELEASE</version>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
......@@ -66,6 +51,11 @@
<artifactId>fastjson</artifactId>
<version>1.2.47</version>
</dependency>
<dependency>
<groupId>cn.hutool</groupId>
<artifactId>hutool-all</artifactId>
<version>5.8.16</version>
</dependency>
</dependencies>
<build>
......
......@@ -31,7 +31,7 @@ import java.net.InetAddress;
@EnableAsync
@EnableScheduling
@SpringBootApplication(exclude = {DataSourceAutoConfiguration.class, DruidDataSourceAutoConfigure.class})
@MapperScan({"com.yeejoin.amos.boot.module.das.mapper.msyql","com.yeejoin.amos.boot.module.das.mapper.tdengineanalysis","com.yeejoin.amos.boot.module.das.mapper.tdengineiot"})
@MapperScan({"com.yeejoin.amos.boot.module.das.mapper.msyql","com.yeejoin.amos.boot.module.das.mapper.analysis","com.yeejoin.amos.boot.module.das.mapper.iot"})
@ComponentScan({"springfox.documentation.schema", "com.yeejoin.amos.boot.module.das","com.yeejoin.amos.boot.module.das.service.impl","org.typroject.tyboot.component"})
public class AmosJxiopDasApplication {
......
......@@ -16,9 +16,11 @@ public class IndicatorData {
private Date createdTime;
private String gatewayId;
private String dataType;
private String dasTime;
private String pointSeq;
private String pointAddress;
private String pointLocation;
private String pointType;
private String pointName;
private String value="0";
private Double valueF;
......
......@@ -8,6 +8,6 @@ import java.util.List;
@Component
public interface IndicatorDataMapper {
int insertBatch(@Param("list") List<IndicatorData> list, @Param("gatewayId")String gatewayId);
int insertBatch(@Param("list") List<IndicatorData> list, @Param("gatewayId")String gatewayId,@Param("dasTime")String dasTime);
void createTable();
}
......@@ -8,6 +8,6 @@ import java.util.List;
public interface FrontGatewayDevicePointsMapper extends BaseMapper<FrontGatewayDevicePoints> {
@Select("select distinct gateway_id from iot_front_gateway_device_points")
List<String> getGatewayIds();
@Select("select SEQUENCE_NBR,POINT_NAME,POINT_DATA_TYPE,POINT_ADDRESS,POINT_LOCATION from iot_front_gateway_device_points where gateway_id = #{gatewayId}")
@Select("select SEQUENCE_NBR,POINT_NAME,POINT_DATA_TYPE,POINT_ADDRESS,POINT_LOCATION,POINT_TYPE,DATA_TYPE from iot_front_gateway_device_points where gateway_id = #{gatewayId}")
List<FrontGatewayDevicePoints> getFrontGatewayDevicePointsByGatewayId(String gatewayId);
}
package com.yeejoin.amos.boot.module.das.service.impl;
import cn.hutool.core.date.DateUtil;
import com.alibaba.fastjson.JSON;
import com.google.common.collect.Lists;
......@@ -18,6 +19,7 @@ import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Service;
import org.springframework.util.ObjectUtils;
import org.typroject.tyboot.component.emq.EmqKeeper;
import java.util.*;
import java.util.stream.Collectors;
......@@ -41,7 +43,8 @@ public class DasServiceImpl implements DasService {
* 完成后,会记录此次操作所花费的时间。
*/
public void dataSolidification() {
String dasTime = DateUtil.format(new Date(), "yyyy-MM-dd HH:mm:00");
log.info("数据采集开始执行-采集时间::" + dasTime );
// 记录操作开始时间
Long startTime = System.currentTimeMillis();
// 创建新表
......@@ -50,7 +53,7 @@ public class DasServiceImpl implements DasService {
List<String> gateWayIds = frontGatewayDevicePointsMapper.getGatewayIds();
// 并行处理每个网关ID的数据凝固
gateWayIds.parallelStream().forEach(gatewayId -> {
dataSolidificationByGatewayId(gatewayId);
dataSolidificationByGatewayId(gatewayId,dasTime);
});
// 记录操作结束时间
Long endTime = System.currentTimeMillis();
......@@ -66,7 +69,7 @@ public class DasServiceImpl implements DasService {
* @param gatewayId 网关的唯一标识符,用于查询相关设备点信息和数据点值。
*/
@Async("jxiopAsyncExecutor")
public void dataSolidificationByGatewayId(String gatewayId) {
public void dataSolidificationByGatewayId(String gatewayId,String dasTime) {
// 根据网关ID查询设备点信息
List<FrontGatewayDevicePoints> tempPoints = frontGatewayDevicePointsMapper.getFrontGatewayDevicePointsByGatewayId(gatewayId);
if (!ObjectUtils.isEmpty(tempPoints)) {
......@@ -90,6 +93,7 @@ public class DasServiceImpl implements DasService {
indicatorData.setPointAddress(point.getPointAddress());
indicatorData.setPointLocation(point.getPointLocation());
indicatorData.setPointName(point.getPointName());
indicatorData.setPointType(point.getPointType());
// 设置数据点的值,如果是布尔值则进行转换
indicatorData.setValue(stbMap.get(point.getSequenceNbr().toString()));
if (!ObjectUtils.isEmpty(indicatorData.getValue()) && !booleans.contains(indicatorData.getValue())) {
......@@ -104,10 +108,9 @@ public class DasServiceImpl implements DasService {
// 批量插入构建的数据模型到数据库
Lists.partition(listAll, 1000).stream().forEach(
list -> {
indicatorDataMapper.insertBatch(list, gatewayId);
indicatorDataMapper.insertBatch(list, gatewayId,dasTime);
}
);
// 向EMQX发送消息,通知数据同步成功
try {
HashMap<String, String> syncFlag = new HashMap<>();
......
......@@ -31,6 +31,8 @@ spring.redis.password=yeejoin@2020
emqx.clean-session=true
emqx.client-id=${spring.application.name}-${random.int[1024,65536]}
emqx.broker=tcp://10.20.1.210:2883
emqx.client-user-name=admin
emqx.client-password=public
emqx.user-name=admin
emqx.password=public
mqtt.scene.host=mqtt://10.20.1.210:8083/mqtt
......
......@@ -8,11 +8,13 @@
<update id="createTable">
create STABLE if not exists indicator_data
(created_time timestamp,
das_time VARCHAR(100),
data_type NCHAR(12),
point_seq VARCHAR(100) ,
point_address VARCHAR(100) ,
point_location VARCHAR(500) ,
point_name VARCHAR(200),
point_type VARCHAR(50),
`value` VARCHAR(50),
`value_f` float)
TAGS (gateway_id binary(64));
......@@ -24,11 +26,13 @@
indicator_data_#{gatewayId,jdbcType=VARCHAR} USING indicator_data
TAGS (#{gatewayId,jdbcType=VARCHAR})
VALUES (NOW + #{index}a,
#{dasTime,jdbcType=VARCHAR},
#{item.dataType,jdbcType=VARCHAR},
#{item.pointSeq,jdbcType=VARCHAR},
#{item.pointAddress,jdbcType=VARCHAR},
#{item.pointLocation,jdbcType=VARCHAR},
#{item.pointName,jdbcType=VARCHAR},
#{item.pointType,jdbcType=VARCHAR},
#{item.value,jdbcType=VARCHAR},
#{item.valueF,jdbcType=FLOAT})
</foreach>
......
......@@ -21,7 +21,7 @@ public class StationCacheInfoDto implements Serializable {
//所属片区
private String belongArea;
//装机容量
private String installedCapacity="0.0";
private String installedCapacity;
//设备数量
private String equipmentNumbers;
//风机网关
......
......@@ -14,7 +14,7 @@ import java.util.stream.Collectors;
public interface CoreCommonService {
/**
* @deprecated 远程调用core服务根据厂长名称
* @deprecated 远程调用core服务根据场站名称
* @param stationNames 要查的场站名称需要使用逗号分隔 eg:夏造风机,石灰山风机
* @param pointsNames 要查的测点名称需要使用逗号分隔开 eg:日发电量,月发电量,年发电
* @return CoreValuesDto
......
......@@ -17,7 +17,6 @@ import com.yeejoin.amos.boot.module.jxiop.biz.dto.StationCacheInfoDto;
import com.yeejoin.amos.boot.module.jxiop.biz.entity.SjglZsjZsbtz;
import com.yeejoin.amos.boot.module.jxiop.biz.mapper2.SjglZsjZsbtzMapper;
import com.yeejoin.amos.boot.module.jxiop.biz.repository.ESEquipmentsRepository;
//import com.yeejoin.amos.component.influxdb.InfluxdbUtil;
import lombok.extern.slf4j.Slf4j;
import org.elasticsearch.index.query.*;
import org.elasticsearch.script.Script;
......@@ -49,8 +48,6 @@ import java.util.stream.Collectors;
@Slf4j
public class CommonServiceImpl {
// @Autowired
// InfluxdbUtil influxdbUtil;
@Autowired
SjglZsjZsbtzMapper sjglZsjZsbtzMapper;
@Autowired
......
......@@ -28,14 +28,19 @@ public class CoreCommonServiceImpl implements CoreCommonService {
@Override
public List<CoreValuesDto> getValuesByStationNamesAndPointsNames(String stationNames, String pointsNames) {
List<CoreValuesDto> result = new ArrayList<>();
try {
FeignClientResult<List<Object>> feignClientResult = coreFeignClient.getValues(stationNames, pointsNames);
List<Object> list = feignClientResult.getResult();
List<CoreValuesDto> result = new ArrayList<>();
list.forEach(o -> {
CoreValuesDto coreValuesDto = JSONObject.parseObject(JSONObject.toJSONString(o), CoreValuesDto.class);
coreValuesDto.setDataMap(coreValuesDto.getData().stream().flatMap(m -> m.entrySet().stream()).collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue, (a, b) -> b)));
result.add(coreValuesDto);
});
}catch (Exception e){
e.printStackTrace();
return new ArrayList<>();
}
return result;
}
......
......@@ -254,7 +254,6 @@ public class MonitorServiceImpl implements MonitorService {
List<StationCoordinate> stationCoordinateList = stationCoordinateMapper.selectList(new QueryWrapper<StationCoordinate>().isNotNull("station_id").eq("is_main_coordinate", 0));
for (int i = 0; i < list.size(); i++) {
HashMap<String, Double> hashMap = new HashMap<>();
StationCacheInfoDto stationCacheInfoDto = list.get(i);
RegionNationWideDto regionNationWideDto = new RegionNationWideDto();
regionNationWideDto.setStaitionId(String.valueOf(stationCacheInfoDto.getStationId()));
......@@ -279,7 +278,13 @@ public class MonitorServiceImpl implements MonitorService {
speendOrirradiate =String.format(CommonConstans.Twodecimalplaces,buDunStationDetailInfo.getWind_as_irradiance());
}
}
BuDunStationDetailInfo buDunStationDetailInfo = buDunStationDetailInfos.stream().filter(buDunStationDetailInfo1 -> buDunStationDetailInfo1.getStation_name().contains(stationCacheInfoDto.getStationName().replace("风电站","").replace("风电场","").replace("光伏电站",""))).findFirst().orElse(null);
if(!ObjectUtils.isEmpty(buDunStationDetailInfo)&&!ObjectUtils.isEmpty(buDunStationDetailInfo.getWind_as_irradiance())){
speendOrirradiate = buDunStationDetailInfo.getWind_as_irradiance().toString();
if(ObjectUtils.isEmpty(regionNationWideDto.getStationCapacity())&&!ObjectUtils.isEmpty(buDunStationDetailInfo.getActual_installed_capacity())){
regionNationWideDto.setStationCapacity(buDunStationDetailInfo.getActual_installed_capacity().toString());
}
}
regionNationWideDto.setSpeendOrirradiate(speendOrirradiate);
StationCoordinate stationCoordinate = stationCoordinateList.stream().filter(stationCoordinate1 -> String.valueOf(stationCoordinate1.getStationId()).equals(stationCacheInfoDto.getStationId())).collect(Collectors.toList()).get(0);
hashMap.put("lng", Double.valueOf(stationCoordinate.getLongitude()));
......@@ -356,24 +361,25 @@ public class MonitorServiceImpl implements MonitorService {
AtomicReference<Double> fdzannualPower = new AtomicReference<>(0.0);
AtomicReference<Double> gfzannualPower = new AtomicReference<>(0.0);
List<CoreValuesDto> coreValuesDtos = coreCommonService.getValuesByStationNamesAndPointsNames(null, null);
String requestUrl = Constants.BASE_URL + "?" + Constants.get_province_station_item + "&provinceName" + provinceName;
String requestUrl = Constants.BASE_URL + "?" + Constants.get_province_station_item + "&provinceName=" + provinceName;
List<BuDunStationDetailInfo> buDunStationDetailInfos = httpRequestUtil.getResPonse(requestUrl, Constants.REQUEST_GET, "", Constants.resovleRule_data, BuDunStationDetailInfo.class);
if (buDunStationDetailInfos.size() > 0) {
buDunStationDetailInfos.forEach(buDunStationDetailInfo -> {
CompletionOfPowerIndicatorsDto completionOfPowerIndicatorsDto = new CompletionOfPowerIndicatorsDto();
completionOfPowerIndicatorsDto.setStationName(buDunStationDetailInfo.getStation_name());
completionOfPowerIndicatorsDto.setInstallCapactity(String.valueOf(buDunStationDetailInfo.getActual_installed_capacity()));
completionOfPowerIndicatorsDto.setWindSpeedOrIrradiance(String.format(CommonConstans.Twodecimalplaces, buDunStationDetailInfo.getWind_as_irradiance()));
completionOfPowerIndicatorsDto.setWindSpeedOrIrradiance(!ObjectUtils.isEmpty(buDunStationDetailInfo.getWind_as_irradiance())?String.format(CommonConstans.Twodecimalplaces, buDunStationDetailInfo.getWind_as_irradiance()):"--");
completionOfPowerIndicatorsDto.setDailyPower(String.format(CommonConstans.Fourdecimalplaces, buDunStationDetailInfo.getDay()));
completionOfPowerIndicatorsDto.setMonthlyPower(String.format(CommonConstans.Fourdecimalplaces, buDunStationDetailInfo.getMonth()));
completionOfPowerIndicatorsDto.setAnnualPower(String.format(CommonConstans.Fourdecimalplaces, buDunStationDetailInfo.getYear()));
StationCacheInfoDto stationCacheInfoDto = stationCacheInfoDtoList.stream().filter(stationCacheInfoDto1 -> stationCacheInfoDto1.getStationName().contains(buDunStationDetailInfo.getStation_name().replace("电场","").replace("电站",""))).findFirst().get();
if (stationCacheInfoDto.getStationCoreName() != null && stationCacheInfoDto.getBoosterCoreName() != null) {
StationCacheInfoDto stationCacheInfoDto = stationCacheInfoDtoList.stream().filter(stationCacheInfoDto1 -> stationCacheInfoDto1.getStationName().contains(buDunStationDetailInfo.getStation_name().replace("电场","").replace("电站","").replace("光伏电站",""))).findFirst().orElse(null);
if (!ObjectUtils.isEmpty(stationCacheInfoDto)&&stationCacheInfoDto.getStationCoreName() != null && stationCacheInfoDto.getBoosterCoreName() != null) {
List<CoreValuesDto> coreValuesDtoList = coreValuesDtos.stream().filter(coreValuesDto -> coreValuesDto.getName().equals(stationCacheInfoDto.getStationCoreName()) || coreValuesDto.getName().equals(stationCacheInfoDto.getBoosterCoreName())).collect(Collectors.toList());
completionOfPowerIndicatorsDto.setActivePower(String.format(CommonConstans.Twodecimalplaces, (coreCommonService.getSumOfByPointName(coreValuesDtoList, CommonConstans.ACTIVE_POWER))));
}else {
completionOfPowerIndicatorsDto.setActivePower("--");
}
completionOfPowerIndicatorsDtoList.add(completionOfPowerIndicatorsDto);
});
}
String provincelUrl = Constants.BASE_URL + "?" + Constants.get_province_item_url + "&provinceName=" + provinceName;
......@@ -481,7 +487,7 @@ public class MonitorServiceImpl implements MonitorService {
String requestUrl = Constants.BASE_URL + "?" + Constants.get_area_item_url;
if (!ObjectUtils.isEmpty(areaName)) {
if (!areaName.contains(Constants.areaChinese)) {
areaName = Constants.areaChinese;
areaName =areaName+ Constants.areaChinese;
}
requestUrl = requestUrl + "&areaName=" + areaName;
}
......@@ -573,7 +579,7 @@ public class MonitorServiceImpl implements MonitorService {
List<HashMap<String, String>> mapList = new ArrayList<>();
String requestUrl = Constants.BASE_URL + "?" + Constants.get_month_top_url + "&topValue=5";
if (!ObjectUtils.isEmpty(areaName)) {
if (areaName.contains(Constants.areaChinese)) {
if (!areaName.contains(Constants.areaChinese)) {
areaName = areaName + Constants.areaChinese;
}
requestUrl = requestUrl + "&areaName=" + areaName;
......@@ -640,7 +646,7 @@ public class MonitorServiceImpl implements MonitorService {
List<StationBasic> gfdzlist = new ArrayList<>();
String requestUrl = Constants.BASE_URL + "?" + Constants.get_hours_num_top + "&topValue=3&tabValue=" + tabValue;
if (!ObjectUtils.isEmpty(areaName)) {
if (areaName.contains(Constants.areaChinese)) {
if (!areaName.contains(Constants.areaChinese)) {
areaName = areaName + Constants.areaChinese;
}
requestUrl = requestUrl + "&areaName=" + areaName;
......
......@@ -5,7 +5,6 @@ import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper;
import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import com.netflix.loadbalancer.RetryRule;
import com.yeejoin.amos.boot.module.jxiop.api.dto.IndexDto;
import com.yeejoin.amos.boot.module.jxiop.api.entity.Region;
import com.yeejoin.amos.boot.module.jxiop.api.entity.StationBasic;
......@@ -17,11 +16,7 @@ import com.yeejoin.amos.boot.module.jxiop.api.mapper.StationPlanMapper;
import com.yeejoin.amos.boot.module.jxiop.biz.ESDto.ESEquipments;
import com.yeejoin.amos.boot.module.jxiop.biz.constants.CommonConstans;
import com.yeejoin.amos.boot.module.jxiop.biz.dto.ESEquipmentsDTO;
import com.yeejoin.amos.boot.module.jxiop.biz.dto.IndicatorsDto;
import com.yeejoin.amos.boot.module.jxiop.biz.dto.SocialContributionDto;
//import com.yeejoin.amos.boot.module.jxiop.biz.utils.InfluxDButils;
//import com.yeejoin.amos.component.influxdb.InfluxdbUtil;
import io.swagger.models.auth.In;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment