Commit 2b478238 authored by 朱晨阳's avatar 朱晨阳

Merge remote-tracking branch 'origin/developer' into developer

parents ea0fe745 4943de2f
This diff is collapsed.
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<artifactId>amos-boot-data</artifactId>
<groupId>com.amosframework.boot</groupId>
<version>1.0.0</version>
</parent>
<artifactId>amos-boot-data-alarm</artifactId>
<name>amos-boot-data-alarm</name>
<dependencies>
<dependency>
<groupId>org.springframework.cloud</groupId>
<artifactId>spring-cloud-starter-netflix-eureka-client</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-actuator</artifactId>
</dependency>
<dependency>
<groupId>org.typroject</groupId>
<artifactId>tyboot-core-foundation</artifactId>
<version>${tyboot-version}</version>
</dependency>
<dependency>
<groupId>org.typroject</groupId>
<artifactId>tyboot-core-restful</artifactId>
<version>${tyboot-version}</version>
<exclusions>
<exclusion>
<groupId>org.typroject</groupId>
<artifactId>*</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.typroject</groupId>
<artifactId>tyboot-core-auth</artifactId>
<version>${tyboot-version}</version>
<exclusions>
<exclusion>
<groupId>org.typroject</groupId>
<artifactId>*</artifactId>
</exclusion>
</exclusions>
</dependency>
<!--kafka依赖-->
<dependency>
<groupId>org.springframework.kafka</groupId>
<artifactId>spring-kafka</artifactId>
</dependency>
<dependency>
<groupId>org.typroject</groupId>
<artifactId>tyboot-component-emq</artifactId>
<version>1.1.20</version>
</dependency>
<dependency>
<groupId>org.typroject</groupId>
<artifactId>tyboot-component-event</artifactId>
<version>${tyboot-version}</version>
<exclusions>
<exclusion>
<groupId>org.typroject</groupId>
<artifactId>*</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.typroject</groupId>
<artifactId>tyboot-component-opendata</artifactId>
<version>${tyboot-version}</version>
<exclusions>
<exclusion>
<groupId>org.typroject</groupId>
<artifactId>*</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>com.yeejoin</groupId>
<artifactId>amos-feign-systemctl</artifactId>
<version>${amos.version}</version>
</dependency>
<dependency>
<groupId>com.yeejoin</groupId>
<artifactId>amos-component-config</artifactId>
<version>${amos.version}</version>
</dependency>
<dependency>
<groupId>org.typroject</groupId>
<artifactId>tyboot-core-rdbms</artifactId>
<version>${tyboot-version}</version>
<exclusions>
<exclusion>
<groupId>org.typroject</groupId>
<artifactId>*</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.typroject</groupId>
<artifactId>tyboot-component-cache</artifactId>
<version>${tyboot-version}</version>
<exclusions>
<exclusion>
<groupId>org.typroject</groupId>
<artifactId>*</artifactId>
</exclusion>
<exclusion>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-data-redis</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-redis</artifactId>
<version>1.4.5.RELEASE</version>
</dependency>
<dependency>
<groupId>org.jetbrains</groupId>
<artifactId>annotations</artifactId>
<version>19.0.0</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>org.codehaus.jettison</groupId>
<artifactId>jettison</artifactId>
<version>1.3.7</version>
</dependency>
<dependency>
<groupId>com.alibaba</groupId>
<artifactId>druid-spring-boot-starter</artifactId>
<version>1.1.10</version>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-maven-plugin</artifactId>
</plugin>
</plugins>
</build>
</project>
package com.yeejoin.amos;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.mybatis.spring.annotation.MapperScan;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.boot.web.servlet.ServletComponentScan;
import org.springframework.cloud.client.discovery.EnableDiscoveryClient;
import org.springframework.cloud.netflix.eureka.EnableEurekaClient;
import org.springframework.cloud.openfeign.EnableFeignClients;
import org.springframework.context.ConfigurableApplicationContext;
import org.springframework.context.annotation.ComponentScan;
import org.springframework.core.env.Environment;
import org.springframework.scheduling.annotation.EnableAsync;
import org.springframework.scheduling.annotation.EnableScheduling;
import org.springframework.transaction.annotation.EnableTransactionManagement;
import org.typroject.tyboot.core.restful.exception.GlobalExceptionHandler;
import java.net.InetAddress;
/**
*
* <pre>
*
* </pre>
*
* @author gwb
* @version $Id: OpenapiApplication.java, v 0.1 2021年9月27日 下午3:29:30 gwb Exp $
*/
@SpringBootApplication
@EnableTransactionManagement
@EnableConfigurationProperties
@ServletComponentScan
@EnableDiscoveryClient
@EnableFeignClients
@EnableAsync
@EnableEurekaClient
@EnableScheduling
@MapperScan(value = { "org.typroject.tyboot.*.*.face.orm.dao", "com.yeejoin.amos.api.*.face.orm.dao", "org.typroject.tyboot.face.*.orm.dao*",
"com.yeejoin.amos.api.*.mapper","com.yeejoin.amos.boot.biz.common.dao.mapper", "com.yeejoin.amos.api.*.mapper2" })
@ComponentScan({ "org.typroject", "com.yeejoin.amos" })
public class AlarmApplication {
private static final Logger logger = LogManager.getLogger(AlarmApplication.class);
public static void main(String[] args) throws Exception {
ConfigurableApplicationContext context = SpringApplication.run(AlarmApplication.class, args);
GlobalExceptionHandler.setAlwaysOk(true);
Environment env = context.getEnvironment();
String ip = InetAddress.getLocalHost().getHostAddress();
String port = env.getProperty("server.port");
String path = env.getProperty("server.servlet.context-path");
logger.info("\n----------------------------------------------------------\n\t"
+ "Application Amos-Biz-Boot is running! Access URLs:\n\t" + "Swagger文档: \thttp://" + ip + ":" + port
+ path + "/doc.html\n" + "----------------------------------------------------------");
}
}
package com.yeejoin.amos.api.alarm.config;
import com.alibaba.druid.pool.DruidDataSource;
import com.baomidou.mybatisplus.extension.spring.MybatisSqlSessionFactoryBean;
import org.apache.ibatis.session.SqlSessionFactory;
import org.mybatis.spring.annotation.MapperScan;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.core.io.support.PathMatchingResourcePatternResolver;
import org.springframework.jdbc.datasource.DataSourceTransactionManager;
import javax.sql.DataSource;
/**
* 从数据源配置
* 若需要配置更多数据源 , 直接在yml中添加数据源配置再增加相应的新的数据源配置类即可
*/
@Configuration
@MapperScan(basePackages = "com.yeejoin.amos.api.alarm.mapper2", sqlSessionFactoryRef = "clusterSqlSessionFactory")
public class ClusterDbConfig {
private Logger logger = LoggerFactory.getLogger(ClusterDbConfig.class);
// 精确到 cluster 目录,以便跟其他数据源隔离
private static final String MAPPER_LOCATION = "classpath*:mapper/cluster/*.xml";
@Value("${spring.db2.datasource.url}")
private String dbUrl;
@Value("${spring.db2.datasource.username}")
private String username;
@Value("${spring.db2.datasource.password}")
private String password;
@Value("${spring.db2.datasource.driver-class-name}")
private String driverClassName;
@Bean(name = "clusterDataSource2") //声明其为Bean实例
public DataSource clusterDataSource() {
DruidDataSource datasource = new DruidDataSource();
datasource.setUrl(this.dbUrl);
datasource.setUsername(username);
datasource.setPassword(password);
datasource.setDriverClassName(driverClassName);
return datasource;
}
@Bean(name = "clusterTransactionManager")
public DataSourceTransactionManager clusterTransactionManager() {
return new DataSourceTransactionManager(clusterDataSource());
}
@Bean(name = "clusterSqlSessionFactory")
public SqlSessionFactory clusterSqlSessionFactory(@Qualifier("clusterDataSource2") DataSource culsterDataSource)
throws Exception {
final MybatisSqlSessionFactoryBean sessionFactory = new MybatisSqlSessionFactoryBean();
sessionFactory.setDataSource(culsterDataSource);
sessionFactory.setMapperLocations(new PathMatchingResourcePatternResolver()
.getResources(ClusterDbConfig.MAPPER_LOCATION));
sessionFactory.setTypeAliasesPackage("com.yeejoin.amos.boot.module.jxiop.biz.entity2");
//mybatis 数据库字段与实体类属性驼峰映射配置
sessionFactory.getObject().getConfiguration().setMapUnderscoreToCamelCase(true);
return sessionFactory.getObject();
}
}
package com.yeejoin.amos.api.alarm.config;
import lombok.extern.slf4j.Slf4j;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.scheduling.annotation.EnableAsync;
import org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor;
import java.util.concurrent.Executor;
import java.util.concurrent.ThreadPoolExecutor;
@Slf4j
@Configuration
@EnableAsync
public class EquipExecutorConfig {
@Bean(name = "equipAsyncExecutor")
public Executor asyncServiceExecutor() {
ThreadPoolTaskExecutor executor = new ThreadPoolTaskExecutor();
//配置核心线程数
executor.setCorePoolSize(10);
//配置最大线程数
executor.setMaxPoolSize(500);
//配置队列大小
executor.setQueueCapacity(2000);
//配置线程池中的线程的名称前缀
executor.setThreadNamePrefix("namePrefix");
//线程池维护线程所允许的空闲时间
executor.setKeepAliveSeconds(30);
// rejection-policy:当pool已经达到max size的时候,如何处理新任务
// CALLER_RUNS:不在新线程中执行任务,而是有调用者所在的线程来执行--拒绝策略
executor.setRejectedExecutionHandler(new ThreadPoolExecutor.CallerRunsPolicy());
//执行初始化
executor.initialize();
//等待所有任务结束后再关闭线程池
executor.setWaitForTasksToCompleteOnShutdown(true);
return executor;
}
}
//package com.yeejoin.amos.api.alarm.config;
//
//import org.apache.kafka.clients.admin.AdminClient;
//import org.apache.kafka.clients.admin.AdminClientConfig;
//import org.apache.kafka.clients.admin.NewTopic;
//import org.springframework.beans.factory.annotation.Value;
//import org.springframework.context.annotation.Bean;
//import org.springframework.context.annotation.Configuration;
//import org.springframework.kafka.core.KafkaAdmin;
//
//import java.util.HashMap;
//import java.util.Map;
//
//import static org.bouncycastle.asn1.pkcs.PKCSObjectIdentifiers.md5;
//
//@Configuration
//public class KafkaInitialConfiguration {
//
//
//
//
// /***
// * 创建top 10个分区1个副本
// * 通过bean创建(bean的名字为initialTopic)
// * @return
// */
//
// @Bean
// public NewTopic initialTopic1() {
//
// return new NewTopic("jf1",3, (short) 1 );
// }
//
//
// @Bean
// public KafkaAdmin kafkaAdmin() {
// Map<String, Object> props = new HashMap<>();
// //配置Kafka实例的连接地址
// props.put(AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG, "121.199.39.218:9092");
// KafkaAdmin admin = new KafkaAdmin(props);
// return admin;
// }
//
// @Bean
// public AdminClient adminClient() {
// return AdminClient.create(kafkaAdmin().getConfig());
// }
//
//
//
//}
package com.yeejoin.amos.api.alarm.config;
import com.alibaba.druid.pool.DruidDataSource;
import com.baomidou.mybatisplus.extension.spring.MybatisSqlSessionFactoryBean;
import org.apache.ibatis.session.SqlSessionFactory;
import org.mybatis.spring.annotation.MapperScan;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Primary;
import org.springframework.core.io.support.PathMatchingResourcePatternResolver;
import org.springframework.jdbc.datasource.DataSourceTransactionManager;
import javax.sql.DataSource;
import java.util.Properties;
@Configuration
@MapperScan(basePackages = "com.yeejoin.amos.api.alarm.mapper", sqlSessionFactoryRef = "masterSqlSessionFactory1")
public class MasterDbConfig {
private Logger logger = LoggerFactory.getLogger(MasterDbConfig.class);
// 精确到 master 目录,以便跟其他数据源隔离
private static final String MAPPER_LOCATION = "classpath*:mapper/*.xml";
@Value("${spring.datasource.url}")
private String dbUrl;
@Value("${spring.datasource.username}")
private String username;
@Value("${spring.datasource.password}")
private String password;
@Value("${spring.datasource.driver-class-name}")
private String driverClassName;
@Bean(name="masterDataSource") //声明其为Bean实例
@Primary //在同样的DataSource中,首先使用被标注的DataSource
public DataSource masterDataSource() {
DruidDataSource datasource = new DruidDataSource();
datasource.setUrl(this.dbUrl);
datasource.setUsername(username);
datasource.setPassword(password);
datasource.setDriverClassName(driverClassName);
return datasource;
}
@Bean(name = "masterTransactionManager")
@Primary
public DataSourceTransactionManager masterTransactionManager() {
return new DataSourceTransactionManager(masterDataSource());
}
@Bean(name = "masterSqlSessionFactory1")
@Primary
public SqlSessionFactory masterSqlSessionFactory(@Qualifier("masterDataSource") DataSource masterDataSource)
throws Exception {
final MybatisSqlSessionFactoryBean sessionFactory = new MybatisSqlSessionFactoryBean();
sessionFactory.setDataSource(masterDataSource);
sessionFactory.setMapperLocations(new PathMatchingResourcePatternResolver()
.getResources(MasterDbConfig.MAPPER_LOCATION));
sessionFactory.setTypeAliasesPackage("com.yeejoin.amos.boot.module.jxiop.api.entity");
//mybatis 数据库字段与实体类属性驼峰映射配置
sessionFactory.getObject().getConfiguration().setMapUnderscoreToCamelCase(true);
return sessionFactory.getObject();
}
}
package com.yeejoin.amos.api.alarm.dto;
import lombok.Data;
import java.util.List;
/**
* @description:
* @author: tw
* @createDate: 2023/6/19
*/
@Data
public class BizInfo {
private String sourceAttributionDesc;
private String sourceAttribution;
private List<DynamicDetails> dynamicDetails;
private String warningObjectCode;
private String warningTime;
private String warningObjectName;
private String warningObjectType;
private String warningObjectLinkUrl;
public BizInfo(String sourceAttributionDesc,
String sourceAttribution,
List<DynamicDetails> dynamicDetails,
String warningObjectCode,
String warningTime,
String warningObjectName,
String warningObjectType,
String warningObjectLinkUrl) {
this.sourceAttributionDesc = sourceAttributionDesc;
this.sourceAttribution = sourceAttribution;
this.dynamicDetails = dynamicDetails;
this.warningObjectCode = warningObjectCode;
this.warningTime = warningTime;
this.warningObjectName = warningObjectName;
this.warningObjectType = warningObjectType;
this.warningObjectLinkUrl = warningObjectLinkUrl;
}
}
package com.yeejoin.amos.api.alarm.dto;
import lombok.Data;
import java.util.List;
/**
* @description:
* @author: tw
* @createDate: 2023/6/19
*/
@Data
public class DynamicDetails {
private String tabName;
private List<TabContent> tabContent;
public DynamicDetails(String tabName, List<TabContent> tabContent) {
this.tabName = tabName;
this.tabContent = tabContent;
}
}
package com.yeejoin.amos.api.alarm.dto;
import lombok.Data;
/**
* @description:
* @author: tw
* @createDate: 2023/6/19
*/
@Data
public class TabContent {
private String label;
private String type;
private Object value;
private String key;
public TabContent(String label, String type, Object value, String key) {
this.label = label;
this.type = type;
this.value = value;
this.key = key;
}
}
package com.yeejoin.amos.api.alarm.dto;
import lombok.Data;
import java.util.List;
/**
* @description:
* @author: tw
* @createDate: 2023/6/19
*/
@Data
public class WarningDto {
private BizInfo bizInfo;
private String indexKey;
private String indexValue;
private String traceId;
public WarningDto( String indexKey, String indexValue, String traceId,
String sourceAttributionDesc,
String sourceAttribution,
List<DynamicDetails> dynamicDetails,
String warningObjectCode,
String warningTime,
String warningObjectName,
String warningObjectType,
String warningObjectLinkUrl
) {
this.bizInfo = new BizInfo( sourceAttributionDesc, sourceAttribution, dynamicDetails, warningObjectCode, warningTime, warningObjectName,warningObjectType, warningObjectLinkUrl);
this.indexKey = indexKey;
this.indexValue = indexValue;
this.traceId = traceId;
}
}
package com.yeejoin.amos.api.alarm.entity;
import com.baomidou.mybatisplus.annotation.FieldFill;
import com.baomidou.mybatisplus.annotation.IdType;
import com.baomidou.mybatisplus.annotation.TableField;
import com.baomidou.mybatisplus.annotation.TableId;
import com.fasterxml.jackson.databind.annotation.JsonSerialize;
import com.fasterxml.jackson.databind.ser.std.ToStringSerializer;
import lombok.Data;
import lombok.experimental.Accessors;
import java.io.Serializable;
import java.util.Date;
/**
* @description: 公共实体
* @author: duanwei
**/
@Data
@Accessors(chain = true)
public class BaseEntity implements Serializable {
private static final long serialVersionUID = -5464322936854328207L;
@TableId(type = IdType.ID_WORKER)
@JsonSerialize(using = ToStringSerializer.class)
private Long id;
/**
* 新增和更新执行
*/
@TableField(value = "create_date", fill = FieldFill.INSERT)
private Date createDate;
}
package com.yeejoin.amos.api.alarm.entity;
import com.baomidou.mybatisplus.annotation.TableField;
import com.baomidou.mybatisplus.annotation.TableName;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import lombok.Data;
import lombok.EqualsAndHashCode;
import lombok.experimental.Accessors;
/**
* @description:
* @author: tw
* @createDate: 2023/6/19
*/
@Data
@EqualsAndHashCode(callSuper = true)
@Accessors(chain = true)
@TableName("dz_point_system")
@ApiModel(value="PointSystem对象", description="")
public class PointSystem extends BaseEntity {
@ApiModelProperty(value = "场站")
@TableField("station")
private String station;
@ApiModelProperty(value = "二维码")
@TableField("number")
private String number;
@ApiModelProperty(value = "类型")
@TableField("type")
private String type;
@ApiModelProperty(value = "'地址'")
@TableField("address")
private String address;
@ApiModelProperty(value = "测点类型")
@TableField("point_type")
private String pointType;
@ApiModelProperty(value = "测点值")
@TableField("value")
private String value;
@ApiModelProperty(value = "功能码")
@TableField("function_num")
private String functionNum;
@ApiModelProperty(value = "kks码")
@TableField("kks")
private String kks;
@ApiModelProperty(value = "網管地址")
@TableField("gateway_id")
private String gatewayId;
}
package com.yeejoin.amos.api.alarm.entity2;
import com.baomidou.mybatisplus.annotation.TableField;
import com.baomidou.mybatisplus.annotation.TableName;
import lombok.Data;
import lombok.experimental.Accessors;
@Data
@Accessors(chain = true)
@TableName("jump_config")
public class JumpConfig {
@TableField("id")
private Integer id;
@TableField("url")
private String url;
@TableField("type")
private String type;
}
package com.yeejoin.amos.api.alarm.mapper;
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
import com.yeejoin.amos.api.alarm.entity.PointSystem;
/**
* @description:
* @author: tw
* @createDate: 2023/6/19
*/
public interface PointSystemMapper extends BaseMapper<PointSystem> {
//推送预警
public void sendWarning();
}
package com.yeejoin.amos.api.alarm.mapper2;
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
import com.yeejoin.amos.api.alarm.entity2.JumpConfig;
public interface JumpConfigMapper extends BaseMapper<JumpConfig> {
}
package com.yeejoin.amos.api.alarm.service;
/**
* @description:
* @author: tw
* @createDate: 2023/6/19
*/
public interface IPointSystemService {
//触发风险预警
public void sendWarning(String address, String value,String valueLabe,String gatewayId);
}
package com.yeejoin.amos.api.alarm.service.impl;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.kafka.annotation.TopicPartition;
import org.springframework.kafka.support.Acknowledgment;
import org.springframework.stereotype.Service;
/**
* @description: 监听设备告警信息
* @author: tw
* @createDate: 2023/6/27
*/
@Service
public class AlarmKafkaConsumer {
@Autowired
PointSystemServiceImpl pointSystemServiceImpl;
//消费者来处理消息
@KafkaListener(id="alarmInfo" , topics ={"${kafka.equipment.alarm}"})
public void message1( String record, Acknowledgment ack){
// 处理业务
String date=record;
System.out.println("消息进来了" +record);
//异步触发预警
pointSystemServiceImpl.sendWarningAsync(date);
//手动提交
ack.acknowledge();
}
@KafkaListener(id="user2" , topics ={"${kafka.equipment.test}"})
public void message2( String record, Acknowledgment ack){
String date=record;
System.out.println("消息进来了 8888888888888888888888");
}
// public void message1( ConsumerRecord<?, ?> record, Acknowledgment ack){
// // 消费的哪个topic、partition的消息,打印出消息内容
//
// StringBuffer sb = new StringBuffer();
// // 主题
// sb.append(record.topic() + "-");
// // 分区
// sb.append(record.partition() + "-");
// // 需要消费的值
// sb.append(record.value() + "-");
// // 位移
// sb.append(record.offset());
//
// System.out.println( "消费者进行消费:"+ sb);
// ack.acknowledge();
//
// }
// // 简单消费者,groupId可以任意起
// @KafkaListener(id = "Consumer0", groupId = "jf0-group", topics = "jf1", topicPartitions = {
// @TopicPartition(topic = "jf1", partitions = {"0"}),
// }, containerFactory = "kafkaListenerContainerFactory")
// public void consumer0(ConsumerRecord<String, String> records, Acknowledgment ack) {
// this.message1(records,ack);
// }
//
// @KafkaListener(id = "Consumer1", groupId = "jf1-group", topics = "jf1", topicPartitions = {
// @TopicPartition(topic = "jf1", partitions = {"1"}),
// }, containerFactory = "kafkaListenerContainerFactory")
// public void consumer1(ConsumerRecord<String, String> records, Acknowledgment ack) {
// this.message1(records,ack);
// }
//
// @KafkaListener(id = "Consumer2", groupId = "jf2-group", topics = "jf1", topicPartitions = {
// @TopicPartition(topic = "jf1", partitions = {"2"}),
// }, containerFactory = "kafkaListenerContainerFactory")
// public void consumer3(ConsumerRecord<String, String> records, Acknowledgment ack) {
// this.message1(records,ack);
// }
}
package com.yeejoin.amos.api.alarm.service.impl;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl;
import com.github.xiaoymin.knife4j.core.util.StrUtil;
import com.yeejoin.amos.api.alarm.dto.DynamicDetails;
import com.yeejoin.amos.api.alarm.dto.TabContent;
import com.yeejoin.amos.api.alarm.dto.WarningDto;
import com.yeejoin.amos.api.alarm.entity2.JumpConfig;
import com.yeejoin.amos.api.alarm.entity.PointSystem;
import com.yeejoin.amos.api.alarm.mapper.PointSystemMapper;
import com.yeejoin.amos.api.alarm.mapper2.JumpConfigMapper;
import com.yeejoin.amos.api.alarm.service.IPointSystemService;
import com.yeejoin.amos.api.alarm.utils.HttpContentTypeUtil;
import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.scheduling.annotation.Async;
import org.springframework.stereotype.Service;
import org.typroject.tyboot.component.emq.EmqKeeper;
import java.text.SimpleDateFormat;
import java.util.*;
import java.util.stream.Collectors;
/**
* @description:
* @author: tw
* @createDate: 2023/6/19
*/
@Service
public class PointSystemServiceImpl extends ServiceImpl<PointSystemMapper, PointSystem> implements IPointSystemService {
private static final Logger logger = LogManager.getLogger(PointSystemServiceImpl.class);
@Autowired
PointSystemMapper pointSystemMapper;
@Value("${power.station.url}")
private String powerStationUrl;
private final String TABNAME="预警问题";
private final String TEXT= "text";
@Value("${power.station.warning:104/data/analysis}")
private String STATIONWARNING;
@Autowired
protected EmqKeeper emqKeeper;
@Autowired
private JumpConfigMapper jumpConfigMapper;
public String getJumpUrlByInfo(String sbbm) {
List<JumpConfig> jumpConfigs = jumpConfigMapper.selectList(null);
Map<String, String> collect = jumpConfigs.stream().collect(Collectors.toMap(JumpConfig::getType, JumpConfig::getUrl));
if (StringUtils.isEmpty(sbbm)) {
return "";
}
if (sbbm.indexOf("BAT") != -1) {
return collect.get("箱变");
} else if (sbbm.indexOf("WG") != -1) {
return collect.get("汇流箱");
} else if (sbbm.indexOf("WC") != -1) {
return collect.get("逆变器");
} else if (sbbm.length() == 12 && sbbm.indexOf("MD") != -1) {
return collect.get("风机");
} else if (sbbm.length() > 12 && sbbm.indexOf("MD") != -1) {
return collect.get("风机子系统");
} else {
return collect.get("默认");
}
}
@Async("equipAsyncExecutor")
public void sendWarningAsync( String date){
try {
logger.info("收到告警信息"+date);
com.alibaba.fastjson.JSONObject messageObj = JSON.parseObject(date);
String address= messageObj.get("address").toString();
String value= messageObj.get("value").toString();
String valueLabe=messageObj.get("valueLabel").toString();
String gatewayId=messageObj.get("gatewayId").toString();
this.sendWarning(address, value, valueLabe,gatewayId);
} catch (Exception e) {
e.printStackTrace();
}
}
@Override
public void sendWarning(String address, String value,String valueLabe,String gatewayId) {
try {
//通过测点地址获取,和对应值 获取kks
QueryWrapper<PointSystem> pointSystemWrapper = new QueryWrapper<>();
pointSystemWrapper.lambda().eq(PointSystem::getAddress, address);
if(!value.equals("false") && !value.equals("true")){
pointSystemWrapper.lambda().eq(PointSystem::getValue, value);
}
pointSystemWrapper.lambda().eq(PointSystem::getGatewayId, gatewayId);
List<PointSystem> pointSystems = pointSystemMapper.selectList(pointSystemWrapper);
if (pointSystems == null || pointSystems.size() < 1 ) {
throw new RuntimeException("获取kks码失败!");
}
PointSystem pointSystem = pointSystems.get(0);
if (pointSystem.getType().equals("遥信")){
return;
}
//调用获取设备相关信息
Map<String, String> maps = new HashMap<>();
maps.put("type", "equipinfo");
maps.put("kksbm", pointSystem.getKks());
String data = HttpContentTypeUtil.sendHttpPost(powerStationUrl, maps);
if (StringUtils.isEmpty(data) || !(Boolean) JSON.parseObject(data).get("success")) {
throw new RuntimeException("获取设备信息失败!");
}
JSONObject json = JSON.parseObject(data);
JSONObject jsond = (JSONObject) json.get("dataset");
JSONArray list = (JSONArray) jsond.get("datas");
JSONObject eqdata = null;
if (list == null || list.isEmpty()) {
throw new RuntimeException("获取设备信息失败!");
}
eqdata = (JSONObject) list.get(0);
//组装数据,发送预警
WarningDto warningDto = setWarningDto(pointSystem, eqdata, valueLabe);
emqKeeper.getMqttClient().publish(STATIONWARNING, JSON.toJSONString(warningDto).getBytes(), 0, false);
} catch (Exception e) {
e.printStackTrace();
throw new RuntimeException("预警消息发送失败!");
}
}
public WarningDto setWarningDto(PointSystem pointSystem,JSONObject eqdata,String valueLabe ){
SimpleDateFormat sdf =new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
String time= sdf.format(new Date());
String warningObjectCode=pointSystem.getKks();
List<TabContent> tabContent=new ArrayList<>();
tabContent.add(new TabContent( "KKS编码", TEXT, warningObjectCode, "key1"));
tabContent.add(new TabContent( "设备名称", TEXT, eqdata.get("kksms"), "key2"));
tabContent.add(new TabContent( "告警原因", TEXT, valueLabe, "key3"));
tabContent.add(new TabContent( "发生时间", TEXT, time, "key4"));
DynamicDetails dynamicDetails=new DynamicDetails( TABNAME, tabContent);
List<DynamicDetails> dynamicDetailsList=new ArrayList<>();
dynamicDetailsList.add(dynamicDetails);
StringBuilder indexKey=new StringBuilder(pointSystem.getStation())
.append("#")
.append(pointSystem.getNumber())
.append("#")
.append(pointSystem.getFunctionNum());
String indexValue=valueLabe;
WarningDto WarningDto=new WarningDto(
indexKey.toString(),
indexValue,
null,
(String)eqdata.get("sourceAttributionDesc"),
(String)eqdata.get("sourceAttribution"),
dynamicDetailsList,
warningObjectCode,
time ,
(String)eqdata.get("kksms"),
"equip",
getJumpUrlByInfo(warningObjectCode)
);
return WarningDto;
}
}
//package com.yeejoin.amos.api.alarm.service.impl;
//
//import com.alibaba.fastjson.JSON;
//import org.apache.kafka.clients.admin.NewTopic;
//import org.apache.kafka.clients.producer.ProducerRecord;
//import org.checkerframework.checker.units.qual.K;
//import org.springframework.beans.factory.annotation.Autowired;
//import org.springframework.kafka.core.KafkaTemplate;
//import org.springframework.kafka.support.SendResult;
//import org.springframework.scheduling.annotation.Scheduled;
//import org.springframework.stereotype.Service;
//import org.springframework.util.concurrent.ListenableFuture;
//import org.springframework.util.concurrent.ListenableFutureCallback;
//
//import javax.annotation.PostConstruct;
//import javax.annotation.Resource;
//
///**
// * @description:
// * @author: tw
// * @createDate: 2023/6/28
// */
//@Service
//public class producerServers {
//
//
// @Autowired
// private KafkaTemplate<String, String> kafkaTemplate;
//
//@Scheduled(fixedRate = 60000)
// public void send(){
// String gg1="1668801435891929089@18873";
// String gg2="1668801435891929089@18874";
// String gg3="1668801435891929089@18875";
// String gg4="1668801435891929089@18876";
// String gg5="1668801435891929089@18877";
// String gg6="1668801435891929089@18878";
// String gg7="1668801435891929089@18879";
// String gg8="1668801435891929089@18880";
//
//
// String topic="jf1";
//
// ProducerRecord<String, String> producerRecord1 = new ProducerRecord<String, String>( topic, gg1.hashCode()%3, gg1.hashCode()%3+"", gg1+"==============="+gg1.hashCode()%5);
// ProducerRecord<String, String> producerRecord2 = new ProducerRecord<String, String>( topic, gg2.hashCode()%3,gg2.hashCode()%3+"", gg2+"==============="+gg2.hashCode()%5);
// ProducerRecord<String, String> producerRecord3 = new ProducerRecord<String, String>( topic, gg3.hashCode()%3,gg3.hashCode()%3+"", gg3+"==============="+gg3.hashCode()%5);
// ProducerRecord<String, String> producerRecord4 = new ProducerRecord<String, String>( topic, gg4.hashCode()%3,gg4.hashCode()%3+"", gg4+"==============="+gg4.hashCode()%5);
// ProducerRecord<String, String> producerRecord5 = new ProducerRecord<String, String>( topic, gg5.hashCode()%3,gg5.hashCode()%3+"", gg5+"==============="+gg5.hashCode()%5);
// ProducerRecord<String, String> producerRecord6 = new ProducerRecord<String, String>( topic, gg6.hashCode()%3,gg6.hashCode()%3+"", gg6+"==============="+gg6.hashCode()%5);
// ProducerRecord<String, String> producerRecord7 = new ProducerRecord<String, String>( topic, gg7.hashCode()%3,gg7.hashCode()%3+"", gg7+"==============="+gg7.hashCode()%5);
// ProducerRecord<String, String> producerRecord8 = new ProducerRecord<String, String>( topic, gg8.hashCode()%3,gg8.hashCode()%3+"", gg8+"==============="+gg8.hashCode()%5);
//
// System.out.println(gg1.hashCode()%3);
// System.out.println(gg2.hashCode()%3);
// System.out.println(gg3.hashCode()%3);
// System.out.println(gg4.hashCode()%3);
// System.out.println(gg5.hashCode()%3);
// System.out.println(gg6.hashCode()%3);
// System.out.println(gg7.hashCode()%3);
// System.out.println(gg8.hashCode()%3);
//
// kafkaTemplate.send(producerRecord1);
// kafkaTemplate.send(producerRecord2);
// kafkaTemplate.send(producerRecord3);
// kafkaTemplate.send(producerRecord4);
// kafkaTemplate.send(producerRecord5);
// kafkaTemplate.send(producerRecord6);
// kafkaTemplate.send(producerRecord7);
// kafkaTemplate.send(producerRecord8);
//
//
//
//
// }
//
//
//
//
//
//}
# jdbc_config
spring.datasource.driver-class-name=com.mysql.cj.jdbc.Driver
spring.datasource.url = jdbc:mysql://172.16.10.220:3306/equipment?useUnicode=true&allowMultiQueries=true&characterEncoding=utf-8&useJDBCCompliantTimezoneShift=true&useLegacyDatetimeCode=false&serverTimezone=Asia/Shanghai
spring.datasource.username=root
spring.datasource.password=Yeejoin@2020
spring.datasource.type=com.zaxxer.hikari.HikariDataSource
spring.datasource.hikari.pool-name=DatebookHikariCP
spring.datasource.hikari.minimum-idle= 3
spring.datasource.hikari.maximum-pool-size= 30
spring.datasource.hikari.auto-commit= true
spring.datasource.hikari.idle-timeout= 500000
spring.datasource.hikari.max-lifetime= 1800000
spring.datasource.hikari.connection-timeout= 60000
spring.datasource.hikari.connection-test-query= SELECT 1
## db2-sync_data
spring.db2.datasource.type: com.alibaba.druid.pool.DruidDataSource
spring.db2.datasource.url=jdbc:mysql://139.9.173.44:3306/jxiop_sync_data?allowMultiQueries=true&serverTimezone=GMT%2B8&characterEncoding=utf8
spring.db2.datasource.username=root
spring.db2.datasource.password=Yeejoin@2020
spring.db2.datasource.driver-class-name: com.mysql.cj.jdbc.Driver
# REDIS (RedisProperties)
spring.redis.database=1
spring.redis.host=172.16.10.220
spring.redis.port=6379
spring.redis.password=yeejoin@2020
spring.redis.lettuce.pool.max-active=200
spring.redis.lettuce.pool.max-wait=-1
spring.redis.lettuce.pool.max-idle=10
spring.redis.lettuce.pool.min-idle=0
spring.redis.expire.time=30000
#注册中心地址
eureka.client.registry-fetch-interval-seconds=5
management.endpoint.health.show-details=always
management.endpoints.web.exposure.include=*
eureka.instance.health-check-url-path=/actuator/health
eureka.instance.lease-expiration-duration-in-seconds=10
eureka.instance.lease-renewal-interval-in-seconds=5
eureka.instance.metadata-map.management.context-path=${server.servlet.context-path}/actuator
eureka.instance.status-page-url-path=/actuator/info
eureka.instance.metadata-map.management.api-docs=http://localhost:${server.port}${server.servlet.context-path}/doc.html
eureka.instance.hostname= 172.16.10.220
eureka.instance.prefer-ip-address = true
eureka.client.serviceUrl.defaultZone=http://${spring.security.user.name}:${spring.security.user.password}@172.16.10.220:10001/eureka/
spring.security.user.name=admin
spring.security.user.password=a1234560
## emqx
emqx.clean-session=true
emqx.client-id=${spring.application.name}-${random.int[1024,65536]}
emqx.broker=tcp://172.16.10.220:1883
emqx.user-name=admin
emqx.password=public
mqtt.scene.host=mqtt://172.16.10.220:8083/mqtt
mqtt.client.product.id=mqtt
mqtt.topic=topic_mqtt
spring.mqtt.completionTimeout=3000
# jdbc_config
spring.datasource.driver-class-name=com.kingbase8.Driver
spring.datasource.url=jdbc:kingbase8://10.20.1.176:54321/equipment?allowMultiQueries=true&serverTimezone=GMT%2B8&characterEncoding=utf8&currentSchema=root&binaryTransferDisable=TINYINTspring.datasource.username=root
spring.datasource.username=root
spring.datasource.password=Yeejoin@2020
spring.datasource.type=com.zaxxer.hikari.HikariDataSource
spring.datasource.hikari.pool-name=DatebookHikariCP
spring.datasource.hikari.minimum-idle= 3
spring.datasource.hikari.maximum-pool-size= 30
spring.datasource.hikari.auto-commit= true
spring.datasource.hikari.idle-timeout= 500000
spring.datasource.hikari.max-lifetime= 1800000
spring.datasource.hikari.connection-timeout= 60000
spring.datasource.hikari.connection-test-query= SELECT 1
## db2-sync_data
spring.db2.datasource.type: com.alibaba.druid.pool.DruidDataSource
spring.db2.datasource.url=jdbc:kingbase8://10.20.1.176:54321/jxiop_sync_data?allowMultiQueries=true&serverTimezone=GMT%2B8&characterEncoding=utf8&currentSchema=root&binaryTransferDisable=TINYINTspring.datasource.username=root
spring.db2.datasource.username=root
spring.db2.datasource.password=Yeejoin@2020
spring.db2.datasource.driver-class-name: com.mysql.cj.jdbc.Driver
# REDIS (RedisProperties)
spring.redis.database=1
spring.redis.host=47.92.234.253
spring.redis.port=6379
spring.redis.password=yeejoin@2020
spring.redis.lettuce.pool.max-active=200
spring.redis.lettuce.pool.max-wait=-1
spring.redis.lettuce.pool.max-idle=10
spring.redis.lettuce.pool.min-idle=0
spring.redis.expire.time=30000
#注册中心地址
eureka.client.registry-fetch-interval-seconds=5
management.endpoint.health.show-details=always
management.endpoints.web.exposure.include=*
eureka.instance.health-check-url-path=/actuator/health
eureka.instance.lease-expiration-duration-in-seconds=10
eureka.instance.lease-renewal-interval-in-seconds=5
eureka.instance.metadata-map.management.context-path=${server.servlet.context-path}/actuator
eureka.instance.status-page-url-path=/actuator/info
eureka.instance.metadata-map.management.api-docs=http://localhost:${server.port}${server.servlet.context-path}/doc.html
eureka.instance.hostname= 172.16.10.220
eureka.instance.prefer-ip-address = true
eureka.client.serviceUrl.defaultZone=http://${spring.security.user.name}:${spring.security.user.password}@47.92.234.253:10001/eureka/
spring.security.user.name=admin
spring.security.user.password=a1234560
## emqx
emqx.clean-session=true
emqx.client-id=${spring.application.name}-${random.int[1024,65536]}
emqx.broker=tcp://47.92.234.253:1883
emqx.user-name=admin
emqx.password=public
mqtt.scene.host=mqtt://47.92.234.253:8083/mqtt
mqtt.client.product.id=mqtt
mqtt.topic=topic_mqtt
spring.mqtt.completionTimeout=3000
spring.application.name=AMOS-ALARM-WJ
server.servlet.context-path=/alarm
server.port=11007
spring.profiles.active=kingbase8
server.compression.enabled=true
spring.jackson.dateFormat=yyyy-MM-dd HH:mm:ss
logging.config=classpath:logback-${spring.profiles.active}.xml
#设置文件上传的大小限制
spring.servlet.multipart.maxFileSize=3MB
spring.servlet.multipart.maxRequestSize=3MB
## redis失效时间
redis.cache.failure.time=10800
# mybatis-plus
mybatis-plus.mapper-locations=classpath:mapper/*Mapper.xml
#消费者所在组的名称
#消费者 的broker地址
spring.kafka.consumer.bootstrap-servers=47.92.234.253:9092
# 是否自动提交
spring.kafka.consumer.enable-auto-commit=false
#offset的消费位置
spring.kafka.consumer.auto-offset-reset=earliest
# 配置序列化
spring.kafka.consumer.key-deserializer=org.apache.kafka.common.serialization.StringDeserializer
spring.kafka.consumer.value-deserializer=org.apache.kafka.common.serialization.StringDeserializer
#手动提交方式
spring.kafka.listener.ack-mode=manual_immediate
#监听类型
spring.kafka.listener.type=single
# 并发
#spring.kafka.listener.concurrency=5
# 发生错误后,消息重发的次数。
spring.kafka.producer.retries=1
#配置kafak produce的broker地址
spring.kafka.producer.bootstrap-servers=47.92.234.253:9092
#默认批处理大小(以字节为单位)
spring.kafka.producer.batch-size=16384
#生产者可以用来缓冲等待发送到服务器的记录的内存总字节数
spring.kafka.producer.buffer-memory=33554432
# producer配置序列化
spring.kafka.producer.key-serializer=org.apache.kafka.common.serialization.StringSerializer
# kafka默认的String序列化器
spring.kafka.producer.value-serializer=org.apache.kafka.common.serialization.StringSerializer
kafka.equipment.alarm=EQUIPMENT_ALARM
kafka.equipment.test=test88888
#电站对接第三方查询设备kks码
power.station.url=http://139.9.169.123:5024/prod-api/fdgl/process/DataInterface
#电站104采集预警
power.station.warning=104/data/analysis
\ No newline at end of file
<?xml version="1.0" encoding="UTF-8"?>
<configuration debug="false">
<!-- 控制台输出 -->
<appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
<encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder">
<!--格式化输出:%d表示日期,%thread表示线程名,%-5level:级别从左显示5个字符宽度%msg:日志消息,%n是换行符-->
<pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{50} - %msg%n</pattern>
</encoder>
</appender>
<!-- show parameters for hibernate sql 专为 Hibernate 定制
<logger name="org.hibernate.type.descriptor.sql.BasicBinder" level="TRACE" />
<logger name="org.hibernate.type.descriptor.sql.BasicExtractor" level="DEBUG" />
<logger name="org.hibernate.SQL" level="DEBUG" />
<logger name="org.hibernate.engine.QueryParameters" level="DEBUG" />
<logger name="org.hibernate.engine.query.HQLQueryPlan" level="DEBUG" />
-->
<!--myibatis log configure-->
<logger name="com.apache.ibatis" level="ERROR"/>
<logger name="java.sql.Connection" level="ERROR"/>
<logger name="java.sql.Statement" level="ERROR"/>
<logger name="java.sql.PreparedStatement" level="ERROR"/>
<logger name="com.baomidou" level="ERROR"/>
<logger name="org.springframework" level="INFO"/>
<logger name="org.apache.activemq" level="INFO"/>
<!-- 日志输出级别 -->
<root level="ERROR">
<appender-ref ref="STDOUT" />
</root>
<!--日志异步到数据库 -->
<!--<appender name="DB" class="ch.qos.logback.classic.db.DBAppender">-->
<!--&lt;!&ndash;日志异步到数据库 &ndash;&gt;-->
<!--<connectionSource class="ch.qos.logback.core.db.DriverManagerConnectionSource">-->
<!--&lt;!&ndash;连接池 &ndash;&gt;-->
<!--<dataSource class="com.mchange.v2.c3p0.ComboPooledDataSource">-->
<!--<driverClass>com.mysql.jdbc.Driver</driverClass>-->
<!--<url>jdbc:mysql://127.0.0.1:3306/databaseName</url>-->
<!--<user>root</user>-->
<!--<password>root</password>-->
<!--</dataSource>-->
<!--</connectionSource>-->
<!--</appender>-->
</configuration>
\ No newline at end of file
<?xml version="1.0" encoding="UTF-8"?>
<configuration debug="false">
<!-- 控制台输出 -->
<appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
<encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder">
<!--格式化输出:%d表示日期,%thread表示线程名,%-5level:级别从左显示5个字符宽度%msg:日志消息,%n是换行符-->
<pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{50} - %msg%n</pattern>
</encoder>
</appender>
<!-- show parameters for hibernate sql 专为 Hibernate 定制
<logger name="org.hibernate.type.descriptor.sql.BasicBinder" level="TRACE" />
<logger name="org.hibernate.type.descriptor.sql.BasicExtractor" level="DEBUG" />
<logger name="org.hibernate.SQL" level="DEBUG" />
<logger name="org.hibernate.engine.QueryParameters" level="DEBUG" />
<logger name="org.hibernate.engine.query.HQLQueryPlan" level="DEBUG" />
-->
<!--myibatis log configure-->
<logger name="com.apache.ibatis" level="ERROR"/>
<logger name="java.sql.Connection" level="ERROR"/>
<logger name="java.sql.Statement" level="ERROR"/>
<logger name="java.sql.PreparedStatement" level="ERROR"/>
<logger name="com.baomidou" level="ERROR"/>
<logger name="org.springframework" level="INFO"/>
<logger name="org.apache.activemq" level="INFO"/>
<!-- 日志输出级别 -->
<root level="ERROR">
<appender-ref ref="STDOUT" />
</root>
<!--日志异步到数据库 -->
<!--<appender name="DB" class="ch.qos.logback.classic.db.DBAppender">-->
<!--&lt;!&ndash;日志异步到数据库 &ndash;&gt;-->
<!--<connectionSource class="ch.qos.logback.core.db.DriverManagerConnectionSource">-->
<!--&lt;!&ndash;连接池 &ndash;&gt;-->
<!--<dataSource class="com.mchange.v2.c3p0.ComboPooledDataSource">-->
<!--<driverClass>com.mysql.jdbc.Driver</driverClass>-->
<!--<url>jdbc:mysql://127.0.0.1:3306/databaseName</url>-->
<!--<user>root</user>-->
<!--<password>root</password>-->
<!--</dataSource>-->
<!--</connectionSource>-->
<!--</appender>-->
</configuration>
\ No newline at end of file
......@@ -557,7 +557,6 @@ public class KsolarDataAcquisitionServiceImpl implements KSolarDataAcquisitionSe
@Scheduled(cron = "${dataRequstScheduled.keshida}")
@Override
@Async
@PostConstruct
public void collectorDetail() {
long ts = System.currentTimeMillis();
logger.info("-------科士达同步采集器详情/逆变器开始" + ts + "------- " + sdf.format(new Date()));
......
......@@ -12,11 +12,17 @@
<name>amos-boot-data</name>
<dependencies>
<dependency>
<groupId>com.kingbase8</groupId>
<artifactId>kingbase8</artifactId>
<version>8.6.0</version>
</dependency>
</dependencies>
<modules>
<module>amos-boot-data-housepvapi</module>
<module>amos-boot-data-alarm</module>
</modules>
</project>
......@@ -613,6 +613,8 @@ public class TDBigScreenAnalyseController extends BaseController {
query.eq(IdxBizFanHealthLevel::getStatus, item.get("station"));
query.le(IdxBizFanHealthLevel::getGroupLowerLimit, equipmentHealthScore);
query.ge(IdxBizFanHealthLevel::getGroupUpperLimit, equipmentHealthScore);
query.orderByDesc(IdxBizFanHealthLevel::getGroupUpperLimit);
query.last("LIMIT 1");
IdxBizFanHealthLevel idxBizFanHealthLevel = idxBizFanHealthLevelMapper.selectOne(query);
if (ObjectUtils.isNotEmpty(idxBizFanHealthLevel)) {
item.put("warningName", idxBizFanHealthLevel.getHealthLevel());
......
......@@ -2762,6 +2762,7 @@ public class HealthStatusIndicatorServiceImpl {
bizMessage.setIndexValue(idxBizFanWarningRecord.getPointName());
// bizMessage.setTraceId(idxBizFanWarningRecord.getSequenceNbr());
RiskBizInfoVo riskBizInfoVo = new RiskBizInfoVo();
riskBizInfoVo.setBussId(String.valueOf(idxBizFanWarningRecord.getTs()));
riskBizInfoVo.setWarningObjectName(idxBizFanWarningRecord.getEquipmentName());
riskBizInfoVo.setWarningObjectCode(idxBizFanWarningRecord.getKks());
riskBizInfoVo.setSourceAttribution(stationMap.get(idxBizFanWarningRecord.getGatewayId()).getProjectOrgCode());
......
spring.application.name=AMOS-JXIOP-ANALYSE
spring.application.name=AMOS-JXIOP-ANALYSE-WJ
server.servlet.context-path=/jxiop-analyse
server.port=33400
server.uri-encoding=UTF-8
......
......@@ -124,7 +124,7 @@
${tableName}
<where>
ANALYSIS_TYPE = '按天'
AND DATE_FORMAT( REC_DATE, "%Y-%m-%d" ) = CURRENT_DATE
AND DATE_ADD(DATE_FORMAT( REC_DATE, "%Y-%m-%d" ),INTERVAL 1 DAY) = CURRENT_DATE
<if test="stationCode != null and stationCode != ''">
AND GATEWAY_ID = #{stationCode}
AND ANALYSIS_OBJ_TYPE = '场站'
......
......@@ -6,10 +6,10 @@
update idx_biz_pv_point_var_correlation
<set>
<if test="processPointIds == null || processPointIds.size() == 0">
MATCH_PROCESS_PONIT = null
MATCH_PROCESS_POINT = null
</if>
<if test="processPointIds != null and processPointIds.size() > 0">
MATCH_PROCESS_PONIT = '匹配'
MATCH_PROCESS_POINT = '匹配'
</if>
</set>
WHERE ANALYSIS_GATEWAY_ID = #{gatewayId}
......
......@@ -205,8 +205,8 @@
<if test="dto.analysisType!= null and dto.analysisType!= ''">and analysis_type = #{dto.analysisType}</if>
<if test="dto.endDate!= null and dto.endDate!= ''"> and ts &lt;= #{dto.endDate} </if>
<if test="dto.startDate!= null and dto.startDate!= ''"> and ts &gt;= #{dto.startDate} </if>
<if test="dto.area!= null and dto.area!= ''"> AND area = #{area} </if>
<if test="dto.subarray!= null and dto.subarray!= ''"> AND `subarray` = #{dto.subarray} </if>
<if test="dto.area!= null and dto.area!= ''"> AND area = #{dto.area} </if>
<if test="dto.subarray!= null and dto.subarray!= ''"> AND subarray = #{dto.subarray} </if>
<if test="dto.pointName!= null and dto.pointName!= ''">AND point_name = #{dto.pointName} </if>
<if test="dto.station!= null and dto.station!= ''">AND station = #{dto.station} </if>
<if test="dto.healthLevel!= null and dto.healthLevel!= ''">AND health_level = #{dto.healthLevel} </if>
......@@ -227,14 +227,14 @@
</select>
<select id="getInfoByPageTotal" resultType="java.lang.Integer">
SELECT count(1) FROM fan_health_index_data
SELECT count(1) FROM pv_health_index_data
<where>
<if test="dto.analysisObjType!= null and dto.analysisObjType!= ''">analysis_obj_type = #{dto.analysisObjType}</if>
<if test="dto.analysisType!= null and dto.analysisType!= ''">and analysis_type = #{dto.analysisType}</if>
<if test="dto.endDate!= null and dto.endDate!= ''"> and ts &lt;= #{dto.endDate} </if>
<if test="dto.startDate!= null and dto.startDate!= ''"> and ts &gt;= #{dto.startDate} </if>
<if test="dto.area!= null and dto.area!= ''"> AND area = #{area} </if>
<if test="dto.subarray!= null and dto.subarray!= ''"> AND `subarray` = #{dto.subarray} </if>
<if test="dto.area!= null and dto.area!= ''"> AND area = #{dto.area} </if>
<if test="dto.subarray!= null and dto.subarray!= ''"> AND subarray = #{dto.subarray} </if>
<if test="dto.pointName!= null and dto.pointName!= ''">AND point_name = #{dto.pointName} </if>
<if test="dto.station!= null and dto.station!= ''">AND station = #{dto.station} </if>
<if test="dto.healthLevel!= null and dto.healthLevel!= ''">AND health_level = #{dto.healthLevel} </if>
......
......@@ -291,6 +291,7 @@
'' as unitName,
IFNULL(a.qrcode_color, 'green') as qrCodeColor,
IFNULL(a.rec_date, '') AS recDate,
IFNULL(a.qrcode_date, '') AS qrcodeDate,
IFNULL(head_photo, '') AS headPhoto
FROM
person_basic a
......
......@@ -212,6 +212,7 @@
) warnNum LEFT join amos_mcb.mcb_warning_question_info question on question.NUM = warnNum.QUESTION_NUM
LEFT JOIN amos_mcb.mcb_warning_base_source_attribution source ON source.CODE = question.SOURCE_ATTRIBUTION
<where>
question.COMPLETION_STATUS != 1
<if test="projectOrgCodes != null and projectOrgCodes.size() > 0">
AND question.SOURCE_ATTRIBUTION IN
<foreach collection="projectOrgCodes" item="item" open="(" separator="," close=")">
......@@ -240,13 +241,19 @@
(SELECT
QUESTION_NUM
FROM
amos_mcb.mcb_warning_warning_info
mcb_warning_warning_info
WHERE
mcb_warning_warning_info.PROCESSING_STATUS = 2
AND QUESTION_NUM IS NOT NULL
AND mcb_warning_warning_info.WARNING_SOURCE_TYPE_CODE IN ( SELECT CODE FROM amos_mcb.mcb_data_dictionary WHERE remark = 'OVERVIEW' )
) warnNum LEFT join amos_mcb.mcb_warning_question_info question on question.NUM = warnNum.QUESTION_NUM
LEFT JOIN amos_mcb.mcb_warning_base_source_attribution source ON source.CODE = question.SOURCE_ATTRIBUTION
AND mcb_warning_warning_info.WARNING_SOURCE_TYPE_CODE IN ( SELECT CODE FROM mcb_data_dictionary WHERE remark = 'OVERVIEW' )
) warnNum inner join mcb_warning_question_info question on question.NUM = warnNum.QUESTION_NUM
<if test="completionStatus != null and completionStatus == 0">
AND question.COMPLETION_STATUS in (0,2)
</if>
<if test="completionStatus != null and completionStatus != 0">
AND question.COMPLETION_STATUS = #{completionStatus}
</if>
LEFT JOIN mcb_warning_base_source_attribution source ON source.CODE = question.SOURCE_ATTRIBUTION
<where>
<if test="projectOrgCodes != null and projectOrgCodes.size() > 0">
AND question.SOURCE_ATTRIBUTION IN
......@@ -254,9 +261,7 @@
#{item}
</foreach>
</if>
<if test="completionStatus != null">
AND question.COMPLETION_STATUS = #{completionStatus}
</if>
<if test="startTime != null and startTime != ''">
AND question.CREATE_DATE &gt;= concat(#{startTime}, ' 00:00:00')
</if>
......
......@@ -329,6 +329,10 @@ public class PersonQrCodeController extends BaseController {
Map<String, Object> resultMap = new HashMap<>();
if ("V1".equals(column)) {
resultMap = personBasicServiceImpl.getPersonDetailInfoByObjectId(objectId);
if (resultMap.get("recDate").equals("")){
resultMap.put("recDate",resultMap.get("qrcodeDate"));
}
} else if ("S1".equals(column)) {
resultMap = sjglZsjZsbtzMapper.getEquipDetailInfoByObjectId(objectId);
} else if ("P1".equals(column)) {
......
......@@ -305,7 +305,7 @@ public class McbWarningServiceImpl implements IMcbWarningService {
private List<String> getProjectOrgCodes() {
List<String> projectOrgCodes = permissionService.getCurrentUserProjectOrgCodes();
if (Objects.isNull(projectOrgCodes)) {
projectOrgCodes = Collections.emptyList();
projectOrgCodes = new ArrayList<>();
}
log.info(JSON.toJSONString("当前用户转换获取的orgcode列表是 " + projectOrgCodes));
return projectOrgCodes;
......
## DB properties:
spring.db1.datasource.driver-class-name=com.kingbase8.Driver
spring.db1.datasource.url=jdbc:kingbase8://10.20.1.176:54321/production?allowMultiQueries=true&serverTimezone=GMT%2B8&characterEncoding=utf8&currentSchema=root
spring.db1.datasource.username=root
spring.db1.datasource.password=Yeejoin@2020
spring.db2.datasource.driver-class-name=com.kingbase8.Driver
spring.db2.datasource.url=jdbc:kingbase8://10.20.1.176:54321/amos_project?allowMultiQueries=true&serverTimezone=GMT%2B8&characterEncoding=utf8&currentSchema=root
spring.db2.datasource.username=root
spring.db2.datasource.password=Yeejoin@2020
spring.db3.datasource.driver-class-name:=com.kingbase8.Driver
spring.db3.datasource.url=jdbc:kingbase8://10.20.1.176:54321/amos_mcb?allowMultiQueries=true&serverTimezone=GMT%2B8&characterEncoding=utf8&zeroDateTimeBehavior=convertToNull&currentSchema=root
spring.db3.datasource.username=root
spring.db3.datasource.password=Yeejoin@2020
## eureka properties:
eureka.instance.hostname=47.92.234.253
eureka.client.serviceUrl.defaultZone=http://admin:a1234560@${eureka.instance.hostname}:10001/eureka/
## redis properties:
spring.redis.database=1
spring.redis.host=47.92.234.253
spring.redis.port=6379
spring.redis.password=yeejoin@2020
spring.cache.type=GENERIC
j2cache.open-spring-cache=true
j2cache.cache-clean-mode=passive
j2cache.allow-null-values=true
j2cache.redis-client=lettuce
j2cache.l2-cache-open=true
j2cache.broadcast=net.oschina.j2cache.cache.support.redis.SpringRedisPubSubPolicy
j2cache.L1.provider_class=caffeine
j2cache.L2.provider_class=net.oschina.j2cache.cache.support.redis.SpringRedisProvider
j2cache.L2.config_section=lettuce
j2cache.sync_ttl_to_redis=true
j2cache.default_cache_null_object=false
j2cache.serialization=fst
caffeine.properties=/caffeine.properties
lettuce.mode=single
lettuce.namespace=
lettuce.storage=generic
lettuce.channel=j2cache
lettuce.scheme=redis
lettuce.hosts=${spring.redis.host}:${spring.redis.port}
lettuce.password=${spring.redis.password}
lettuce.database=${spring.redis.database}
lettuce.sentinelMasterId=
lettuce.maxTotal=100
lettuce.maxIdle=10
lettuce.minIdle=10
lettuce.timeout=10000
emqx.clean-session=true
emqx.client-id=${spring.application.name}-${random.int[1024,65536]}
emqx.broker=tcp://47.92.234.253:2883
emqx.user-name=super
emqx.password=a123456
emqx.max-inflight=1000
spring.influx.url=http://39.98.246.31:8086
spring.influx.password=Yeejoin@2020
spring.influx.user=root
spring.influx.database=iot_platform
spring.influx.retention_policy=default
spring.influx.retention_policy_time=30d
spring.influx.actions=10000
spring.influx.bufferLimit=20000
knife4j.production=false
knife4j.enable=true
knife4j.basic.enable=true
knife4j.basic.username=admin
knife4j.basic.password=a1234560
management.security.enabled=true
spring.security.user.name=admin
spring.security.user.password=a1234560
fire-rescue=123
mybatis-plus.global-config.db-config.update-strategy=ignored
# user-amos setting : This value is the secretkey for person manage moudle accout password encryption.please don't change it!!!
amos.secret.key=qaz
# if your service can't be access ,you can use this setting , you need change ip as your.
#eureka.instance.prefer-ip-address=true
#eureka.instance.ip-address=172.16.3.122
spring.activemq.broker-url=tcp://47.92.234.253:61616
spring.activemq.user=admin
spring.activemq.password=admin
spring.jms.pub-sub-domain=false
myqueue=amos.privilege.v1.JXIOP.AQSC_FDGL.userBusiness
modifypasswordqueue= amos.privilege.v1.JXIOP.AMOS_ADMIN.modifyPassword
yth.qg.id=1
\ No newline at end of file
......@@ -2,7 +2,7 @@ spring.application.name=AMOS-JXIOP-CT
server.servlet.context-path=/jxiop
server.port=33100
server.uri-encoding=UTF-8
spring.profiles.active=dev
spring.profiles.active=king
spring.jackson.time-zone=GMT+8
spring.jackson.date-format=yyyy-MM-dd HH:mm:ss
logging.config=classpath:logback-${spring.profiles.active}.xml
......
<?xml version="1.0" encoding="UTF-8"?>
<configuration debug="false">
<!--定义日志文件的存储地址 勿在 LogBack 的配置中使用相对路径-->
<property name="LOG_HOME" value="log" />
<property name="LOG_PATTERN" value="%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %-50.50logger{50} - %msg [%file:%line] %n" />
<!-- 按照每天生成日志文件 -->
<appender name="FILE" class="ch.qos.logback.core.rolling.RollingFileAppender">
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<!--日志文件输出的文件名-->
<FileNamePattern>${LOG_HOME}/ccs.log.%d{yyyy-MM-dd}.log</FileNamePattern>
<!--日志文件保留天数-->
<MaxHistory>7</MaxHistory>
</rollingPolicy>
<encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder">
<!--格式化输出:%d表示日期,%thread表示线程名,%-5level:级别从左显示5个字符宽度%msg:日志消息,%n是换行符-->
<pattern>${LOG_PATTERN}</pattern>
</encoder>
<!--日志文件最大的大小-->
<triggeringPolicy class="ch.qos.logback.core.rolling.SizeBasedTriggeringPolicy">
<MaxFileSize>30mb</MaxFileSize>
</triggeringPolicy>
</appender>
<!-- 控制台输出 -->
<appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
<encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder">
<!--格式化输出:%d表示日期,%thread表示线程名,%-5level:级别从左显示5个字符宽度%msg:日志消息,%n是换行符-->
<pattern>${LOG_PATTERN}</pattern>
</encoder>
</appender>
<!--myibatis log configure-->
<logger name="com.apache.ibatis" level="DEBUG"/>
<logger name="java.sql.Connection" level="DEBUG"/>
<logger name="java.sql.Statement" level="DEBUG"/>
<logger name="java.sql.PreparedStatement" level="DEBUG"/>
<logger name="com.baomidou.mybatisplus" level="DEBUG"/>
<logger name="org.springframework" level="DEBUG"/>
<logger name="org.typroject" level="DEBUG"/>
<logger name="com.yeejoin" level="DEBUG"/>
<!-- 日志输出级别 -->
<root level="INFO">
<appender-ref ref="FILE" />
<appender-ref ref="STDOUT" />
</root>
</configuration>
......@@ -95,7 +95,7 @@ public class AppletMonitorServiceImpl {
data.put("value",stationMark.get("value"));
double yearPower = planPower.stream().mapToDouble(StationPlan::getValue).sum();
Double monthPower = planPower.stream().filter(e -> e.getMonthly().equals(DateUtils.getMonth(new Date()))).findFirst().get().getValue();
Double monthPower = planPower.stream().filter(e -> e.getMonthly().equals(String.valueOf(DateUtils.getMonth(new Date())))).findFirst().get().getValue();
double dayPower = monthPower / DateUtils.getDaysOfMonth(new Date());
data.put("yearPower",yearPower);
data.put("monthPower",monthPower);
......@@ -162,7 +162,7 @@ public class AppletMonitorServiceImpl {
//计划发电量
List<StationPlan> planPower = stationPlanMapper.getPlanGenByStationIdAndYear(String.valueOf(stationBasic.getSequenceNbr()), String.valueOf(DateUtils.getYear(new Date())));
double yearPower = planPower.stream().mapToDouble(StationPlan::getValue).sum();
Double monthPower = planPower.stream().filter(e -> e.getMonthly().equals(DateUtils.getMonth(new Date()))).findFirst().get().getValue();
Double monthPower = planPower.stream().filter(e -> e.getMonthly().equals(String.valueOf(DateUtils.getMonth(new Date())))).findFirst().get().getValue();
double dayPower = monthPower / DateUtils.getDaysOfMonth(new Date());
data.put("yearPower",yearPower);
data.put("monthPower",monthPower);
......@@ -252,7 +252,7 @@ public class AppletMonitorServiceImpl {
String join = String.join(",", collect1);
List<StationPlan> plansPower = stationPlanMapper.getPlansGenByStationIdAndYear(join, String.valueOf(DateUtils.getYear(new Date())));
double yearPower = plansPower.stream().mapToDouble(StationPlan::getValue).sum();
Double monthPower = plansPower.stream().filter(e -> e.getMonthly().equals(DateUtils.getMonth(new Date()))).mapToDouble(StationPlan::getValue).sum();
Double monthPower = plansPower.stream().filter(e -> e.getMonthly().equals(String.valueOf(DateUtils.getMonth(new Date())))).mapToDouble(StationPlan::getValue).sum();
double dayPower = monthPower / DateUtils.getDaysOfMonth(new Date());
result.put("yearPower",yearPower);
result.put("monthPower",monthPower);
......
......@@ -103,6 +103,13 @@
<artifactId>pooled-jms</artifactId>
<version>1.0.5</version>
</dependency>
<dependency>
<groupId>com.kingbase8</groupId>
<artifactId>kingbase8</artifactId>
<version>8.6.0</version>
</dependency>
</dependencies>
</project>
......@@ -313,7 +313,16 @@
<name>thirdparty</name>
<url>http://113.142.68.105:8081/nexus/content/repositories/thirdparty/</url>
</repository>
</repositories>
<distributionManagement>
<repository>
<id>Releases</id>
<name>Releases</name>
<url>http://113.142.68.105:8081/nexus/content/repositories/releases/</url>
</repository>
</distributionManagement>
<modules>
<module>amos-boot-biz-common</module>
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment