Commit d85aac8d authored by wujiang's avatar wujiang

提交装备

parent 8d30ebd2

Too many changes to show.

To preserve performance only 1000 of 1000+ files are displayed.

package com.yeejoin.amos.boot.biz.common.annotation;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
/**
* @author DELL
*/
@Target(ElementType.METHOD)
@Retention(RetentionPolicy.RUNTIME)
public @interface ResubmitCheck {
/**
* 失效时间,即可以第二次提交间隔时长,单位秒
*/
long expireTime() default 3;
/**
* 提示消息
*/
String message() default "您的操作过于频繁,请稍后重试";
}
package com.yeejoin.amos.boot.biz.common.aop;
import com.yeejoin.amos.boot.biz.common.annotation.ResubmitCheck;
import com.yeejoin.amos.boot.biz.common.utils.RedisUtils;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import org.aspectj.lang.JoinPoint;
import org.aspectj.lang.annotation.Aspect;
import org.aspectj.lang.annotation.Before;
import org.aspectj.lang.annotation.Pointcut;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import org.springframework.util.DigestUtils;
import org.typroject.tyboot.core.foundation.context.RequestContext;
import javax.annotation.Resource;
import javax.servlet.http.HttpServletRequest;
/**
* @author DELL
*/
@Aspect
@Component
@Slf4j
public class ResubmitCheckAspect {
@Resource
HttpServletRequest request;
@Autowired
RedisUtils redisUtils;
@Pointcut(value = "@annotation(com.yeejoin.amos.boot.biz.common.annotation.ResubmitCheck)")
public void submit() {
}
@Before("submit()&&@annotation(resubmitCheck)")
public void doBefore(JoinPoint joinPoint, ResubmitCheck resubmitCheck) {
String token = !StringUtils.isEmpty(request.getHeader("token")) ? request.getHeader("token") : RequestContext.getToken();
StringBuilder md5Builder = new StringBuilder(StringUtils.isEmpty(token) ? "unknown" : token);
if (joinPoint.getArgs() != null) {
for (Object obj : joinPoint.getArgs()) {
md5Builder.append(obj.toString());
}
}
String md5String = DigestUtils.md5DigestAsHex(md5Builder.toString().getBytes());
Object cache = redisUtils.get(md5String);
if (cache != null) {
throw new RuntimeException(resubmitCheck.message());
}
redisUtils.set(md5String, 1, resubmitCheck.expireTime());
}
}
package com.yeejoin.amos.boot.biz.common.utils;
public class SnowFlakeGenerateIdWorker {
/**
* 开始时间截
*/
private final long twepoch = 1420041600000L;
/**
* 机器id所占的位数
*/
private final long workerIdBits = 5L;
/**
* 数据标识id所占的位数
*/
private final long datacenterIdBits = 5L;
/**
* 支持的最大机器id,结果是31 (这个移位算法可以很快的计算出几位二进制数所能表示的最大十进制数)
*/
private final long maxWorkerId = -1L ^ (-1L << workerIdBits);
/**
* 支持的最大数据标识id,结果是31
*/
private final long maxDatacenterId = -1L ^ (-1L << datacenterIdBits);
/**
* 序列在id中占的位数
*/
private final long sequenceBits = 12L;
/**
* 机器ID向左移12位
*/
private final long workerIdShift = sequenceBits;
/**
* 数据标识id向左移17位(12+5)
*/
private final long datacenterIdShift = sequenceBits + workerIdBits;
/**
* 时间截向左移22位(5+5+12)
*/
private final long timestampLeftShift = sequenceBits + workerIdBits + datacenterIdBits;
/**
* 生成序列的掩码,这里为4095 (0b111111111111=0xfff=4095)
*/
private final long sequenceMask = -1L ^ (-1L << sequenceBits);
/**
* 工作机器ID(0~31)
*/
private long workerId;
/**
* 数据中心ID(0~31)
*/
private long datacenterId;
/**
* 毫秒内序列(0~4095)
*/
private long sequence = 0L;
/**
* 上次生成ID的时间截
*/
private long lastTimestamp = -1L;
/**
* 构造函数
*
* @param workerId 工作ID (0~31)
* @param datacenterId 数据中心ID (0~31)
*/
public SnowFlakeGenerateIdWorker(long workerId, long datacenterId) {
if (workerId > maxWorkerId || workerId < 0) {
throw new IllegalArgumentException(String.format("worker Id can't be greater than %d or less than 0", maxWorkerId));
}
if (datacenterId > maxDatacenterId || datacenterId < 0) {
throw new IllegalArgumentException(String.format("datacenter Id can't be greater than %d or less than 0", maxDatacenterId));
}
this.workerId = workerId;
this.datacenterId = datacenterId;
}
/**
* 获得下一个ID (该方法是线程安全的)
*
* @return long
*/
public synchronized long nextId() {
long timestamp = timeGen();
timestamp = generateId(timestamp);
return ((timestamp - twepoch) << timestampLeftShift) //
| (datacenterId << datacenterIdShift) //
| (workerId << workerIdShift) //
| sequence;
}
private long generateId(long timestamp){
//如果当前时间小于上一次ID生成的时间戳,说明系统时钟回退过这个时候应当抛出异常
if(timestamp < lastTimestamp){
throw new RuntimeException(
String.format("Clock moved backwards. Refusing to generate id for %d milliseconds", lastTimestamp - timestamp));
}
//如果是同一时间生成的,则进行毫秒内序列
if(lastTimestamp == timestamp)
{
sequence = (sequence + 1) & sequenceMask;
//毫秒内序列溢出
if(sequence == 0)
//阻塞到下一个毫秒,获得新的时间戳
timestamp = tilNextMillis(lastTimestamp);
}
else//时间戳改变,毫秒内序列重置
{
sequence = 0L;
}
//上次生成ID的时间截
lastTimestamp = timestamp;
return timestamp;
}
/**
*获得下一个ID (string)
**/
public synchronized String generateNextId() {
long timestamp = timeGen();
timestamp = generateId(timestamp);
//移位并通过或运算拼到一起组成64位的ID
return String.valueOf(((timestamp - twepoch) << timestampLeftShift)
| (datacenterId << datacenterIdShift)
| (workerId << workerIdShift)
| sequence);
}
/**
* 阻塞到下一个毫秒,直到获得新的时间戳
*
* @param lastTimestamp 上次生成ID的时间截
* @return 当前时间戳
*/
protected long tilNextMillis(long lastTimestamp) {
long timestamp = timeGen();
while (timestamp <= lastTimestamp) {
timestamp = timeGen();
}
return timestamp;
}
/**
* 返回以毫秒为单位的当前时间
*
* @return 当前时间(毫秒)
*/
protected long timeGen() {
return System.currentTimeMillis();
}
}
This diff is collapsed.
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<artifactId>amos-boot-data</artifactId>
<groupId>com.amosframework.boot</groupId>
<version>1.0.0</version>
</parent>
<artifactId>amos-boot-data-equip</artifactId>
<name>amos-boot-data-equip</name>
<dependencies>
<dependency>
<groupId>org.springframework.cloud</groupId>
<artifactId>spring-cloud-starter-netflix-eureka-client</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-actuator</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.kafka</groupId>
<artifactId>spring-kafka</artifactId>
</dependency>
<dependency>
<groupId>org.typroject</groupId>
<artifactId>tyboot-component-emq</artifactId>
<version>1.1.23</version>
</dependency>
<dependency>
<groupId>org.typroject</groupId>
<artifactId>tyboot-core-restful</artifactId>
<version>${tyboot-version}</version>
<exclusions>
<exclusion>
<groupId>org.typroject</groupId>
<artifactId>*</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.typroject</groupId>
<artifactId>tyboot-core-auth</artifactId>
<version>${tyboot-version}</version>
<exclusions>
<exclusion>
<groupId>org.typroject</groupId>
<artifactId>*</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.typroject</groupId>
<artifactId>tyboot-core-rdbms</artifactId>
<version>${tyboot-version}</version>
<exclusions>
<exclusion>
<groupId>org.typroject</groupId>
<artifactId>*</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-redis</artifactId>
<version>1.4.5.RELEASE</version>
</dependency>
<dependency>
<groupId>net.sf.json-lib</groupId>
<artifactId>json-lib</artifactId>
<version>2.4</version>
<classifier>jdk15</classifier>
</dependency>
<dependency>
<groupId>org.springframework.data</groupId>
<artifactId>spring-data-elasticsearch</artifactId>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>com.taosdata.jdbc</groupId>
<artifactId>taos-jdbcdriver</artifactId>
<version>3.0.0</version>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-maven-plugin</artifactId>
</plugin>
</plugins>
</build>
</project>
package com.yeejoin.equip;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.mybatis.spring.annotation.MapperScan;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.boot.web.servlet.ServletComponentScan;
import org.springframework.cloud.client.discovery.EnableDiscoveryClient;
import org.springframework.cloud.netflix.eureka.EnableEurekaClient;
import org.springframework.cloud.openfeign.EnableFeignClients;
import org.springframework.context.ConfigurableApplicationContext;
import org.springframework.context.annotation.ComponentScan;
import org.springframework.core.env.Environment;
import org.springframework.scheduling.annotation.EnableAsync;
import org.springframework.scheduling.annotation.EnableScheduling;
import org.springframework.transaction.annotation.EnableTransactionManagement;
import org.typroject.tyboot.core.restful.exception.GlobalExceptionHandler;
import java.net.InetAddress;
/**
* @author LiuLin
* @date 2023/6/25
* @apiNote 启动类
*/
@SpringBootApplication
@EnableTransactionManagement
@EnableConfigurationProperties
@ServletComponentScan
@EnableDiscoveryClient
@EnableFeignClients
@EnableAsync
@EnableEurekaClient
@EnableScheduling
@MapperScan(value = { "org.typroject.tyboot.*.*.face.orm.dao", "com.yeejoin.amos.api.*.face.orm.dao", "org.typroject.tyboot.face.*.orm.dao*",
"com.yeejoin.equip.mapper","com.yeejoin.amos.boot.biz.common.dao.mapper" })
@ComponentScan({ "org.typroject", "com.yeejoin.equip","com.yeejoin.amos" })
public class EquipDataApplication {
private static final Logger logger = LogManager.getLogger(EquipDataApplication.class);
public static void main(String[] args) throws Exception {
ConfigurableApplicationContext context = SpringApplication.run(EquipDataApplication.class, args);
GlobalExceptionHandler.setAlwaysOk(true);
Environment env = context.getEnvironment();
String ip = InetAddress.getLocalHost().getHostAddress();
String port = env.getProperty("server.port");
String path = env.getProperty("server.servlet.context-path");
logger.info("\n----------------------------------------------------------\n\t"
+ "Application Amos-Biz-Boot is running! Access URLs:\n\t" + "Swagger文档: \thttp://" + ip + ":" + port
+ path + "/doc.html\n" + "----------------------------------------------------------");
}
}
package com.yeejoin.equip.config;
import lombok.*;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.stereotype.Component;
/**
* ElasticSearch 配置
*
*/
@Data
@AllArgsConstructor
@NoArgsConstructor
@Component
@ConfigurationProperties(prefix = "elasticsearch")
@ConditionalOnProperty("elasticsearch.address")
public class ElasticSearchConfig {
/**
* es连接地址,如果有多个用,隔开
*/
private String address;
/**
* es用户名
*/
private String username;
/**
* es密码
*/
private String password;
/**
* 协议
*/
private String scheme;
/**
* 连接超时时间
*/
private int connectTimeout;
/**
* Socket 连接超时时间
*/
private int socketTimeout;
/**
* 获取连接的超时时间
*/
private int connectionRequestTimeout;
/**
* 最大连接数
*/
private int maxConnectNum;
/**
* 最大路由连接数
*/
private int maxConnectPerRoute;
}
\ No newline at end of file
package com.yeejoin.equip.config;
import com.alibaba.fastjson.JSONObject;
import com.yeejoin.equip.entity.EquipmentIndexVO;
import com.yeejoin.equip.mapper.mysql.EquipmentSpecificIndexMapper;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.CommandLineRunner;
import org.springframework.stereotype.Component;
import org.springframework.transaction.annotation.Transactional;
import redis.clients.jedis.Jedis;
import redis.clients.jedis.Pipeline;
import javax.annotation.Resource;
import java.util.List;
/**
* @author LiuLin
* @date 2023/6/15
* @apiNote
*/
@Slf4j
@Component
@Transactional(transactionManager = "mysqlTransactionManager")
public class EquipmentIndexCacheRunner implements CommandLineRunner {
@Resource
private EquipmentSpecificIndexMapper equipmentSpecificIndexMapper;
@Value("${spring.redis.host}")
private String redisHost;
@Value("${spring.redis.port}")
private Integer redisPort;
@Value("${spring.redis.password}")
private String redisPassword;
//@Autowired
//private HandleESMessage2TDService handleESMessage2TDService;
//@Autowired
//private InitTDEngineDbService initTDEngineDbService;
@Override
public void run(String... args) throws Exception {
Jedis jedis = new Jedis(redisHost, redisPort);
jedis.auth(redisPassword);
Pipeline pipeline = jedis.pipelined();
List<EquipmentIndexVO> equipSpecificIndexList = equipmentSpecificIndexMapper.getEquipSpecificIndexList(null);
equipSpecificIndexList.forEach(vo->{
String key = vo.getIndexAddress() + "_" + vo.getGatewayId();
pipeline.set(key, JSONObject.toJSONString(vo));
});
pipeline.syncAndReturnAll();
log.info(">>>>>>>>>>>>>>>>服务启动执行Redis缓存预加载指标数据完成!>>>>>>>>>>>>>>>>");
//initTDEngineDbService.init();
//handleESMessage2TDService.syncEsData2TDEngine();
}
}
\ No newline at end of file
package com.yeejoin.equip.config;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.kafka.annotation.EnableKafka;
import java.util.Properties;
/**
* @author LiuLin
* @date 2023/6/25
* @apiNote kafka 消费者配置类
*/
@Configuration
@EnableKafka
public class KafkaConsumerConfig {
@Value("${spring.kafka.bootstrap-servers}")
private String kafkaBootstrapServers;
@Value("${spring.kafka.consumer.group-id}")
private String kafkaGroupId;
@Value("${spring.kafka.consumer.enable-auto-commit}")
private boolean enableAutoCommit;
@Bean("consumerConfig")
public Properties consumerConfigs() {
Properties props = new Properties();
props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, kafkaBootstrapServers);
// 自动提交
props.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, true);
//两次Poll之间的最大允许间隔。
//消费者超过该值没有返回心跳,服务端判断消费者处于非存活状态,服务端将消费者从Consumer Group移除并触发Rebalance,默认30s。
props.put(ConsumerConfig.SESSION_TIMEOUT_MS_CONFIG, 30000);
//设置单次拉取的量,走公网访问时,该参数会有较大影响。
props.put(ConsumerConfig.MAX_PARTITION_FETCH_BYTES_CONFIG, 32000);
props.put(ConsumerConfig.FETCH_MAX_BYTES_CONFIG, 32000);
//每次Poll的最大数量。
//注意该值不要改得太大,如果Poll太多数据,而不能在下次Poll之前消费完,则会触发一次负载均衡,产生卡顿。
props.put(ConsumerConfig.MAX_POLL_RECORDS_CONFIG, 30);
//消息的反序列化方式。
props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringDeserializer");
props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringDeserializer");
//当前消费实例所属的消费组,请在控制台申请之后填写。
//属于同一个组的消费实例,会负载消费消息。
props.put(ConsumerConfig.GROUP_ID_CONFIG, kafkaGroupId);
return props;
}
}
package com.yeejoin.equip.config;
import lombok.Data;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.context.annotation.Configuration;
/**
* @author LiuLin
* @date 2023年08月18日 11:08
*/
@Data
@ConfigurationProperties(prefix = "emqx")
@Configuration
public class MqttPropertyConfig {
private String broker;
private String clientUserName;
private String clientPassword;
private String clientId;
private Boolean cleanSession;
private String bizClientId;
private String[] bizTopic;
private int maxInflight;
private int keepAliveInterval;
}
package com.yeejoin.equip.config;
import org.apache.ibatis.session.SqlSessionFactory;
import org.mybatis.spring.SqlSessionFactoryBean;
import org.mybatis.spring.SqlSessionTemplate;
import org.mybatis.spring.annotation.MapperScan;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.boot.jdbc.DataSourceBuilder;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Primary;
import org.springframework.core.io.support.PathMatchingResourcePatternResolver;
import org.springframework.jdbc.datasource.DataSourceTransactionManager;
import javax.sql.DataSource;
/**
* @author LiuLin
* @date 2023年09月15日 10:42
*/
@Configuration
@MapperScan(basePackages = {"com.yeejoin.equip.mapper.mysql"}, sqlSessionTemplateRef = "mysqlSqlSessionTemplate")
public class MysqlServerConfig {
@Bean(name = "mysqlDataSource")
@ConfigurationProperties(prefix = "spring.datasource.mysql-server")
@Primary
public DataSource mysqlDataSource() {
return DataSourceBuilder.create().build();
}
@Bean(name = "mysqlSqlSessionFactory")
@Primary
public SqlSessionFactory mysqlSqlSessionFactory(@Qualifier("mysqlDataSource") DataSource dataSource) throws Exception {
SqlSessionFactoryBean bean = new SqlSessionFactoryBean();
bean.setDataSource(dataSource);
bean.setMapperLocations(new PathMatchingResourcePatternResolver().getResources("classpath:mapper/mysql/*.xml"));
return bean.getObject();
}
@Bean(name = "mysqlTransactionManager")
@Primary
public DataSourceTransactionManager mysqlTransactionManager(@Qualifier("mysqlDataSource") DataSource dataSource) {
return new DataSourceTransactionManager(dataSource);
}
@Bean(name = "mysqlSqlSessionTemplate")
@Primary
public SqlSessionTemplate mysqlSqlSessionTemplate(@Qualifier("mysqlSqlSessionFactory") SqlSessionFactory sqlSessionFactory) throws Exception {
return new SqlSessionTemplate(sqlSessionFactory);
}
}
package com.yeejoin.equip.config;
import com.fasterxml.jackson.annotation.JsonAutoDetect;
import com.fasterxml.jackson.annotation.JsonTypeInfo;
import com.fasterxml.jackson.annotation.PropertyAccessor;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.SerializationFeature;
import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.data.redis.connection.RedisConnectionFactory;
import org.springframework.data.redis.core.RedisTemplate;
import org.springframework.data.redis.serializer.Jackson2JsonRedisSerializer;
import org.springframework.data.redis.serializer.StringRedisSerializer;
/**
* @description:
* @author: duanwei
**/
@Configuration
public class RedisConfig {
@Bean
public RedisTemplate<String, Object> redisTemplate(RedisConnectionFactory factory) {
RedisTemplate<String, Object> template = new RedisTemplate<>();
template.setConnectionFactory(factory);
Jackson2JsonRedisSerializer<Object> j2jrs = new Jackson2JsonRedisSerializer<>(Object.class);
ObjectMapper om = new ObjectMapper();
om.setVisibility(PropertyAccessor.ALL, JsonAutoDetect.Visibility.ANY);
// 解决jackson2无法反序列化LocalDateTime的问题
om.disable(SerializationFeature.WRITE_DATES_AS_TIMESTAMPS);
om.registerModule(new JavaTimeModule());
om.enableDefaultTyping(ObjectMapper.DefaultTyping.NON_FINAL, JsonTypeInfo.As.PROPERTY);
j2jrs.setObjectMapper(om);
// 序列化 value 时使用此序列化方法
//template.setValueSerializer(j2jrs);
template.setHashValueSerializer(j2jrs);
StringRedisSerializer srs = new StringRedisSerializer();
// 序列化 key 时
template.setKeySerializer(srs);
template.setHashKeySerializer(srs);
template.afterPropertiesSet();
// 序列化 value 时
template.setValueSerializer(srs);
return template;
}
}
package com.yeejoin.equip.config;
import org.apache.ibatis.session.SqlSessionFactory;
import org.mybatis.spring.SqlSessionFactoryBean;
import org.mybatis.spring.SqlSessionTemplate;
import org.mybatis.spring.annotation.MapperScan;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.boot.jdbc.DataSourceBuilder;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.core.io.support.PathMatchingResourcePatternResolver;
import org.springframework.jdbc.datasource.DataSourceTransactionManager;
import javax.sql.DataSource;
/**
* @author LiuLin
* @date 2023年09月15日 10:55
*/
@Configuration
@MapperScan(basePackages = {"com.yeejoin.equip.mapper.tdengine"}, sqlSessionTemplateRef = "tdEngineSqlSessionTemplate")
public class TDEngineServerConfig {
@Bean(name = "tdEngineDataSource")
@ConfigurationProperties(prefix = "spring.datasource.td-engine-server")
public DataSource tdEngineDataSource() {
return DataSourceBuilder.create().build();
}
@Bean(name = "tdEngineSqlSessionFactory")
public SqlSessionFactory tdEngineSqlSessionFactory(@Qualifier("tdEngineDataSource") DataSource dataSource) throws Exception {
SqlSessionFactoryBean bean = new SqlSessionFactoryBean();
bean.setDataSource(dataSource);
bean.setMapperLocations(new PathMatchingResourcePatternResolver().getResources("classpath:mapper/tdengine/*.xml"));
return bean.getObject();
}
@Bean(name = "tdEngineTransactionManager")
public DataSourceTransactionManager tdEngineTransactionManager(@Qualifier("tdEngineDataSource") DataSource dataSource) {
return new DataSourceTransactionManager(dataSource);
}
@Bean(name = "tdEngineSqlSessionTemplate")
public SqlSessionTemplate tdEngineSqlSessionTemplate(@Qualifier("tdEngineSqlSessionFactory") SqlSessionFactory sqlSessionFactory) throws Exception {
return new SqlSessionTemplate(sqlSessionFactory);
}
}
package com.yeejoin.equip.entity;
import com.baomidou.mybatisplus.annotation.FieldFill;
import com.baomidou.mybatisplus.annotation.IdType;
import com.baomidou.mybatisplus.annotation.TableField;
import com.baomidou.mybatisplus.annotation.TableId;
import com.fasterxml.jackson.databind.annotation.JsonSerialize;
import com.fasterxml.jackson.databind.ser.std.ToStringSerializer;
import lombok.Data;
import lombok.experimental.Accessors;
import java.io.Serializable;
import java.util.Date;
/**
* @description: 公共实体
* @author: duanwei
**/
@Data
@Accessors(chain = true)
public class BaseEntity implements Serializable {
private static final long serialVersionUID = -5464322936854328207L;
@TableId(type = IdType.ID_WORKER)
@JsonSerialize(using = ToStringSerializer.class)
private Long id;
/**
* 新增和更新执行
*/
@TableField(value = "create_date", fill = FieldFill.INSERT)
private Date createDate;
}
package com.yeejoin.equip.entity;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
import lombok.ToString;
import java.util.Date;
/**
*
* @author LiuLin
* @date 2023年10月11日 09:31
*/
@Data
@ToString
@AllArgsConstructor
@NoArgsConstructor
public class Book {
private String value;
private Float valueF;
private String valueLabel;
private String unit;
private Date createdTime;
}
package com.yeejoin.equip.entity;
import io.github.classgraph.json.Id;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
import org.springframework.data.elasticsearch.annotations.DateFormat;
import org.springframework.data.elasticsearch.annotations.Document;
import org.springframework.data.elasticsearch.annotations.Field;
import org.springframework.data.elasticsearch.annotations.FieldType;
import java.util.Date;
/**
* @description:
* @author: LiuLin
* @createDate: 2023/09/18
*/
@Data
@AllArgsConstructor
@NoArgsConstructor
@Document(indexName = "jxiop_equipments")
public class ESEquipments {
@Id
private String id;
@Field(type = FieldType.Text, index = false)
private String address;
@Field(type = FieldType.Text)
private String dataType;
@Field(type = FieldType.Text)
private String equipmentSpecificName;
@Field(type = FieldType.Keyword)
private String gatewayId;
@Field(type = FieldType.Text)
private String isAlarm;
//@Field(type = FieldType.Date, format = DateFormat.basic_date_time, index = false)
//private Date createdTime;
@Field(type = FieldType.Text , index = false)
private String unit;
@Field(type = FieldType.Text)
private String value;
@Field(type = FieldType.Float, index = false)
private Float valueF;
@Field(type = FieldType.Text)
private String valueLabel;
@Field(type = FieldType.Text , index = false)
private String traceId;
@Field(type = FieldType.Keyword )
private String equipmentIndexName;
@Field(type = FieldType.Keyword )
private String equipmentNumber;
@Field(type = FieldType.Text)
private String frontModule;
@Field(type = FieldType.Text)
private String systemType;
@Field(type = FieldType.Text , index = false)
private String pictureName;
@Field(type = FieldType.Text)
private String displayName;
}
package com.yeejoin.equip.entity;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
import lombok.ToString;
import java.io.Serializable;
import java.util.Date;
@Data
@ToString
@AllArgsConstructor
@NoArgsConstructor
@ApiModel(value = "性能指标详情返回vo实体", description = "性能指标详情返回vo实体")
public class EquipmentIndexVO implements Serializable {
@ApiModelProperty(value = "id")
private Long id;
@ApiModelProperty(value = "值")
private String value;
@ApiModelProperty(value = "equipment_id")
private String equipmentId;
@ApiModelProperty(value = "性能指标名称")
private String perfQuotaName;
@ApiModelProperty(value = "性能指标id")
private String perfQuotaDefinitionId;
@ApiModelProperty(value = "数量单位名称")
private String unitName;
@ApiModelProperty(value = "否物联指标")
private Integer isIot;
@ApiModelProperty(value = "物联指标")
private String typeName;
@ApiModelProperty(value = "物联指标ID")
private String typeCode;
@ApiModelProperty(value = "分类名称")
private String groupName;
@ApiModelProperty(value = "指标原始id,从iot平台接口获取")
private String indexId;
@ApiModelProperty(value = "性能指标")
private String perfQuotaStr;
@ApiModelProperty(value = "是否是核心参数")
private Boolean isImportentParameter;
@ApiModelProperty(value = "排序")
private Integer sortNum;
@ApiModelProperty(value = "類型")
private Integer type;
@ApiModelProperty(value = "物联nameKey")
private String nameKey;
@ApiModelProperty(value = "创建日期")
private Date createDate;
@ApiModelProperty(value = "更新日期")
private Date updateDate;
@ApiModelProperty(value = "是否支持趋势查看")
private Integer isTrend;
@ApiModelProperty(value = "是否告警")
private int isAlarm;
@ApiModelProperty(value = "指标枚举")
private String valueEnum;
@ApiModelProperty(value = "信号的索引键key,用于唯一索引信号")
private String indexAddress;
@ApiModelProperty(value = "测点类型,analog/state")
private String dataType;
@ApiModelProperty(value = "网关标识")
private String gatewayId;
@ApiModelProperty(value = "装备名称")
private String equipmentSpecificName;
@ApiModelProperty(value = "装备指标名称")
private String equipmentIndexName;
}
package com.yeejoin.equip.entity;
import com.baomidou.mybatisplus.annotation.TableField;
import com.baomidou.mybatisplus.annotation.TableName;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import lombok.Data;
import lombok.EqualsAndHashCode;
import java.util.Date;
/**
* @author ZeHua Li
* @date 2020/10/30 11:12
* @since v2.0
*/
@Data
@EqualsAndHashCode(callSuper = true)
@TableName("wl_equipment_specific_index")
@ApiModel(value = "EquipmentSpecificIndex对象", description = "性能指标参数")
public class EquipmentSpecificIndex extends BaseEntity {
@ApiModelProperty(value = "单个设备id")
@TableField("equipment_specific_id")
private Long equipmentSpecificId;
@ApiModelProperty(value = "值")
@TableField("value")
private String value;
@ApiModelProperty(value = "值说明")
@TableField("value_label")
private String valueLabel;
@ApiModelProperty(value = "性能指标对应id")
@TableField("equipment_index_id")
private Long equipmentIndexId;
@ApiModelProperty(value = "更新时间")
@TableField("update_date")
private Date updateDate;
@ApiModelProperty(value = "装备名称(冗余字段)")
@TableField("equipment_specific_name")
private String equipmentSpecificName;
// equipmentSpecialName
@ApiModelProperty(value = "指标名称(冗余字段)")
@TableField("equipment_index_name")
private String equipmentIndexName;
@ApiModelProperty(value = "指标key(冗余字段)")
@TableField("equipment_index_key")
private String equipmentIndexKey;
/**
* 颜色
*/
@TableField(value = "emergency_level_color")
private String emergencyLevelColor;
/**
* 是否告警:0-否;1-是
*/
@TableField(value = "is_alarm")
private Integer isAlarm;
/**
* 紧急程度枚举(1:紧急,2:严重,3:轻微,4:正常,5:无效,6:备用,7:其他)
*/
@TableField(value = "emergency_level")
private String emergencyLevel;
/**
* 紧急程度描述
*/
@TableField(value = "emergency_level_describe")
private String emergencyLevelDescribe;
@ApiModelProperty(value = "iot数据上报唯一id")
private String traceId;
@TableField(exist = false)
private String nameKey;
@TableField(exist = false)
private String code;
@TableField(exist = false)
private String iotCode;
@TableField(exist = false)
private String type;
@TableField(exist = false)
private String orgCode;
@TableField(exist = false)
private String typeCode;
@TableField(exist = false)
private String typeName;
@TableField(exist = false)
private String indexName;
@TableField(exist = false)
private String equipmentSpecificIndexName;
@TableField(exist = false)
private String indexUnitName;
@TableField(exist = false)
private String qrCode;
@TableField(exist = false)
private String equipmentCode;
@TableField(exist = false)
private Long equipmentId;
@TableField(exist = false)
private Long equipmentDetailId;
@TableField(exist = false)
private String alamReason;
@ApiModelProperty(value = "设备CODE")
@TableField(exist = false)
private String equipmentSpecificCode;
@ApiModelProperty(value = "设备所属系统ids")
@TableField(exist = false)
private String systemId;
@ApiModelProperty(value = "详细位置")
@TableField(exist = false)
private String location;
@ApiModelProperty(value = "所属建筑id")
@TableField(exist = false)
private String buildId;
@ApiModelProperty(value = "是否遥测")
@TableField(exist = false)
private Integer isTrend;
@ApiModelProperty(value = "绑定视屏数量")
@TableField(exist = false)
private int num;
/**
* 机构/部门名称
*/
@TableField(exist = false)
private String bizOrgName;
/**
* 机构编码
*/
@TableField(exist = false)
private String bizOrgCode;
@ApiModelProperty(value = "装备系统code")
@TableField(exist = false)
private String specificCode;
@ApiModelProperty(value = "装备定义名称")
@TableField(exist = false)
private String equipmentName;
@TableField(exist = false)
private String equipmentType;
// 直流中心方便前端刷新展示
@TableField(exist = false)
private String UUID;
@TableField(value = "unit")
private String unit;
@ApiModelProperty(value = "信号的索引键key,用于唯一索引信号")
@TableField(value = "index_address")
private String indexAddress;
@ApiModelProperty(value = "品质,0为有效,1为无效")
@TableField(value = "quality")
private String quality;
@ApiModelProperty(value = "测点类型,analog/state")
@TableField(value = "data_type")
private String dataType;
@ApiModelProperty(value = "时间")
@TableField(value = "time_stamp")
private String timeStamp;
/**
* 指标值枚举
*/
@ApiModelProperty(value = "指标值枚举")
@TableField("value_enum")
private String valueEnum;
@ApiModelProperty(value = "网关标识")
@TableField(value = "gateway_id")
private String gatewayId;
}
package com.yeejoin.equip.entity;
import lombok.Getter;
/**
*
* @author LiuLin
* @date 2023年10月11日 09:31
*/
@Getter
public final class EsEntity<T> {
private String id;
private T data;
public EsEntity() {
}
public EsEntity(String id, T data) {
this.data = data;
this.id = id;
}
public void setId(String id) {
this.id = id;
}
public void setData(T data) {
this.data = data;
}
}
package com.yeejoin.equip.entity;
import com.yeejoin.equip.mqtt.message.MqttTopicEnum;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
import java.util.Date;
/**
* @author LiuLin
* @Description: 指标数据表
* @date 2023/09/14 14:30
*/
@Data
@AllArgsConstructor
@NoArgsConstructor
public class IndicatorData {
private String address;
private String gatewayId;
private String equipmentsIdx;
private String dataType;
private String isAlarm;
private String equipmentSpecificName;
private String equipmentIndexName;
private String valueLabel;
private String value;
private float valueF;
private String unit;
private String signalType;
private Date createdTime;
private MqttTopicEnum mqttTopicEnum;
}
package com.yeejoin.equip.eqmx;
import com.alibaba.fastjson.JSON;
import com.yeejoin.equip.kafka.KafkaProducerService;
import lombok.extern.slf4j.Slf4j;
import net.sf.json.JSONObject;
import org.eclipse.paho.client.mqttv3.MqttMessage;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Component;
import org.typroject.tyboot.component.emq.EmqKeeper;
import org.typroject.tyboot.component.emq.EmqxListener;
import javax.annotation.PostConstruct;
import java.util.concurrent.*;
/**
* @author LiuLin
* @date 2023/6/25
* @apiNote Emq消息转发Kafka
*/
@Slf4j
@Component
public class EmqMessageService extends EmqxListener {
@Autowired
protected EmqKeeper emqKeeper;
@Autowired
protected KafkaProducerService kafkaProducerService;
@Value("${emq.topic}")
private String emqTopic;
@Value("${kafka.topic}")
private String kafkaTopic;
ExecutorService service = Executors.newFixedThreadPool(10);
private static final BlockingQueue<JSONObject> blockingQueue = new LinkedBlockingQueue<>();
@PostConstruct
void init() throws Exception {
service.execute(new NumberThread());
emqKeeper.subscript(emqTopic, 0, this);
}
@Override
public void processMessage(String topic, MqttMessage message) throws Exception {
JSONObject result = JSONObject.fromObject(new String(message.getPayload()));
JSONObject messageResult = new JSONObject();
messageResult.put("result", result);
messageResult.put("topic", topic);
blockingQueue.add(messageResult);
}
class NumberThread implements Runnable {
@Override
public void run() {
while (true) {
try {
JSONObject messageResult = blockingQueue.take();
JSONObject result = messageResult.getJSONObject("result");
if ((messageResult.getString("topic")).equals(emqTopic)) {
String dataType = result.getString("dataType");
String address = result.getString("address");
String gatewayId = result.getString("gatewayId");
String value = result.getString("value");
String signalType = result.getString("signalType");
log.info("===========接收IOT订阅消息,address:{},gatewayId:{},dateType:{},value:{},signalType:{}", address,gatewayId,dataType,value,signalType);
kafkaProducerService.sendMessageAsync(kafkaTopic,JSON.toJSONString(result));
}
} catch (Exception e) {
Thread.currentThread().interrupt();
}
}
}
}
}
//package com.yeejoin.equip.kafka;
//
//import com.alibaba.fastjson.JSON;
//import com.alibaba.fastjson.JSONArray;
//import com.alibaba.fastjson.JSONObject;
//import com.yeejoin.amos.component.influxdb.InfluxDbConnection;
//import com.yeejoin.equip.entity.EquipmentIndexVO;
//import com.yeejoin.equip.entity.IndicatorData;
//import com.yeejoin.equip.eqmx.EmqMessageService;
//import com.yeejoin.equip.mapper.tdengine.IndicatorDataMapper;
//import com.yeejoin.equip.utils.ElasticSearchUtil;
//import com.yeejoin.equip.utils.RedisUtils;
//import lombok.extern.slf4j.Slf4j;
//import org.apache.commons.lang3.ObjectUtils;
//import org.apache.kafka.clients.consumer.ConsumerRecord;
//import org.springframework.beans.factory.annotation.Autowired;
//import org.springframework.beans.factory.annotation.Value;
//import org.springframework.kafka.annotation.KafkaListener;
//import org.springframework.kafka.support.Acknowledgment;
//import org.springframework.stereotype.Service;
//
//import javax.annotation.PostConstruct;
//import java.text.SimpleDateFormat;
//import java.util.*;
//import java.util.concurrent.ExecutorService;
//import java.util.concurrent.Executors;
//
///**
// * @author LiuLin
// * @date 2023/6/25
// * @apiNote kafka 消费服务类
// */
//@Slf4j
//@Service
//public class KafkaConsumerService {
//
// //装备更新最新消息存入influxdb前缀
// private static final String TRUE = "true";
// private static final String FALSE = "false";
// private static final String ES_INDEX_NAME_JX = "jxiop_equipments";
//
// private static final String MEASUREMENT = "iot_data_";
// private static final String TOTAL_DATA_ = "total_data_";
// @Autowired
// protected KafkaProducerService kafkaProducerService;
//
// @Autowired
// private RedisUtils redisUtils;
//
// @Autowired
// private ElasticSearchUtil elasticSearchUtil;
//
// @Autowired
// private InfluxDbConnection influxDbConnection;
//
// @Autowired
// private IndicatorDataMapper indicatorDataMapper;
// ExecutorService service = Executors.newFixedThreadPool(10);
//
// @Value("${kafka.alarm.topic}")
// private String alarmTopic;
//
// /**
// * 批量消费kafka消息
// * Kafka消息转emq
// *
// * @param consumerRecords messages
// * @param ack ack
// */
// @KafkaListener(id = "consumerSingle", topics = "#{'${kafka.topic}'.split(',')}", groupId = "messageConsumerGroup")
// public void listen1(List<ConsumerRecord<String, String>> consumerRecords, Acknowledgment ack) {
// try {
// for (ConsumerRecord<String, String> consumerRecord : consumerRecords) {
// Optional<?> kafkaMessage = Optional.ofNullable(consumerRecord.value());
// kafkaMessage.ifPresent(o -> this.handleMessage((String) o));
// }
// } catch (Exception e) {
// log.error("kafka失败,当前失败的批次: data:{}", consumerRecords);
// } finally {
// ack.acknowledge();
// }
// }
//
// private void handleMessage(String record) {
// JSONObject jsonObject = JSONObject.parseObject(record);
// IndicatorData indicatorData = JSON.parseObject(record, IndicatorData.class);
// String dataType = jsonObject.getString("dataType");
// String indexAddress = jsonObject.getString("address");
// String gatewayId = jsonObject.getString("gatewayId");
// String value = jsonObject.getString("value");
// String key = indexAddress + "_" + gatewayId;
// String signalType = jsonObject.getString("signalType");
// log.info("接收Kafka消息! address: {}, gatewayId: {},value:{}", indexAddress, gatewayId, value);
// try {
// if (redisUtils.hasKey(key)) {
// SimpleDateFormat simpleDateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
// EquipmentIndexVO equipmentSpeIndex = JSONObject.parseObject(redisUtils.get(key), EquipmentIndexVO.class);
// String valueLabel = valueTranslate(value, equipmentSpeIndex.getValueEnum());
//
// //更新数据入ES库
// Map<String, Object> paramJson = new HashMap<>();
// if (!Arrays.asList(TRUE, FALSE).contains(value)) {
// paramJson.put("valueF", Float.parseFloat(value));
// }
// paramJson.put("value", value);
// paramJson.put("valueLabel", valueLabel.isEmpty() ? value : valueLabel);
// paramJson.put("createdTime", new Date());
// paramJson.put("unit", equipmentSpeIndex.getUnitName());
// elasticSearchUtil.updateData(ES_INDEX_NAME_JX, key, JSON.toJSONString(paramJson));
//
// Map<String, String> tagsMap = new HashMap<>();
// Map<String, Object> fieldsMap = new HashMap<>();
// tagsMap.put("equipmentsIdx", key);
// fieldsMap.put("address", indexAddress);
// fieldsMap.put("gatewayId", gatewayId);
// fieldsMap.put("dataType", dataType);
// fieldsMap.put("isAlarm", String.valueOf(equipmentSpeIndex.getIsAlarm()));
// fieldsMap.put("equipmentSpecificName", equipmentSpeIndex.getEquipmentSpecificName());
//
// fieldsMap.put("value", value);
// fieldsMap.put("valueLabel", valueLabel.isEmpty() ? value : valueLabel);
// fieldsMap.put("equipmentIndexName", equipmentSpeIndex.getEquipmentIndexName());
// fieldsMap.put("unit", equipmentSpeIndex.getUnitName());
// fieldsMap.put("createdTime", simpleDateFormat.format(new Date()));
//
// indicatorData.setIsAlarm(String.valueOf(equipmentSpeIndex.getIsAlarm()));
// indicatorData.setEquipmentIndexName(equipmentSpeIndex.getEquipmentIndexName());
// indicatorData.setEquipmentSpecificName(equipmentSpeIndex.getEquipmentSpecificName());
// indicatorData.setUnit(equipmentSpeIndex.getUnitName());
// indicatorData.setEquipmentsIdx(key);
// indicatorData.setValueLabel(valueLabel.isEmpty() ? value : valueLabel);
//
// //变位存入influxdb
// if ("transformation".equalsIgnoreCase(signalType)) {
// influxDbConnection.insert(MEASUREMENT + gatewayId, tagsMap, fieldsMap);
// indicatorDataMapper.insert(indicatorData);
// log.info("TDEngine入库成功,{},value:{}", indicatorData.getEquipmentsIdx(), indicatorData.getValue());
// } else {
// influxDbConnection.insert(TOTAL_DATA_ + indicatorData.getGatewayId(), tagsMap, fieldsMap);
// log.info("总召入库,key:{}", indicatorData.getEquipmentsIdx());
// }
//
// if (0 != equipmentSpeIndex.getIsAlarm()) {
// fieldsMap.putAll(tagsMap);
// kafkaProducerService.sendMessageAsync(alarmTopic, JSON.toJSONString(fieldsMap));
// log.info("===========发送告警信息,key:{}", indicatorData.getEquipmentsIdx());
// }
// }
// } catch (Exception e) {
// log.error("Iot透传消息解析入库失败" + e.getMessage(), e);
// }
// }
//
// private String valueTranslate(String value, String enumStr) {
// if (ObjectUtils.isEmpty(enumStr)) {
// return "";
// }
// try {
// JSONArray jsonArray = JSONArray.parseArray(enumStr);
// for (int i = 0; i < jsonArray.size(); i++) {
// JSONObject jsonObject = jsonArray.getJSONObject(i);
// if (jsonObject.get("key").equals(value)) {
// return jsonObject.getString("label");
// }
// }
// } catch (Exception e) {
// log.error("告警枚举转换异常" + e.getMessage(), e);
// }
// return "";
// }
//}
//
//
//package com.yeejoin.equip.kafka;
//
//import com.alibaba.fastjson.JSON;
//import com.alibaba.fastjson.JSONArray;
//import com.alibaba.fastjson.JSONObject;
//import com.yeejoin.amos.component.influxdb.InfluxDbConnection;
//import com.yeejoin.equip.config.KafkaConsumerConfig;
//import com.yeejoin.equip.entity.EquipmentIndexVO;
//import com.yeejoin.equip.entity.IndicatorData;
//import com.yeejoin.equip.mapper.tdengine.IndicatorDataMapper;
//import com.yeejoin.equip.utils.ElasticSearchUtil;
//import com.yeejoin.equip.utils.RedisUtils;
//import lombok.extern.slf4j.Slf4j;
//import org.apache.commons.lang3.ObjectUtils;
//import org.apache.kafka.clients.consumer.ConsumerRecord;
//import org.apache.kafka.clients.consumer.ConsumerRecords;
//import org.apache.kafka.clients.consumer.KafkaConsumer;
//import org.springframework.beans.factory.annotation.Autowired;
//import org.springframework.beans.factory.annotation.Value;
//import org.springframework.boot.CommandLineRunner;
//import org.springframework.stereotype.Component;
//import java.text.SimpleDateFormat;
//import java.time.Duration;
//import java.util.*;
//import java.util.concurrent.*;
//import java.util.concurrent.atomic.AtomicLong;
//
///**
// * @author LiuLin
// * @date 2023年08月01日 17:27
// */
//@Slf4j
//@Component
//public class KafkaConsumerWithThread implements CommandLineRunner {
// final private static AtomicLong sendThreadPoolCounter = new AtomicLong(0);
// final public static ExecutorService pooledExecutor =
// Executors.newFixedThreadPool(60 + Runtime.getRuntime().availableProcessors(),
// createThreadFactory());
// //iot转发实时消息存入influxdb前缀
// private static final String MEASUREMENT = "iot_data_";
// private static final String TOTAL_DATA_ = "total_data_";
// private static final String ES_INDEX_NAME_JX = "jxiop_equipments";
// //装备更新最新消息存入influxdb前缀
// private static final String TRUE = "true";
// private static final String FALSE = "false";
// @Autowired
// protected KafkaProducerService kafkaProducerService;
// @Autowired
// private KafkaConsumerConfig consumerConfig;
// @Autowired
// private InfluxDbConnection influxDbConnection;
// @Autowired
// private RedisUtils redisUtils;
// @Autowired
// private IndicatorDataMapper indicatorDataMapper;
//
// @Value("${kafka.alarm.topic}")
// private String alarmTopic;
//
// @Value("${kafka.topic}")
// private String topic;
//
// @Autowired
// private ElasticSearchUtil elasticSearchUtil;
//
// private static ThreadFactory createThreadFactory() {
// return runnable -> {
// Thread thread = new Thread(runnable);
// thread.setName(String.format("kafka-consumer-iot-pool-%d", KafkaConsumerWithThread.sendThreadPoolCounter.getAndIncrement()));
// return thread;
// };
// }
//
// @Override
// public void run(String... args) {
// Thread thread = new Thread(new KafkaConsumerThread(consumerConfig.consumerConfigs(), topic));
// thread.start();
// }
//
// private void processRecord(ConsumerRecord<String, String> record) {
// JSONObject jsonObject = JSONObject.parseObject(record.value());
// IndicatorData indicatorData = JSON.parseObject(record.value(), IndicatorData.class);
// String dataType = jsonObject.getString("dataType");
// String indexAddress = jsonObject.getString("address");
// String gatewayId = jsonObject.getString("gatewayId");
// String value = jsonObject.getString("value");
// String key = indexAddress + "_" + gatewayId;
// String signalType = jsonObject.getString("signalType");
// log.info("接收Kafka消息! address: {}, gatewayId: {},value:{}", indexAddress, gatewayId,value);
// try {
// if (redisUtils.hasKey(key)) {
// SimpleDateFormat simpleDateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
// EquipmentIndexVO equipmentSpeIndex = JSONObject.parseObject(redisUtils.get(key), EquipmentIndexVO.class);
// String valueLabel = valueTranslate(value, equipmentSpeIndex.getValueEnum());
//
// //更新数据入ES库
// Map<String, Object> paramJson = new HashMap<>();
// if (!Arrays.asList(TRUE, FALSE).contains(value)) {
// paramJson.put("valueF", Float.parseFloat(value));
// }
// paramJson.put("value", value);
// paramJson.put("valueLabel", valueLabel.isEmpty() ? value : valueLabel);
// paramJson.put("createdTime", new Date());
// paramJson.put("unit", equipmentSpeIndex.getUnitName());
// elasticSearchUtil.updateData(ES_INDEX_NAME_JX, key, JSON.toJSONString(paramJson));
//
// Map<String, String> tagsMap = new HashMap<>();
// Map<String, Object> fieldsMap = new HashMap<>();
// tagsMap.put("equipmentsIdx", key);
// fieldsMap.put("address", indexAddress);
// fieldsMap.put("gatewayId", gatewayId);
// fieldsMap.put("dataType", dataType);
// fieldsMap.put("isAlarm", String.valueOf(equipmentSpeIndex.getIsAlarm()));
// fieldsMap.put("equipmentSpecificName", equipmentSpeIndex.getEquipmentSpecificName());
//
// fieldsMap.put("value", value);
// fieldsMap.put("valueLabel", valueLabel.isEmpty() ? value : valueLabel);
// fieldsMap.put("equipmentIndexName", equipmentSpeIndex.getEquipmentIndexName());
// fieldsMap.put("unit", equipmentSpeIndex.getUnitName());
// fieldsMap.put("createdTime", simpleDateFormat.format(new Date()));
//
// indicatorData.setIsAlarm(String.valueOf(equipmentSpeIndex.getIsAlarm()));
// indicatorData.setEquipmentIndexName(equipmentSpeIndex.getEquipmentIndexName());
// indicatorData.setEquipmentSpecificName(equipmentSpeIndex.getEquipmentSpecificName());
// indicatorData.setUnit(equipmentSpeIndex.getUnitName());
// indicatorData.setEquipmentsIdx(key);
// indicatorData.setValueLabel(valueLabel.isEmpty() ? value : valueLabel);
//
// //变位存入influxdb
// if ("transformation".equalsIgnoreCase(signalType)) {
// influxDbConnection.insert(MEASUREMENT + gatewayId, tagsMap, fieldsMap);
// indicatorDataMapper.insert(indicatorData);
// log.info("TDEngine入库成功,{},value:{}",indicatorData.getEquipmentsIdx(),indicatorData.getValue());
// } else {
// influxDbConnection.insert(TOTAL_DATA_ + indicatorData.getGatewayId(), tagsMap, fieldsMap);
// log.info("总召入库,key:{}",indicatorData.getEquipmentsIdx());
// }
//
// if (0 != equipmentSpeIndex.getIsAlarm()) {
// fieldsMap.putAll(tagsMap);
// kafkaProducerService.sendMessageAsync(alarmTopic, JSON.toJSONString(fieldsMap));
// log.info("===========发送告警信息,key:{}",indicatorData.getEquipmentsIdx());
// }
// }
// } catch (Exception e) {
// log.error("Iot透传消息解析入库失败" + e.getMessage(), e);
// }
// }
//
// private String valueTranslate(String value, String enumStr) {
// if (ObjectUtils.isEmpty(enumStr)) {
// return "";
// }
// try {
// JSONArray jsonArray = JSONArray.parseArray(enumStr);
// for (int i = 0; i < jsonArray.size(); i++) {
// JSONObject jsonObject = jsonArray.getJSONObject(i);
// if (jsonObject.get("key").equals(value)) {
// return jsonObject.getString("label");
// }
// }
// } catch (Exception e) {
// log.error("告警枚举转换异常" + e.getMessage(), e);
// }
// return "";
// }
//
// public class KafkaConsumerThread implements Runnable {
// private final KafkaConsumer<String, String> kafkaConsumer;
//
// public KafkaConsumerThread(Properties props, String topic) {
// this.kafkaConsumer = new KafkaConsumer<>(props);
// this.kafkaConsumer.subscribe(Collections.singletonList(topic));
// }
//
// @Override
// public void run() {
// while (true) {
// ConsumerRecords<String, String> records = kafkaConsumer.poll(Duration.ofMillis(500));
// for (ConsumerRecord<String, String> record : records) {
// pooledExecutor.submit(() -> {
// processRecord(record);
// });
// }
// }
// }
// }
//}
package com.yeejoin.equip.kafka;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import com.yeejoin.equip.config.KafkaConsumerConfig;
import com.yeejoin.equip.entity.Book;
import com.yeejoin.equip.entity.EquipmentIndexVO;
import com.yeejoin.equip.entity.EsEntity;
import com.yeejoin.equip.entity.IndicatorData;
import com.yeejoin.equip.mapper.tdengine.IndicatorDataMapper;
import com.yeejoin.equip.utils.ElasticSearchUtil;
import com.yeejoin.equip.utils.RedisUtils;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.ObjectUtils;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.CommandLineRunner;
import org.springframework.stereotype.Component;
import java.time.Duration;
import java.util.*;
import java.util.concurrent.*;
import java.util.concurrent.atomic.AtomicLong;
import java.util.stream.Collectors;
import java.util.stream.StreamSupport;
/**
* @author LiuLin
* @date 2023年08月01日 17:27
*/
@Slf4j
@Component
public class KafkaConsumerWorker implements CommandLineRunner {
final private static AtomicLong sendThreadPoolCounter = new AtomicLong(0);
final public static ExecutorService pooledExecutor =
Executors.newFixedThreadPool(200 + Runtime.getRuntime().availableProcessors(),
createThreadFactory());
private static final int CPU_COUNT = Runtime.getRuntime().availableProcessors();
private static final int CORE_POOL_SIZE = 6 * CPU_COUNT;
private static final int MAX_POOL_SIZE = 6 * CPU_COUNT + 2;
/* private static final ThreadPoolExecutor exec = new ThreadPoolExecutor(
CORE_POOL_SIZE,
MAX_POOL_SIZE,
60L,
TimeUnit.SECONDS,
new LinkedBlockingQueue<>(1000)
);*/
private static final String ES_INDEX_NAME_JX = "jxiop_equipments";
private static final String TRANSFORMATION = "transformation";
//装备更新最新消息存入influxdb前缀
private static final String TRUE = "true";
private static final String FALSE = "false";
@Autowired
protected KafkaProducerService kafkaProducerService;
@Autowired
private KafkaConsumerConfig consumerConfig;
@Autowired
private RedisUtils redisUtils;
@Autowired
private IndicatorDataMapper indicatorDataMapper;
@Value("${kafka.alarm.topic}")
private String alarmTopic;
@Value("${kafka.topic}")
private String topic;
@Autowired
private ElasticSearchUtil elasticSearchUtil;
private static ThreadFactory createThreadFactory() {
return runnable -> {
Thread thread = new Thread(runnable);
thread.setName(String.format("kafka-consumer-iot-pool-%d", KafkaConsumerWorker.sendThreadPoolCounter.getAndIncrement()));
return thread;
};
}
@Override
public void run(String... args) {
Thread thread = new Thread(new KafkaConsumerThread(consumerConfig.consumerConfigs(), topic));
thread.start();
}
private Optional<IndicatorData> processSignal(ConsumerRecord<String, String> record) {
JSONObject jsonObject = JSONObject.parseObject(record.value());
String address = jsonObject.getString("address");
String gatewayId = jsonObject.getString("gatewayId");
String value = jsonObject.getString("value");
String key = address + "_" + gatewayId;
log.info("===========收到Kafka消息,key:{},value:{}", key, value);
IndicatorData indicatorData = JSON.parseObject(record.value(), IndicatorData.class);
if (redisUtils.hasKey(key)) {
EquipmentIndexVO equipmentSpeIndex = JSONObject.parseObject(redisUtils.get(key), EquipmentIndexVO.class);
String valueLabel = valueTranslate(value, equipmentSpeIndex.getValueEnum());
indicatorData.setIsAlarm(String.valueOf(equipmentSpeIndex.getIsAlarm()));
indicatorData.setEquipmentIndexName(equipmentSpeIndex.getEquipmentIndexName());
indicatorData.setEquipmentSpecificName(equipmentSpeIndex.getEquipmentSpecificName());
indicatorData.setUnit(equipmentSpeIndex.getUnitName());
indicatorData.setEquipmentsIdx(key);
indicatorData.setValueLabel(valueLabel.isEmpty() ? value : valueLabel);
indicatorData.setValueF(!Arrays.asList(TRUE, FALSE).contains(value) ? Float.parseFloat(value) : 0);
//发送告警信息
if (0 != equipmentSpeIndex.getIsAlarm()) {
kafkaProducerService.sendMessageAsync(alarmTopic, JSON.toJSONString(indicatorData));
log.info("===========发送告警信息,key:{}", indicatorData.getEquipmentsIdx());
}
return Optional.of(indicatorData);
}
return Optional.empty();
}
private void processRecord(ConsumerRecords<String, String> records) {
Map<String, List<IndicatorData>> data = StreamSupport.stream(records.spliterator(), true)
.map(this::processSignal)
.filter(Optional::isPresent)
.map(Optional::get)
.collect(Collectors.groupingBy(IndicatorData::getGatewayId));
data.forEach((gatewayId, list) -> {
//1.update es
List<EsEntity<Book>> batchList = new ArrayList<>(list.size());
list.forEach(item -> batchList.add(new EsEntity<>(item.getEquipmentsIdx(), new Book(item.getValue(), item.getValueF(), item.getValueLabel(),
item.getUnit(), new Date()))));
elasticSearchUtil.updateBatch(ES_INDEX_NAME_JX, batchList);
//2.save
List<IndicatorData> tdDataList = list.stream().filter(t -> Objects.equals(t.getSignalType(), TRANSFORMATION)).collect(Collectors.toList());
indicatorDataMapper.insertBatch(tdDataList, gatewayId);
tdDataList.forEach(s -> log.info("===========TDEngine入库成功,id:【{}】,value:【{}】修改成功", s.getEquipmentsIdx(), s.getValueF()));
});
}
private String valueTranslate(String value, String enumStr) {
if (ObjectUtils.isEmpty(enumStr)) {
return "";
}
try {
JSONArray jsonArray = JSONArray.parseArray(enumStr);
for (int i = 0; i < jsonArray.size(); i++) {
JSONObject jsonObject = jsonArray.getJSONObject(i);
if (jsonObject.get("key").equals(value)) {
return jsonObject.getString("label");
}
}
} catch (Exception e) {
log.error("告警枚举转换异常" + e.getMessage(), e);
}
return "";
}
public class KafkaConsumerThread implements Runnable {
private final KafkaConsumer<String, String> kafkaConsumer;
public KafkaConsumerThread(Properties props, String topic) {
this.kafkaConsumer = new KafkaConsumer<>(props);
this.kafkaConsumer.subscribe(Collections.singletonList(topic));
}
@Override
public void run() {
while (true) {
ConsumerRecords<String, String> records = kafkaConsumer.poll(Duration.ofMillis(100));
pooledExecutor.submit(() -> {
processRecord(records);
});
kafkaConsumer.commitSync();
}
}
}
}
package com.yeejoin.equip.kafka;
import com.alibaba.fastjson.JSONObject;
import lombok.extern.slf4j.Slf4j;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.kafka.support.SendResult;
import org.springframework.stereotype.Service;
import org.springframework.util.concurrent.ListenableFuture;
import org.springframework.util.concurrent.ListenableFutureCallback;
import javax.annotation.Resource;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
/**
* @author LiuLin
* @date 2023/6/25
* @apiNote kafka 生产服务类
*/
@Slf4j
@Service
public class KafkaProducerService {
@Resource
private KafkaTemplate<String, String> kafkaTemplate;
@Resource
private KafkaTemplate<String, String> kafkaTemplateWithTransaction;
/**
* 发送消息(同步)
* @param topic 主题
* @param key 键
* @param message 值
*/
public void sendMessageSync(String topic, String key, String message) throws InterruptedException, ExecutionException, TimeoutException {
//可以指定最长等待时间,也可以不指定
kafkaTemplate.send(topic, message).get(10, TimeUnit.SECONDS);
log.info("sendMessageSync => topic: {}, key: {}, message: {}", topic, key, message);
}
/**
* 发送消息并获取结果
* @param topic
* @param message
* @throws ExecutionException
* @throws InterruptedException
*/
public void sendMessageGetResult(String topic, String key, String message) throws ExecutionException, InterruptedException {
SendResult<String, String> result = kafkaTemplate.send(topic, message).get();
log.info("The partition the message was sent to: " + result.getRecordMetadata().partition());
}
/**
* 发送消息(异步)
* @param topic 主题
* @param message 消息内容
*/
public void sendMessageAsync(String topic, String message) {
ListenableFuture<SendResult<String, String>> future = kafkaTemplate.send(topic, message);
future.addCallback(new ListenableFutureCallback<SendResult<String, String>>() {
@Override
public void onFailure(Throwable throwable) {
log.error("发送消息(异步) failure! topic : {}, message: {}", topic, message);
}
@Override
public void onSuccess(SendResult<String, String> stringStringSendResult) {
JSONObject jsonObject = JSONObject.parseObject(message);
String address = jsonObject.getString("address");
String gatewayId = jsonObject.getString("gatewayId");
String value = jsonObject.getString("value");
log.info("===========Kafka发送消息 success! address: {}, gatewayId: {},value:{}", address, gatewayId,value);
}
});
}
}
//package com.yeejoin.equip.kafka;
//
//import org.apache.kafka.clients.consumer.ConsumerConfig;
//import org.apache.kafka.clients.consumer.ConsumerRecords;
//import org.apache.kafka.clients.consumer.KafkaConsumer;
//import org.apache.kafka.common.serialization.StringDeserializer;
//import javax.annotation.PostConstruct;
//import java.time.Duration;
//import java.util.Collections;
//import java.util.Properties;
//import java.util.concurrent.ExecutorService;
//import java.util.concurrent.Executors;
//
///**
// * @author LiuLin
// * @date 2023年10月11日 09:31
// */
//public class WorkerConsumer {
// private static final ExecutorService executor = Executors.newFixedThreadPool(100);
// @PostConstruct
// void init() throws Exception {
// String topicName = "topic_t40";
// KafkaConsumer<String, String> consumer = getKafkaConsumer();
// consumer.subscribe(Collections.singletonList(topicName));
// try {
// while (true) {
// ConsumerRecords<String, String> records = consumer.poll(Duration.ofSeconds(1));
// if(!records.isEmpty()){
// executor.execute(new MessageHandler(records));
// }
// }
// }finally {
// consumer.close();
// }
// }
//
// private static KafkaConsumer<String, String> getKafkaConsumer() {
// Properties props = new Properties();
// props.put("bootstrap.servers", "localhost:9092");
// props.put(ConsumerConfig.GROUP_ID_CONFIG, "app_w");
// props.put("client.id", "client_02");
// props.put("enable.auto.commit", true);
// props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "latest");
// props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
// props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
//
// return new KafkaConsumer<>(props);
// }
//
//
// static class MessageHandler implements Runnable{
//
// private final ConsumerRecords<String, String> records;
//
// public MessageHandler(ConsumerRecords<String, String> records) {
// this.records = records;
// }
//
// @Override
// public void run() {
// records.forEach(record -> {
// System.out.println(" 开始处理消息: " + record.value() + ", partition " + record.partition());
// });
// }
// }
//}
package com.yeejoin.equip.mapper.mysql;
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
import com.yeejoin.equip.entity.EquipmentIndexVO;
import com.yeejoin.equip.entity.EquipmentSpecificIndex;
import org.springframework.stereotype.Component;
import java.util.List;
/**
* @author ZeHua Li
* @date 2020/10/30 11:16
* @since v2.0
*/
@Component
public interface EquipmentSpecificIndexMapper extends BaseMapper<EquipmentSpecificIndex> {
List<EquipmentIndexVO> getEquipSpecificIndexList(EquipmentIndexVO equipmentIndexVo);
}
package com.yeejoin.equip.mapper.tdengine;
import com.yeejoin.equip.entity.ESEquipments;
import com.yeejoin.equip.entity.IndicatorData;
import org.apache.ibatis.annotations.Param;
import org.springframework.stereotype.Component;
import java.util.List;
/**
* @author CuiXi
* @version 1.0
* @Description:
* @date 2021/3/11 14:30
*/
@Component
public interface ESEquipmentsMapper {
int batchInsert(@Param("list")List<ESEquipments> esEquipmentsList,@Param("time")String time);
int insert(ESEquipments esEquipments);
void createTable();
}
package com.yeejoin.equip.mapper.tdengine;
import com.yeejoin.equip.entity.IndicatorData;
import org.apache.ibatis.annotations.Param;
import org.springframework.stereotype.Component;
import java.util.List;
/**
* @author CuiXi
* @version 1.0
* @Description:
* @date 2021/3/11 14:30
*/
@Component
public interface IndicatorDataMapper {
int insert(IndicatorData indicatorData);
int insertBatch(@Param("list")List<IndicatorData> indicatorDataList, @Param("gatewayId")String gatewayId);
void createDB();
void createTable();
}
//package com.yeejoin.equip.mqtt;
//
//import com.yeejoin.equip.config.MqttPropertyConfig;
//import com.yeejoin.equip.entity.IndicatorData;
//import com.yeejoin.equip.mqtt.message.MqttTopicEnum;
//import com.yeejoin.equip.utils.ExecutorFactory;
//import org.eclipse.paho.client.mqttv3.MqttConnectOptions;
//import org.springframework.context.annotation.Bean;
//import org.springframework.context.annotation.Configuration;
//import org.springframework.integration.annotation.ServiceActivator;
//import org.springframework.integration.channel.DirectChannel;
//import org.springframework.integration.dsl.IntegrationFlow;
//import org.springframework.integration.dsl.IntegrationFlows;
//import org.springframework.integration.endpoint.MessageProducerSupport;
//import org.springframework.integration.mqtt.core.DefaultMqttPahoClientFactory;
//import org.springframework.integration.mqtt.core.MqttPahoClientFactory;
//import org.springframework.integration.mqtt.inbound.MqttPahoMessageDrivenChannelAdapter;
//import org.springframework.integration.mqtt.outbound.MqttPahoMessageHandler;
//import org.springframework.integration.mqtt.support.DefaultPahoMessageConverter;
//import org.springframework.messaging.MessageChannel;
//import org.springframework.messaging.MessageHandler;
//import javax.annotation.Resource;
//import java.util.Objects;
//import static com.yeejoin.equip.mqtt.message.MqttConstant.*;
//
///**
// * 消息处理器
// *
// * @author LiuLin
// * @date 2023年08月18日 10:56
// */
//@Configuration
//public class MessageIntegration {
//
// @Resource
// private MqttPropertyConfig mqttPropertyConfig;
// @Bean
// public MqttConnectOptions mqttConnectOptions() {
// MqttConnectOptions options = new MqttConnectOptions();
// options.setServerURIs(new String[]{mqttPropertyConfig.getBroker()});
// options.setUserName(mqttPropertyConfig.getClientUserName());
// options.setPassword(mqttPropertyConfig.getClientPassword().toCharArray());
// options.setConnectionTimeout(DEFAULT_CONNECTION_TIMEOUT);
// // 设置心跳:1.5*20秒
// options.setKeepAliveInterval(mqttPropertyConfig.getKeepAliveInterval());
// // 设置最大并发数
// options.setMaxInflight(mqttPropertyConfig.getMaxInflight());
// options.setAutomaticReconnect(true);
// //options.setCleanSession(false);
// return options;
// }
//
// @Bean
// public MqttPahoClientFactory mqttClientFactory() {
// DefaultMqttPahoClientFactory factory = new DefaultMqttPahoClientFactory();
// factory.setConnectionOptions(mqttConnectOptions());
// return factory;
// }
//
// @Bean
// public MessageProducerSupport bizInbound() {
// MqttPahoMessageDrivenChannelAdapter adapter = new MqttPahoMessageDrivenChannelAdapter(
// mqttPropertyConfig.getBizClientId(),
// mqttClientFactory(),
// mqttPropertyConfig.getBizTopic()
// );
// adapter.setCompletionTimeout(DEFAULT_COMPLETION_TIMEOUT);
// adapter.setConverter(new DefaultPahoMessageConverter());
// adapter.setQos(QOS_DEFAULT);
// return adapter;
// }
//
// @Bean
// public MessageChannel mqttOutboundChannel() {
// return new DirectChannel();
// }
//
// @Bean
// @ServiceActivator(inputChannel = "mqttOutboundChannel")
// public MessageHandler mqttOutbound() {
// MqttPahoMessageHandler messageHandler = new MqttPahoMessageHandler(
// mqttPropertyConfig.getClientId(),
// mqttClientFactory()
// );
// messageHandler.setAsync(true);
// messageHandler.setDefaultQos(QOS_DEFAULT);
// return messageHandler;
// }
//
// @Bean
// public IntegrationFlow bizMsgFlow() {
// return IntegrationFlows
// .from(bizInbound())
// .channel(channels -> channels.executor(ExecutorFactory.buildBizExecutor()))
// .handle(MessageTransfer::mqttMessage2RawMessage)
// //根据Topic后缀进行分流
// .<IndicatorData, MqttTopicEnum>route(IndicatorData::getMqttTopicEnum,
// mapping -> mapping
// .subFlowMapping(MqttTopicEnum.perspective, flow -> flow
// .handle("handleMessageService", "processMessage")
// .filter(Objects::nonNull)
// .handle(mqttOutbound()))
// .defaultOutputToParentFlow())
// .get();
// }
//}
//package com.yeejoin.equip.mqtt;
//
//import com.alibaba.fastjson.JSON;
//import com.alibaba.fastjson.JSONArray;
//import com.alibaba.fastjson.JSONObject;
//import com.yeejoin.equip.entity.EquipmentIndexVO;
//import com.yeejoin.equip.entity.IndicatorData;
//import com.yeejoin.equip.mqtt.message.MqttTopicEnum;
//import com.yeejoin.equip.utils.RedisUtils;
//import com.yeejoin.equip.utils.SpringUtils;
//import lombok.extern.slf4j.Slf4j;
//import org.apache.commons.lang3.ObjectUtils;
//import org.springframework.integration.mqtt.support.MqttHeaders;
//import org.springframework.stereotype.Component;
//import java.util.Arrays;
//import java.util.Date;
//import java.util.Map;
//import static com.yeejoin.equip.mqtt.message.MqttConstant.*;
//
///**
// * @author LiuLin
// * @date 2023年07月13日 09:58
// */
//@Slf4j
//@Component
//public class MessageTransfer {
//
// /**
// * 转为原生数据,payload为字节数组
// **/
// public static IndicatorData mqttMessage2RawMessage(String payload, Map<String, Object> headers) {
// //log.info("received raw message, header >>> {}, payload >>> {}", headers, JSONObject.toJSONString(payload));
// RedisUtils redisUtils = (RedisUtils) SpringUtils.getBean("redisUtils");
// IndicatorData indicatorData = JSON.parseObject(payload, IndicatorData.class);
// if(!indicatorData.getSignalType().equals("transformation")){
// log.info("received TotalSummon message:address{},gateway:{},time:{},signalType:{}", indicatorData.getAddress(),indicatorData.getGatewayId(),
// new Date(),indicatorData.getSignalType());
// }
// //log.info("received raw message, header >>> {}, payload >>> {}", headers, JSONObject.toJSONString(payload));
// try {
// String topic = headers.get(MqttHeaders.RECEIVED_TOPIC).toString();
// String[] topicItems = topic.split(TOPIC_SPLITTER);
// indicatorData.setMqttTopicEnum(MqttTopicEnum.of(topicItems[topicItems.length - 1]));
// String key = indicatorData.getAddress() + "_" + indicatorData.getGatewayId();
// if (redisUtils.hasKey(key)) {
// EquipmentIndexVO equipmentSpeIndex = JSONObject.parseObject(redisUtils.get(key),EquipmentIndexVO.class) ;
// String valueLabel = valueTranslate(indicatorData.getValue(), equipmentSpeIndex.getValueEnum());
// indicatorData.setIsAlarm(String.valueOf(equipmentSpeIndex.getIsAlarm()));
// indicatorData.setEquipmentIndexName(equipmentSpeIndex.getEquipmentIndexName());
// indicatorData.setEquipmentSpecificName(equipmentSpeIndex.getEquipmentSpecificName());
// indicatorData.setUnit(equipmentSpeIndex.getUnitName());
// indicatorData.setEquipmentsIdx(key);
// indicatorData.setValueLabel(valueLabel.isEmpty() ? indicatorData.getValue() : valueLabel);
// if (!Arrays.asList(TRUE, FALSE).contains(indicatorData.getValue())) {
// indicatorData.setValueF(Float.parseFloat(indicatorData.getValue()));
// }
// }else {
// return null;
// }
// } catch (Exception e) {
// log.error("mqttMessage2RawMessage解析消息数据异常", e);
// }
// return indicatorData;
// }
//
// private static String valueTranslate(String value, String enumStr) {
// if (ObjectUtils.isEmpty(enumStr)) {
// return "";
// }
// try {
// JSONArray jsonArray = JSONArray.parseArray(enumStr);
// for (int i = 0; i < jsonArray.size(); i++) {
// JSONObject jsonObject = jsonArray.getJSONObject(i);
// if (jsonObject.get("key").equals(value)) {
// return jsonObject.getString("label");
// }
// }
// } catch (Exception e) {
// log.error("告警枚举转换异常" + e.getMessage(), e);
// }
// return "";
// }
//}
package com.yeejoin.equip.mqtt.message;
import com.google.common.collect.ImmutableMap;
import java.util.Map;
/**
* @author LiuLin
* @date 2023年08月02日 11:02
*/
public interface MqttConstant {
int DEFAULT_CONNECTION_TIMEOUT = 5000;
long DEFAULT_COMPLETION_TIMEOUT = 5000;
int QOS_DEFAULT = 1;
String TOPIC_SPLITTER = "/";
String TRUE = "true";
String FALSE = "false";
}
package com.yeejoin.equip.mqtt.message;
/**
* @author LiuLin
* @date 2023年07月13日 09:54
*/
public enum MqttTopicEnum {
perspective,//iot/data/perspective
;
public static MqttTopicEnum of(String name) {
for (MqttTopicEnum topic : values()) {
if (topic.name().equals(name)) {
return topic;
}
}
return null;
}
}
\ No newline at end of file
package com.yeejoin.equip.service;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.baomidou.mybatisplus.core.toolkit.CollectionUtils;
import com.google.common.collect.Lists;
import com.yeejoin.equip.entity.ESEquipments;
import com.yeejoin.equip.mapper.tdengine.ESEquipmentsMapper;
import com.yeejoin.equip.utils.ElasticSearchUtil;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Component;
import org.springframework.transaction.annotation.Transactional;
import org.typroject.tyboot.component.emq.EmqKeeper;
import java.text.SimpleDateFormat;
import java.util.HashMap;
import java.util.List;
/**
* @author LiuLin
* @date 2023/6/25
* @apiNote ES数据十分钟存入TDEngine
*/
@Slf4j
@Component
public class HandleESMessage2TDService {
public static final String ES_INDEX = "jxiop_equipments";
public static final int SIZE = 1000;
private static final String datePattern = "yyyy-MM-dd HH:mm:ss.SSS";
@Autowired
private ESEquipmentsMapper esEquipmentsMapper;
@Autowired
private ElasticSearchUtil elasticSearchUtil;
@Autowired
private EmqKeeper emqKeeper;
/**
* 十分钟拉取ES数据存入TdEngine
*/
@Scheduled(cron = "0 */10 * * * ?")
@Transactional(rollbackFor = Exception.class)
public void syncEsData2TDEngine() throws Exception {
SimpleDateFormat format = new SimpleDateFormat(datePattern);
List<ESEquipments> result = elasticSearchUtil.searchResponse(ES_INDEX, null, hit -> JSONObject.parseObject(hit.getSourceAsString(), ESEquipments.class));
List<List<ESEquipments>> allDataList = Lists.partition(result, SIZE);
String time = format.format(System.currentTimeMillis());
for (List<ESEquipments> tempDataList : allDataList) {
if (CollectionUtils.isNotEmpty(tempDataList)) {
esEquipmentsMapper.batchInsert(tempDataList, time);
}
}
log.info("同步ES数据至TDEngine成功!共同步{}条!", result.size());
try {
HashMap<String, String> syncFlag = new HashMap<>();
syncFlag.put("sync_flag", "success");
emqKeeper.getMqttClient().publish("sync_esdata_to_tdengine_notice", JSON.toJSONString(syncFlag).getBytes(), 0, false);
log.info("同步ES数据至发发送消息给业务发送通知成功!");
} catch (Exception exception) {
log.info("同步ES数据至发发送消息给业务发送通知失败!");
}
}
}
//package com.yeejoin.equip.service;
//
//import com.alibaba.fastjson.JSON;
//import com.yeejoin.amos.component.influxdb.InfluxDbConnection;
//import com.yeejoin.equip.entity.IndicatorData;
//import com.yeejoin.equip.kafka.KafkaProducerService;
//import com.yeejoin.equip.mapper.tdengine.IndicatorDataMapper;
//import com.yeejoin.equip.utils.ElasticSearchUtil;
//import lombok.extern.slf4j.Slf4j;
//import org.springframework.beans.factory.annotation.Autowired;
//import org.springframework.beans.factory.annotation.Value;
//import org.springframework.stereotype.Component;
//import java.text.SimpleDateFormat;
//import java.util.Date;
//import java.util.HashMap;
//import java.util.Map;
///**
// * @author LiuLin
// * @date 2023/6/25
// * @apiNote Emq消息转发Kafka
// */
//@Slf4j
//@Component("handleMessageService")
//public class HandleMessageService {
// private static final String MEASUREMENT = "iot_data_";
// private static final String TOTAL_DATA_ = "total_data_";
// private static final String ES_INDEX_NAME_JX = "jxiop_equipments";
// @Autowired
// protected KafkaProducerService kafkaProducerService;
// @Autowired
// private InfluxDbConnection influxDbConnection;
// @Autowired
// private IndicatorDataMapper indicatorDataMapper;
// @Value("${kafka.alarm.topic}")
// private String alarmTopic;
// @Autowired
// private ElasticSearchUtil elasticSearchUtil;
//
// public void processMessage(IndicatorData indicatorData) {
// try {
// SimpleDateFormat simpleDateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
// Map<String, String> tagsMap = new HashMap<>();
// Map<String, Object> fieldsMap = new HashMap<>();
// tagsMap.put("equipmentsIdx", indicatorData.getEquipmentsIdx());
//
// fieldsMap.put("address", indicatorData.getAddress());
// fieldsMap.put("gatewayId", indicatorData.getGatewayId());
// fieldsMap.put("dataType", indicatorData.getDataType());
// fieldsMap.put("isAlarm", indicatorData.getIsAlarm());
// fieldsMap.put("equipmentSpecificName", indicatorData.getEquipmentSpecificName());
// fieldsMap.put("value", indicatorData.getValue());
// fieldsMap.put("valueLabel", indicatorData.getValueLabel());
// fieldsMap.put("equipmentIndexName", indicatorData.getEquipmentIndexName());
// fieldsMap.put("unit", indicatorData.getUnit());
// fieldsMap.put("createdTime", simpleDateFormat.format(new Date()));
//
// //更新数据入ES库
// Map<String, Object> paramJson = new HashMap<>();
// paramJson.put("valueF", indicatorData.getValueF());
// paramJson.put("value", indicatorData.getValue());
// paramJson.put("valueLabel", indicatorData.getValueLabel());
// paramJson.put("createdTime", new Date());
// paramJson.put("unit", indicatorData.getUnit());
// elasticSearchUtil.updateData(ES_INDEX_NAME_JX, indicatorData.getEquipmentsIdx(), JSON.toJSONString(paramJson));
//
// if (indicatorData.getIsAlarm() != null && "1".equals(indicatorData.getIsAlarm())) {
// fieldsMap.putAll(tagsMap);
// kafkaProducerService.sendMessageAsync(alarmTopic, JSON.toJSONString(fieldsMap));
// }
//
// if ("transformation".equals(indicatorData.getSignalType())) {
// influxDbConnection.insert(MEASUREMENT + indicatorData.getGatewayId(), tagsMap, fieldsMap);
// indicatorDataMapper.insert(indicatorData);
// log.info("TDEngine入库成功,{},value:{}",indicatorData.getEquipmentsIdx(),indicatorData.getValue());
// }else{
// influxDbConnection.insert(TOTAL_DATA_ + indicatorData.getGatewayId(), tagsMap, fieldsMap);
// }
//
// } catch (Exception e) {
// log.error("Iot透传消息解析入库失败" + e.getMessage(), e);
// }
// }
//}
package com.yeejoin.equip.service;
import com.yeejoin.equip.mapper.tdengine.ESEquipmentsMapper;
import com.yeejoin.equip.mapper.tdengine.IndicatorDataMapper;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
/**
* @author LiuLin
* @date 2023年07月12日 10:44
*/
@Slf4j
@Service
@Transactional(transactionManager = "tdEngineTransactionManager")
public class InitTDEngineDbService {
@Autowired
private IndicatorDataMapper indicatorDataMapper;
@Autowired
private ESEquipmentsMapper esEquipmentsMapper;
public void init() {
indicatorDataMapper.createDB();
indicatorDataMapper.createTable();
esEquipmentsMapper.createTable();
}
}
package com.yeejoin.equip.utils;
import org.apache.http.HttpResponse;
import org.apache.http.conn.ConnectionKeepAliveStrategy;
import org.apache.http.protocol.HTTP;
import org.apache.http.protocol.HttpContext;
import java.util.Arrays;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.math.NumberUtils;
public class CustomConnectionKeepAliveStrategy implements ConnectionKeepAliveStrategy {
private final long DEFAULT_SECONDS = 30;
public static final CustomConnectionKeepAliveStrategy INSTANCE = new CustomConnectionKeepAliveStrategy();
@Override
public long getKeepAliveDuration(HttpResponse response, HttpContext context) {
return Arrays.stream(response.getHeaders(HTTP.CONN_KEEP_ALIVE))
.filter(h -> StringUtils.equalsIgnoreCase(h.getName(), "timeout")
&& StringUtils.isNumeric(h.getValue()))
.findFirst()
.map(h -> NumberUtils.toLong(h.getValue(), DEFAULT_SECONDS))
.orElse(DEFAULT_SECONDS) * 1000;
}
}
\ No newline at end of file
package com.yeejoin.equip.utils;
import com.yeejoin.equip.config.ElasticSearchConfig;
import lombok.extern.slf4j.Slf4j;
import org.apache.http.HttpHost;
import org.apache.http.auth.AuthScope;
import org.apache.http.auth.UsernamePasswordCredentials;
import org.apache.http.client.CredentialsProvider;
import org.apache.http.impl.client.BasicCredentialsProvider;
import org.elasticsearch.client.RestClient;
import org.elasticsearch.client.RestClientBuilder;
import org.elasticsearch.client.RestHighLevelClient;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
/**
* ElasticSearch 配置
* @author LiuLin
* @date 2023年08月08日 16:30
*/
@Slf4j
@Configuration
public class ElasticSearchClient {
@Autowired(required = false)
private ElasticSearchConfig elasticSearchConfig;
/**
* 如果@Bean没有指定bean的名称,那么这个bean的名称就是方法名
*/
@Bean(name = "restHighLevelClient")
public RestHighLevelClient restHighLevelClient() {
// 此处为单节点es
String host = elasticSearchConfig.getAddress().split(":")[0];
String port = elasticSearchConfig.getAddress().split(":")[1];
HttpHost httpHost = new HttpHost(host, Integer.parseInt(port));
// 构建连接对象
RestClientBuilder builder = RestClient.builder(httpHost);
// 设置用户名、密码
CredentialsProvider credentialsProvider = new BasicCredentialsProvider();
credentialsProvider.setCredentials(AuthScope.ANY, new UsernamePasswordCredentials(elasticSearchConfig.getUsername(), elasticSearchConfig.getPassword()));
// 连接延时配置
builder.setRequestConfigCallback(requestConfigBuilder -> {
requestConfigBuilder.setConnectTimeout(elasticSearchConfig.getConnectTimeout());
requestConfigBuilder.setSocketTimeout(elasticSearchConfig.getSocketTimeout());
requestConfigBuilder.setConnectionRequestTimeout(elasticSearchConfig.getConnectionRequestTimeout());
return requestConfigBuilder;
});
// 连接数配置
builder.setHttpClientConfigCallback(httpClientBuilder -> {
httpClientBuilder.setMaxConnTotal(elasticSearchConfig.getMaxConnectNum());
httpClientBuilder.setMaxConnPerRoute(elasticSearchConfig.getMaxConnectPerRoute());
httpClientBuilder.setDefaultCredentialsProvider(credentialsProvider);
//调整Keep-Alive策略
httpClientBuilder.setKeepAliveStrategy(CustomConnectionKeepAliveStrategy.INSTANCE);
return httpClientBuilder;
});
return new RestHighLevelClient(builder);
}
}
\ No newline at end of file
package com.yeejoin.equip.utils;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.yeejoin.equip.entity.Book;
import com.yeejoin.equip.entity.EsEntity;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.ArrayUtils;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.DocWriteRequest;
import org.elasticsearch.action.DocWriteResponse;
import org.elasticsearch.action.bulk.BulkItemResponse;
import org.elasticsearch.action.bulk.BulkRequest;
import org.elasticsearch.action.bulk.BulkResponse;
import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.action.search.ClearScrollRequest;
import org.elasticsearch.action.search.SearchRequest;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.action.search.SearchScrollRequest;
import org.elasticsearch.action.support.WriteRequest;
import org.elasticsearch.action.update.UpdateRequest;
import org.elasticsearch.action.update.UpdateResponse;
import org.elasticsearch.client.RequestOptions;
import org.elasticsearch.client.RestHighLevelClient;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.core.TimeValue;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.search.Scroll;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.builder.SearchSourceBuilder;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.function.Function;
/**
* @author LiuLin
* @date 2023年08月08日 16:30
*/
@Slf4j
@Component
public class ElasticSearchUtil {
private static final long SCROLL_TIMEOUT = 180000;
private static final int SIZE = 1000;
@Autowired
private RestHighLevelClient restHighLevelClient;
/**
* ES修改数据
*
* @param indexName 索引名称
* @param id 主键
* @param paramJson 参数JSON
* @return
*/
public boolean updateData(String indexName, String id, String paramJson) {
log.info("更新ES数据,value:{}", id);
UpdateRequest updateRequest = new UpdateRequest(indexName, id);
//如果修改索引中不存在则进行新增
updateRequest.docAsUpsert(true);
//立即刷新数据
updateRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
updateRequest.doc(paramJson, XContentType.JSON);
try {
UpdateResponse updateResponse = restHighLevelClient.update(updateRequest, RequestOptions.DEFAULT);
//log.info("索引[{}],主键:【{}】操作结果:[{}]", indexName, id, updateResponse.getResult());
if (DocWriteResponse.Result.CREATED.equals(updateResponse.getResult())) {
log.info("索引:【{}】,主键:【{}】新增成功", indexName, id);
return true;
} else if (DocWriteResponse.Result.UPDATED.equals(updateResponse.getResult())) {
log.info("索引:【{}】,主键:【{}】修改成功", indexName, id);
return true;
} else if (DocWriteResponse.Result.NOOP.equals(updateResponse.getResult())) {
log.info("索引:[{}],主键:[{}]无变化", indexName, id);
return true;
}
} catch (IOException e) {
log.error("索引:[{}],主键:【{}】", indexName, id, e);
return false;
}
return false;
}
/**
* 单条更新
*
* @param indexName
* @param id
* @param data
* @return
* @throws IOException
*/
public boolean updateData(String indexName, String id, Object data) throws IOException {
UpdateRequest updateRequest = new UpdateRequest(indexName, id);
//准备文档
String jsonString = JSONObject.toJSONString(data);
Map jsonMap = JSONObject.parseObject(jsonString, Map.class);
updateRequest.doc(jsonMap);
updateRequest.timeout(TimeValue.timeValueSeconds(1));
updateRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.WAIT_UNTIL);
//数据为存储而不是更新
UpdateResponse update = restHighLevelClient.update(updateRequest, RequestOptions.DEFAULT);
return update.getGetResult().equals(DocWriteResponse.Result.UPDATED);
}
/**
* 必须传递ids集合
*
* @param indexName
* @param idList
* @param map
* @return
*/
public boolean update(String indexName, List<String> idList, Map map) {
// 创建批量请求
BulkRequest bulkRequest = new BulkRequest();
for (String id : idList) {
UpdateRequest updateRequest = new UpdateRequest(indexName, id).doc(map);
bulkRequest.add(updateRequest);
}
try {
bulkRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
BulkResponse bulk = restHighLevelClient.bulk(bulkRequest, RequestOptions.DEFAULT);
return bulk.hasFailures();
} catch (IOException e) {
return false;
}
}
/**
* Description: 批量修改数据
*
* @param index index
* @param list 更新列表
* @author LiuLin
*/
public <T> void updateBatch(String index, List<EsEntity<T>> list) {
BulkRequest request = new BulkRequest();
list.forEach(item -> request.add(new UpdateRequest(index, item.getId())
.doc(JSON.toJSONString(item.getData()), XContentType.JSON)));
try {
restHighLevelClient.bulk(request, RequestOptions.DEFAULT);
list.forEach(s -> log.info("===========索引:【{}】,主键:【{}】修改成功", index, s.getId()));
} catch (Exception e) {
log.error("索引:[{}]", index, e);
}
}
/**
* Description: 批量插入数据
*
* @param index index
* @param list 插入列表
* @author LiuLin
*/
public <T> void insertBatch(String index, List<EsEntity<T>> list) {
BulkRequest request = new BulkRequest();
list.forEach(item -> request.add(new IndexRequest(index).id(item.getId())
.source(JSON.toJSONString(item.getData()), XContentType.JSON)));
try {
restHighLevelClient.bulk(request, RequestOptions.DEFAULT);
} catch (Exception e) {
throw new RuntimeException(e);
}
}
/**
* ES异步修改数据
*
* @param indexName 索引名称
* @param id 主键
* @param paramJson 参数JSON
*/
public void updateDataAsync(String indexName, String id, String paramJson) throws IOException {
UpdateRequest updateRequest = new UpdateRequest(indexName, id);
updateRequest.docAsUpsert(true);
updateRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
updateRequest.doc(paramJson, XContentType.JSON);
restHighLevelClient.updateAsync(updateRequest, RequestOptions.DEFAULT, new ActionListener<UpdateResponse>() {
@Override
public void onResponse(UpdateResponse updateResponse) {
if (DocWriteResponse.Result.UPDATED.equals(updateResponse.getResult())) {
log.info("索引:【{}】,主键:【{}】修改成功", indexName, id);
}
}
@Override
public void onFailure(Exception e) {
log.error("索引:[{}],主键:【{}】", indexName, id, e);
}
});
}
/**
* 构建SearchResponse
*
* @param indices 索引
* @param query queryBuilder
* @param fun 返回函数
* @param <T> 返回类型
* @return List, 可以使用fun转换为T结果
* @throws Exception e
*/
public <T> List<T> searchResponse(String indices, QueryBuilder query, Function<SearchHit, T> fun) throws Exception {
SearchRequest request = new SearchRequest(indices);
Scroll scroll = new Scroll(TimeValue.timeValueMillis(SCROLL_TIMEOUT));
SearchSourceBuilder sourceBuilder = new SearchSourceBuilder();
sourceBuilder.query(query);
sourceBuilder.size(SIZE);
request.scroll(scroll);
request.source(sourceBuilder);
List<String> scrollIdList = new ArrayList<>();
List<T> result = new ArrayList<>();
SearchResponse searchResponse = restHighLevelClient.search(request, RequestOptions.DEFAULT);
String scrollId = searchResponse.getScrollId();
SearchHit[] hits = searchResponse.getHits().getHits();
scrollIdList.add(scrollId);
try {
while (ArrayUtils.isNotEmpty(hits)) {
for (SearchHit hit : hits) {
result.add(fun.apply(hit));
}
if (hits.length < SIZE) {
break;
}
SearchScrollRequest searchScrollRequest = new SearchScrollRequest(scrollId);
searchScrollRequest.scroll(scroll);
SearchResponse searchScrollResponse = restHighLevelClient.scroll(searchScrollRequest, RequestOptions.DEFAULT);
scrollId = searchScrollResponse.getScrollId();
hits = searchScrollResponse.getHits().getHits();
scrollIdList.add(scrollId);
}
} finally {
ClearScrollRequest clearScrollRequest = new ClearScrollRequest();
clearScrollRequest.setScrollIds(scrollIdList);
restHighLevelClient.clearScroll(clearScrollRequest, RequestOptions.DEFAULT);
}
return result;
}
}
//package com.yeejoin.equip.utils;
//
//import com.yeejoin.equip.config.ElasticSearchConfig;
//import lombok.RequiredArgsConstructor;
//import org.apache.http.HttpHost;
//import org.apache.http.auth.AuthScope;
//import org.apache.http.auth.UsernamePasswordCredentials;
//import org.apache.http.client.CredentialsProvider;
//import org.apache.http.impl.client.BasicCredentialsProvider;
//import org.elasticsearch.client.RestClient;
//import org.elasticsearch.client.RestClientBuilder;
//import org.elasticsearch.client.RestHighLevelClient;
//import org.springframework.beans.factory.annotation.Autowired;
//import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean;
//import org.springframework.boot.context.properties.EnableConfigurationProperties;
//import org.springframework.context.annotation.Bean;
//import org.springframework.context.annotation.Configuration;
//import org.springframework.util.Assert;
//import org.springframework.util.StringUtils;
//import java.util.ArrayList;
//import java.util.List;
//
///**
// * Elasticsearch自动配置
// *
// * @author LiuLin
// * @version v1.0
// * @since 2024-01-15
// */
//@Configuration
//@RequiredArgsConstructor(onConstructor_ = @Autowired)
//@EnableConfigurationProperties(ElasticSearchConfig.class)
//public class ElasticsearchAutoConfiguration {
//
// private final ElasticSearchConfig elasticSearchConfig;
// private final List<HttpHost> httpHosts = new ArrayList<>();
//
// @Bean
// @ConditionalOnMissingBean
// public RestHighLevelClient restHighLevelClient() {
//
// List<String> clusterNodes = elasticSearchConfig.getClusterNodes();
// clusterNodes.forEach(node -> {
// try {
// String[] parts = StringUtils.split(node, ":");
// Assert.notNull(parts, "Must defined");
// Assert.state(parts.length == 2, "Must be defined as 'host:port'");
// httpHosts.add(new HttpHost(parts[0], Integer.parseInt(parts[1]), elasticSearchConfig.getSchema()));
// } catch (Exception e) {
// throw new IllegalStateException("Invalid ES nodes " + "property '" + node + "'", e);
// }
// });
// RestClientBuilder builder = RestClient.builder(httpHosts.toArray(new HttpHost[0]));
//
// return getRestHighLevelClient(builder, elasticSearchConfig);
// }
//
//
// /**
// * get restHistLevelClient
// *
// * @param builder RestClientBuilder
// * @param elasticSearchConfig elasticsearch default properties
// * @return {@link RestHighLevelClient}
// */
// private static RestHighLevelClient getRestHighLevelClient(RestClientBuilder builder, ElasticSearchConfig elasticSearchConfig) {
//
// // Callback used the default {@link RequestConfig} being set to the {@link CloseableHttpClient}
// builder.setRequestConfigCallback(requestConfigBuilder -> {
// requestConfigBuilder.setConnectTimeout(elasticSearchConfig.getConnectTimeout());
// requestConfigBuilder.setSocketTimeout(elasticSearchConfig.getSocketTimeout());
// requestConfigBuilder.setConnectionRequestTimeout(elasticSearchConfig.getConnectionRequestTimeout());
// return requestConfigBuilder;
// });
//
// // Callback used to customize the {@link CloseableHttpClient} instance used by a {@link RestClient} instance.
// builder.setHttpClientConfigCallback(httpClientBuilder -> {
// httpClientBuilder.setMaxConnTotal(elasticSearchConfig.getMaxConnectTotal());
// httpClientBuilder.setMaxConnPerRoute(elasticSearchConfig.getMaxConnectPerRoute());
// return httpClientBuilder;
// });
//
// // Callback used the basic credential auth
// ElasticSearchConfig.Account account = elasticSearchConfig.getAccount();
// if (!StringUtils.isEmpty(account.getUsername()) && !StringUtils.isEmpty(account.getUsername())) {
// final CredentialsProvider credentialsProvider = new BasicCredentialsProvider();
//
// credentialsProvider.setCredentials(AuthScope.ANY, new UsernamePasswordCredentials(account.getUsername(), account.getPassword()));
// }
// return new RestHighLevelClient(builder);
// }
//}
\ No newline at end of file
package com.yeejoin.equip.utils;
import com.google.common.util.concurrent.ThreadFactoryBuilder;
import java.util.concurrent.Executor;
import java.util.concurrent.LinkedBlockingDeque;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
/**
* @author LiuLin
* @date 2023年07月12日 18:11
*/
public class ExecutorFactory {
private static final Integer DEFAULT_THREAD_NUM = Runtime.getRuntime().availableProcessors();
private static final Integer THREAD_NUM_BIZ = DEFAULT_THREAD_NUM * 2;
public static Executor buildBizExecutor() {
return new ThreadPoolExecutor(
THREAD_NUM_BIZ,
THREAD_NUM_BIZ * 2,
0L,
TimeUnit.MILLISECONDS,
new LinkedBlockingDeque<>(1024),
new ThreadFactoryBuilder().setNameFormat("mqtt-pool-%d").build()
);
}
}
package com.yeejoin.equip.utils;
import org.apache.http.HeaderElement;
import org.apache.http.HeaderElementIterator;
import org.apache.http.HttpResponse;
import org.apache.http.conn.ConnectionKeepAliveStrategy;
import org.apache.http.message.BasicHeaderElementIterator;
import org.apache.http.protocol.HTTP;
import org.apache.http.protocol.HttpContext;
import org.apache.http.util.Args;
/**
* An implementation of a strategy deciding duration that a connection can remain idle, but setting
* the keep alive to a maximum of 10 minutes (600 seconds).
*/
public class OdpConnectionKeepAliveStrategy implements ConnectionKeepAliveStrategy {
/**
* Instance object.
*/
public static final OdpConnectionKeepAliveStrategy INSTANCE = new OdpConnectionKeepAliveStrategy();
@Override
public long getKeepAliveDuration(final HttpResponse response, final HttpContext context) {
Args.notNull(response, "HTTP response");
final HeaderElementIterator it = new BasicHeaderElementIterator(
response.headerIterator(HTTP.CONN_KEEP_ALIVE));
while (it.hasNext()) {
final HeaderElement he = it.nextElement();
final String param = he.getName();
final String value = he.getValue();
if (value != null && param.equalsIgnoreCase("timeout")) {
try {
return Long.parseLong(value) * 1000;
} catch (final NumberFormatException ignore) {
}
}
}
return 600 * 1000;
}
}
\ No newline at end of file
package com.yeejoin.equip.utils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.redis.core.RedisTemplate;
import org.springframework.stereotype.Component;
/**
* @author LiuLin
*/
@Component
public class RedisUtils {
@Autowired
private RedisTemplate<String, Object> redisTemplate;
/**
* 普通缓存获取
*
* @param key 键
* @return 值
*/
public String get(String key) {
return (String) redisTemplate.opsForValue().get(key);
}
/**
* 普通缓存放入
*
* @param key 键
* @param value 值
* @return true成功 false失败
*/
public boolean set(String key, Object value) {
redisTemplate.opsForValue().set(key, value);
return true;
}
/**
* 判断key是否存在
*
* @param key 键
* @return true 存在 false不存在
*/
public boolean hasKey(String key) {
return Boolean.TRUE.equals(redisTemplate.hasKey(key));
}
}
package com.yeejoin.equip.utils;
import org.springframework.beans.BeansException;
import org.springframework.context.ApplicationContext;
import org.springframework.context.ApplicationContextAware;
import org.springframework.stereotype.Component;
import java.lang.annotation.Annotation;
import java.util.Map;
/**
* @description: spring容器操作工具类
* @author: duanwei
* @create: 2020-05-28 13:57
**/
@Component
public class SpringUtils implements ApplicationContextAware {
private static ApplicationContext applicationContext;
/**
* 利用aware注入application
*
* @param applicationContext
* @throws BeansException
*/
@Override
public void setApplicationContext(ApplicationContext applicationContext) throws BeansException {
SpringUtils.applicationContext = applicationContext;
}
private static ApplicationContext getApplicationContext() {
return applicationContext;
}
/**
* 通过name获取bean
*
* @param name
* @return
*/
public static Object getBean(String name) {
return getApplicationContext().getBean(name);
}
}
#mysql ???
spring.datasource.mysql-server.driver-class-name=com.mysql.cj.jdbc.Driver
spring.datasource.mysql-server.jdbc-url = jdbc:mysql://139.9.173.44:3306/equipment?useUnicode=true&allowMultiQueries=true&characterEncoding=utf-8&useJDBCCompliantTimezoneShift=true&useLegacyDatetimeCode=false&serverTimezone=Asia/Shanghai
spring.datasource.mysql-server.username=root
spring.datasource.mysql-server.password=Yeejoin@2020
spring.datasource.mysql-server.type=com.zaxxer.hikari.HikariDataSource
spring.datasource.mysql-server.hikari.pool-name=DatebookHikariCP
spring.datasource.mysql-server.hikari.minimum-idle= 3
spring.datasource.mysql-server.hikari.maximum-pool-size= 30
spring.datasource.mysql-server.hikari.auto-commit= true
spring.datasource.mysql-server.hikari.idle-timeout= 500000
spring.datasource.mysql-server.hikari.max-lifetime= 1800000
spring.datasource.mysql-server.hikari.connection-timeout= 60000
spring.datasource.mysql-server.hikari.connection-test-query= SELECT 1
#TDengine ???
spring.datasource.tdengine-server.driver-class-name=com.taosdata.jdbc.rs.RestfulDriver
spring.datasource.tdengine-server.jdbc-url = jdbc:TAOS-RS://139.9.170.47:6041/iot_data?user=root&password=taosdata&timezone=GMT%2b8&allowMultiQueries=true
spring.datasource.tdengine-server.username=root
spring.datasource.tdengine-server.password=taosdata
spring.datasource.tdengine-server.type=com.zaxxer.hikari.HikariDataSource
spring.datasource.tdengine-server.hikari.minimum-idle= 30
spring.datasource.tdengine-server.hikari.maximum-pool-size= 150
spring.datasource.tdengine-server.hikari.auto-commit= true
spring.datasource.tdengine-server.hikari.pool-name=TDEngineDruidCP
spring.datasource.tdengine-server.hikari.idle-timeout= 500000
spring.datasource.tdengine-server.hikari.max-lifetime= 1800000
spring.datasource.tdengine-server.hikari.connection-timeout= 60000
spring.datasource.tdengine-server.hikari.connection-test-query= show tables
spring.redis.database=0
spring.redis.host=139.9.173.44
spring.redis.port=6379
spring.redis.password=yeejoin@2020
spring.redis.timeout=3000
spring.redis.lettuce.pool.max-active=300
spring.redis.lettuce.pool.max-wait=-1
spring.redis.lettuce.pool.max-idle=64
spring.redis.lettuce.pool.min-idle=0
eureka.client.registry-fetch-interval-seconds=5
management.endpoint.health.show-details=always
management.endpoints.web.exposure.include=*
eureka.instance.health-check-url-path=/actuator/health
eureka.instance.lease-expiration-duration-in-seconds=10
eureka.instance.lease-renewal-interval-in-seconds=5
eureka.instance.metadata-map.management.context-path=${server.servlet.context-path}/actuator
eureka.instance.status-page-url-path=/actuator/info
eureka.instance.metadata-map.management.api-docs=http://localhost:${server.port}${server.servlet.context-path}/doc.html
eureka.instance.hostname= 139.9.173.44
eureka.instance.prefer-ip-address = true
eureka.client.serviceUrl.defaultZone=http://${spring.security.user.name}:${spring.security.user.password}@139.9.173.44:10001/eureka/
spring.security.user.name=admin
spring.security.user.password=a1234560
emqx.clean-session=true
emqx.client-id=${spring.application.name}-${random.int[1,65536]}
emqx.biz-client-id=consumer-${random.int[1,65536]}
emqx.broker=tcp://139.9.173.44:1883
emqx.client-user-name=admin
emqx.client-password=public
emqx.max-inflight=1000
emqx.keep-alive-interval=10
emqx.biz-topic[0]= iot/data/perspective
#kafka
spring.kafka.bootstrap-servers=139.9.173.44:9092
spring.kafka.producer.retries=1
spring.kafka.producer.bootstrap-servers=139.9.173.44:9092
spring.kafka.producer.batch-size=16384
spring.kafka.producer.buffer-memory=33554432
spring.kafka.producer.acks=1
spring.kafka.producer.key-serializer=org.apache.kafka.common.serialization.StringSerializer
spring.kafka.producer.value-serializer=org.apache.kafka.common.serialization.StringSerializer
spring.kafka.consumer.group-id=messageConsumerGroup
spring.kafka.consumer.bootstrap-servers=139.9.173.44:9092
spring.kafka.consumer.enable-auto-commit=true
spring.kafka.consumer.auto-offset-reset=earliest
spring.kafka.consumer.key-deserializer=org.apache.kafka.common.serialization.StringDeserializer
spring.kafka.consumer.value-deserializer=org.apache.kafka.common.serialization.StringDeserializer
spring.kafka.consumer.fetch-max-wait= 1000
spring.kafka.consumer.max-poll-records=1000
spring.kafka.listener.ack-mode=manual_immediate
spring.kafka.listener.type=batch
kafka.alarm.topic=EQUIPMENT_ALARM
kafka.topic=PERSPECTIVE
emq.topic=iot/data/perspective
elasticsearch.address= 139.9.173.44:9200
elasticsearch.username= elastic
elasticsearch.password= Yeejoin@2020
elasticsearch.scheme= http
elasticsearch.connectTimeout= 50000
elasticsearch.socketTimeout= 50000
elasticsearch.connectionRequestTimeout= 50000
elasticsearch.maxConnectNum= 1000
elasticsearch.maxConnectPerRoute= 1000
\ No newline at end of file
#mysql ???
spring.datasource.mysql-server.driver-class-name=com.kingbase8.Driver
spring.datasource.mysql-server.jdbc-url = jdbc:kingbase8://10.20.1.176:54321/equipment?useUnicode=true&allowMultiQueries=true&characterEncoding=utf-8&useJDBCCompliantTimezoneShift=true&useLegacyDatetimeCode=false&serverTimezone=Asia/Shanghai&currentSchema=root
spring.datasource.mysql-server.username=root
spring.datasource.mysql-server.password=Yeejoin@2020
spring.datasource.mysql-server.type=com.zaxxer.hikari.HikariDataSource
spring.datasource.mysql-server.hikari.pool-name=DatebookHikariCP
spring.datasource.mysql-server.hikari.minimum-idle= 3
spring.datasource.mysql-server.hikari.maximum-pool-size= 30
spring.datasource.mysql-server.hikari.auto-commit= true
spring.datasource.mysql-server.hikari.idle-timeout= 500000
spring.datasource.mysql-server.hikari.max-lifetime= 1800000
spring.datasource.mysql-server.hikari.connection-timeout= 60000
spring.datasource.mysql-server.hikari.connection-test-query= SELECT 1
#TDengine ???
spring.datasource.tdengine-server.driver-class-name=com.taosdata.jdbc.rs.RestfulDriver
spring.datasource.tdengine-server.jdbc-url = jdbc:TAOS-RS://139.9.170.47:6041/iot_data?user=root&password=taosdata&timezone=GMT%2b8&allowMultiQueries=true
spring.datasource.tdengine-server.username=root
spring.datasource.tdengine-server.password=taosdata
spring.datasource.tdengine-server.type=com.zaxxer.hikari.HikariDataSource
spring.datasource.tdengine-server.hikari.minimum-idle= 30
spring.datasource.tdengine-server.hikari.maximum-pool-size= 150
spring.datasource.tdengine-server.hikari.auto-commit= true
spring.datasource.tdengine-server.hikari.pool-name=TDEngineDruidCP
spring.datasource.tdengine-server.hikari.idle-timeout= 500000
spring.datasource.tdengine-server.hikari.max-lifetime= 1800000
spring.datasource.tdengine-server.hikari.connection-timeout= 60000
spring.datasource.tdengine-server.hikari.connection-test-query= show tables
spring.redis.database=0
spring.redis.host=139.9.173.44
spring.redis.port=6379
spring.redis.password=yeejoin@2020
spring.redis.timeout=3000
spring.redis.lettuce.pool.max-active=300
spring.redis.lettuce.pool.max-wait=-1
spring.redis.lettuce.pool.max-idle=64
spring.redis.lettuce.pool.min-idle=0
eureka.client.registry-fetch-interval-seconds=5
management.endpoint.health.show-details=always
management.endpoints.web.exposure.include=*
eureka.instance.health-check-url-path=/actuator/health
eureka.instance.lease-expiration-duration-in-seconds=10
eureka.instance.lease-renewal-interval-in-seconds=5
eureka.instance.metadata-map.management.context-path=${server.servlet.context-path}/actuator
eureka.instance.status-page-url-path=/actuator/info
eureka.instance.metadata-map.management.api-docs=http://localhost:${server.port}${server.servlet.context-path}/doc.html
eureka.instance.hostname= 139.9.173.44
eureka.instance.prefer-ip-address = true
eureka.client.serviceUrl.defaultZone=http://${spring.security.user.name}:${spring.security.user.password}@139.9.173.44:10001/eureka/
spring.security.user.name=admin
spring.security.user.password=a1234560
emqx.clean-session=true
emqx.client-id=${spring.application.name}-${random.int[1,65536]}
emqx.biz-client-id=consumer-${random.int[1,65536]}
emqx.broker=tcp://139.9.173.44:1883
emqx.client-user-name=admin
emqx.client-password=public
emqx.max-inflight=1000
emqx.keep-alive-interval=10
emqx.biz-topic[0]= iot/data/perspective
#kafka
spring.kafka.bootstrap-servers=139.9.173.44:9092
spring.kafka.producer.retries=1
spring.kafka.producer.bootstrap-servers=139.9.173.44:9092
spring.kafka.producer.batch-size=16384
spring.kafka.producer.buffer-memory=33554432
spring.kafka.producer.acks=1
spring.kafka.producer.key-serializer=org.apache.kafka.common.serialization.StringSerializer
spring.kafka.producer.value-serializer=org.apache.kafka.common.serialization.StringSerializer
spring.kafka.consumer.group-id=messageConsumerGroup
spring.kafka.consumer.bootstrap-servers=139.9.173.44:9092
spring.kafka.consumer.enable-auto-commit=true
spring.kafka.consumer.auto-offset-reset=earliest
spring.kafka.consumer.key-deserializer=org.apache.kafka.common.serialization.StringDeserializer
spring.kafka.consumer.value-deserializer=org.apache.kafka.common.serialization.StringDeserializer
spring.kafka.consumer.fetch-max-wait= 1000
spring.kafka.consumer.max-poll-records=1000
spring.kafka.listener.ack-mode=manual_immediate
spring.kafka.listener.type=batch
kafka.alarm.topic=EQUIPMENT_ALARM
kafka.topic=PERSPECTIVE
emq.topic=iot/data/perspective
elasticsearch.address= 139.9.173.44:9200
elasticsearch.username= elastic
elasticsearch.password= Yeejoin@2020
elasticsearch.scheme= http
elasticsearch.connectTimeout= 50000
elasticsearch.socketTimeout= 50000
elasticsearch.connectionRequestTimeout= 50000
elasticsearch.maxConnectNum= 1000
elasticsearch.maxConnectPerRoute= 1000
\ No newline at end of file
spring.application.name=AMOS-DATA-EQUIP
server.servlet.context-path=/data-equip
server.port=8100
spring.profiles.active=dev
server.compression.enabled=true
spring.jackson.dateFormat=yyyy-MM-dd HH:mm:ss
spring.servlet.multipart.maxFileSize=3MB
spring.servlet.multipart.maxRequestSize=3MB
mybatis-plus.mapper-locations=classpath:mapper/*Mapper.xml
#\u65E5\u5FD7\u7EA7\u522BALL < TRACE < DEBUG < INFO < WARN < ERROR < OFF
logging.level.root=INFO
\ No newline at end of file
<?xml version="1.0" encoding="UTF-8"?>
<configuration scan="true" scanPeriod="60 seconds" debug="false">
<springProperty scope="context" name="LOG_LEVEL" source="logging.level.root" defaultValue="info"/>
<springProperty scope="context" name="MAX_FILE_SIZE" source="logging.max_file_size" defaultValue="128MB"/>
<springProperty scope="context" name="MAX_HISTORY" source="log.max_history" defaultValue="7"/>
<springProperty scope="context" name="PATTERN" source="log.pattern"
defaultValue="-|%d{yyyy-MM-dd HH:mm:ss.SSS}|%-5level|%X{tid}|%thread|%logger{36}.%M:%L-%msg%n"/>
<property name="LOG_NAME" value="equip"/>
<property name="LOG_PATH" value="./logs"/>
<property name="LOG_DIR" value="${LOG_PATH}/${LOG_NAME}/%d{yyyyMMdd}"/>
<property name="CHARSET" value="UTF-8"/>
<!-- 控制台输出 -->
<appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
<encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder">
<pattern>${PATTERN}</pattern>
</encoder>
<filter class="ch.qos.logback.classic.filter.ThresholdFilter">
<level>DEBUG</level>
</filter>
</appender>
<appender name="ERROR" class="ch.qos.logback.core.rolling.RollingFileAppender">
<filter class="ch.qos.logback.classic.filter.LevelFilter">
<level>ERROR</level> <!-- 只记录error级别的日志 -->
<onMatch>ACCEPT</onMatch>
<onMismatch>DENY</onMismatch>
</filter>
<!-- 定义文件的名称 -->
<file>${LOG_PATH}/${LOG_NAME}/error.log</file>
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<FileNamePattern>${LOG_DIR}/err_${LOG_NAME}%i.log</FileNamePattern>
<MaxHistory>${MAX_HISTORY}</MaxHistory>
<timeBasedFileNamingAndTriggeringPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedFNATP">
<maxFileSize>${MAX_FILE_SIZE}</maxFileSize>
</timeBasedFileNamingAndTriggeringPolicy>
</rollingPolicy>
<layout class="ch.qos.logback.classic.PatternLayout">
<Pattern>${PATTERN}</Pattern>
</layout>
</appender>
<appender name="FILE_ALL" class="ch.qos.logback.core.rolling.RollingFileAppender">
<!-- 定义文件的名称 -->
<file>${LOG_PATH}/${LOG_NAME}/total.log</file>
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<FileNamePattern>${LOG_DIR}/all_${LOG_NAME}%i.log</FileNamePattern>
<MaxHistory>${MAX_HISTORY}</MaxHistory>
<timeBasedFileNamingAndTriggeringPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedFNATP">
<maxFileSize>${MAX_FILE_SIZE}</maxFileSize>
</timeBasedFileNamingAndTriggeringPolicy>
</rollingPolicy>
<layout class="ch.qos.logback.classic.PatternLayout">
<pattern>${PATTERN}</pattern>
</layout>
</appender>
<appender name="ASYNC_STDOUT" class="ch.qos.logback.classic.AsyncAppender">
<appender-ref ref="STDOUT"/>
<queueSize>256</queueSize>
<neverBlock>true</neverBlock>
<includeCallerData>true</includeCallerData>
</appender>
<appender name="ASYNC_FILE_ALL" class="ch.qos.logback.classic.AsyncAppender">
<appender-ref ref="FILE_ALL"/>
<queueSize>1024</queueSize>
<neverBlock>true</neverBlock>
<includeCallerData>true</includeCallerData>
</appender>
<appender name="ASYNC_ERROR" class="ch.qos.logback.classic.AsyncAppender">
<appender-ref ref="ERROR"/>
<queueSize>256</queueSize>
<neverBlock>true</neverBlock>
<includeCallerData>true</includeCallerData>
</appender>
<!--统一日志输出级别,其他appender中如果有高于此处等级设置的也会被输出 -->
<root level="${LOG_LEVEL}">
<springProfile name="test,dev">
<appender-ref ref="ASYNC_STDOUT"/>
</springProfile>
<appender-ref ref="ASYNC_FILE_ALL"/>
<appender-ref ref="ASYNC_ERROR"/>
</root>
</configuration>
\ No newline at end of file
<?xml version="1.0" encoding="UTF-8"?>
<configuration scan="true" scanPeriod="60 seconds" debug="false">
<springProperty scope="context" name="LOG_LEVEL" source="logging.level.root" defaultValue="info"/>
<springProperty scope="context" name="MAX_FILE_SIZE" source="logging.max_file_size" defaultValue="128MB"/>
<springProperty scope="context" name="MAX_HISTORY" source="log.max_history" defaultValue="7"/>
<springProperty scope="context" name="PATTERN" source="log.pattern"
defaultValue="-|%d{yyyy-MM-dd HH:mm:ss.SSS}|%-5level|%X{tid}|%thread|%logger{36}.%M:%L-%msg%n"/>
<property name="LOG_NAME" value="equip"/>
<property name="LOG_PATH" value="./logs"/>
<property name="LOG_DIR" value="${LOG_PATH}/${LOG_NAME}/%d{yyyyMMdd}"/>
<property name="CHARSET" value="UTF-8"/>
<!-- 控制台输出 -->
<appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
<encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder">
<pattern>${PATTERN}</pattern>
</encoder>
<filter class="ch.qos.logback.classic.filter.ThresholdFilter">
<level>DEBUG</level>
</filter>
</appender>
<appender name="ERROR" class="ch.qos.logback.core.rolling.RollingFileAppender">
<filter class="ch.qos.logback.classic.filter.LevelFilter">
<level>ERROR</level> <!-- 只记录error级别的日志 -->
<onMatch>ACCEPT</onMatch>
<onMismatch>DENY</onMismatch>
</filter>
<!-- 定义文件的名称 -->
<file>${LOG_PATH}/${LOG_NAME}/error.log</file>
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<FileNamePattern>${LOG_DIR}/err_${LOG_NAME}%i.log</FileNamePattern>
<MaxHistory>${MAX_HISTORY}</MaxHistory>
<timeBasedFileNamingAndTriggeringPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedFNATP">
<maxFileSize>${MAX_FILE_SIZE}</maxFileSize>
</timeBasedFileNamingAndTriggeringPolicy>
</rollingPolicy>
<layout class="ch.qos.logback.classic.PatternLayout">
<Pattern>${PATTERN}</Pattern>
</layout>
</appender>
<appender name="FILE_ALL" class="ch.qos.logback.core.rolling.RollingFileAppender">
<!-- 定义文件的名称 -->
<file>${LOG_PATH}/${LOG_NAME}/total.log</file>
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<FileNamePattern>${LOG_DIR}/all_${LOG_NAME}%i.log</FileNamePattern>
<MaxHistory>${MAX_HISTORY}</MaxHistory>
<timeBasedFileNamingAndTriggeringPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedFNATP">
<maxFileSize>${MAX_FILE_SIZE}</maxFileSize>
</timeBasedFileNamingAndTriggeringPolicy>
</rollingPolicy>
<layout class="ch.qos.logback.classic.PatternLayout">
<pattern>${PATTERN}</pattern>
</layout>
</appender>
<appender name="ASYNC_STDOUT" class="ch.qos.logback.classic.AsyncAppender">
<appender-ref ref="STDOUT"/>
<queueSize>256</queueSize>
<neverBlock>true</neverBlock>
<includeCallerData>true</includeCallerData>
</appender>
<appender name="ASYNC_FILE_ALL" class="ch.qos.logback.classic.AsyncAppender">
<appender-ref ref="FILE_ALL"/>
<queueSize>1024</queueSize>
<neverBlock>true</neverBlock>
<includeCallerData>true</includeCallerData>
</appender>
<appender name="ASYNC_ERROR" class="ch.qos.logback.classic.AsyncAppender">
<appender-ref ref="ERROR"/>
<queueSize>256</queueSize>
<neverBlock>true</neverBlock>
<includeCallerData>true</includeCallerData>
</appender>
<!--统一日志输出级别,其他appender中如果有高于此处等级设置的也会被输出 -->
<root level="${LOG_LEVEL}">
<springProfile name="test,dev">
<appender-ref ref="ASYNC_STDOUT"/>
</springProfile>
<appender-ref ref="ASYNC_FILE_ALL"/>
<appender-ref ref="ASYNC_ERROR"/>
</root>
</configuration>
\ No newline at end of file
<?xml version="1.0" encoding="UTF-8" ?>
<!DOCTYPE mapper PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN" "http://mybatis.org/dtd/mybatis-3-mapper.dtd">
<mapper namespace="com.yeejoin.equip.mapper.mysql.EquipmentSpecificIndexMapper">
<resultMap id="ComplementCode" type="com.yeejoin.equip.entity.EquipmentIndexVO">
<result property="equipmentId" column="equipment_specific_id"/>
<result property="id" column="id"/>
<result property="nameKey" column="name_key"/>
<result property="value" column="value"/>
<result property="valueEnum" column="value_enum"/>
<result property="unitName" column="unit"/>
<result property="indexAddress" column="index_address"/>
<result property="gatewayId" column="gateway_id"/>
<result property="isAlarm" column="is_alarm"/>
<result property="equipmentIndexName" column="equipment_index_name"/>
<result property="equipmentSpecificName" column="equipment_specific_name"/>
<result property="dataType" column="data_type"/>
</resultMap>
<select id="getEquipSpecificIndexList" resultMap="ComplementCode">
SELECT
si.equipment_specific_id AS equipmentId,
ei.id,
ei.name_key,
si.value,
si.value_enum,
ei.is_iot,
si.index_address,
si.gateway_id,
si.data_type,
si.equipment_specific_name,
si.equipment_index_name,
si.is_alarm
FROM
wl_equipment_specific_index si
LEFT JOIN wl_equipment_index ei ON si.equipment_index_id = ei.id
<where>
<if test="equipmentId != null and equipmentId !=''">
si.equipment_specific_id = #{equipmentId}
</if>
<if test="isIot != null and isIot !=''">
AND ei.is_iot = #{isIot}
</if>
</where>
</select>
</mapper>
\ No newline at end of file
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE mapper PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN" "http://mybatis.org/dtd/mybatis-3-mapper.dtd">
<mapper namespace="com.yeejoin.equip.mapper.tdengine.ESEquipmentsMapper">
<insert id="batchInsert" parameterType="java.util.List" >
insert into indicator_data
(ts,
id,
address,
gateway_id,
data_type,
is_alarm,
unit,
equipment_index_name,
equipment_specific_name,
`value`,
`value_f` ,
value_label,
equipment_number,
display_name,
created_time)
values
<foreach separator=" " collection="list" item="equip" index="index" >
(now + #{index}a,
#{equip.id},
#{equip.address},
#{equip.gatewayId},
#{equip.dataType},
#{equip.isAlarm},
#{equip.unit},
#{equip.equipmentIndexName},
#{equip.equipmentSpecificName},
#{equip.value},
#{equip.valueF},
#{equip.valueLabel},
#{equip.equipmentNumber},
#{equip.displayName},
#{time})
</foreach>
</insert>
<insert id="insert" parameterType="com.yeejoin.equip.entity.ESEquipments" >
insert into indicator_data
(createdTime,
id,
address,
gateway_id,
data_type,
is_alarm,
unit,
equipment_index_name,
equipment_specific_name,
`value`,
`value_f` ,
value_label,
equipment_number,
display_name)
values
(NOW,
#{id},
#{address},
#{gatewayId},
#{dataType},
#{isAlarm},
#{unit},
#{equipmentIndexName},
#{equipmentSpecificName},
#{value},
#{valueF},
#{valueLabel},
#{equipmentNumber},
#{displayName})
</insert>
<!--创建表-->
<update id="createTable" >
create table if not exists indicator_data
(ts TIMESTAMP,
id binary(64),
address binary(64),
gateway_id binary(64),
data_type NCHAR(12),
is_alarm BIGINT,
unit NCHAR(24),
equipment_index_name VARCHAR(255) ,
equipment_specific_name VARCHAR(255),
`value` VARCHAR(12),
`value_f` FLOAT,
value_label VARCHAR(64),
equipment_number BINARY(64),
display_name VARCHAR(200),
created_time VARCHAR(64));
</update>
</mapper>
\ No newline at end of file
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE mapper PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN" "http://mybatis.org/dtd/mybatis-3-mapper.dtd">
<mapper namespace="com.yeejoin.equip.mapper.tdengine.IndicatorDataMapper">
<!--创建数据库,指定压缩比-->
<update id="createDB">
create database if not exists iot_data vgroups 10 buffer 10 COMP 2 PRECISION 'ns';
</update>
<!--创建超级表-->
<update id="createTable">
create STABLE if not exists s_indicator_his
(created_time timestamp,
address binary(64),
equipments_idx NCHAR(64),
data_type NCHAR(12),
is_alarm BIGINT,
equipment_index_name VARCHAR(200) ,
equipment_specific_name VARCHAR(200),
`value` VARCHAR(12),
`value_f` float,
value_label VARCHAR(24),
unit NCHAR(12))
TAGS (gateway_id binary(64));
</update>
<insert id="insertBatch" parameterType="java.util.List">
insert into
<foreach separator=" " collection="list" item="item" index="index">
indicator_his_#{gatewayId,jdbcType=VARCHAR} USING s_indicator_his
TAGS (#{item.gatewayId,jdbcType=VARCHAR})
VALUES (NOW + #{index}a,
#{item.address,jdbcType=VARCHAR},
#{item.equipmentsIdx,jdbcType=VARCHAR},
#{item.dataType,jdbcType=VARCHAR},
#{item.isAlarm,jdbcType=VARCHAR},
#{item.equipmentSpecificName,jdbcType=VARCHAR},
#{item.equipmentIndexName,jdbcType=VARCHAR},
#{item.value,jdbcType=VARCHAR},
#{item.valueF,jdbcType=FLOAT},
#{item.valueLabel,jdbcType=VARCHAR},
#{item.unit,jdbcType=VARCHAR})
</foreach>
</insert>
<!-- <insert id="insertBatch" parameterType="java.util.List">-->
<!-- INSERT INTO indicator_#{gatewayId,jdbcType=VARCHAR} (created_time, `value`,`value_f`, value_label,unit,-->
<!-- address,gateway_id,equipments_idx,data_type,is_alarm,equipment_index_name,equipment_specific_name)-->
<!-- VALUES-->
<!-- <foreach collection="list" item="item" separator="UNION ALL" index="index">-->
<!-- SELECT NOW + #{index}a, #{item.value}, #{item.valueF}, #{item.valueLabel}, #{item.unit},-->
<!-- #{item.address}, #{item.gatewayId}, #{item.equipmentsIdx}, #{item.dataType}, #{item.isAlarm},-->
<!-- #{item.equipmentSpecificName},#{item.equipmentIndexName}-->
<!-- </foreach>-->
<!-- </insert>-->
<insert id="insert" parameterType="com.yeejoin.equip.entity.IndicatorData">
insert into indicator_#{gatewayId,jdbcType=VARCHAR} USING indicator
TAGS (#{address,jdbcType=VARCHAR},
#{gatewayId,jdbcType=VARCHAR},
#{equipmentsIdx,jdbcType=VARCHAR},
#{dataType,jdbcType=VARCHAR},
#{isAlarm,jdbcType=VARCHAR},
#{equipmentSpecificName,jdbcType=VARCHAR},
#{equipmentIndexName,jdbcType=VARCHAR})
VALUES (NOW, #{value,jdbcType=VARCHAR}, #{valueF,jdbcType=FLOAT}, #{valueLabel,jdbcType=VARCHAR}, #{unit,jdbcType=VARCHAR})
</insert>
</mapper>
\ No newline at end of file
logback-dev.xmlspring.application.name=AMOS-API-HOUSEPVAPI-WJ
server.servlet.context-path=/housepvapi
server.port=11006
## DB properties hygf ## DB properties hygf
## db1-production database ## db1-production database
spring.db1.datasource.type: com.alibaba.druid.pool.DruidDataSource spring.db1.datasource.type: com.alibaba.druid.pool.DruidDataSource
......
spring.application.name=AMOS-API-ACCESSAPI
server.servlet.context-path=/accessapi
server.port=11005
# jdbc_config # jdbc_config
spring.datasource.driver-class-name=org.postgresql.Driver spring.datasource.driver-class-name=org.postgresql.Driver
spring.datasource.url=jdbc:postgresql://172.16.10.213:5432/per_tzs_amos_tzs_biz?currentSchema=per_tzs_amos_data_accessapi spring.datasource.url=jdbc:postgresql://172.16.10.213:5432/per_tzs_amos_tzs_biz?currentSchema=per_tzs_amos_data_accessapi
......
## DB properties hygf
## db1-production database
spring.db1.datasource.type: com.alibaba.druid.pool.DruidDataSource
spring.db1.datasource.url=jdbc:kingbase8://10.20.1.176:54321/amos_openapi?allowMultiQueries=true&serverTimezone=GMT%2B8&characterEncoding=utf8&currentSchema=root
spring.db1.datasource.username=root
spring.db1.datasource.password=Yeejoin@2020
spring.db1.datasource.driver-class-name=com.kingbase8.Driver
## db2-sync_data
spring.db2.datasource.type: com.alibaba.druid.pool.DruidDataSource
spring.db2.datasource.url=jdbc:kingbase8://10.20.1.176:54321/amos_project?allowMultiQueries=true&serverTimezone=GMT%2B8&characterEncoding=utf8&currentSchema=root
spring.db2.datasource.username=root
spring.db2.datasource.password=Yeejoin@2020
spring.db2.datasource.driver-class-name=com.kingbase8.Driver
## db3-taosiData
spring.db3.datasource.type: com.alibaba.druid.pool.DruidDataSource
spring.db3.datasource.url=jdbc:TAOS-RS://47.92.234.253:6041/house_pv_data?user=root&password=taosdata&characterEncoding=utf8
spring.db3.datasource.username=root
spring.db3.datasource.password=taosdata
spring.db3.datasource.driver-class-name=com.taosdata.jdbc.rs.RestfulDriver
# REDIS (RedisProperties)
spring.redis.database=1
spring.redis.host=47.92.234.253
spring.redis.port=6379
spring.redis.password=yeejoin@2020
spring.redis.lettuce.pool.max-active=200
spring.redis.lettuce.pool.max-wait=-1
spring.redis.lettuce.pool.max-idle=10
spring.redis.lettuce.pool.min-idle=0
spring.redis.expire.time=300
#注册中心地址
eureka.client.service-url.defaultZone =http://admin:a1234560@47.92.234.253:10001/eureka/
eureka.instance.prefer-ip-address=true
management.endpoint.health.show-details=always
management.endpoints.web.exposure.include=*
eureka.instance.health-check-url=http://localhost:${server.port}${server.servlet.context-path}/actuator/health
eureka.instance.metadata-map.management.context-path=${server.servlet.context-path}/actuator
eureka.instance.status-page-url=http://localhost:${server.port}${server.servlet.context-path}/actuator/info
eureka.instance.metadata-map.management.api-docs=http://localhost:${server.port}${server.servlet.context-path}/swagger-ui.html
eureka.instance.ip-address=localhost
eureka.instance.instance-id=${eureka.instance.ip-address}:${server.port}
##emqx
emqx.clean-session=true
emqx.client-id=${spring.application.name}-${random.int[1024,65536]}
emqx.broker=tcp://47.92.234.253:1883
emqx.client-user-name=admin
emqx.client-password=public
##biz custem properties
biz.lxyd.lift.url=http://39.106.181.149:8088/elevatorapi
# influxDB
#spring.influx.url=http://139.9.173.44:8086
#spring.influx.password=Yeejoin@2020
#spring.influx.user=root
#spring.influx.database=station_platform
#spring.influx.retention_policy=default
#spring.influx.retention_policy_time=30d
#spring.influx.actions=10000
#spring.influx.bufferLimit=20000
# ??????????
dataRequstScheduled.jinlangyun=0 0/50 * * * *
dataRequstScheduled.huawei=0 0/50 * * * *
dataRequstScheduled.keshida=0 0/50 * * * *
dataRequstScheduled.Sunlight=0 0/50 * * * *
dataRequstScheduled.GoodWe=0 0/3 * * * *
dataRequstScheduled.Sofar=0 0/50 * * * *
\ No newline at end of file
spring.application.name=AMOS-API-ACCESSAPI
server.servlet.context-path=/housepvapi
server.port=11006
spring.profiles.active=dev spring.profiles.active=dev
server.compression.enabled=true server.compression.enabled=true
spring.jackson.dateFormat=yyyy-MM-dd HH:mm:ss spring.jackson.dateFormat=yyyy-MM-dd HH:mm:ss
......
<?xml version="1.0" encoding="UTF-8"?>
<configuration debug="false">
<!--定义日志文件的存储地址 勿在 LogBack 的配置中使用相对路径-->
<property name="LOG_HOME" value="/opt/log/qa" />
<!--格式化输出:%d表示日期,%thread表示线程名,%-5level:级别从左显示5个字符宽度%msg:日志消息,%n是换行符-->
<property name="LOG_PATTERN" value="%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %-50.50logger{50} - %msg [%file:%line] %n" />
<!-- 按照每天生成日志文件 -->
<appender name="FILE" class="ch.qos.logback.core.rolling.RollingFileAppender">
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<!--日志文件输出的文件名-->
<FileNamePattern>${LOG_HOME}/tzs.log.%d{yyyy-MM-dd}.%i.log</FileNamePattern>
<!--日志文件保留天数-->
<MaxHistory>7</MaxHistory>
<!--日志文件大小-->
<MaxFileSize>10mb</MaxFileSize>
</rollingPolicy>
<encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder">
<pattern>${LOG_PATTERN}</pattern>
</encoder>
</appender>
<!-- 控制台输出 -->
<appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
<encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder">
<pattern>${LOG_PATTERN}</pattern>
</encoder>
</appender>
<!-- show parameters for hibernate sql 专为 Hibernate 定制
<logger name="org.hibernate.type.descriptor.sql.BasicBinder" level="TRACE" />
<logger name="org.hibernate.type.descriptor.sql.BasicExtractor" level="DEBUG" />
<logger name="org.hibernate.SQL" level="DEBUG" />
<logger name="org.hibernate.engine.QueryParameters" level="DEBUG" />
<logger name="org.hibernate.engine.query.HQLQueryPlan" level="DEBUG" />
-->
<!--myibatis log configure-->
<logger name="com.apache.ibatis" level="INFO"/>
<logger name="org.mybatis" level="INFO" />
<logger name="java.sql.Connection" level="INFO"/>
<logger name="java.sql.Statement" level="INFO"/>
<logger name="java.sql.PreparedStatement" level="INFO"/>
<logger name="org.springframework" level="INFO"/>
<logger name="com.baomidou.mybatisplus" level="INFO"/>
<logger name="org.apache.activemq" level="INFO"/>
<logger name="org.typroject" level="INFO"/>
<logger name="com.yeejoin" level="INFO"/>
<!-- 日志输出级别 -->
<root level="DEBUG">
<appender-ref ref="FILE" />
<appender-ref ref="STDOUT" />
</root>
</configuration>
\ No newline at end of file
This diff is collapsed.
/.apt_generated/
/.apt_generated_tests/
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<artifactId>amos-boot-module</artifactId>
<groupId>com.amosframework.boot</groupId>
<version>1.0.0</version>
</parent>
<artifactId>amos-boot-cloud-gateway</artifactId>
<dependencies>
<dependency>
<groupId>com.amosframework.boot</groupId>
<artifactId>amos-boot-module-jcs-api</artifactId>
<version>${amos-biz-boot.version}</version>
</dependency>
<dependency>
<groupId>com.amosframework.boot</groupId>
<artifactId>amos-boot-module-tzs-api</artifactId>
<version>${amos-biz-boot.version}</version>
</dependency>
<!-- spring-cloud网关-->
<dependency>
<groupId>org.springframework.cloud</groupId>
<artifactId>spring-cloud-starter-gateway</artifactId>
</dependency>
<!--Spring Webflux-->
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-webflux</artifactId>
</dependency>
<!-- 熔断、降级 -->
<dependency>
<groupId>org.springframework.cloud</groupId>
<artifactId>spring-cloud-starter-netflix-hystrix</artifactId>
</dependency>
<!--健康监控-->
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-actuator</artifactId>
</dependency>
<!-- 限流Redis实现 -->
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-data-redis-reactive</artifactId>
</dependency>
<!--springboot2.X默认使用lettuce连接池,需要引入commons-pool2-->
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-pool2</artifactId>
</dependency>
<!--server-api-->
<dependency>
<groupId>javax.servlet</groupId>
<artifactId>javax.servlet-api</artifactId>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-maven-plugin</artifactId>
</plugin>
</plugins>
<resources>
<resource>
<directory>src/main/resources</directory>
<filtering>true</filtering>
</resource>
</resources>
</build>
</project>
package com.yeejoin.amos;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.cloud.client.discovery.EnableDiscoveryClient;
import org.springframework.cloud.openfeign.EnableFeignClients;
import org.springframework.context.ConfigurableApplicationContext;
@EnableFeignClients
@EnableDiscoveryClient
@SpringBootApplication
public class AmosBizGatewayApplication {
public static void main(String[] args) {
ConfigurableApplicationContext applicationContext = SpringApplication.run(AmosBizGatewayApplication.class, args);
String userName = applicationContext.getEnvironment().getProperty("jeecg.test");
System.err.println("user name :" +userName);
}
}
package com.yeejoin.amos.server.gateway.config;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.http.HttpHeaders;
import org.springframework.http.HttpMethod;
import org.springframework.http.HttpStatus;
import org.springframework.http.server.reactive.ServerHttpRequest;
import org.springframework.http.server.reactive.ServerHttpResponse;
import org.springframework.web.cors.reactive.CorsUtils;
import org.springframework.web.server.ServerWebExchange;
import org.springframework.web.server.WebFilter;
import org.springframework.web.server.WebFilterChain;
import reactor.core.publisher.Mono;
import static org.springframework.web.cors.CorsConfiguration.ALL;
@Configuration
public class Cors {
private static final String MAX_AGE = "18000L";
@Bean
public CorsResponseHeaderFilter corsResponseHeaderFilter() {
return new CorsResponseHeaderFilter();
}
@Bean
public WebFilter corsFilter() {
return (ServerWebExchange ctx, WebFilterChain chain) -> {
ServerHttpRequest request = ctx.getRequest();
if (!CorsUtils.isCorsRequest(request)) {
return chain.filter(ctx);
}
HttpHeaders requestHeaders = request.getHeaders();
ServerHttpResponse response = ctx.getResponse();
HttpMethod requestMethod = requestHeaders.getAccessControlRequestMethod();
HttpHeaders headers = response.getHeaders();
headers.add(HttpHeaders.ACCESS_CONTROL_ALLOW_ORIGIN, "*");
headers.addAll(HttpHeaders.ACCESS_CONTROL_ALLOW_HEADERS, requestHeaders.getAccessControlRequestHeaders());
if (requestMethod != null) {
headers.add(HttpHeaders.ACCESS_CONTROL_ALLOW_METHODS, requestMethod.name());
}
headers.add(HttpHeaders.ACCESS_CONTROL_ALLOW_CREDENTIALS, "true");
headers.add(HttpHeaders.ACCESS_CONTROL_EXPOSE_HEADERS, ALL);
headers.add(HttpHeaders.ACCESS_CONTROL_MAX_AGE, MAX_AGE);
if (request.getMethod() == HttpMethod.OPTIONS) {
response.setStatusCode(HttpStatus.OK);
return Mono.empty();
}
return chain.filter(ctx);
};
}
}
package com.yeejoin.amos.server.gateway.config;
import org.springframework.cloud.gateway.filter.GatewayFilterChain;
import org.springframework.cloud.gateway.filter.GlobalFilter;
import org.springframework.cloud.gateway.filter.NettyWriteResponseFilter;
import org.springframework.core.Ordered;
import org.springframework.http.HttpHeaders;
import org.springframework.web.server.ServerWebExchange;
import reactor.core.publisher.Mono;
import java.util.ArrayList;
public class CorsResponseHeaderFilter implements GlobalFilter, Ordered{
@Override
public int getOrder() {
// 指定此过滤器位于NettyWriteResponseFilter之后
// 即待处理完响应体后接着处理响应头
return NettyWriteResponseFilter.WRITE_RESPONSE_FILTER_ORDER + 1;
}
@Override
@SuppressWarnings("serial")
public Mono<Void> filter(ServerWebExchange exchange, GatewayFilterChain chain) {
//过滤相同的ACCESS_CONTROL_ALLOW_ORIGIN 和 ACCESS_CONTROL_ALLOW_CREDENTIALS
return chain.filter(exchange).then(Mono.defer(() -> {
exchange.getResponse().getHeaders().entrySet().stream()
.filter(kv -> (kv.getValue() != null && kv.getValue().size() > 1))
.filter(kv -> (kv.getKey().equals(HttpHeaders.ACCESS_CONTROL_ALLOW_ORIGIN)
|| kv.getKey().equals(HttpHeaders.ACCESS_CONTROL_ALLOW_CREDENTIALS)))
.forEach(kv ->
{
kv.setValue(new ArrayList<String>() {{add(kv.getValue().get(0));}});
});
return chain.filter(exchange);
}));
}
}
package com.yeejoin.amos.server.gateway.config;
import org.springframework.cloud.gateway.filter.GatewayFilter;
import org.springframework.cloud.gateway.filter.GatewayFilterChain;
import org.springframework.web.server.ServerWebExchange;
import reactor.core.publisher.Mono;
public class CustomeGatewayFilter implements GatewayFilter {
@Override
public Mono<Void> filter(ServerWebExchange exchange, GatewayFilterChain chain) {
return null;
}
}
package com.yeejoin.amos.server.gateway.config;
import org.springframework.cloud.gateway.filter.GatewayFilter;
import org.springframework.cloud.gateway.filter.factory.StripPrefixGatewayFilterFactory;
import org.springframework.cloud.gateway.route.RouteLocator;
import org.springframework.cloud.gateway.route.builder.RouteLocatorBuilder;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
@Configuration
public class GatewayInfo {
static StripPrefixGatewayFilterFactory stripPrefixGatewayFilterFactory = new StripPrefixGatewayFilterFactory();
static GatewayFilter gatewayFilter;
static {
StripPrefixGatewayFilterFactory.Config config = new StripPrefixGatewayFilterFactory.Config();
config.setParts(1);
gatewayFilter = stripPrefixGatewayFilterFactory.apply(config);
}
// @Value("${elecfire_converter_statton_service}")
// private String elecfire_converter_statton_service;
//
// @Value("${elecfire_converter_statton_patrol}")
// private String elecfire_converter_statton_patrol;
//
// @Value("${elecfire_converter_statton_3dservice}")
// private String elecfire_converter_statton_3dservice;
//
// @Value("${elecfire_converter_statton_duty}")
// private String DUTY_ERUKA_NAME;
//
//
// @Value("${elecfire_converter_statton_aixah}")
// private String AI_XAH_JD;
//
// @Value("${elecfire_converter_statton_equipmanage}")
// private String AMOS_EQUIPMANAGE;
//
// @Value("${visual_morphic}")
// private String visual_morphic;
//
// @Value("${elecfire_converter_statton_intelligentbox}")
// private String AMOS_API_INTELLIGENTBOX;
/**
*
* @param builder
* @return
*/
@Bean
public RouteLocator myRoutes(RouteLocatorBuilder builder) {
return builder.routes().route(p -> p.path("/privilege/**").uri("lb://AMOS-API-PRIVILEGE"))
.route(p -> p.path("/systemctl/**").uri("lb://AMOS-API-SYSTEMCTL"))
.route(p -> p.path("/urule/**").uri("lb://AMOS-API-RULE"))
// .route(p -> p.path("/fireAutoSys/**").uri("lb://" + elecfire_converter_statton_service))
//
// .route(p -> p.path("/patrol/**").uri("lb://" + elecfire_converter_statton_patrol))
//
// // 值班
// .route(p -> p.path("/duty/**").uri("lb://" + DUTY_ERUKA_NAME))
// .route(p -> p.path("/api/common/user/list/**").uri("lb://" + DUTY_ERUKA_NAME))
// // AI消安盒
// .route(p -> p.path("/intelligentbox/**").uri("lb://" + AMOS_API_INTELLIGENTBOX))
// .route(p -> p.path("/tsioservice/**").uri(AI_XAH_JD))
// //队站-装备信息系统
// .route(p -> p.path("/equip/**").uri("lb://" + AMOS_EQUIPMANAGE))
//// .route(p -> p.path("/equip-1225/**").uri("lb://" + AMOS_EQUIPMANAGE))
// //组态
// .route(p -> p.path("/morphic/**").uri("lb://" + visual_morphic))
.build();
}
}
package com.yeejoin.amos.server.gateway.face.model;
import java.util.LinkedHashMap;
import java.util.Map;
/**
* 过滤器模型
* zhuyu 2019-01-17
*/
public class GatewayFilterDefinition {
//Filter Name
private String name;
//对应的路由规则
private Map<String, String> args = new LinkedHashMap<>();
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public Map<String, String> getArgs() {
return args;
}
public void setArgs(Map<String, String> args) {
this.args = args;
}
}
package com.yeejoin.amos.server.gateway.face.model;
import java.util.LinkedHashMap;
import java.util.Map;
/**
* 路由断言模型
* zhuyu 2019-01-17
*/
public class GatewayPredicateDefinition {
//断言对应的Name
private String name;
//配置的断言规则
private Map<String, String> args = new LinkedHashMap<>();
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public Map<String, String> getArgs() {
return args;
}
public void setArgs(Map<String, String> args) {
this.args = args;
}
}
package com.yeejoin.amos.server.gateway.face.model;
import java.util.ArrayList;
import java.util.List;
/**
* 路由模型
* zhuyu 2019-01-17
*/
public class GatewayRouteDefinition {
//路由的Id
private String id;
//路由断言集合配置
private List<GatewayPredicateDefinition> predicates = new ArrayList<>();
//路由过滤器集合配置
private List<GatewayFilterDefinition> filters = new ArrayList<>();
//路由规则转发的目标uri
private String uri;
//路由执行的顺序
private int order = 0;
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
public List<GatewayPredicateDefinition> getPredicates() {
return predicates;
}
public void setPredicates(List<GatewayPredicateDefinition> predicates) {
this.predicates = predicates;
}
public List<GatewayFilterDefinition> getFilters() {
return filters;
}
public void setFilters(List<GatewayFilterDefinition> filters) {
this.filters = filters;
}
public String getUri() {
return uri;
}
public void setUri(String uri) {
this.uri = uri;
}
public int getOrder() {
return order;
}
public void setOrder(int order) {
this.order = order;
}
}
package com.yeejoin.amos.server.gateway.face.service;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import java.util.HashMap;
import java.util.Map;
/**
* 默认降级处理
*/
@RestController
public class DefaultHystrixController {
@RequestMapping("/defaultfallback")
public Map<String,String> defaultfallback(){
System.out.println("降级操作...");
Map<String,String> map = new HashMap<>();
map.put("resultCode","fail");
map.put("resultMessage","服务异常");
map.put("resultObj","null");
return map;
}
}
package com.yeejoin.amos.server.gateway.face.service;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.cloud.gateway.event.RefreshRoutesEvent;
import org.springframework.cloud.gateway.route.RouteDefinition;
import org.springframework.cloud.gateway.route.RouteDefinitionWriter;
import org.springframework.context.ApplicationEventPublisher;
import org.springframework.context.ApplicationEventPublisherAware;
import org.springframework.stereotype.Service;
import reactor.core.publisher.Mono;
/**
* 动态路由服务
*/
@Service
public class DynamicRouteServiceImpl implements ApplicationEventPublisherAware{
@Autowired
private RouteDefinitionWriter routeDefinitionWriter;
private ApplicationEventPublisher publisher;
@Override
public void setApplicationEventPublisher(ApplicationEventPublisher applicationEventPublisher) {
this.publisher = applicationEventPublisher;
}
//增加路由
public String add(RouteDefinition definition) {
routeDefinitionWriter.save(Mono.just(definition)).subscribe();
this.publisher.publishEvent(new RefreshRoutesEvent(this));
return "success";
}
// //更新路由
// public String update(RouteDefinition definition) {
// try {
// delete(definition.getId());
// } catch (Exception e) {
// return "update fail,not find route routeId: "+definition.getId();
// }
// try {
// routeDefinitionWriter.save(Mono.just(definition)).subscribe();
// this.publisher.publishEvent(new RefreshRoutesEvent(this));
// return "success";
// } catch (Exception e) {
// return "update route fail";
// }
// }
// //删除路由
// public Mono<ResponseEntity<Object>> delete(String id) {
// return this.routeDefinitionWriter.delete(Mono.just(id)).then(Mono.defer(() -> {
// return Mono.just(ResponseEntity.ok().build());
// })).onErrorResume((t) -> {
// return t instanceof NotFoundException;
// }, (t) -> {
// return Mono.just(ResponseEntity.notFound().build());
// });
// }
}
package com.yeejoin.amos.server.gateway.face.service;
import org.springframework.cloud.gateway.filter.ratelimit.KeyResolver;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import reactor.core.publisher.Mono;
/**
* 路由限流配置
* @author zhuyu
* @date 2019/1/15
*/
@Configuration
public class RateLimiterConfig {
@Bean(value = "remoteAddrKeyResolver")
public KeyResolver remoteAddrKeyResolver() {
return exchange -> Mono.just(exchange.getRequest().getRemoteAddress().getAddress().getHostAddress());
}
}
\ No newline at end of file
# REDIS (RedisProperties)
spring.redis.database=0
spring.redis.host=172.16.10.85
spring.redis.port=6379
spring.redis.password=amos2019Redis
spring.redis.timeout=0
##服务注册
eureka.client.service-url.defaultZone =http://172.16.10.72:10001/eureka/
eureka.instance.prefer-ip-address=true
management.endpoint.health.show-details=always
management.endpoints.web.exposure.include=*
eureka.instance.health-check-url=http://localhost:${server.port}/actuator/health
eureka.instance.metadata-map.management.context-path=/actuator
eureka.instance.status-page-url=http://localhost:${server.port}/actuator/info
#服务跟踪
spring.zipkin.base-url=http://172.16.10.72:10002
spring.zipkin.service.name=${spring.application.name}
spring.sleuth.sampler.probability=1.0
elecfire_converter_statton_service=AMOS-AUTOSYS
elecfire_converter_statton_patrol=AMOS-PATROL
elecfire_converter_statton_3dservice=AMOS-SAFETY3D
elecfire_converter_statton_duty=AMOS-DUTY
elecfire_converter_statton_equipmanage=AMOS-EQUIPMANAGE
visual_morphic = VISUAL-API-MORPHIC
elecfire_converter_statton_intelligentbox=AMOS-API-INTELLIGENTBOX
elecfire_converter_statton_aixah=http://172.16.10.102:9089/
##服务注册
eureka.client.service-url.defaultZone =http://amos-eurka:10001/eureka/
eureka.instance.prefer-ip-address=true
management.endpoint.health.show-details=always
management.endpoints.web.exposure.include=*
eureka.instance.health-check-url=http://amos-gateway:${server.port}/actuator/health
eureka.instance.metadata-map.management.context-path=/actuator
eureka.instance.status-page-url=http://amos-gateway:${server.port}/actuator/info
#服务跟踪
spring.zipkin.base-url=http://amos-tracking:10002
spring.zipkin.service.name=${spring.application.name}
spring.sleuth.sampler.probability=1.0
elecfire_converter_statton_service=AMOS-AUTOSYS
elecfire_converter_statton_patrol=AMOS-PATROL
elecfire_converter_statton_3dservice=AMOS-SAFETY3D
elecfire_converter_statton_duty=AMOS-DUTY
elecfire_converter_statton_equipmanage=AMOS-EQUIPMANAGE
visual_morphic = VISUAL-API-MORPHIC
\ No newline at end of file
##服务注册
eureka.client.service-url.defaultZone =http://localhost:10001/eureka/
eureka.instance.prefer-ip-address=true
management.endpoint.health.show-details=always
management.endpoints.web.exposure.include=*
eureka.instance.health-check-url=http://localhost:${server.port}/actuator/health
eureka.instance.metadata-map.management.context-path=/actuator
eureka.instance.status-page-url=http://localhost:${server.port}/actuator/info
#服务跟踪
spring.zipkin.base-url=http://localhost:10002
spring.zipkin.service.name=${spring.application.name}
spring.sleuth.sampler.probability=1.0
elecfire_converter_statton_service=AMOS-AUTOSYS
elecfire_converter_statton_patrol=AMOS-PATROL
elecfire_converter_statton_3dservice=AMOS-SAFETY3D
elecfire_converter_statton_duty=AMOS-DUTY
elecfire_converter_statton_equipmanage=AMOS-EQUIPMANAGE
visual_morphic = VISUAL-API-MORPHIC
\ No newline at end of file
server.port=20005
server.http2.enabled=true
spring.profiles.active=dev
spring.application.name=amos-biz-gateway
logging.config=classpath:logback-dev.xml
spring.cloud.gateway.discovery.locator.enabled=true
\ No newline at end of file
<?xml version="1.0" encoding="UTF-8"?>
<configuration debug="false">
<!-- 控制台输出 -->
<appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
<encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder">
<!--格式化输出:%d表示日期,%thread表示线程名,%-5level:级别从左显示5个字符宽度%msg:日志消息,%n是换行符-->
<pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{50} - %msg%n</pattern>
</encoder>
</appender>
<!-- show parameters for hibernate sql 专为 Hibernate 定制
<logger name="org.hibernate.type.descriptor.sql.BasicBinder" level="TRACE" />
<logger name="org.hibernate.type.descriptor.sql.BasicExtractor" level="DEBUG" />
<logger name="org.hibernate.SQL" level="DEBUG" />
<logger name="org.hibernate.engine.QueryParameters" level="DEBUG" />
<logger name="org.hibernate.engine.query.HQLQueryPlan" level="DEBUG" />
-->
<!--myibatis log configure-->
<logger name="com.apache.ibatis" level="INFO"/>
<logger name="java.sql.Connection" level="INFO"/>
<logger name="java.sql.Statement" level="INFO"/>
<logger name="java.sql.PreparedStatement" level="INFO"/>
<logger name="com.baomidou" level="INFO"/>
<logger name="org.tycloud" level="INFO"/>
<logger name="org.springframework" level="INFO"/>
<!-- 日志输出级别 -->
<root level="INFO">
<appender-ref ref="STDOUT" />
</root>
<!--日志异步到数据库 -->
<!--<appender name="DB" class="ch.qos.logback.classic.db.DBAppender">-->
<!--&lt;!&ndash;日志异步到数据库 &ndash;&gt;-->
<!--<connectionSource class="ch.qos.logback.core.db.DriverManagerConnectionSource">-->
<!--&lt;!&ndash;连接池 &ndash;&gt;-->
<!--<dataSource class="com.mchange.v2.c3p0.ComboPooledDataSource">-->
<!--<driverClass>com.mysql.jdbc.Driver</driverClass>-->
<!--<url>jdbc:mysql://127.0.0.1:3306/databaseName</url>-->
<!--<user>root</user>-->
<!--<password>root</password>-->
<!--</dataSource>-->
<!--</connectionSource>-->
<!--</appender>-->
</configuration>
\ No newline at end of file
<?xml version="1.0" encoding="UTF-8"?>
<configuration debug="false">
<!--定义日志文件的存储地址 勿在 LogBack 的配置中使用相对路径-->
<property name="LOG_HOME" value="/opt/amos/log" />
<!-- 按照每天生成日志文件 -->
<appender name="FILE" class="ch.qos.logback.core.rolling.RollingFileAppender">
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<!--日志文件输出的文件名-->
<FileNamePattern>${LOG_HOME}/amos_biz_gateway.log.%d{yyyy-MM-dd}.log</FileNamePattern>
<!--日志文件保留天数-->
<MaxHistory>30</MaxHistory>
</rollingPolicy>
<encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder">
<!--格式化输出:%d表示日期,%thread表示线程名,%-5level:级别从左显示5个字符宽度%msg:日志消息,%n是换行符-->
<pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{50} - %msg%n</pattern>
</encoder>
<!--日志文件最大的大小-->
<triggeringPolicy class="ch.qos.logback.core.rolling.SizeBasedTriggeringPolicy">
<MaxFileSize>30mb</MaxFileSize>
</triggeringPolicy>
</appender>
<!-- show parameters for hibernate sql 专为 Hibernate 定制
<logger name="org.hibernate.type.descriptor.sql.BasicBinder" level="TRACE" />
<logger name="org.hibernate.type.descriptor.sql.BasicExtractor" level="DEBUG" />
<logger name="org.hibernate.SQL" level="DEBUG" />
<logger name="org.hibernate.engine.QueryParameters" level="DEBUG" />
<logger name="org.hibernate.engine.query.HQLQueryPlan" level="DEBUG" />
-->
<!--myibatis log configure-->
<logger name="com.apache.ibatis" level="INFO"/>
<logger name="org.mybatis" level="INFO" />
<logger name="java.sql.Connection" level="INFO"/>
<logger name="java.sql.Statement" level="INFO"/>
<logger name="java.sql.PreparedStatement" level="INFO"/>
<logger name="com.baomidou.mybatisplus" level="INFO"/>
<logger name="org.typroject" level="INFO"/>
<logger name="com.yeejoin" level="INFO"/>
<logger name="org.springframework" level="INFO"/>
<!-- 日志输出级别 -->
<root level="INFO">
<appender-ref ref="FILE" />
</root>
</configuration>
\ No newline at end of file
<?xml version="1.0" encoding="UTF-8"?>
<configuration debug="false">
<!--定义日志文件的存储地址 勿在 LogBack 的配置中使用相对路径-->
<property name="LOG_HOME" value="/opt/log/qa" />
<!-- 按照每天生成日志文件 -->
<appender name="FILE" class="ch.qos.logback.core.rolling.RollingFileAppender">
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<!--日志文件输出的文件名-->
<FileNamePattern>${LOG_HOME}/amos_biz_gateway.log.%d{yyyy-MM-dd}.log</FileNamePattern>
<!--日志文件保留天数-->
<MaxHistory>30</MaxHistory>
</rollingPolicy>
<encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder">
<!--格式化输出:%d表示日期,%thread表示线程名,%-5level:级别从左显示5个字符宽度%msg:日志消息,%n是换行符-->
<pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{50} - %msg%n</pattern>
</encoder>
<!--日志文件最大的大小-->
<triggeringPolicy class="ch.qos.logback.core.rolling.SizeBasedTriggeringPolicy">
<MaxFileSize>30mb</MaxFileSize>
</triggeringPolicy>
</appender>
<!-- show parameters for hibernate sql 专为 Hibernate 定制
<logger name="org.hibernate.type.descriptor.sql.BasicBinder" level="TRACE" />
<logger name="org.hibernate.type.descriptor.sql.BasicExtractor" level="DEBUG" />
<logger name="org.hibernate.SQL" level="DEBUG" />
<logger name="org.hibernate.engine.QueryParameters" level="DEBUG" />
<logger name="org.hibernate.engine.query.HQLQueryPlan" level="DEBUG" />
-->
<!--myibatis log configure-->
<logger name="com.apache.ibatis" level="INFO"/>
<logger name="org.mybatis" level="INFO" />
<logger name="java.sql.Connection" level="INFO"/>
<logger name="java.sql.Statement" level="INFO"/>
<logger name="java.sql.PreparedStatement" level="INFO"/>
<logger name="com.baomidou.mybatisplus" level="INFO"/>
<logger name="org.typroject" level="INFO"/>
<logger name="com.yeejoin" level="INFO"/>
<logger name="org.springframework" level="INFO"/>
<!-- 日志输出级别 -->
<root level="INFO">
<appender-ref ref="FILE" />
</root>
</configuration>
\ No newline at end of file
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<parent>
<groupId>com.amosframework.boot</groupId>
<artifactId>amos-boot-module-api</artifactId>
<version>1.0.0</version>
</parent>
<modelVersion>4.0.0</modelVersion>
<artifactId>amos-boot-module-ccs-api</artifactId>
</project>
\ No newline at end of file
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment