Skip to content
Projects
Groups
Snippets
Help
This project
Loading...
Sign in / Register
Toggle navigation
A
amos-boot-biz
Project
Project
Details
Activity
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
项目统一框架
amos-boot-biz
Commits
4fdb52b4
Commit
4fdb52b4
authored
Oct 13, 2023
by
刘林
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
fix(equip):优化对接IOT代码,优化es批量查询
parent
750b645c
Expand all
Hide whitespace changes
Inline
Side-by-side
Showing
13 changed files
with
488 additions
and
54 deletions
+488
-54
Book.java
...ta-equip/src/main/java/com/yeejoin/equip/entity/Book.java
+24
-0
EsEntity.java
...quip/src/main/java/com/yeejoin/equip/entity/EsEntity.java
+31
-0
EmqMessageService.java
...c/main/java/com/yeejoin/equip/eqmx/EmqMessageService.java
+1
-1
KafkaConsumerService.java
...in/java/com/yeejoin/equip/kafka/KafkaConsumerService.java
+0
-0
KafkaConsumerWithThread.java
...java/com/yeejoin/equip/kafka/KafkaConsumerWithThread.java
+0
-0
KafkaConsumerWorker.java
...ain/java/com/yeejoin/equip/kafka/KafkaConsumerWorker.java
+180
-0
KafkaProducerService.java
...in/java/com/yeejoin/equip/kafka/KafkaProducerService.java
+6
-1
WorkerConsumer.java
...src/main/java/com/yeejoin/equip/kafka/WorkerConsumer.java
+66
-0
IndicatorDataMapper.java
...om/yeejoin/equip/mapper/tdengine/IndicatorDataMapper.java
+2
-0
ElasticSearchUtil.java
.../main/java/com/yeejoin/equip/utils/ElasticSearchUtil.java
+114
-15
application-dev.properties
...-data-equip/src/main/resources/application-dev.properties
+18
-18
IndicatorDataMapper.xml
...rc/main/resources/mapper/tdengine/IndicatorDataMapper.xml
+43
-16
pom.xml
pom.xml
+3
-3
No files found.
amos-boot-data/amos-boot-data-equip/src/main/java/com/yeejoin/equip/entity/Book.java
0 → 100644
View file @
4fdb52b4
package
com
.
yeejoin
.
equip
.
entity
;
import
lombok.AllArgsConstructor
;
import
lombok.Data
;
import
lombok.NoArgsConstructor
;
import
lombok.ToString
;
import
java.util.Date
;
/**
*
* @author LiuLin
* @date 2023年10月11日 09:31
*/
@Data
@ToString
@AllArgsConstructor
@NoArgsConstructor
public
class
Book
{
private
String
value
;
private
Float
valueF
;
private
String
valueLabel
;
private
String
unit
;
private
Date
createdTime
;
}
amos-boot-data/amos-boot-data-equip/src/main/java/com/yeejoin/equip/entity/EsEntity.java
0 → 100644
View file @
4fdb52b4
package
com
.
yeejoin
.
equip
.
entity
;
import
lombok.Getter
;
/**
*
* @author LiuLin
* @date 2023年10月11日 09:31
*/
@Getter
public
final
class
EsEntity
<
T
>
{
private
String
id
;
private
T
data
;
public
EsEntity
()
{
}
public
EsEntity
(
String
id
,
T
data
)
{
this
.
data
=
data
;
this
.
id
=
id
;
}
public
void
setId
(
String
id
)
{
this
.
id
=
id
;
}
public
void
setData
(
T
data
)
{
this
.
data
=
data
;
}
}
amos-boot-data/amos-boot-data-equip/src/main/java/com/yeejoin/equip/eqmx/EmqMessageService.java
View file @
4fdb52b4
...
...
@@ -63,7 +63,7 @@ public class EmqMessageService extends EmqxListener {
String
gatewayId
=
result
.
getString
(
"gatewayId"
);
String
value
=
result
.
getString
(
"value"
);
String
signalType
=
result
.
getString
(
"signalType"
);
log
.
info
(
"
订阅emq消息 ====>
address:{},gatewayId:{},dateType:{},value:{},signalType:{}"
,
address
,
gatewayId
,
dataType
,
value
,
signalType
);
log
.
info
(
"
===========接收IOT订阅消息,
address:{},gatewayId:{},dateType:{},value:{},signalType:{}"
,
address
,
gatewayId
,
dataType
,
value
,
signalType
);
kafkaProducerService
.
sendMessageAsync
(
kafkaTopic
,
JSON
.
toJSONString
(
result
));
}
}
catch
(
Exception
e
)
{
...
...
amos-boot-data/amos-boot-data-equip/src/main/java/com/yeejoin/equip/kafka/KafkaConsumerService.java
View file @
4fdb52b4
This diff is collapsed.
Click to expand it.
amos-boot-data/amos-boot-data-equip/src/main/java/com/yeejoin/equip/kafka/KafkaConsumerWithThread.java
View file @
4fdb52b4
This diff is collapsed.
Click to expand it.
amos-boot-data/amos-boot-data-equip/src/main/java/com/yeejoin/equip/kafka/KafkaConsumerWorker.java
0 → 100644
View file @
4fdb52b4
package
com
.
yeejoin
.
equip
.
kafka
;
import
com.alibaba.fastjson.JSON
;
import
com.alibaba.fastjson.JSONArray
;
import
com.alibaba.fastjson.JSONObject
;
import
com.yeejoin.equip.config.KafkaConsumerConfig
;
import
com.yeejoin.equip.entity.Book
;
import
com.yeejoin.equip.entity.EquipmentIndexVO
;
import
com.yeejoin.equip.entity.EsEntity
;
import
com.yeejoin.equip.entity.IndicatorData
;
import
com.yeejoin.equip.mapper.tdengine.IndicatorDataMapper
;
import
com.yeejoin.equip.utils.ElasticSearchUtil
;
import
com.yeejoin.equip.utils.RedisUtils
;
import
lombok.extern.slf4j.Slf4j
;
import
org.apache.commons.lang3.ObjectUtils
;
import
org.apache.kafka.clients.consumer.ConsumerRecord
;
import
org.apache.kafka.clients.consumer.ConsumerRecords
;
import
org.apache.kafka.clients.consumer.KafkaConsumer
;
import
org.springframework.beans.factory.annotation.Autowired
;
import
org.springframework.beans.factory.annotation.Value
;
import
org.springframework.boot.CommandLineRunner
;
import
org.springframework.stereotype.Component
;
import
java.time.Duration
;
import
java.util.*
;
import
java.util.concurrent.*
;
import
java.util.concurrent.atomic.AtomicLong
;
import
java.util.stream.Collectors
;
import
java.util.stream.StreamSupport
;
/**
* @author LiuLin
* @date 2023年08月01日 17:27
*/
@Slf4j
@Component
public
class
KafkaConsumerWorker
implements
CommandLineRunner
{
final
private
static
AtomicLong
sendThreadPoolCounter
=
new
AtomicLong
(
0
);
final
public
static
ExecutorService
pooledExecutor
=
Executors
.
newFixedThreadPool
(
100
+
Runtime
.
getRuntime
().
availableProcessors
(),
createThreadFactory
());
private
static
final
int
CPU_COUNT
=
Runtime
.
getRuntime
().
availableProcessors
();
private
static
final
int
CORE_POOL_SIZE
=
6
*
CPU_COUNT
;
private
static
final
int
MAX_POOL_SIZE
=
6
*
CPU_COUNT
+
2
;
private
static
final
ThreadPoolExecutor
exec
=
new
ThreadPoolExecutor
(
CORE_POOL_SIZE
,
MAX_POOL_SIZE
,
60L
,
TimeUnit
.
SECONDS
,
new
LinkedBlockingQueue
<>(
1000
)
);
private
static
final
String
ES_INDEX_NAME_JX
=
"jxiop_equipments"
;
private
static
final
String
TRANSFORMATION
=
"transformation"
;
//装备更新最新消息存入influxdb前缀
private
static
final
String
TRUE
=
"true"
;
private
static
final
String
FALSE
=
"false"
;
@Autowired
protected
KafkaProducerService
kafkaProducerService
;
@Autowired
private
KafkaConsumerConfig
consumerConfig
;
@Autowired
private
RedisUtils
redisUtils
;
@Autowired
private
IndicatorDataMapper
indicatorDataMapper
;
@Value
(
"${kafka.alarm.topic}"
)
private
String
alarmTopic
;
@Value
(
"${kafka.topic}"
)
private
String
topic
;
@Autowired
private
ElasticSearchUtil
elasticSearchUtil
;
private
static
ThreadFactory
createThreadFactory
()
{
return
runnable
->
{
Thread
thread
=
new
Thread
(
runnable
);
thread
.
setName
(
String
.
format
(
"kafka-consumer-iot-pool-%d"
,
KafkaConsumerWorker
.
sendThreadPoolCounter
.
getAndIncrement
()));
return
thread
;
};
}
@Override
public
void
run
(
String
...
args
)
{
Thread
thread
=
new
Thread
(
new
KafkaConsumerThread
(
consumerConfig
.
consumerConfigs
(),
topic
));
thread
.
start
();
}
private
Optional
<
IndicatorData
>
processSignal
(
ConsumerRecord
<
String
,
String
>
record
)
{
JSONObject
jsonObject
=
JSONObject
.
parseObject
(
record
.
value
());
String
address
=
jsonObject
.
getString
(
"address"
);
String
gatewayId
=
jsonObject
.
getString
(
"gatewayId"
);
String
value
=
jsonObject
.
getString
(
"value"
);
String
key
=
address
+
"_"
+
gatewayId
;
log
.
info
(
"===========收到Kafka消息,key:{},value:{}"
,
key
,
value
);
IndicatorData
indicatorData
=
JSON
.
parseObject
(
record
.
value
(),
IndicatorData
.
class
);
if
(
redisUtils
.
hasKey
(
key
))
{
EquipmentIndexVO
equipmentSpeIndex
=
JSONObject
.
parseObject
(
redisUtils
.
get
(
key
),
EquipmentIndexVO
.
class
);
String
valueLabel
=
valueTranslate
(
value
,
equipmentSpeIndex
.
getValueEnum
());
indicatorData
.
setIsAlarm
(
String
.
valueOf
(
equipmentSpeIndex
.
getIsAlarm
()));
indicatorData
.
setEquipmentIndexName
(
equipmentSpeIndex
.
getEquipmentIndexName
());
indicatorData
.
setEquipmentSpecificName
(
equipmentSpeIndex
.
getEquipmentSpecificName
());
indicatorData
.
setUnit
(
equipmentSpeIndex
.
getUnitName
());
indicatorData
.
setEquipmentsIdx
(
key
);
indicatorData
.
setValueLabel
(
valueLabel
.
isEmpty
()
?
value
:
valueLabel
);
indicatorData
.
setValueF
(!
Arrays
.
asList
(
TRUE
,
FALSE
).
contains
(
value
)
?
Float
.
parseFloat
(
value
)
:
0
);
//发送告警信息
if
(
0
!=
equipmentSpeIndex
.
getIsAlarm
())
{
kafkaProducerService
.
sendMessageAsync
(
alarmTopic
,
JSON
.
toJSONString
(
indicatorData
));
log
.
info
(
"===========发送告警信息,key:{}"
,
indicatorData
.
getEquipmentsIdx
());
}
return
Optional
.
of
(
indicatorData
);
}
return
Optional
.
empty
();
}
private
void
processRecord
(
ConsumerRecords
<
String
,
String
>
records
)
{
Map
<
String
,
List
<
IndicatorData
>>
data
=
StreamSupport
.
stream
(
records
.
spliterator
(),
true
)
.
map
(
this
::
processSignal
)
.
filter
(
Optional:
:
isPresent
)
.
map
(
Optional:
:
get
)
.
collect
(
Collectors
.
groupingBy
(
IndicatorData:
:
getGatewayId
));
data
.
forEach
((
gatewayId
,
list
)
->
{
//1.update es
List
<
EsEntity
<
Book
>>
batchList
=
new
ArrayList
<>(
list
.
size
());
list
.
forEach
(
item
->
batchList
.
add
(
new
EsEntity
<>(
item
.
getEquipmentsIdx
(),
new
Book
(
item
.
getValue
(),
item
.
getValueF
(),
item
.
getValueLabel
(),
item
.
getUnit
(),
new
Date
()))));
elasticSearchUtil
.
updateBatch
(
ES_INDEX_NAME_JX
,
batchList
);
//2.save
List
<
IndicatorData
>
tdDataList
=
list
.
stream
().
filter
(
t
->
Objects
.
equals
(
t
.
getSignalType
(),
TRANSFORMATION
)).
collect
(
Collectors
.
toList
());
indicatorDataMapper
.
insertBatch
(
tdDataList
,
gatewayId
);
tdDataList
.
forEach
(
s
->
log
.
info
(
"===========TDEngine入库成功,id:【{}】,value:【{}】修改成功"
,
s
.
getEquipmentsIdx
(),
s
.
getValueF
()));
});
}
private
String
valueTranslate
(
String
value
,
String
enumStr
)
{
if
(
ObjectUtils
.
isEmpty
(
enumStr
))
{
return
""
;
}
try
{
JSONArray
jsonArray
=
JSONArray
.
parseArray
(
enumStr
);
for
(
int
i
=
0
;
i
<
jsonArray
.
size
();
i
++)
{
JSONObject
jsonObject
=
jsonArray
.
getJSONObject
(
i
);
if
(
jsonObject
.
get
(
"key"
).
equals
(
value
))
{
return
jsonObject
.
getString
(
"label"
);
}
}
}
catch
(
Exception
e
)
{
log
.
error
(
"告警枚举转换异常"
+
e
.
getMessage
(),
e
);
}
return
""
;
}
public
class
KafkaConsumerThread
implements
Runnable
{
private
final
KafkaConsumer
<
String
,
String
>
kafkaConsumer
;
public
KafkaConsumerThread
(
Properties
props
,
String
topic
)
{
this
.
kafkaConsumer
=
new
KafkaConsumer
<>(
props
);
this
.
kafkaConsumer
.
subscribe
(
Collections
.
singletonList
(
topic
));
}
@Override
public
void
run
()
{
while
(
true
)
{
ConsumerRecords
<
String
,
String
>
records
=
kafkaConsumer
.
poll
(
Duration
.
ofMillis
(
100
));
exec
.
submit
(()
->
{
processRecord
(
records
);
});
kafkaConsumer
.
commitSync
();
exec
.
shutdown
();
}
}
}
}
amos-boot-data/amos-boot-data-equip/src/main/java/com/yeejoin/equip/kafka/KafkaProducerService.java
View file @
4fdb52b4
package
com
.
yeejoin
.
equip
.
kafka
;
import
com.alibaba.fastjson.JSONObject
;
import
lombok.extern.slf4j.Slf4j
;
import
org.springframework.kafka.core.KafkaTemplate
;
import
org.springframework.kafka.support.SendResult
;
...
...
@@ -63,7 +64,11 @@ public class KafkaProducerService {
}
@Override
public
void
onSuccess
(
SendResult
<
String
,
String
>
stringStringSendResult
)
{
//log.info("发送消息(异步) success! topic: {}, message: {}", topic, message);
JSONObject
jsonObject
=
JSONObject
.
parseObject
(
message
);
String
address
=
jsonObject
.
getString
(
"address"
);
String
gatewayId
=
jsonObject
.
getString
(
"gatewayId"
);
String
value
=
jsonObject
.
getString
(
"value"
);
log
.
info
(
"===========Kafka发送消息 success! address: {}, gatewayId: {},value:{}"
,
address
,
gatewayId
,
value
);
}
});
}
...
...
amos-boot-data/amos-boot-data-equip/src/main/java/com/yeejoin/equip/kafka/WorkerConsumer.java
0 → 100644
View file @
4fdb52b4
//package com.yeejoin.equip.kafka;
//
//import org.apache.kafka.clients.consumer.ConsumerConfig;
//import org.apache.kafka.clients.consumer.ConsumerRecords;
//import org.apache.kafka.clients.consumer.KafkaConsumer;
//import org.apache.kafka.common.serialization.StringDeserializer;
//import javax.annotation.PostConstruct;
//import java.time.Duration;
//import java.util.Collections;
//import java.util.Properties;
//import java.util.concurrent.ExecutorService;
//import java.util.concurrent.Executors;
//
///**
// * @author LiuLin
// * @date 2023年10月11日 09:31
// */
//public class WorkerConsumer {
// private static final ExecutorService executor = Executors.newFixedThreadPool(100);
// @PostConstruct
// void init() throws Exception {
// String topicName = "topic_t40";
// KafkaConsumer<String, String> consumer = getKafkaConsumer();
// consumer.subscribe(Collections.singletonList(topicName));
// try {
// while (true) {
// ConsumerRecords<String, String> records = consumer.poll(Duration.ofSeconds(1));
// if(!records.isEmpty()){
// executor.execute(new MessageHandler(records));
// }
// }
// }finally {
// consumer.close();
// }
// }
//
// private static KafkaConsumer<String, String> getKafkaConsumer() {
// Properties props = new Properties();
// props.put("bootstrap.servers", "localhost:9092");
// props.put(ConsumerConfig.GROUP_ID_CONFIG, "app_w");
// props.put("client.id", "client_02");
// props.put("enable.auto.commit", true);
// props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "latest");
// props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
// props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
//
// return new KafkaConsumer<>(props);
// }
//
//
// static class MessageHandler implements Runnable{
//
// private final ConsumerRecords<String, String> records;
//
// public MessageHandler(ConsumerRecords<String, String> records) {
// this.records = records;
// }
//
// @Override
// public void run() {
// records.forEach(record -> {
// System.out.println(" 开始处理消息: " + record.value() + ", partition " + record.partition());
// });
// }
// }
//}
amos-boot-data/amos-boot-data-equip/src/main/java/com/yeejoin/equip/mapper/tdengine/IndicatorDataMapper.java
View file @
4fdb52b4
...
...
@@ -16,6 +16,8 @@ public interface IndicatorDataMapper {
int
insert
(
IndicatorData
indicatorData
);
int
insertBatch
(
@Param
(
"list"
)
List
<
IndicatorData
>
indicatorDataList
,
@Param
(
"gatewayId"
)
String
gatewayId
);
void
createDB
();
void
createTable
();
...
...
amos-boot-data/amos-boot-data-equip/src/main/java/com/yeejoin/equip/utils/ElasticSearchUtil.java
View file @
4fdb52b4
package
com
.
yeejoin
.
equip
.
utils
;
import
com.alibaba.fastjson.JSON
;
import
com.alibaba.fastjson.JSONObject
;
import
com.yeejoin.equip.entity.Book
;
import
com.yeejoin.equip.entity.EsEntity
;
import
lombok.extern.slf4j.Slf4j
;
import
org.apache.commons.lang3.ArrayUtils
;
import
org.elasticsearch.action.ActionListener
;
import
org.elasticsearch.action.DocWriteRequest
;
import
org.elasticsearch.action.DocWriteResponse
;
import
org.elasticsearch.action.bulk.BulkItemResponse
;
import
org.elasticsearch.action.bulk.BulkRequest
;
import
org.elasticsearch.action.bulk.BulkResponse
;
import
org.elasticsearch.action.index.IndexRequest
;
import
org.elasticsearch.action.search.ClearScrollRequest
;
import
org.elasticsearch.action.search.SearchRequest
;
import
org.elasticsearch.action.search.SearchResponse
;
...
...
@@ -21,9 +30,12 @@ import org.elasticsearch.search.SearchHit;
import
org.elasticsearch.search.builder.SearchSourceBuilder
;
import
org.springframework.beans.factory.annotation.Autowired
;
import
org.springframework.stereotype.Component
;
import
java.io.IOException
;
import
java.util.ArrayList
;
import
java.util.HashMap
;
import
java.util.List
;
import
java.util.Map
;
import
java.util.function.Function
;
/**
...
...
@@ -48,6 +60,7 @@ public class ElasticSearchUtil {
* @return
*/
public
boolean
updateData
(
String
indexName
,
String
id
,
String
paramJson
)
{
log
.
info
(
"更新ES数据,value:{}"
,
id
);
UpdateRequest
updateRequest
=
new
UpdateRequest
(
indexName
,
id
);
//如果修改索引中不存在则进行新增
updateRequest
.
docAsUpsert
(
true
);
...
...
@@ -75,6 +88,90 @@ public class ElasticSearchUtil {
}
/**
* 单条更新
*
* @param indexName
* @param id
* @param data
* @return
* @throws IOException
*/
public
boolean
updateData
(
String
indexName
,
String
id
,
Object
data
)
throws
IOException
{
UpdateRequest
updateRequest
=
new
UpdateRequest
(
indexName
,
id
);
//准备文档
String
jsonString
=
JSONObject
.
toJSONString
(
data
);
Map
jsonMap
=
JSONObject
.
parseObject
(
jsonString
,
Map
.
class
);
updateRequest
.
doc
(
jsonMap
);
updateRequest
.
timeout
(
TimeValue
.
timeValueSeconds
(
1
));
updateRequest
.
setRefreshPolicy
(
WriteRequest
.
RefreshPolicy
.
WAIT_UNTIL
);
//数据为存储而不是更新
UpdateResponse
update
=
restHighLevelClient
.
update
(
updateRequest
,
RequestOptions
.
DEFAULT
);
return
update
.
getGetResult
().
equals
(
DocWriteResponse
.
Result
.
UPDATED
);
}
/**
* 必须传递ids集合
*
* @param indexName
* @param idList
* @param map
* @return
*/
public
boolean
update
(
String
indexName
,
List
<
String
>
idList
,
Map
map
)
{
// 创建批量请求
BulkRequest
bulkRequest
=
new
BulkRequest
();
for
(
String
id
:
idList
)
{
UpdateRequest
updateRequest
=
new
UpdateRequest
(
indexName
,
id
).
doc
(
map
);
bulkRequest
.
add
(
updateRequest
);
}
try
{
bulkRequest
.
setRefreshPolicy
(
WriteRequest
.
RefreshPolicy
.
IMMEDIATE
);
BulkResponse
bulk
=
restHighLevelClient
.
bulk
(
bulkRequest
,
RequestOptions
.
DEFAULT
);
return
bulk
.
hasFailures
();
}
catch
(
IOException
e
)
{
return
false
;
}
}
/**
* Description: 批量修改数据
*
* @param index index
* @param list 更新列表
* @author LiuLin
*/
public
<
T
>
void
updateBatch
(
String
index
,
List
<
EsEntity
<
T
>>
list
)
{
BulkRequest
request
=
new
BulkRequest
();
list
.
forEach
(
item
->
request
.
add
(
new
UpdateRequest
(
index
,
item
.
getId
())
.
doc
(
JSON
.
toJSONString
(
item
.
getData
()),
XContentType
.
JSON
)));
try
{
restHighLevelClient
.
bulk
(
request
,
RequestOptions
.
DEFAULT
);
list
.
forEach
(
s
->
log
.
info
(
"===========索引:【{}】,主键:【{}】修改成功"
,
index
,
s
.
getId
()));
}
catch
(
Exception
e
)
{
log
.
error
(
"索引:[{}]"
,
index
,
e
);
}
}
/**
* Description: 批量插入数据
*
* @param index index
* @param list 插入列表
* @author LiuLin
*/
public
<
T
>
void
insertBatch
(
String
index
,
List
<
EsEntity
<
T
>>
list
)
{
BulkRequest
request
=
new
BulkRequest
();
list
.
forEach
(
item
->
request
.
add
(
new
IndexRequest
(
index
).
id
(
item
.
getId
())
.
source
(
JSON
.
toJSONString
(
item
.
getData
()),
XContentType
.
JSON
)));
try
{
restHighLevelClient
.
bulk
(
request
,
RequestOptions
.
DEFAULT
);
}
catch
(
Exception
e
)
{
throw
new
RuntimeException
(
e
);
}
}
/**
* ES异步修改数据
*
* @param indexName 索引名称
...
...
@@ -86,26 +183,28 @@ public class ElasticSearchUtil {
updateRequest
.
docAsUpsert
(
true
);
updateRequest
.
setRefreshPolicy
(
WriteRequest
.
RefreshPolicy
.
IMMEDIATE
);
updateRequest
.
doc
(
paramJson
,
XContentType
.
JSON
);
restHighLevelClient
.
updateAsync
(
updateRequest
,
RequestOptions
.
DEFAULT
,
new
ActionListener
<
UpdateResponse
>()
{
@Override
public
void
onResponse
(
UpdateResponse
updateResponse
)
{
if
(
DocWriteResponse
.
Result
.
UPDATED
.
equals
(
updateResponse
.
getResult
()))
{
log
.
info
(
"索引:【{}】,主键:【{}】修改成功"
,
indexName
,
id
);
}
}
@Override
public
void
onFailure
(
Exception
e
)
{
log
.
error
(
"索引:[{}],主键:【{}】"
,
indexName
,
id
,
e
);
restHighLevelClient
.
updateAsync
(
updateRequest
,
RequestOptions
.
DEFAULT
,
new
ActionListener
<
UpdateResponse
>()
{
@Override
public
void
onResponse
(
UpdateResponse
updateResponse
)
{
if
(
DocWriteResponse
.
Result
.
UPDATED
.
equals
(
updateResponse
.
getResult
()))
{
log
.
info
(
"索引:【{}】,主键:【{}】修改成功"
,
indexName
,
id
);
}
});
}
@Override
public
void
onFailure
(
Exception
e
)
{
log
.
error
(
"索引:[{}],主键:【{}】"
,
indexName
,
id
,
e
);
}
});
}
/**
* 构建SearchResponse
* @param indices 索引
* @param query queryBuilder
* @param fun 返回函数
* @param <T> 返回类型
*
* @param indices 索引
* @param query queryBuilder
* @param fun 返回函数
* @param <T> 返回类型
* @return List, 可以使用fun转换为T结果
* @throws Exception e
*/
...
...
amos-boot-data/amos-boot-data-equip/src/main/resources/application-dev.properties
View file @
4fdb52b4
...
...
@@ -14,12 +14,21 @@ spring.datasource.mysql-server.hikari.connection-timeout= 60000
spring.datasource.mysql-server.hikari.connection-test-query
=
SELECT 1
#TDengine ???
spring.datasource.tdengine-server.driver-class-name
=
com.taosdata.jdbc.rs.RestfulDriver
spring.datasource.tdengine-server.jdbc-url
=
jdbc:TAOS-RS://139.9.170.47:6041/iot_data
_test
?user=root&password=taosdata&timezone=GMT%2b8&allowMultiQueries=true
spring.datasource.tdengine-server.jdbc-url
=
jdbc:TAOS-RS://139.9.170.47:6041/iot_data?user=root&password=taosdata&timezone=GMT%2b8&allowMultiQueries=true
spring.datasource.tdengine-server.username
=
root
spring.datasource.tdengine-server.password
=
taosdata
spring.datasource.tdengine-server.type
=
com.zaxxer.hikari.HikariDataSource
spring.datasource.tdengine-server.hikari.minimum-idle
=
30
spring.datasource.tdengine-server.hikari.maximum-pool-size
=
150
spring.datasource.tdengine-server.hikari.auto-commit
=
true
spring.datasource.tdengine-server.hikari.pool-name
=
TDEngineDruidCP
spring.datasource.tdengine-server.hikari.idle-timeout
=
500000
spring.datasource.tdengine-server.hikari.max-lifetime
=
1800000
spring.datasource.tdengine-server.hikari.connection-timeout
=
60000
spring.datasource.tdengine-server.hikari.connection-test-query
=
show tables
spring.redis.database
=
0
spring.redis.host
=
1
72.16.11.201
spring.redis.host
=
1
39.9.173.44
spring.redis.port
=
6379
spring.redis.password
=
yeejoin@2020
spring.redis.timeout
=
3000
...
...
@@ -37,9 +46,9 @@ eureka.instance.lease-renewal-interval-in-seconds=5
eureka.instance.metadata-map.management.context-path
=
${server.servlet.context-path}/actuator
eureka.instance.status-page-url-path
=
/actuator/info
eureka.instance.metadata-map.management.api-docs
=
http://localhost:${server.port}${server.servlet.context-path}/doc.html
eureka.instance.hostname
=
1
72.16.11.201
eureka.instance.hostname
=
1
39.9.173.44
eureka.instance.prefer-ip-address
=
true
eureka.client.serviceUrl.defaultZone
=
http://${spring.security.user.name}:${spring.security.user.password}@1
72.16.11.201
:10001/eureka/
eureka.client.serviceUrl.defaultZone
=
http://${spring.security.user.name}:${spring.security.user.password}@1
39.9.173.44
:10001/eureka/
spring.security.user.name
=
admin
spring.security.user.password
=
a1234560
...
...
@@ -53,16 +62,6 @@ emqx.max-inflight=1000
emqx.keep-alive-interval
=
10
emqx.biz-topic[0]=
iot/data/perspective
# influxDB
spring.influx.url
=
http://139.9.173.44:8086
spring.influx.password
=
Yeejoin@2020
spring.influx.user
=
root
spring.influx.database
=
iot_platform_test
spring.influx.retention_policy
=
default
spring.influx.retention_policy_time
=
30d
spring.influx.actions
=
10000
spring.influx.bufferLimit
=
20000
#kafka
spring.kafka.bootstrap-servers
=
139.9.173.44:9092
spring.kafka.producer.retries
=
1
...
...
@@ -74,7 +73,7 @@ spring.kafka.producer.key-serializer=org.apache.kafka.common.serialization.Strin
spring.kafka.producer.value-serializer
=
org.apache.kafka.common.serialization.StringSerializer
spring.kafka.consumer.group-id
=
messageConsumerGroup
spring.kafka.consumer.bootstrap-servers
=
139.9.173.44:9092
spring.kafka.consumer.enable-auto-commit
=
fals
e
spring.kafka.consumer.enable-auto-commit
=
tru
e
spring.kafka.consumer.auto-offset-reset
=
earliest
spring.kafka.consumer.key-deserializer
=
org.apache.kafka.common.serialization.StringDeserializer
spring.kafka.consumer.value-deserializer
=
org.apache.kafka.common.serialization.StringDeserializer
...
...
@@ -90,8 +89,8 @@ elasticsearch.address= 139.9.173.44:9200
elasticsearch.username
=
elastic
elasticsearch.password
=
Yeejoin@2020
elasticsearch.scheme
=
http
elasticsearch.connectTimeout
=
5000
elasticsearch.socketTimeout
=
5000
elasticsearch.connectionRequestTimeout
=
5000
elasticsearch.connectTimeout
=
5000
0
elasticsearch.socketTimeout
=
5000
0
elasticsearch.connectionRequestTimeout
=
5000
0
elasticsearch.maxConnectNum
=
1000
elasticsearch.maxConnectPerRoute
=
1000
\ No newline at end of file
amos-boot-data/amos-boot-data-equip/src/main/resources/mapper/tdengine/IndicatorDataMapper.xml
View file @
4fdb52b4
...
...
@@ -3,28 +3,58 @@
<mapper
namespace=
"com.yeejoin.equip.mapper.tdengine.IndicatorDataMapper"
>
<!--创建数据库,指定压缩比-->
<update
id=
"createDB"
>
<update
id=
"createDB"
>
create database if not exists iot_data vgroups 10 buffer 10 COMP 2 PRECISION 'ns';
</update>
<!--创建超级表-->
<update
id=
"createTable"
>
CREATE STABLE if not exists indicator
<update
id=
"createTable"
>
create STABLE if not exists s_indicator_his
(created_time timestamp,
`value` VARCHAR(12),
`value_f` float,
value_label VARCHAR(24),
unit NCHAR(12))
TAGS (address binary(64),
gateway_id binary(64),
address binary(64),
equipments_idx NCHAR(64),
data_type NCHAR(12),
is_alarm BIGINT,
equipment_index_name VARCHAR(200) ,
equipment_specific_name VARCHAR(200));
equipment_specific_name VARCHAR(200),
`value` VARCHAR(12),
`value_f` float,
value_label VARCHAR(24),
unit NCHAR(12))
TAGS (gateway_id binary(64));
</update>
<insert
id=
"insert"
parameterType=
"com.yeejoin.equip.entity.IndicatorData"
>
<insert
id=
"insertBatch"
parameterType=
"java.util.List"
>
insert into
<foreach
separator=
" "
collection=
"list"
item=
"item"
index=
"index"
>
indicator_his_#{gatewayId,jdbcType=VARCHAR} USING s_indicator_his
TAGS (#{item.gatewayId,jdbcType=VARCHAR})
VALUES (NOW + #{index}a,
#{item.address,jdbcType=VARCHAR},
#{item.equipmentsIdx,jdbcType=VARCHAR},
#{item.dataType,jdbcType=VARCHAR},
#{item.isAlarm,jdbcType=VARCHAR},
#{item.equipmentSpecificName,jdbcType=VARCHAR},
#{item.equipmentIndexName,jdbcType=VARCHAR},
#{item.value,jdbcType=VARCHAR},
#{item.valueF,jdbcType=FLOAT},
#{item.valueLabel,jdbcType=VARCHAR},
#{item.unit,jdbcType=VARCHAR})
</foreach>
</insert>
<!-- <insert id="insertBatch" parameterType="java.util.List">-->
<!-- INSERT INTO indicator_#{gatewayId,jdbcType=VARCHAR} (created_time, `value`,`value_f`, value_label,unit,-->
<!-- address,gateway_id,equipments_idx,data_type,is_alarm,equipment_index_name,equipment_specific_name)-->
<!-- VALUES-->
<!-- <foreach collection="list" item="item" separator="UNION ALL" index="index">-->
<!-- SELECT NOW + #{index}a, #{item.value}, #{item.valueF}, #{item.valueLabel}, #{item.unit},-->
<!-- #{item.address}, #{item.gatewayId}, #{item.equipmentsIdx}, #{item.dataType}, #{item.isAlarm},-->
<!-- #{item.equipmentSpecificName},#{item.equipmentIndexName}-->
<!-- </foreach>-->
<!-- </insert>-->
<insert
id=
"insert"
parameterType=
"com.yeejoin.equip.entity.IndicatorData"
>
insert into indicator_#{gatewayId,jdbcType=VARCHAR} USING indicator
TAGS (#{address,jdbcType=VARCHAR},
#{gatewayId,jdbcType=VARCHAR},
...
...
@@ -33,10 +63,6 @@
#{isAlarm,jdbcType=VARCHAR},
#{equipmentSpecificName,jdbcType=VARCHAR},
#{equipmentIndexName,jdbcType=VARCHAR})
VALUES (NOW,
#{value,jdbcType=VARCHAR},
#{valueF,jdbcType=FLOAT},
#{valueLabel,jdbcType=VARCHAR},
#{unit,jdbcType=VARCHAR})
VALUES (NOW, #{value,jdbcType=VARCHAR}, #{valueF,jdbcType=FLOAT}, #{valueLabel,jdbcType=VARCHAR}, #{unit,jdbcType=VARCHAR})
</insert>
</mapper>
\ No newline at end of file
pom.xml
View file @
4fdb52b4
...
...
@@ -311,12 +311,12 @@
<repository>
<id>
Releases
</id>
<name>
Releases
</name>
<url>
http://
36.46.149.14
:8081/nexus/content/repositories/releases/
</url>
<url>
http://
113.142.68.105
:8081/nexus/content/repositories/releases/
</url>
</repository>
<repository>
<id>
Snapshots
</id>
<name>
Snapshots
</name>
<url>
http://
36.46.149.14
:8081/nexus/content/repositories/snapshots/
</url>
<url>
http://
113.142.68.105
:8081/nexus/content/repositories/snapshots/
</url>
</repository>
<repository>
<id>
com.e-iceblue
</id>
...
...
@@ -326,7 +326,7 @@
<repository>
<id>
thirdparty
</id>
<name>
thirdparty
</name>
<url>
http://
36.46.149.14
:8081/nexus/content/repositories/thirdparty/
</url>
<url>
http://
113.142.68.105
:8081/nexus/content/repositories/thirdparty/
</url>
</repository>
</repositories>
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment