Commit ed5a85ad authored by KeYong's avatar KeYong

Merge remote-tracking branch 'origin/develop_dl_plan6' into develop_dl_plan6

parents 35b4f1f9 ede3e0f8
...@@ -2,6 +2,7 @@ package com.yeejoin.amos.boot.module.common.api.enums; ...@@ -2,6 +2,7 @@ package com.yeejoin.amos.boot.module.common.api.enums;
public enum DataSyncTopicEnum { public enum DataSyncTopicEnum {
EQM_PATROL_CREATED("user", "emq.user.created"), EQM_PATROL_CREATED("user", "emq.user.created"),
ORG_USR_DEL("userDel", "user/sync/orgUsrDel"),
ORG_USR("orgUsr", "user/sync/orgUsr"); ORG_USR("orgUsr", "user/sync/orgUsr");
private String type; private String type;
......
...@@ -10,4 +10,6 @@ public interface IDataSyncService { ...@@ -10,4 +10,6 @@ public interface IDataSyncService {
void OrgUsrSyncDtoDataSync(Long id); void OrgUsrSyncDtoDataSync(Long id);
void OrgUsrSyncDtoDelDataSync(Long id);
} }
...@@ -600,7 +600,7 @@ WHERE ...@@ -600,7 +600,7 @@ WHERE
ou.sequence_nbr as id, ou.sequence_nbr as id,
ou.biz_org_name as userName, ou.biz_org_name as userName,
IFNULL( MAX( CASE WHEN cfi.field_code = 'telephone' THEN field_value END ), '' ) AS telephone, IFNULL( MAX( CASE WHEN cfi.field_code = 'telephone' THEN field_value END ), '' ) AS telephone,
IF(MAX( CASE WHEN cfi.field_code = 'postTypeName' THEN field_value END ) = '', NULL, MAX( CASE WHEN cfi.field_code = 'postTypeName' THEN field_value END )) AS postTypeName, IFNULL( MAX( CASE WHEN cfi.field_code = 'positionType' THEN field_value_label END ), '' ) AS postTypeName,
IFNULL( MAX( CASE WHEN cfi.field_code = 'personImg' THEN field_value END ), '' ) AS personImg, IFNULL( MAX( CASE WHEN cfi.field_code = 'personImg' THEN field_value END ), '' ) AS personImg,
IFNULL( MAX( CASE WHEN cfi.field_code = 'peopleType' THEN field_value END ), '' ) AS peopleType IFNULL( MAX( CASE WHEN cfi.field_code = 'peopleType' THEN field_value END ), '' ) AS peopleType
FROM FROM
......
...@@ -1067,115 +1067,57 @@ LEFT JOIN ( ...@@ -1067,115 +1067,57 @@ LEFT JOIN (
<select id="getOrgUsrSyncDataList" resultType="com.yeejoin.amos.boot.module.common.api.dto.OrgUsrSyncDto"> <select id="getOrgUsrSyncDataList" resultType="com.yeejoin.amos.boot.module.common.api.dto.OrgUsrSyncDto">
SELECT SELECT
u.*, u.*,
case CASE
when u.biz_org_type = 'person'
and ( WHEN u.biz_org_type = 'person'
SELECT AND ( SELECT s.biz_org_type FROM cb_org_usr s WHERE s.sequence_nbr = u.parent_id ) = 'DEPARTMENT' THEN
s.biz_org_type (
from
cb_org_usr s
WHERE
s.sequence_nbr = u.parent_id) = 'DEPARTMENT'
then (
SELECT SELECT
s.biz_org_name s.biz_org_name
from FROM
cb_org_usr s
WHERE
s.sequence_nbr = (
SELECT
s.parent_id
from
cb_org_usr s
WHERE
s.sequence_nbr = u.parent_id))
when u.biz_org_type = 'person'
and (
SELECT
s.biz_org_type
from
cb_org_usr s
WHERE
s.sequence_nbr = u.parent_id) = 'COMPANY'
then u.parent_name
else u.biz_org_name
end as company_biz_name,
case
when u.biz_org_type = 'person'
and (
SELECT
s.biz_org_type
from
cb_org_usr s
WHERE
s.sequence_nbr = u.parent_id) = 'DEPARTMENT'
then (
SELECT
s.biz_org_code
from
cb_org_usr s
WHERE
s.sequence_nbr = (
SELECT
s.parent_id
from
cb_org_usr s
WHERE
s.sequence_nbr = u.parent_id))
when u.biz_org_type = 'person'
and (
SELECT
s.biz_org_type
from
cb_org_usr s cb_org_usr s
WHERE WHERE
s.sequence_nbr = u.parent_id) = 'COMPANY' s.sequence_nbr = ( SELECT s.parent_id FROM cb_org_usr s WHERE s.sequence_nbr = u.parent_id )
then ( )
WHEN u.biz_org_type = 'person'
AND ( SELECT s.biz_org_type FROM cb_org_usr s WHERE s.sequence_nbr = u.parent_id ) = 'COMPANY' THEN
u.parent_name ELSE u.biz_org_name
END AS company_biz_name,
CASE
WHEN u.biz_org_type = 'person'
AND ( SELECT s.biz_org_type FROM cb_org_usr s WHERE s.sequence_nbr = u.parent_id ) = 'DEPARTMENT' THEN
(
SELECT SELECT
s.biz_org_code s.biz_org_code
from FROM
cb_org_usr s cb_org_usr s
WHERE WHERE
s.sequence_nbr = u.parent_id) s.sequence_nbr = ( SELECT s.parent_id FROM cb_org_usr s WHERE s.sequence_nbr = u.parent_id )
else u.biz_org_code )
end as company_biz_code, WHEN u.biz_org_type = 'person'
AND ( SELECT s.biz_org_type FROM cb_org_usr s WHERE s.sequence_nbr = u.parent_id ) = 'COMPANY' THEN
( SELECT s.biz_org_code FROM cb_org_usr s WHERE s.sequence_nbr = u.parent_id ) ELSE u.biz_org_code
END AS company_biz_code,
cfp.fire_management_post, cfp.fire_management_post,
cft.type_code, cft.type_code,
cf.certificate_number certificatesNumber, cf.certificate_number certificatesNumber,
cf.employee_number , cf.employee_number,
cf.mobile_phone telephone, ( SELECT field_value FROM cb_dynamic_form_instance dfi WHERE dfi.field_code = 'telephone' AND dfi.instance_id = u.sequence_nbr ) AS telephone,
( CASE ( CASE WHEN cfp.post_qualification IS NULL THEN 0 ELSE 1 END ) AS is_certificate,
WHEN cfp.post_qualification IS NULL THEN 0
ELSE 1
END ) AS is_certificate,
IF IF
((
SELECT
field_value
FROM
cb_dynamic_form_instance dfi
WHERE
dfi.field_code = 'peopleType'
AND dfi.instance_id = u.sequence_nbr )= 2,
1,
0 ) AS is_firefighters,
( (
SELECT ( SELECT field_value FROM cb_dynamic_form_instance dfi WHERE dfi.field_code = 'peopleType' AND dfi.instance_id = u.sequence_nbr ) = 2,
field_value 1,
FROM 0
cb_dynamic_form_instance dfi ) AS is_firefighters,
WHERE ( SELECT field_value FROM cb_dynamic_form_instance dfi WHERE dfi.field_code = 'personImg' AND dfi.instance_id = u.sequence_nbr ) AS personImg,
dfi.field_code = 'personImg'
AND dfi.instance_id = u.sequence_nbr ) AS personImg,
cfp.job_title cfp.job_title
FROM FROM
cb_org_usr u cb_org_usr u
LEFT JOIN cb_firefighters_post cfp ON LEFT JOIN cb_firefighters_post cfp ON cfp.org_usr_id = u.sequence_nbr
cfp.org_usr_id = u.sequence_nbr LEFT JOIN cb_firefighters cf ON cf.org_usr_id = u.sequence_nbr
LEFT JOIN cb_firefighters cf ON LEFT JOIN cb_fire_team cft ON cft.sequence_nbr = cf.fire_team_id
cf.org_usr_id = u.sequence_nbr
LEFT JOIN cb_fire_team cft ON
cft.sequence_nbr = cf.fire_team_id
WHERE WHERE
u.is_delete = 0 u.is_delete = 0
<if test="id != null"> <if test="id != null">
......
...@@ -47,15 +47,24 @@ public class DataSyncServiceImpl implements IDataSyncService { ...@@ -47,15 +47,24 @@ public class DataSyncServiceImpl implements IDataSyncService {
emqKeeper.getMqttClient().publish(DataSyncTopicEnum.ORG_USR.getTopic(), JSONObject.toJSONString(x).getBytes(), RuleConfig.DEFAULT_QOS, false); emqKeeper.getMqttClient().publish(DataSyncTopicEnum.ORG_USR.getTopic(), JSONObject.toJSONString(x).getBytes(), RuleConfig.DEFAULT_QOS, false);
} }
} }
} else { }
}
} catch (Exception e) {
log.error("站端与中心级人员数据【OrgUsrSyncDto】同步推送失败-----------" + e.getMessage());
}
}
@Override
public void OrgUsrSyncDtoDelDataSync(Long id) {
try {
if (id != null) {
// 同步删除人员信息 // 同步删除人员信息
OrgUsrSyncDto orgUsrSyncDto = new OrgUsrSyncDto(); OrgUsrSyncDto orgUsrSyncDto = new OrgUsrSyncDto();
orgUsrSyncDto.setSequenceNbr(id); orgUsrSyncDto.setSequenceNbr(id);
String message = buildSyncMessage(DataSyncTopicEnum.ORG_USR.getTopic(), orgUsrSyncDto); String message = buildSyncMessage(DataSyncTopicEnum.ORG_USR_DEL.getTopic(), orgUsrSyncDto);
emqKeeper.getMqttClient().publish(DataSyncTopicEnum.EQM_PATROL_CREATED.getTopic(), message.getBytes(), RuleConfig.DEFAULT_QOS, false); emqKeeper.getMqttClient().publish(DataSyncTopicEnum.EQM_PATROL_CREATED.getTopic(), message.getBytes(), RuleConfig.DEFAULT_QOS, false);
if (orgUsrSyncSwitch) { if (orgUsrSyncSwitch) {
emqKeeper.getMqttClient().publish(DataSyncTopicEnum.ORG_USR.getTopic(), JSONObject.toJSONString(orgUsrSyncDto).getBytes(), RuleConfig.DEFAULT_QOS, false); emqKeeper.getMqttClient().publish(DataSyncTopicEnum.ORG_USR_DEL.getTopic(), JSONObject.toJSONString(orgUsrSyncDto).getBytes(), RuleConfig.DEFAULT_QOS, false);
}
} }
} }
} catch (Exception e) { } catch (Exception e) {
......
...@@ -3340,7 +3340,7 @@ public class OrgUsrServiceImpl extends BaseService<OrgUsrDto, OrgUsr, OrgUsrMapp ...@@ -3340,7 +3340,7 @@ public class OrgUsrServiceImpl extends BaseService<OrgUsrDto, OrgUsr, OrgUsrMapp
@Override @Override
public void afterCommit() { public void afterCommit() {
// 事物提交后业务逻辑 // 事物提交后业务逻辑
dataSyncService.OrgUsrSyncDtoDataSync(id); dataSyncService.OrgUsrSyncDtoDelDataSync(id);
} }
}); });
return "0"; return "0";
......
...@@ -132,7 +132,9 @@ public class EquipmentIndexController { ...@@ -132,7 +132,9 @@ public class EquipmentIndexController {
x.setEmergencyLevel("4"); x.setEmergencyLevel("4");
x.setEmergencyLevelDescribe("正常"); x.setEmergencyLevelDescribe("正常");
}); });
if(list.size() > 0){
carPropertyService.updateBatchById(list); carPropertyService.updateBatchById(list);
}
} else { } else {
LambdaUpdateWrapper<EquipmentSpecificIndex> equipWrapper = new LambdaUpdateWrapper<>(); LambdaUpdateWrapper<EquipmentSpecificIndex> equipWrapper = new LambdaUpdateWrapper<>();
equipWrapper.eq(EquipmentSpecificIndex::getEquipmentIndexId, equipmentIndex.getId()); equipWrapper.eq(EquipmentSpecificIndex::getEquipmentIndexId, equipmentIndex.getId());
......
...@@ -23,7 +23,7 @@ spring.redis.password=1234560 ...@@ -23,7 +23,7 @@ spring.redis.password=1234560
#需要监听得kafka消息主题 根据是否是中心极和站端选择需要监听得主题进行配置 #需要监听得kafka消息主题 根据是否是中心极和站端选择需要监听得主题进行配置
kafka.topics=null.topic kafka.topics=null.topic
kafka.init.topics=akka.iot.created,akka.patrol.created,akka.sign.created,akka.bussSign.created,akka.user.created kafka.init.topics=
#需要监听得eqm消息主题 根据是否是中心极和站端选择需要监听得主题进行配置 #需要监听得eqm消息主题 根据是否是中心极和站端选择需要监听得主题进行配置 emq.iot.created,
emq.topic=emq.iot.created,emq.patrol.created,emq.sign.created,emq.bussSign.created,emq.user.created emq.topic=emq.iot.created,emq.patrol.created,emq.sign.created,emq.bussSign.created,emq.user.created
\ No newline at end of file
...@@ -5,59 +5,59 @@ spring.profiles.active=dev ...@@ -5,59 +5,59 @@ spring.profiles.active=dev
spring.jackson.time-zone=GMT+8 spring.jackson.time-zone=GMT+8
spring.jackson.date-format=yyyy-MM-dd HH:mm:ss spring.jackson.date-format=yyyy-MM-dd HH:mm:ss
spring.jackson.serialization.write-dates-as-timestamps=true spring.jackson.serialization.write-dates-as-timestamps=true
# kafka集群信息 # kafka集群信息
spring.kafka.bootstrap-servers=172.16.3.100:9092 spring.kafka.bootstrap-servers=172.16.3.100:9092
# 生产者配置 # 生产者配置
# 设置大于0的值,则客户端会将发送失败的记录重新发送 # 重试次数 # 设置大于0的值,则客户端会将发送失败的记录重新发送 # 重试次数
spring.kafka.producer.retries=3 spring.kafka.producer.retries=0
#16K #16K
spring.kafka.producer.batch-size=16384 spring.kafka.producer.batch-size=16384
spring.kafka.producer.buffer-memory=33554432 spring.kafka.producer.buffer-memory=33554432
# 应答级别 # 应答级别
# acks=0 把消息发送到kafka就认为发送成功 # acks=0 把消息发送到kafka就认为发送成功
# acks=1 把消息发送到kafka leader分区,并且写入磁盘就认为发送成功 # acks=1 把消息发送到kafka leader分区,并且写入磁盘就认为发送成功
# acks=all 把消息发送到kafka leader分区,并且leader分区的副本follower对消息进行了同步就任务发送成功 # acks=all 把消息发送到kafka leader分区,并且leader分区的副本follower对消息进行了同步就任务发送成功
spring.kafka.producer.acks=1 spring.kafka.producer.acks=1
# 指定消息key和消息体的编解码方式 # 指定消息key和消息体的编解码方式
# # 批量处理的最大大小 单位 byte # # 批量处理的最大大小 单位 byte
# batch-size: 4096 # batch-size: 4096
# # 发送延时,当生产端积累的消息达到batch-size或接收到消息linger.ms后,生产者就会将消息提交给kafka # # 发送延时,当生产端积累的消息达到batch-size或接收到消息linger.ms后,生产者就会将消息提交给kafka
# buffer-memory: 33554432 # buffer-memory: 33554432
# # 客户端ID # # 客户端ID
# client-id: hello-kafka # client-id: hello-kafka
# # 消息压缩:none、lz4、gzip、snappy,默认为 none。 # # 消息压缩:none、lz4、gzip、snappy,默认为 none。
# compression-type: gzip # compression-type: gzip
spring.kafka.producer.key-serializer=org.apache.kafka.common.serialization.StringSerializer spring.kafka.producer.key-serializer=org.apache.kafka.common.serialization.StringSerializer
spring.kafka.producer.value-serializer=org.apache.kafka.common.serialization.StringSerializer spring.kafka.producer.value-serializer=org.apache.kafka.common.serialization.StringSerializer
# 消费者组 # 消费者组
# 当kafka中没有初始offset或offset超出范围时将自动重置offset # 当kafka中没有初始offset或offset超出范围时将自动重置offset
# earliest:重置为分区中最小的offset # earliest:重置为分区中最小的offset
# latest:重置为分区中最新的offset(消费分区中新产生的数据) # latest:重置为分区中最新的offset(消费分区中新产生的数据)
# none:只要有一个分区不存在已提交的offset,就抛出异常 # none:只要有一个分区不存在已提交的offset,就抛出异常
spring.kafka.consumer.group-id=zhTestGroup spring.kafka.consumer.group-id=zhTestGroup
spring.kafka.consumer.enable-auto-commit=false spring.kafka.consumer.enable-auto-commit=false
# 当各分区下有已提交的offset时,从提交的offset开始消费;无提交的offset时,从头开始消费 # 当各分区下有已提交的offset时,从提交的offset开始消费;无提交的offset时,从头开始消费
# # 自动提交的频率 单位 ms # # 自动提交的频率 单位 ms
# auto-commit-interval: 1000 # auto-commit-interval: 1000
# # 批量消费最大数量 # # 批量消费最大数量
# max-poll-records: 100 # max-poll-records: 100
spring.kafka.consumer.auto-offset-reset=earliest spring.kafka.consumer.auto-offset-reset=earliest
spring.kafka.consumer.key-deserializer=org.apache.kafka.common.serialization.StringDeserializer spring.kafka.consumer.key-deserializer=org.apache.kafka.common.serialization.StringDeserializer
spring.kafka.consumer.value-deserializer=org.apache.kafka.common.serialization.StringDeserializer spring.kafka.consumer.value-deserializer=org.apache.kafka.common.serialization.StringDeserializer
# 当每一条记录被消费者监听器(ListenerConsumer)处理之后提交 # 当每一条记录被消费者监听器(ListenerConsumer)处理之后提交
# RECORD # RECORD
# 当每一批poll()的数据被消费者监听器(ListenerConsumer)处理之后提交 # 当每一批poll()的数据被消费者监听器(ListenerConsumer)处理之后提交
# BATCH # BATCH
# 当每一批poll()的数据被消费者监听器(ListenerConsumer)处理之后,距离上次提交时间大于TIME时提交 # 当每一批poll()的数据被消费者监听器(ListenerConsumer)处理之后,距离上次提交时间大于TIME时提交
# TIME # TIME
# 当每一批poll()的数据被消费者监听器(ListenerConsumer)处理之后,被处理record数量大于等于COUNT时提交 # 当每一批poll()的数据被消费者监听器(ListenerConsumer)处理之后,被处理record数量大于等于COUNT时提交
# COUNT # COUNT
# TIME | COUNT 有一个条件满足时提交 # TIME | COUNT 有一个条件满足时提交
# COUNT_TIME # COUNT_TIME
# 当每一批poll()的数据被消费者监听器(ListenerConsumer)处理之后, 手动调用Acknowledgment.acknowledge()后提交 # 当每一批poll()的数据被消费者监听器(ListenerConsumer)处理之后, 手动调用Acknowledgment.acknowledge()后提交
# MANUAL # MANUAL
# 手动调用Acknowledgment.acknowledge()后立即提交,一般使用这种 # 手动调用Acknowledgment.acknowledge()后立即提交,一般使用这种
# MANUAL_IMMEDIATE # MANUAL_IMMEDIATE
spring.kafka.listener.ack-mode=manual_immediate spring.kafka.listener.ack-mode=manual_immediate
...@@ -20,5 +20,6 @@ ...@@ -20,5 +20,6 @@
<module>amos-boot-utils-jpush</module> <module>amos-boot-utils-jpush</module>
<module>amos-boot-utils-video</module> <module>amos-boot-utils-video</module>
<module>amos-boot-utils-speech</module> <module>amos-boot-utils-speech</module>
<module>amos-boot-utils-message</module>
</modules> </modules>
</project> </project>
\ No newline at end of file
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment