Commit 9b8137c6 authored by caotao's avatar caotao

分析新增现网配置

parent 989b3ed0
## DB properties:
## db1-production database
spring.db1.datasource.type: com.alibaba.druid.pool.DruidDataSource
spring.db1.datasource.url=jdbc:mysql://10.20.1.157:3306/production?allowMultiQueries=true&serverTimezone=GMT%2B8&characterEncoding=utf8
spring.db1.datasource.username=root
spring.db1.datasource.password=Yeejoin@2020
spring.db1.datasource.driver-class-name: com.mysql.cj.jdbc.Driver
## db2-sync_data
spring.db2.datasource.type: com.alibaba.druid.pool.DruidDataSource
spring.db2.datasource.url=jdbc:mysql://10.20.1.157:3306/amos_idx_biz?allowMultiQueries=true&serverTimezone=GMT%2B8&characterEncoding=utf8
spring.db2.datasource.username=root
spring.db2.datasource.password=Yeejoin@2020
spring.db2.datasource.driver-class-name: com.mysql.cj.jdbc.Driver
## db5-sync_data
spring.db5.datasource.type: com.alibaba.druid.pool.DruidDataSource
spring.db5.datasource.url=jdbc:mysql://10.20.1.157:3306/jxiop_sync_data?allowMultiQueries=true&serverTimezone=GMT%2B8&characterEncoding=utf8
spring.db5.datasource.username=root
spring.db5.datasource.password=Yeejoin@2020
spring.db5.datasource.driver-class-name: com.mysql.cj.jdbc.Driver
## amos-project
spring.db6.datasource.type: com.alibaba.druid.pool.DruidDataSource
spring.db6.datasource.url=jdbc:mysql://10.20.1.157:3306/amos_project?allowMultiQueries=true&serverTimezone=GMT%2B8&characterEncoding=utf8
spring.db6.datasource.username=root
spring.db6.datasource.password=Yeejoin@2020
spring.db6.datasource.driver-class-name: com.mysql.cj.jdbc.Driver
## eureka properties:
eureka.instance.hostname=10.20.1.160
eureka.client.serviceUrl.defaultZone=http://admin:a1234560@${eureka.instance.hostname}:10001/eureka/
## redis properties:
spring.redis.database=1
spring.redis.host=10.20.1.210
spring.redis.port=6379
spring.redis.password=yeejoin@2020
openHealth=false
spring.cache.type=GENERIC
j2cache.open-spring-cache=true
j2cache.cache-clean-mode=passive
j2cache.allow-null-values=true
j2cache.redis-client=lettuce
j2cache.l2-cache-open=true
j2cache.broadcast=net.oschina.j2cache.cache.support.redis.SpringRedisPubSubPolicy
j2cache.L1.provider_class=caffeine
j2cache.L2.provider_class=net.oschina.j2cache.cache.support.redis.SpringRedisProvider
j2cache.L2.config_section=lettuce
j2cache.sync_ttl_to_redis=true
j2cache.default_cache_null_object=false
j2cache.serialization=fst
caffeine.properties=/caffeine.properties
lettuce.mode=single
lettuce.namespace=
lettuce.storage=generic
lettuce.channel=j2cache
lettuce.scheme=redis
lettuce.hosts=${spring.redis.host}:${spring.redis.port}
lettuce.password=${spring.redis.password}
lettuce.database=${spring.redis.database}
lettuce.sentinelMasterId=
lettuce.maxTotal=100
lettuce.maxIdle=10
lettuce.minIdle=10
lettuce.timeout=10000
emqx.clean-session=true
emqx.client-id=${spring.application.name}-${random.int[1024,65536]}
emqx.broker=tcp://10.20.1.210:2883
emqx.user-name=admin
emqx.password=public
mqtt.scene.host=mqtt://10.20.1.210:8083/mqtt
mqtt.client.product.id=mqtt
mqtt.topic=topic_mqtt
spring.mqtt.completionTimeout=3000
emqx.max-inflight=1000
tdengine-server:
driver-class-name: com.taosdata.jdbc.rs.RestfulDriver
jdbc-url: jdbc:TAOS-RS://10.20.0.203:6041/iot_data_1?user=root&password=taosdata&timezone=GMT%2b8&allowMultiQueries=true
username: root
password: taosdata
#spring.db3.datasource.type: com.alibaba.druid.pool.DruidDataSource
spring.db3.datasource.url=jdbc:TAOS-RS://10.20.0.203:6041/iot_data?user=root&password=taosdata&timezone=GMT%2b8&allowMultiQueries=true
spring.db3.datasource.username=root
spring.db3.datasource.password=taosdata
spring.db3.datasource.driver-class-name: com.taosdata.jdbc.rs.RestfulDriver
# ����ר��td���ݿ� analyse_data
#spring.db4.datasource.type: com.alibaba.druid.pool.DruidDataSource
spring.db4.datasource.url=jdbc:TAOS-RS://10.20.0.203:6041/analysis_data?user=root&password=taosdata&timezone=GMT%2b8&allowMultiQueries=true
spring.db4.datasource.username=root
spring.db4.datasource.password=taosdata
spring.db4.datasource.driver-class-name: com.taosdata.jdbc.rs.RestfulDriver
## influxDB
#spring.influx.url= http://172.16.3.155:18186
#spring.influx.password=Yeejoin@2020
#spring.influx.user=root
#spring.influx.database=iot_platform
#spring.influx.retention_policy=default
#spring.influx.retention_policy_time=30d
#spring.influx.actions=10000
#spring.influx.bufferLimit=20000
## influxDB
#spring.influx.url= http://139.9.171.247:8086
#spring.influx.password=Yeejoin@2023
#spring.influx.user=admin
#spring.influx.database=iot_platform
#spring.influx.retention_policy=default
#spring.influx.retention_policy_time=30d
#spring.influx.actions=10000
#spring.influx.bufferLimit=20000
spring.influx.url=http://139.9.173.44:8086
spring.influx.password=Yeejoin@2020
spring.influx.user=root
spring.influx.database=iot_platform
spring.influx.retention_policy=default
spring.influx.retention_policy_time=30d
spring.influx.actions=10000
spring.influx.bufferLimit=20000
knife4j.production=false
knife4j.enable=true
knife4j.basic.enable=true
knife4j.basic.username=admin
knife4j.basic.password=a1234560
management.security.enabled=true
spring.security.user.name=admin
spring.security.user.password=a1234560
fire-rescue=123
mybatis-plus.global-config.db-config.update-strategy=ignored
# user-amos setting : This value is the secretkey for person manage moudle accout password encryption.please don't change it!!!
amos.secret.key=qaz
# if your service can't be access ,you can use this setting , you need change ip as your.
#eureka.instance.prefer-ip-address=true
#eureka.instance.ip-address=172.16.3.122
spring.activemq.broker-url=tcp://10.20.1.210:61616
spring.activemq.user=admin
spring.activemq.password=admin
spring.jms.pub-sub-domain=false
myqueue=amos.privilege.v1.JXIOP.AQSC_FDGL.userBusiness
# ?????????
fan.statuts.stattuspath=upload/jxiop/device_status
pictureUrl=upload/jxiop/syz/
#kafka
spring.kafka.bootstrap-servers=10.20.0.223:9092,10.20.0.133:9200
spring.kafka.producer.retries=1
spring.kafka.producer.bootstrap-servers=10.20.0.223:9092,10.20.0.133:9200
spring.kafka.producer.batch-size=16384
spring.kafka.producer.buffer-memory=33554432
spring.kafka.producer.acks=1
spring.kafka.producer.key-serializer=org.apache.kafka.common.serialization.StringSerializer
spring.kafka.producer.value-serializer=org.apache.kafka.common.serialization.StringSerializer
spring.kafka.consumer.group-id=consumerGroup
spring.kafka.consumer.bootstrap-servers=10.20.0.223:9092,10.20.0.133:9200
spring.kafka.consumer.enable-auto-commit=false
spring.kafka.consumer.auto-offset-reset=earliest
spring.kafka.consumer.key-deserializer=org.apache.kafka.common.serialization.StringDeserializer
spring.kafka.consumer.value-deserializer=org.apache.kafka.common.serialization.StringDeserializer
spring.kafka.listener.ack-mode=manual_immediate
spring.kafka.listener.type=batch
#һ����ȡ���� && �߳�����
spring.kafka.consumer.max-poll-records=30
#spring.kafka.consumer.fetch-max-wait= 10000
#��ǰʱ����ǰƫ������ ����ʷƫ������
last.month.num = 12
#����� �㷨����
base.url.XGX=http://139.9.171.247:8052/intelligent-analysis/correlation
#�������� �㷨���õ�ַ
base.url.GKHF=http://139.9.171.247:8052/intelligent-analysis/working-condition-division
#����� �㷨����
base.url.ZXZ=http://139.9.171.247:8052/intelligent-analysis/central-value
#ָ���������㷨����
base.url.zsfx:http://139.9.171.247:8052/intelligent-analysis/index-analysis
\ No newline at end of file
<?xml version="1.0" encoding="UTF-8"?>
<configuration debug="false">
<!--定义日志文件的存储地址 勿在 LogBack 的配置中使用相对路径-->
<property name="LOG_HOME" value="log" />
<property name="LOG_PATTERN" value="%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %-50.50logger{50} - %msg [%file:%line] %n" />
<!-- 按照每天生成日志文件 -->
<appender name="FILE" class="ch.qos.logback.core.rolling.RollingFileAppender">
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<!--日志文件输出的文件名-->
<FileNamePattern>${LOG_HOME}/ccs.log.%d{yyyy-MM-dd}.log</FileNamePattern>
<!--日志文件保留天数-->
<MaxHistory>7</MaxHistory>
</rollingPolicy>
<encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder">
<!--格式化输出:%d表示日期,%thread表示线程名,%-5level:级别从左显示5个字符宽度%msg:日志消息,%n是换行符-->
<pattern>${LOG_PATTERN}</pattern>
</encoder>
<!--日志文件最大的大小-->
<triggeringPolicy class="ch.qos.logback.core.rolling.SizeBasedTriggeringPolicy">
<MaxFileSize>30mb</MaxFileSize>
</triggeringPolicy>
</appender>
<!-- 控制台输出 -->
<appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
<encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder">
<!--格式化输出:%d表示日期,%thread表示线程名,%-5level:级别从左显示5个字符宽度%msg:日志消息,%n是换行符-->
<pattern>${LOG_PATTERN}</pattern>
</encoder>
</appender>
<!--myibatis log configure-->
<logger name="com.apache.ibatis" level="DEBUG"/>
<logger name="java.sql.Connection" level="DEBUG"/>
<logger name="java.sql.Statement" level="DEBUG"/>
<logger name="java.sql.PreparedStatement" level="DEBUG"/>
<logger name="com.baomidou.mybatisplus" level="DEBUG"/>
<logger name="org.springframework" level="DEBUG"/>
<logger name="org.typroject" level="DEBUG"/>
<logger name="com.yeejoin" level="DEBUG"/>
<!-- 日志输出级别 -->
<root level="INFO">
<appender-ref ref="FILE" />
<appender-ref ref="STDOUT" />
</root>
</configuration>
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment