1
0
Dieser Commit ist enthalten in:
caixiang 2022-11-25 15:00:33 +08:00
Ursprung d6b7d8eabc
Commit 4c6b49c6a2
9 geänderte Dateien mit 135 neuen und 63 gelöschten Zeilen

Datei anzeigen

@ -190,12 +190,16 @@
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-validation</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-configuration-processor</artifactId>
<optional>true</optional>
</dependency>
<!-- influx begin -->
<dependency>
<groupId>com.influxdb</groupId>
<artifactId>influxdb-client-java</artifactId>
<version>6.3.0</version>
<version>6.7.0</version>
</dependency>
<!-- influx end -->

Datei anzeigen

@ -2,16 +2,17 @@ package com.qgs.dc.influx;
//import com.qgs.dc.influx.config.InfluxClient;
import com.influxdb.client.InfluxDBClient;
import com.influxdb.client.InfluxDBClientFactory;
import com.influxdb.client.QueryApi;
import com.influxdb.client.WriteApi;
import com.influxdb.client.*;
import com.influxdb.client.domain.WritePrecision;
import com.influxdb.client.write.Point;
import com.influxdb.query.FluxRecord;
import com.influxdb.query.FluxTable;
import com.qgs.dc.influx.config.InfluxClient;
import com.qgs.dc.influx.param.PageInfo;
import com.qgs.dc.influx.param.QueryDataParam;
import com.qgs.dc.influx.param.Range;
import com.qgs.dc.influx.param.Tag;
import com.qgs.dc.influx.template.Event;
import java.text.SimpleDateFormat;
import java.time.Instant;
@ -27,13 +28,28 @@ import java.util.List;
*/
public class Main {
public static void main(String[] args) throws InterruptedException {
char[] token = "lkBsC27QZr1W50BSPlGxpTqNNpwuUk5uz1dZZRPSPbCG5VmNDDUo8P3UkZIhGWwfJwkuz6ZGZ7Et4_KBaG3gHw==".toCharArray();
String org = "qgs";
String bucket = "qgs-bucket";
InfluxDBClient influxDBClient = InfluxDBClientFactory.create("http://192.168.0.170:8086", token, org, bucket);
char[] token = "N4yBD2iC0kDkAzlWEQ5koqJNbbd6v9PQsubEAOle1rykOIeM5zckKxUteDUsHm1LFMBnJTtfGNsRT_N54YTLcQ==".toCharArray();
String org = "cigs";
String bucket = "cigs-all";
InfluxDBClient influxDBClient = InfluxDBClientFactory.create("http://172.16.21.164:8086", token, org, bucket);
Point point = Point
.measurement("fortest")
.addTag("host", "host1")
.addField("used_percent", 23.43234543)
.time(Instant.now(), WritePrecision.NS);
WriteApi writeApi = influxDBClient.makeWriteApi();
WriteApiBlocking writeApi = influxDBClient.getWriteApiBlocking();
writeApi.writePoint(bucket, org, point);
Event event = new Event();
event.setTime(Instant.now());
event.setTransationId("asasd11");
event.setArgName("argName11");
event.setBatchNum("12333");
event.setArgValue("900001");
// Point asProcessCompleteEvent = insert(event, "ForTest");
InfluxClient.Client.insert(event,"fortest2");
// InfluxService influxService = new InfluxService();
// Event event = new Event();
@ -144,30 +160,30 @@ public class Main {
QueryDataParam queryDataParam = new QueryDataParam();
queryDataParam.setBucket("qgs-bucket");
queryDataParam.setRange(new Range(getDate().toInstant(),new Date().toInstant()));
queryDataParam.setMeasurement("Weight");
queryDataParam.setTag(new Tag("argName","LTWeight"));
// queryDataParam.setDropedTagName("transationId");
List<String> dropNames = new ArrayList<>();
dropNames.add("transationId");
dropNames.add("inspectionSheetId");
dropNames.add("batchNum");
queryDataParam.setDropedTagNames(dropNames);
queryDataParam.setPageInfo(new PageInfo(1,10000));
List<FluxTable> tables = query(queryDataParam,influxDBClient);
// List<FluxRecord> records1 = tables.get(0).getRecords();
// List<List<FluxRecord>> lists = Utils.fixedGroup(records1, 10);
for (FluxTable fluxTable : tables) {
List<FluxRecord> records = fluxTable.getRecords();
for (FluxRecord fluxRecord : records) {
Instant timms = fluxRecord.getTime();
System.out.println("time: "+fluxRecord.getTime() +" key:"+fluxRecord.getField()+" value: " + fluxRecord.getValueByKey("_value")+" measurement: " + fluxRecord.getMeasurement());
}
}
// QueryDataParam queryDataParam = new QueryDataParam();
// queryDataParam.setBucket("qgs");
// queryDataParam.setRange(new Range(getDate().toInstant(),new Date().toInstant()));
// queryDataParam.setMeasurement("Weight");
// queryDataParam.setTag(new Tag("argName","LTWeight"));
//// queryDataParam.setDropedTagName("transationId");
// List<String> dropNames = new ArrayList<>();
// dropNames.add("transationId");
// dropNames.add("inspectionSheetId");
// dropNames.add("batchNum");
// queryDataParam.setDropedTagNames(dropNames);
// queryDataParam.setPageInfo(new PageInfo(1,10000));
//
// List<FluxTable> tables = query(queryDataParam,influxDBClient);
//// List<FluxRecord> records1 = tables.get(0).getRecords();
//// List<List<FluxRecord>> lists = Utils.fixedGroup(records1, 10);
//
// for (FluxTable fluxTable : tables) {
// List<FluxRecord> records = fluxTable.getRecords();
// for (FluxRecord fluxRecord : records) {
// Instant timms = fluxRecord.getTime();
// System.out.println("time: "+fluxRecord.getTime() +" key:"+fluxRecord.getField()+" value: " + fluxRecord.getValueByKey("_value")+" measurement: " + fluxRecord.getMeasurement());
// }
// }
influxDBClient.close();
}

Datei anzeigen

@ -0,0 +1,23 @@
package com.qgs.dc.influx.config;
import org.springframework.stereotype.Component;
import javax.annotation.PostConstruct;
import javax.annotation.Resource;
/**
* @Desc: ""
* @Author: caixiang
* @DATE: 2022/11/23 11:05
*/
@Component
public class ConfigInjector {
@Resource
private InfluxConfig config;
public static InfluxConfig staticConfig;
@PostConstruct
private void postConstruct() {
ConfigInjector.staticConfig = config;
}
}

Datei anzeigen

@ -12,6 +12,7 @@ import com.qgs.dc.influx.param.QueryDataGroupByTimeParam;
import com.qgs.dc.influx.param.QueryDataParam;
import com.qgs.dc.influx.param.Range;
import com.qgs.dc.influx.template.Event;
import org.springframework.beans.factory.annotation.Autowired;
import java.util.ArrayList;
import java.util.List;
@ -21,7 +22,7 @@ public enum InfluxClient {
/**
* influxdb 读写客户端如果write比较繁忙后续可以考虑维护 client一个线程池
* */
Client("http://192.168.0.170:8086","lkBsC27QZr1W50BSPlGxpTqNNpwuUk5uz1dZZRPSPbCG5VmNDDUo8P3UkZIhGWwfJwkuz6ZGZ7Et4_KBaG3gHw==","qgs","qgs-bucket2"),
Client(),
;
private String url;
@ -34,11 +35,15 @@ public enum InfluxClient {
private QueryApi queryApi;
InfluxClient(String url,String token,String org,String bucket){
this.url = url;
this.token = token;
this.org = org;
this.bucket = bucket;
@Autowired
InfluxConfig config;
InfluxClient(){
this.url = ConfigInjector.staticConfig.getUrl();
this.token = ConfigInjector.staticConfig.getToken();
this.org = ConfigInjector.staticConfig.getOrg();
this.bucket = ConfigInjector.staticConfig.getBucket();
this.influxDBClient = InfluxDBClientFactory.create(this.url, this.token.toCharArray(),this.org,this.bucket);
this.writeApi = influxDBClient.makeWriteApi();
this.queryApi = influxDBClient.getQueryApi();

Datei anzeigen

@ -0,0 +1,21 @@
package com.qgs.dc.influx.config;
import lombok.Data;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.context.annotation.Configuration;
/**
* @Desc: ""
* @Author: caixiang
* @DATE: 2022/11/23 10:53
*/
@Data
@Configuration
@ConfigurationProperties(prefix = "influx")
public class InfluxConfig {
private String url;
private String token;
private String org;
private String bucket;
}

Datei anzeigen

@ -144,13 +144,13 @@ public class InfluxController {
List<Event> list = new ArrayList<>();
QueryDataParam queryDataParam = new QueryDataParam();
queryDataParam.setBucket("qgs-bucket");
queryDataParam.setMeasurement("ASProcessCompleteEventAS");
queryDataParam.setBucket("qgs");
queryDataParam.setMeasurement("ForTest");
List<String> dropNames = new ArrayList<>();
dropNames.add("transationId");
dropNames.add("inspectionSheetId");
queryDataParam.setDropedTagNames(dropNames);
queryDataParam.setTag(new Tag("argName","arg6"));
queryDataParam.setTag(new Tag("argName","argName11"));
queryDataParam.setRange(new Range(DataUtils.getBeforeDate(10).toInstant(),Instant.now()));
queryDataParam.setPageInfo(new PageInfo(1,10));
List<FluxTable> query = InfluxClient.Client.query(queryDataParam);
@ -207,9 +207,10 @@ public class InfluxController {
event.setTime(Instant.now());
event.setTransationId("asasd11");
event.setArgName("argName11");
event.setBatchNum("12333");
event.setArgValue("900001");
Point asProcessCompleteEvent = insert(event, "ASProcessCompleteEvent");
InfluxClient.Client.insert(event,"ASProcessCompleteEvent");
// Point asProcessCompleteEvent = insert(event, "ForTest");
InfluxClient.Client.insert(event,"fortest");
}
@PostMapping("/insertEvents")

Datei anzeigen

@ -22,7 +22,7 @@ public enum DaveArea {
COUNTER200(30), // analog outputs of 200 family
DB(0x84), // Peripheral I/O //DB块
DI(0x85), //DBI块
FLAGS(0x83), //M块
FLAGS(0x83), //M块M块 areaNumber 都是0 , 只配置 byteOffset bitOffset
INPUTS(0x81), //I块
T(0x1D), //T块ed
C(0x1C), //C块

Datei anzeigen

@ -8,7 +8,7 @@ server:
spring:
rabbitmq:
# 如果是rabbitmq+haproxy+keepalived集群 那么192.168.0.176是haproxy代理的地址严格来说是keepalived的vip
addresses: 192.168.0.170:5672 # 新版rabbitmq 版本还未测试
addresses: 172.16.21.191:5672 # 新版rabbitmq 版本还未测试
#addresses: 172.16.21.133:5672
username: cigs
password: cigs
@ -34,19 +34,14 @@ spring:
#multiplier: 2 # 间隔时间乘子,间隔时间*乘子=下一次的间隔时间,最大不能超过设置的最大间隔时间
#================重试机制 结束
#influx:
# influxUrl: 'http://192.168.0.170:8086'
# bucket: 'qgs-bucket'
# org: 'qgs'
# token: 'lkBsC27QZr1W50BSPlGxpTqNNpwuUk5uz1dZZRPSPbCG5VmNDDUo8P3UkZIhGWwfJwkuz6ZGZ7Et4_KBaG3gHw=='
influx:
influxUrl: 'http://192.168.0.170:8086'
bucket: 'qgs-bucket'
org: 'qgs'
token: 'lkBsC27QZr1W50BSPlGxpTqNNpwuUk5uz1dZZRPSPbCG5VmNDDUo8P3UkZIhGWwfJwkuz6ZGZ7Et4_KBaG3gHw=='
# /health point
#management:
# health:
# influxdb:
# enabled: true
influx:
url: http://172.16.21.164:8086
token: N4yBD2iC0kDkAzlWEQ5koqJNbbd6v9PQsubEAOle1rykOIeM5zckKxUteDUsHm1LFMBnJTtfGNsRT_N54YTLcQ==
org: cigs
bucket: cigs-all

Datei anzeigen

@ -9,10 +9,17 @@
<!-- name的值是变量的名称value的值时变量定义的值。通过定义的值会被插入到logger上下文中。定义后可以使“${}”来使用变量。 -->
<property name="logging.pathwork" value="C:/qgs_logger/work" />
<property name="logging.pathopc" value="C:/qgs_logger/opc" />
<property name="logging.pathmq" value="C:/qgs_logger/mq" />
<property name="logging.s7" value="C:/qgs_logger/s7" />
<!-- <property name="logging.pathwork" value="C:/qgs_logger/work" />-->
<!-- <property name="logging.pathopc" value="C:/qgs_logger/opc" />-->
<!-- <property name="logging.pathmq" value="C:/qgs_logger/mq" />-->
<!-- <property name="logging.s7" value="C:/qgs_logger/s7" />-->
<property name="logging.pathwork" value="/usr/local/logger/work" />
<property name="logging.pathopc" value="/usr/local/logger/opc" />
<property name="logging.pathmq" value="/usr/local/logger/mq" />
<property name="logging.s7" value="/usr/local/logger/s7" />
<!--0. 日志格式和颜色渲染 -->
<!-- 彩色日志依赖的渲染类 -->