Ver código fonte

mark for pull

master
caixiang 1 ano atrás
pai
commit
4c6b49c6a2
9 arquivos alterados com 137 adições e 65 exclusões
  1. +6
    -2
      pom.xml
  2. +51
    -35
      src/main/java/com/qgs/dc/influx/Main.java
  3. +23
    -0
      src/main/java/com/qgs/dc/influx/config/ConfigInjector.java
  4. +11
    -6
      src/main/java/com/qgs/dc/influx/config/InfluxClient.java
  5. +21
    -0
      src/main/java/com/qgs/dc/influx/config/InfluxConfig.java
  6. +6
    -5
      src/main/java/com/qgs/dc/influx/controller/InfluxController.java
  7. +1
    -1
      src/main/java/com/qgs/dc/s7/my/s7connector/api/DaveArea.java
  8. +7
    -12
      src/main/resources/application.yml
  9. +11
    -4
      src/main/resources/logback.xml

+ 6
- 2
pom.xml Ver arquivo

@@ -190,12 +190,16 @@
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-validation</artifactId>
</dependency>

<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-configuration-processor</artifactId>
<optional>true</optional>
</dependency>
<!-- influx begin -->
<dependency>
<groupId>com.influxdb</groupId>
<artifactId>influxdb-client-java</artifactId>
<version>6.3.0</version>
<version>6.7.0</version>
</dependency>

<!-- influx end -->


+ 51
- 35
src/main/java/com/qgs/dc/influx/Main.java Ver arquivo

@@ -2,16 +2,17 @@ package com.qgs.dc.influx;

//import com.qgs.dc.influx.config.InfluxClient;

import com.influxdb.client.InfluxDBClient;
import com.influxdb.client.InfluxDBClientFactory;
import com.influxdb.client.QueryApi;
import com.influxdb.client.WriteApi;
import com.influxdb.client.*;
import com.influxdb.client.domain.WritePrecision;
import com.influxdb.client.write.Point;
import com.influxdb.query.FluxRecord;
import com.influxdb.query.FluxTable;
import com.qgs.dc.influx.config.InfluxClient;
import com.qgs.dc.influx.param.PageInfo;
import com.qgs.dc.influx.param.QueryDataParam;
import com.qgs.dc.influx.param.Range;
import com.qgs.dc.influx.param.Tag;
import com.qgs.dc.influx.template.Event;

import java.text.SimpleDateFormat;
import java.time.Instant;
@@ -27,13 +28,28 @@ import java.util.List;
*/
public class Main {
public static void main(String[] args) throws InterruptedException {
char[] token = "lkBsC27QZr1W50BSPlGxpTqNNpwuUk5uz1dZZRPSPbCG5VmNDDUo8P3UkZIhGWwfJwkuz6ZGZ7Et4_KBaG3gHw==".toCharArray();
String org = "qgs";
String bucket = "qgs-bucket";
InfluxDBClient influxDBClient = InfluxDBClientFactory.create("http://192.168.0.170:8086", token, org, bucket);


WriteApi writeApi = influxDBClient.makeWriteApi();
char[] token = "N4yBD2iC0kDkAzlWEQ5koqJNbbd6v9PQsubEAOle1rykOIeM5zckKxUteDUsHm1LFMBnJTtfGNsRT_N54YTLcQ==".toCharArray();
String org = "cigs";
String bucket = "cigs-all";
InfluxDBClient influxDBClient = InfluxDBClientFactory.create("http://172.16.21.164:8086", token, org, bucket);

Point point = Point
.measurement("fortest")
.addTag("host", "host1")
.addField("used_percent", 23.43234543)
.time(Instant.now(), WritePrecision.NS);

WriteApiBlocking writeApi = influxDBClient.getWriteApiBlocking();
writeApi.writePoint(bucket, org, point);

Event event = new Event();
event.setTime(Instant.now());
event.setTransationId("asasd11");
event.setArgName("argName11");
event.setBatchNum("12333");
event.setArgValue("900001");
// Point asProcessCompleteEvent = insert(event, "ForTest");
InfluxClient.Client.insert(event,"fortest2");

// InfluxService influxService = new InfluxService();
// Event event = new Event();
@@ -144,30 +160,30 @@ public class Main {



QueryDataParam queryDataParam = new QueryDataParam();
queryDataParam.setBucket("qgs-bucket");
queryDataParam.setRange(new Range(getDate().toInstant(),new Date().toInstant()));
queryDataParam.setMeasurement("Weight");
queryDataParam.setTag(new Tag("argName","LTWeight"));
// queryDataParam.setDropedTagName("transationId");
List<String> dropNames = new ArrayList<>();
dropNames.add("transationId");
dropNames.add("inspectionSheetId");
dropNames.add("batchNum");
queryDataParam.setDropedTagNames(dropNames);
queryDataParam.setPageInfo(new PageInfo(1,10000));
List<FluxTable> tables = query(queryDataParam,influxDBClient);
// List<FluxRecord> records1 = tables.get(0).getRecords();
// List<List<FluxRecord>> lists = Utils.fixedGroup(records1, 10);
for (FluxTable fluxTable : tables) {
List<FluxRecord> records = fluxTable.getRecords();
for (FluxRecord fluxRecord : records) {
Instant timms = fluxRecord.getTime();
System.out.println("time: "+fluxRecord.getTime() +" key:"+fluxRecord.getField()+" value: " + fluxRecord.getValueByKey("_value")+" measurement: " + fluxRecord.getMeasurement());
}
}
// QueryDataParam queryDataParam = new QueryDataParam();
// queryDataParam.setBucket("qgs");
// queryDataParam.setRange(new Range(getDate().toInstant(),new Date().toInstant()));
// queryDataParam.setMeasurement("Weight");
// queryDataParam.setTag(new Tag("argName","LTWeight"));
//// queryDataParam.setDropedTagName("transationId");
// List<String> dropNames = new ArrayList<>();
// dropNames.add("transationId");
// dropNames.add("inspectionSheetId");
// dropNames.add("batchNum");
// queryDataParam.setDropedTagNames(dropNames);
// queryDataParam.setPageInfo(new PageInfo(1,10000));
//
// List<FluxTable> tables = query(queryDataParam,influxDBClient);
//// List<FluxRecord> records1 = tables.get(0).getRecords();
//// List<List<FluxRecord>> lists = Utils.fixedGroup(records1, 10);
//
// for (FluxTable fluxTable : tables) {
// List<FluxRecord> records = fluxTable.getRecords();
// for (FluxRecord fluxRecord : records) {
// Instant timms = fluxRecord.getTime();
// System.out.println("time: "+fluxRecord.getTime() +" key:"+fluxRecord.getField()+" value: " + fluxRecord.getValueByKey("_value")+" measurement: " + fluxRecord.getMeasurement());
// }
// }

influxDBClient.close();
}


+ 23
- 0
src/main/java/com/qgs/dc/influx/config/ConfigInjector.java Ver arquivo

@@ -0,0 +1,23 @@
package com.qgs.dc.influx.config;

import org.springframework.stereotype.Component;

import javax.annotation.PostConstruct;
import javax.annotation.Resource;

/**
* @Desc: ""
* @Author: caixiang
* @DATE: 2022/11/23 11:05
*/
@Component
public class ConfigInjector {
@Resource
private InfluxConfig config;
public static InfluxConfig staticConfig;

@PostConstruct
private void postConstruct() {
ConfigInjector.staticConfig = config;
}
}

+ 11
- 6
src/main/java/com/qgs/dc/influx/config/InfluxClient.java Ver arquivo

@@ -12,6 +12,7 @@ import com.qgs.dc.influx.param.QueryDataGroupByTimeParam;
import com.qgs.dc.influx.param.QueryDataParam;
import com.qgs.dc.influx.param.Range;
import com.qgs.dc.influx.template.Event;
import org.springframework.beans.factory.annotation.Autowired;

import java.util.ArrayList;
import java.util.List;
@@ -21,7 +22,7 @@ public enum InfluxClient {
/**
* influxdb 读写客户端,,如果write比较繁忙,后续可以考虑,维护 client一个线程池。
* */
Client("http://192.168.0.170:8086","lkBsC27QZr1W50BSPlGxpTqNNpwuUk5uz1dZZRPSPbCG5VmNDDUo8P3UkZIhGWwfJwkuz6ZGZ7Et4_KBaG3gHw==","qgs","qgs-bucket2"),
Client(),

;
private String url;
@@ -34,11 +35,15 @@ public enum InfluxClient {

private QueryApi queryApi;

InfluxClient(String url,String token,String org,String bucket){
this.url = url;
this.token = token;
this.org = org;
this.bucket = bucket;
@Autowired
InfluxConfig config;

InfluxClient(){

this.url = ConfigInjector.staticConfig.getUrl();
this.token = ConfigInjector.staticConfig.getToken();
this.org = ConfigInjector.staticConfig.getOrg();
this.bucket = ConfigInjector.staticConfig.getBucket();
this.influxDBClient = InfluxDBClientFactory.create(this.url, this.token.toCharArray(),this.org,this.bucket);
this.writeApi = influxDBClient.makeWriteApi();
this.queryApi = influxDBClient.getQueryApi();


+ 21
- 0
src/main/java/com/qgs/dc/influx/config/InfluxConfig.java Ver arquivo

@@ -0,0 +1,21 @@
package com.qgs.dc.influx.config;

import lombok.Data;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.context.annotation.Configuration;

/**
* @Desc: ""
* @Author: caixiang
* @DATE: 2022/11/23 10:53
*/

@Data
@Configuration
@ConfigurationProperties(prefix = "influx")
public class InfluxConfig {
private String url;
private String token;
private String org;
private String bucket;
}

+ 6
- 5
src/main/java/com/qgs/dc/influx/controller/InfluxController.java Ver arquivo

@@ -144,13 +144,13 @@ public class InfluxController {
List<Event> list = new ArrayList<>();

QueryDataParam queryDataParam = new QueryDataParam();
queryDataParam.setBucket("qgs-bucket");
queryDataParam.setMeasurement("ASProcessCompleteEventAS");
queryDataParam.setBucket("qgs");
queryDataParam.setMeasurement("ForTest");
List<String> dropNames = new ArrayList<>();
dropNames.add("transationId");
dropNames.add("inspectionSheetId");
queryDataParam.setDropedTagNames(dropNames);
queryDataParam.setTag(new Tag("argName","arg6"));
queryDataParam.setTag(new Tag("argName","argName11"));
queryDataParam.setRange(new Range(DataUtils.getBeforeDate(10).toInstant(),Instant.now()));
queryDataParam.setPageInfo(new PageInfo(1,10));
List<FluxTable> query = InfluxClient.Client.query(queryDataParam);
@@ -207,9 +207,10 @@ public class InfluxController {
event.setTime(Instant.now());
event.setTransationId("asasd11");
event.setArgName("argName11");
event.setBatchNum("12333");
event.setArgValue("900001");
Point asProcessCompleteEvent = insert(event, "ASProcessCompleteEvent");
InfluxClient.Client.insert(event,"ASProcessCompleteEvent");
// Point asProcessCompleteEvent = insert(event, "ForTest");
InfluxClient.Client.insert(event,"fortest");
}

@PostMapping("/insertEvents")


+ 1
- 1
src/main/java/com/qgs/dc/s7/my/s7connector/api/DaveArea.java Ver arquivo

@@ -22,7 +22,7 @@ public enum DaveArea {
COUNTER200(30), // analog outputs of 200 family
DB(0x84), // Peripheral I/O //DB块
DI(0x85), //DBI块
FLAGS(0x83), //M块
FLAGS(0x83), //M块(M块 的 areaNumber 都是0 , 只配置 byteOffset 和 bitOffset)
INPUTS(0x81), //I块
T(0x1D), //T块ed
C(0x1C), //C块


+ 7
- 12
src/main/resources/application.yml Ver arquivo

@@ -8,7 +8,7 @@ server:
spring:
rabbitmq:
# 如果是rabbitmq+haproxy+keepalived集群 ,,那么192.168.0.176是haproxy代理的地址(严格来说是keepalived的vip)
addresses: 192.168.0.170:5672 # 新版rabbitmq 版本还未测试
addresses: 172.16.21.191:5672 # 新版rabbitmq 版本还未测试
#addresses: 172.16.21.133:5672
username: cigs
password: cigs
@@ -34,19 +34,14 @@ spring:
#multiplier: 2 # 间隔时间乘子,间隔时间*乘子=下一次的间隔时间,最大不能超过设置的最大间隔时间
#================重试机制 结束

#influx:
# influxUrl: 'http://192.168.0.170:8086'
# bucket: 'qgs-bucket'
# org: 'qgs'
# token: 'lkBsC27QZr1W50BSPlGxpTqNNpwuUk5uz1dZZRPSPbCG5VmNDDUo8P3UkZIhGWwfJwkuz6ZGZ7Et4_KBaG3gHw=='
influx:
influxUrl: 'http://192.168.0.170:8086'
bucket: 'qgs-bucket'
org: 'qgs'
token: 'lkBsC27QZr1W50BSPlGxpTqNNpwuUk5uz1dZZRPSPbCG5VmNDDUo8P3UkZIhGWwfJwkuz6ZGZ7Et4_KBaG3gHw=='

# /health point
#management:
# health:
# influxdb:
# enabled: true

influx:
url: http://172.16.21.164:8086
token: N4yBD2iC0kDkAzlWEQ5koqJNbbd6v9PQsubEAOle1rykOIeM5zckKxUteDUsHm1LFMBnJTtfGNsRT_N54YTLcQ==
org: cigs
bucket: cigs-all

+ 11
- 4
src/main/resources/logback.xml Ver arquivo

@@ -9,10 +9,17 @@

<!-- name的值是变量的名称,value的值时变量定义的值。通过定义的值会被插入到logger上下文中。定义后,可以使“${}”来使用变量。 -->

<property name="logging.pathwork" value="C:/qgs_logger/work" />
<property name="logging.pathopc" value="C:/qgs_logger/opc" />
<property name="logging.pathmq" value="C:/qgs_logger/mq" />
<property name="logging.s7" value="C:/qgs_logger/s7" />
<!-- <property name="logging.pathwork" value="C:/qgs_logger/work" />-->
<!-- <property name="logging.pathopc" value="C:/qgs_logger/opc" />-->
<!-- <property name="logging.pathmq" value="C:/qgs_logger/mq" />-->
<!-- <property name="logging.s7" value="C:/qgs_logger/s7" />-->
<property name="logging.pathwork" value="/usr/local/logger/work" />
<property name="logging.pathopc" value="/usr/local/logger/opc" />
<property name="logging.pathmq" value="/usr/local/logger/mq" />
<property name="logging.s7" value="/usr/local/logger/s7" />




<!--0. 日志格式和颜色渲染 -->
<!-- 彩色日志依赖的渲染类 -->


Carregando…
Cancelar
Salvar