mark for pull
This commit is contained in:
@@ -27,7 +27,7 @@ public class Main {
|
||||
public static void main(String[] args) throws InterruptedException {
|
||||
char[] token = "lkBsC27QZr1W50BSPlGxpTqNNpwuUk5uz1dZZRPSPbCG5VmNDDUo8P3UkZIhGWwfJwkuz6ZGZ7Et4_KBaG3gHw==".toCharArray();
|
||||
String org = "qgs";
|
||||
String bucket = "mytest";
|
||||
String bucket = "qgs-bucket";
|
||||
InfluxDBClient influxDBClient = InfluxDBClientFactory.create("http://192.168.0.170:8086", token, org, bucket);
|
||||
|
||||
|
||||
@@ -105,24 +105,24 @@ public class Main {
|
||||
// }
|
||||
|
||||
|
||||
String flux = "from(bucket:\"mytest\") |> range(start: -6000000000000000m)";
|
||||
flux += "|> filter(fn: (r) =>\n" +
|
||||
" r._measurement == \"ASProcessCompleteEvent\" and \n" +
|
||||
// " r._field == \"type\" and \n" + //对应 Field key
|
||||
" r.argName == \"arg3\"\n" + //对应 Tags key (Tag 信息无法在FluxRecord 里面获取。)
|
||||
" )";
|
||||
QueryApi queryApi = influxDBClient.getQueryApi();
|
||||
|
||||
List<FluxTable> tables = queryApi.query(flux);
|
||||
for (FluxTable fluxTable : tables) {
|
||||
List<FluxRecord> records = fluxTable.getRecords();
|
||||
for (FluxRecord fluxRecord : records) {
|
||||
Double o = (Double)fluxRecord.getValueByKey("_value");
|
||||
System.out.println("time: "+fluxRecord.getTime() +" key:"+fluxRecord.getField()+" value: " + fluxRecord.getValueByKey("_value")+" measurement: " + fluxRecord.getMeasurement());
|
||||
// System.out.println("time: "+fluxRecord.getTime() +" key:"++" value: " + fluxRecord.getValueByKey("_value")+" measurement: " + fluxRecord.getMeasurement());
|
||||
|
||||
}
|
||||
}
|
||||
// String flux = "from(bucket:\"mytest\") |> range(start: -6000000000000000m)";
|
||||
// flux += "|> filter(fn: (r) =>\n" +
|
||||
// " r._measurement == \"ASProcessCompleteEvent\" and \n" +
|
||||
//// " r._field == \"type\" and \n" + //对应 Field key
|
||||
// " r.argName == \"arg3\"\n" + //对应 Tags key (Tag 信息无法在FluxRecord 里面获取。)
|
||||
// " )";
|
||||
// QueryApi queryApi = influxDBClient.getQueryApi();
|
||||
//
|
||||
// List<FluxTable> tables = queryApi.query(flux);
|
||||
// for (FluxTable fluxTable : tables) {
|
||||
// List<FluxRecord> records = fluxTable.getRecords();
|
||||
// for (FluxRecord fluxRecord : records) {
|
||||
// Double o = (Double)fluxRecord.getValueByKey("_value");
|
||||
// System.out.println("time: "+fluxRecord.getTime() +" key:"+fluxRecord.getField()+" value: " + fluxRecord.getValueByKey("_value")+" measurement: " + fluxRecord.getMeasurement());
|
||||
//// System.out.println("time: "+fluxRecord.getTime() +" key:"++" value: " + fluxRecord.getValueByKey("_value")+" measurement: " + fluxRecord.getMeasurement());
|
||||
//
|
||||
// }
|
||||
// }
|
||||
|
||||
|
||||
// from(bucket: "mytest")
|
||||
@@ -133,34 +133,40 @@ public class Main {
|
||||
// |> sort(columns: ["_time"], desc: true)
|
||||
// 取前10条数据
|
||||
// |> limit(n: 10, offset: 0)
|
||||
|
||||
//
|
||||
// 取 10-20 条数据
|
||||
// |> limit(n: 10, offset: 10)
|
||||
|
||||
//
|
||||
// 取 20-30 条数据
|
||||
// |> limit(n: 10, offset: 20)
|
||||
|
||||
|
||||
|
||||
// QueryDataParam queryDataParam = new QueryDataParam();
|
||||
// queryDataParam.setBucket("mytest");
|
||||
// queryDataParam.setRange(new Range(getDate().toInstant(),new Date().toInstant()));
|
||||
// queryDataParam.setMeasurement("ASProcessCompleteEvent");
|
||||
// queryDataParam.setTag(new Tag("argName","arg4"));
|
||||
QueryDataParam queryDataParam = new QueryDataParam();
|
||||
queryDataParam.setBucket("qgs-bucket");
|
||||
queryDataParam.setRange(new Range(getDate().toInstant(),new Date().toInstant()));
|
||||
queryDataParam.setMeasurement("WeightHeiHei");
|
||||
queryDataParam.setTag(new Tag("argName","LTWeight"));
|
||||
// queryDataParam.setDropedTagName("transationId");
|
||||
// queryDataParam.setPageInfo(new PageInfo(1,100));
|
||||
//
|
||||
// List<FluxTable> tables = query(queryDataParam,influxDBClient);
|
||||
// List<FluxRecord> records1 = tables.get(0).getRecords();
|
||||
List<String> dropNames = new ArrayList<>();
|
||||
dropNames.add("transationId");
|
||||
dropNames.add("inspectionSheetId");
|
||||
dropNames.add("batchNum");
|
||||
queryDataParam.setDropedTagNames(dropNames);
|
||||
queryDataParam.setPageInfo(new PageInfo(1,10000));
|
||||
|
||||
List<FluxTable> tables = query(queryDataParam,influxDBClient);
|
||||
List<FluxRecord> records1 = tables.get(0).getRecords();
|
||||
// List<List<FluxRecord>> lists = Utils.fixedGroup(records1, 10);
|
||||
|
||||
// for (FluxTable fluxTable : tables) {
|
||||
// List<FluxRecord> records = fluxTable.getRecords();
|
||||
// for (FluxRecord fluxRecord : records) {
|
||||
// System.out.println("time: "+fluxRecord.getTime() +" key:"+fluxRecord.getField()+" value: " + fluxRecord.getValueByKey("_value")+" measurement: " + fluxRecord.getMeasurement());
|
||||
//
|
||||
// }
|
||||
// }
|
||||
for (FluxTable fluxTable : tables) {
|
||||
List<FluxRecord> records = fluxTable.getRecords();
|
||||
for (FluxRecord fluxRecord : records) {
|
||||
Instant timms = fluxRecord.getTime();
|
||||
System.out.println("time: "+fluxRecord.getTime() +" key:"+fluxRecord.getField()+" value: " + fluxRecord.getValueByKey("_value")+" measurement: " + fluxRecord.getMeasurement());
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
influxDBClient.close();
|
||||
}
|
||||
@@ -173,7 +179,7 @@ public class Main {
|
||||
//两种写法都可以获取到前三天的日期
|
||||
// calendar1.set(Calendar.DAY_OF_YEAR,calendar1.get(Calendar.DAY_OF_YEAR) -3);
|
||||
//在当前时间的基础上获取前三天的日期
|
||||
calendar1.add(Calendar.DATE, -3);
|
||||
calendar1.add(Calendar.DATE, -1000);
|
||||
//add方法 参数也可传入 月份,获取的是前几月或后几月的日期
|
||||
//calendar1.add(Calendar.MONTH, -3);
|
||||
Date day = calendar1.getTime();
|
||||
@@ -197,6 +203,7 @@ public class Main {
|
||||
flux += "|> drop(columns: [\""+ dropName +"\"]) \n";
|
||||
}
|
||||
flux += "|> sort(columns: [\"_time\"], desc: true) \n";
|
||||
flux += "|> window(every: 1y) \n";
|
||||
if(pageInfo!=null){
|
||||
flux += "|> limit(n: "+pageInfo.getSize()+", offset: "+(pageInfo.getCurrent()-1)* pageInfo.getSize()+")";
|
||||
}
|
||||
|
||||
@@ -2,6 +2,7 @@ package com.cnbm.influx.config;
|
||||
|
||||
import com.cnbm.influx.constant.Constant;
|
||||
import com.cnbm.influx.param.PageInfo;
|
||||
import com.cnbm.influx.param.QueryDataGroupByTimeParam;
|
||||
import com.cnbm.influx.param.QueryDataParam;
|
||||
import com.cnbm.influx.param.Range;
|
||||
import com.cnbm.influx.template.Event;
|
||||
@@ -94,6 +95,7 @@ public enum InfluxClient {
|
||||
Point point = Point.measurement(measurement)
|
||||
.addTag("transationId", event.getTransationId())
|
||||
.addTag("inspectionSheetId", event.getInspectionSheetId())
|
||||
.addTag("batchNum", event.getBatchNum().toString())
|
||||
.addTag("argName", event.getArgName())
|
||||
.addField("argValue", event.getArgValue())
|
||||
.time(event.getTime().toEpochMilli(), WritePrecision.MS);
|
||||
@@ -125,6 +127,47 @@ public enum InfluxClient {
|
||||
}
|
||||
|
||||
|
||||
// List<FluxTable> tables = queryApi.query(flux);
|
||||
// for (FluxTable fluxTable : tables) {
|
||||
// List<FluxRecord> records = fluxTable.getRecords();
|
||||
// for (FluxRecord fluxRecord : records) {
|
||||
// System.out.println("time: "+fluxRecord.getTime() +" key:"+fluxRecord.getField()+" value: " + fluxRecord.getValueByKey("_value")+" measurement: " + fluxRecord.getMeasurement());
|
||||
//
|
||||
// }
|
||||
// }
|
||||
return queryApi.query(flux);
|
||||
}
|
||||
|
||||
public List<FluxTable> queryGroupByTime(QueryDataGroupByTimeParam param){
|
||||
String measurement = param.getMeasurement();
|
||||
List<String> dropedTagNames = param.getDropedTagNames();
|
||||
Range range = param.getRange();
|
||||
String bucket = param.getBucket();
|
||||
String tagName = param.getTag().getTagName();
|
||||
String tagValue = param.getTag().getTagValue();
|
||||
PageInfo pageInfo = param.getPageInfo();
|
||||
|
||||
String flux = "from(bucket:\""+bucket+"\")";
|
||||
flux += "|> range(start: "+range.getBegin()+",stop:"+range.getEnd()+")";
|
||||
flux += "|> filter(fn: (r) => r[\"_measurement\"] == \""+measurement+"\")";
|
||||
flux += "|> filter(fn: (r) => r[\""+tagName+"\"] == \""+tagValue+"\")";
|
||||
for(String dropName:dropedTagNames){
|
||||
flux += "|> drop(columns: [\""+dropName+"\"])";
|
||||
}
|
||||
//|> window(every: 1mo)
|
||||
if(param.getTimeType() == 1){
|
||||
flux += "|> window(every: 1y)";
|
||||
}else if(param.getTimeType() == 2 ){
|
||||
flux += "|> window(every: 1mo)";
|
||||
}else{
|
||||
flux += "|> window(every: 1d)";
|
||||
}
|
||||
flux += "|> sort(columns: [\"_time\"], desc: true)";
|
||||
if(pageInfo!=null){
|
||||
flux += "|> limit(n: "+pageInfo.getSize()+", offset: "+(pageInfo.getCurrent()-1)* pageInfo.getSize()+")";
|
||||
}
|
||||
|
||||
|
||||
// List<FluxTable> tables = queryApi.query(flux);
|
||||
// for (FluxTable fluxTable : tables) {
|
||||
// List<FluxRecord> records = fluxTable.getRecords();
|
||||
|
||||
@@ -18,7 +18,7 @@ public class Constant {
|
||||
public static final LogLevel readTimeout = LogLevel.BODY;
|
||||
public static final LogLevel writeTimeout = LogLevel.BODY;
|
||||
public static final LogLevel connectTimeout = LogLevel.BODY;
|
||||
public static final String measurement = "Weight";
|
||||
public static final String measurement = "WeightHeiHei";
|
||||
|
||||
|
||||
}
|
||||
|
||||
@@ -16,8 +16,11 @@ import org.slf4j.LoggerFactory;
|
||||
import org.springframework.web.bind.annotation.PostMapping;
|
||||
import org.springframework.web.bind.annotation.RequestMapping;
|
||||
import org.springframework.web.bind.annotation.RestController;
|
||||
|
||||
import java.text.DateFormat;
|
||||
import java.time.Instant;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
import java.util.Random;
|
||||
|
||||
@@ -49,21 +52,49 @@ public class S7DemoController {
|
||||
public void insertBatchJYD() throws InterruptedException {
|
||||
List<Event> list = new ArrayList<>();
|
||||
Random r = new Random();
|
||||
Instant instant = DataUtils.getBeforeDate(400).toInstant();
|
||||
|
||||
for(int j=0;j<10;j++){
|
||||
for(int i=0;i<99;i++){
|
||||
Thread.sleep(10);
|
||||
Event event = new Event();
|
||||
event.setTime(Instant.now());
|
||||
event.setTime(instant);
|
||||
event.setTransationId("asas"+i);
|
||||
event.setArgName("LTWeight");
|
||||
Double d = r.nextDouble() * 2.5 + 66;
|
||||
event.setInspectionSheetId(j+"");
|
||||
event.setArgValue(d);
|
||||
event.setBatchNum(i);
|
||||
list.add(event);
|
||||
}
|
||||
}
|
||||
InfluxClient.Client.batchInsert(list,"Weight");
|
||||
InfluxClient.Client.batchInsert(list,"WeightHei");
|
||||
}
|
||||
|
||||
@PostMapping("/insertBatchJYDForTest")
|
||||
public void insertBatchJYDForTest() throws InterruptedException {
|
||||
List<Event> list = new ArrayList<>();
|
||||
Random r = new Random();
|
||||
|
||||
for(int i=0;i<999;i++){
|
||||
Thread.sleep(10);
|
||||
Event event = new Event();
|
||||
event.setTime(DataUtils.getAfterDate(i).toInstant());
|
||||
event.setTransationId("asas"+i);
|
||||
event.setArgName("LostDays");
|
||||
int i1 = r.nextInt(10);
|
||||
if(i1<4){
|
||||
event.setArgValue(new Double(0));
|
||||
}else {
|
||||
event.setArgValue(new Double(1));
|
||||
}
|
||||
|
||||
event.setInspectionSheetId(i+"");
|
||||
|
||||
event.setBatchNum(i);
|
||||
list.add(event);
|
||||
}
|
||||
InfluxClient.Client.batchInsert(list,"WeightHeiHei");
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -139,8 +170,8 @@ public class S7DemoController {
|
||||
.addField("argValue", event.getArgValue())
|
||||
.time(event.getTime().toEpochMilli(), WritePrecision.MS);
|
||||
return point;
|
||||
|
||||
}
|
||||
|
||||
@PostMapping("/insert")
|
||||
public void insert() throws InterruptedException {
|
||||
Event event = new Event();
|
||||
|
||||
@@ -0,0 +1,34 @@
|
||||
package com.cnbm.influx.param;
|
||||
|
||||
import lombok.Data;
|
||||
import lombok.EqualsAndHashCode;
|
||||
import lombok.experimental.Accessors;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* @Desc: "influx 查询条件构造"
|
||||
* @Author: caixiang
|
||||
* @DATE: 2022/6/29 10:17
|
||||
*
|
||||
* 注意:
|
||||
* 必填
|
||||
* ① measurement 不能为空
|
||||
* ② 时间段 不能为空
|
||||
* ③ bucket 不能为空
|
||||
* 非必填
|
||||
* ① 分页信息可选
|
||||
* ② tag
|
||||
*
|
||||
*/
|
||||
@Data
|
||||
@EqualsAndHashCode(callSuper = false)
|
||||
@Accessors(chain = true)
|
||||
public class QueryDataGroupByTimeParam extends BaseParam{
|
||||
private Tag tag;
|
||||
//查询的时候,需要忽略的字段。(transationId是唯一标识会对 最终的查询结果集产生影响)
|
||||
private List<String> dropedTagNames;
|
||||
private String bucket;
|
||||
//1-按年分组; 2-按月分组; 3-按日分组
|
||||
private Integer timeType;
|
||||
}
|
||||
@@ -24,4 +24,7 @@ public class Event {
|
||||
private String argName;
|
||||
|
||||
private Double argValue;
|
||||
|
||||
//批次号,可选的
|
||||
private Integer batchNum;
|
||||
}
|
||||
@@ -16,16 +16,18 @@ public class EventForCount {
|
||||
|
||||
private String inspectionSheetId;
|
||||
|
||||
//n = 某个批次的样本数
|
||||
private Integer n;
|
||||
private String transationId;
|
||||
|
||||
//failN = 某个批次不合格品数
|
||||
private String failN;
|
||||
private String argName;
|
||||
|
||||
|
||||
//如果是计数类型,,1 = 代表ok ;2 = nok
|
||||
//todo 剩下样本量怎么估算。
|
||||
private Double argValue;
|
||||
|
||||
//类型 : 1 计量型 ;2.计数型
|
||||
private Integer type;
|
||||
|
||||
//batchNum = 某个批次
|
||||
private String batchNum;
|
||||
|
||||
//检测名
|
||||
private String detectionName;
|
||||
|
||||
}
|
||||
Reference in New Issue
Block a user