mark for pull

This commit is contained in:
caixiang 2022-08-09 14:57:55 +08:00
parent 314380cd74
commit 01c95bfe1d
29 changed files with 1338 additions and 65 deletions

View File

@ -84,9 +84,7 @@ public class FactoryController {
public Result update(@RequestBody FactoryDTO dto){
//效验数据
ValidatorUtils.validateEntity(dto, UpdateGroup.class, DefaultGroup.class);
factoryService.update(dto);
return new Result();
}

View File

@ -65,7 +65,7 @@ public class ProductController {
return new Result<ProductDTO>().ok(data);
}
@PostMapping
@PostMapping("save")
@ApiOperation("保存")
@LogOperation("保存")
//@PreAuthorize("@ex.hasAuthority('code:product:save')")
@ -78,7 +78,7 @@ public class ProductController {
return new Result();
}
@PutMapping
@PutMapping("update")
@ApiOperation("修改")
@LogOperation("修改")
//@PreAuthorize("@ex.hasAuthority('code:product:update')")
@ -91,7 +91,7 @@ public class ProductController {
return new Result();
}
@DeleteMapping
@DeleteMapping("delete")
@ApiOperation("删除")
@LogOperation("删除")
//@PreAuthorize("@ex.hasAuthority('code:product:delete')")

View File

@ -23,7 +23,45 @@ public class DataUtils {
}
public List<List<Date>> split(List<Date> value) {
List<List<Date>> result = new ArrayList<>();
int day = value.iterator().next().getDate();
List<Date> newListEntry = new ArrayList<>();
for (Date date : value) {
if (date.getDate() == day) {
newListEntry.add(date);
}
else {
day = date.getDate();
result.add(newListEntry);
newListEntry = new ArrayList<>();
newListEntry.add(date);
}
}
result.add(newListEntry);//because the last sublist was not added
return result;
}
public static String splitToNeed(String s,Integer type){
String[] s1 = s.split(" ");
String[] split = s1[0].split("-");
String year = split[0];
String mon = split[1];
String day = split[2];
if(type == 1 ){
//
return year;
}else if(type == 2 ){
//
return year+"-"+mon;
}else {
//
return s1[0];
}
}
public static Date getBeforeDate(Integer number){
Date date = new Date();//获取当前日期
SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMdd");//格式化一下
@ -37,6 +75,19 @@ public class DataUtils {
Date day = calendar1.getTime();
return day;
}
public static Date getAfterDate(Integer number){
Date date = new Date();//获取当前日期
SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMdd");//格式化一下
Calendar calendar1 = Calendar.getInstance();//获取对日期操作的类对象
//两种写法都可以获取到前三天的日期
// calendar1.set(Calendar.DAY_OF_YEAR,calendar1.get(Calendar.DAY_OF_YEAR) -3);
//在当前时间的基础上获取前三天的日期
calendar1.add(Calendar.DATE, 0+number);
//add方法 参数也可传入 月份获取的是前几月或后几月的日期
//calendar1.add(Calendar.MONTH, -3);
Date day = calendar1.getTime();
return day;
}
/**
* 将一组数据固定分组每组n个元素

View File

@ -27,7 +27,7 @@ public class Main {
public static void main(String[] args) throws InterruptedException {
char[] token = "lkBsC27QZr1W50BSPlGxpTqNNpwuUk5uz1dZZRPSPbCG5VmNDDUo8P3UkZIhGWwfJwkuz6ZGZ7Et4_KBaG3gHw==".toCharArray();
String org = "qgs";
String bucket = "mytest";
String bucket = "qgs-bucket";
InfluxDBClient influxDBClient = InfluxDBClientFactory.create("http://192.168.0.170:8086", token, org, bucket);
@ -105,24 +105,24 @@ public class Main {
// }
String flux = "from(bucket:\"mytest\") |> range(start: -6000000000000000m)";
flux += "|> filter(fn: (r) =>\n" +
" r._measurement == \"ASProcessCompleteEvent\" and \n" +
// " r._field == \"type\" and \n" + //对应 Field key
" r.argName == \"arg3\"\n" + //对应 Tags key (Tag 信息无法在FluxRecord 里面获取)
" )";
QueryApi queryApi = influxDBClient.getQueryApi();
List<FluxTable> tables = queryApi.query(flux);
for (FluxTable fluxTable : tables) {
List<FluxRecord> records = fluxTable.getRecords();
for (FluxRecord fluxRecord : records) {
Double o = (Double)fluxRecord.getValueByKey("_value");
System.out.println("time: "+fluxRecord.getTime() +" key:"+fluxRecord.getField()+" value: " + fluxRecord.getValueByKey("_value")+" measurement: " + fluxRecord.getMeasurement());
// System.out.println("time: "+fluxRecord.getTime() +" key:"++" value: " + fluxRecord.getValueByKey("_value")+" measurement: " + fluxRecord.getMeasurement());
}
}
// String flux = "from(bucket:\"mytest\") |> range(start: -6000000000000000m)";
// flux += "|> filter(fn: (r) =>\n" +
// " r._measurement == \"ASProcessCompleteEvent\" and \n" +
//// " r._field == \"type\" and \n" + //对应 Field key
// " r.argName == \"arg3\"\n" + //对应 Tags key (Tag 信息无法在FluxRecord 里面获取)
// " )";
// QueryApi queryApi = influxDBClient.getQueryApi();
//
// List<FluxTable> tables = queryApi.query(flux);
// for (FluxTable fluxTable : tables) {
// List<FluxRecord> records = fluxTable.getRecords();
// for (FluxRecord fluxRecord : records) {
// Double o = (Double)fluxRecord.getValueByKey("_value");
// System.out.println("time: "+fluxRecord.getTime() +" key:"+fluxRecord.getField()+" value: " + fluxRecord.getValueByKey("_value")+" measurement: " + fluxRecord.getMeasurement());
//// System.out.println("time: "+fluxRecord.getTime() +" key:"++" value: " + fluxRecord.getValueByKey("_value")+" measurement: " + fluxRecord.getMeasurement());
//
// }
// }
// from(bucket: "mytest")
@ -133,34 +133,40 @@ public class Main {
// |> sort(columns: ["_time"], desc: true)
// 取前10条数据
// |> limit(n: 10, offset: 0)
//
// 10-20 条数据
// |> limit(n: 10, offset: 10)
//
// 20-30 条数据
// |> limit(n: 10, offset: 20)
// QueryDataParam queryDataParam = new QueryDataParam();
// queryDataParam.setBucket("mytest");
// queryDataParam.setRange(new Range(getDate().toInstant(),new Date().toInstant()));
// queryDataParam.setMeasurement("ASProcessCompleteEvent");
// queryDataParam.setTag(new Tag("argName","arg4"));
QueryDataParam queryDataParam = new QueryDataParam();
queryDataParam.setBucket("qgs-bucket");
queryDataParam.setRange(new Range(getDate().toInstant(),new Date().toInstant()));
queryDataParam.setMeasurement("WeightHeiHei");
queryDataParam.setTag(new Tag("argName","LTWeight"));
// queryDataParam.setDropedTagName("transationId");
// queryDataParam.setPageInfo(new PageInfo(1,100));
//
// List<FluxTable> tables = query(queryDataParam,influxDBClient);
// List<FluxRecord> records1 = tables.get(0).getRecords();
List<String> dropNames = new ArrayList<>();
dropNames.add("transationId");
dropNames.add("inspectionSheetId");
dropNames.add("batchNum");
queryDataParam.setDropedTagNames(dropNames);
queryDataParam.setPageInfo(new PageInfo(1,10000));
List<FluxTable> tables = query(queryDataParam,influxDBClient);
List<FluxRecord> records1 = tables.get(0).getRecords();
// List<List<FluxRecord>> lists = Utils.fixedGroup(records1, 10);
// for (FluxTable fluxTable : tables) {
// List<FluxRecord> records = fluxTable.getRecords();
// for (FluxRecord fluxRecord : records) {
// System.out.println("time: "+fluxRecord.getTime() +" key:"+fluxRecord.getField()+" value: " + fluxRecord.getValueByKey("_value")+" measurement: " + fluxRecord.getMeasurement());
//
// }
// }
for (FluxTable fluxTable : tables) {
List<FluxRecord> records = fluxTable.getRecords();
for (FluxRecord fluxRecord : records) {
Instant timms = fluxRecord.getTime();
System.out.println("time: "+fluxRecord.getTime() +" key:"+fluxRecord.getField()+" value: " + fluxRecord.getValueByKey("_value")+" measurement: " + fluxRecord.getMeasurement());
}
}
influxDBClient.close();
}
@ -173,7 +179,7 @@ public class Main {
//两种写法都可以获取到前三天的日期
// calendar1.set(Calendar.DAY_OF_YEAR,calendar1.get(Calendar.DAY_OF_YEAR) -3);
//在当前时间的基础上获取前三天的日期
calendar1.add(Calendar.DATE, -3);
calendar1.add(Calendar.DATE, -1000);
//add方法 参数也可传入 月份获取的是前几月或后几月的日期
//calendar1.add(Calendar.MONTH, -3);
Date day = calendar1.getTime();
@ -197,6 +203,7 @@ public class Main {
flux += "|> drop(columns: [\""+ dropName +"\"]) \n";
}
flux += "|> sort(columns: [\"_time\"], desc: true) \n";
flux += "|> window(every: 1y) \n";
if(pageInfo!=null){
flux += "|> limit(n: "+pageInfo.getSize()+", offset: "+(pageInfo.getCurrent()-1)* pageInfo.getSize()+")";
}

View File

@ -2,6 +2,7 @@ package com.cnbm.influx.config;
import com.cnbm.influx.constant.Constant;
import com.cnbm.influx.param.PageInfo;
import com.cnbm.influx.param.QueryDataGroupByTimeParam;
import com.cnbm.influx.param.QueryDataParam;
import com.cnbm.influx.param.Range;
import com.cnbm.influx.template.Event;
@ -94,6 +95,7 @@ public enum InfluxClient {
Point point = Point.measurement(measurement)
.addTag("transationId", event.getTransationId())
.addTag("inspectionSheetId", event.getInspectionSheetId())
.addTag("batchNum", event.getBatchNum().toString())
.addTag("argName", event.getArgName())
.addField("argValue", event.getArgValue())
.time(event.getTime().toEpochMilli(), WritePrecision.MS);
@ -125,6 +127,47 @@ public enum InfluxClient {
}
// List<FluxTable> tables = queryApi.query(flux);
// for (FluxTable fluxTable : tables) {
// List<FluxRecord> records = fluxTable.getRecords();
// for (FluxRecord fluxRecord : records) {
// System.out.println("time: "+fluxRecord.getTime() +" key:"+fluxRecord.getField()+" value: " + fluxRecord.getValueByKey("_value")+" measurement: " + fluxRecord.getMeasurement());
//
// }
// }
return queryApi.query(flux);
}
public List<FluxTable> queryGroupByTime(QueryDataGroupByTimeParam param){
String measurement = param.getMeasurement();
List<String> dropedTagNames = param.getDropedTagNames();
Range range = param.getRange();
String bucket = param.getBucket();
String tagName = param.getTag().getTagName();
String tagValue = param.getTag().getTagValue();
PageInfo pageInfo = param.getPageInfo();
String flux = "from(bucket:\""+bucket+"\")";
flux += "|> range(start: "+range.getBegin()+",stop:"+range.getEnd()+")";
flux += "|> filter(fn: (r) => r[\"_measurement\"] == \""+measurement+"\")";
flux += "|> filter(fn: (r) => r[\""+tagName+"\"] == \""+tagValue+"\")";
for(String dropName:dropedTagNames){
flux += "|> drop(columns: [\""+dropName+"\"])";
}
//|> window(every: 1mo)
if(param.getTimeType() == 1){
flux += "|> window(every: 1y)";
}else if(param.getTimeType() == 2 ){
flux += "|> window(every: 1mo)";
}else{
flux += "|> window(every: 1d)";
}
flux += "|> sort(columns: [\"_time\"], desc: true)";
if(pageInfo!=null){
flux += "|> limit(n: "+pageInfo.getSize()+", offset: "+(pageInfo.getCurrent()-1)* pageInfo.getSize()+")";
}
// List<FluxTable> tables = queryApi.query(flux);
// for (FluxTable fluxTable : tables) {
// List<FluxRecord> records = fluxTable.getRecords();

View File

@ -18,7 +18,7 @@ public class Constant {
public static final LogLevel readTimeout = LogLevel.BODY;
public static final LogLevel writeTimeout = LogLevel.BODY;
public static final LogLevel connectTimeout = LogLevel.BODY;
public static final String measurement = "Weight";
public static final String measurement = "WeightHeiHei";
}

View File

@ -16,8 +16,11 @@ import org.slf4j.LoggerFactory;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import java.text.DateFormat;
import java.time.Instant;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.Random;
@ -49,21 +52,49 @@ public class S7DemoController {
public void insertBatchJYD() throws InterruptedException {
List<Event> list = new ArrayList<>();
Random r = new Random();
Instant instant = DataUtils.getBeforeDate(400).toInstant();
for(int j=0;j<10;j++){
for(int i=0;i<99;i++){
Thread.sleep(10);
Event event = new Event();
event.setTime(Instant.now());
event.setTime(instant);
event.setTransationId("asas"+i);
event.setArgName("LTWeight");
Double d = r.nextDouble() * 2.5 + 66;
event.setInspectionSheetId(j+"");
event.setArgValue(d);
event.setBatchNum(i);
list.add(event);
}
}
InfluxClient.Client.batchInsert(list,"Weight");
InfluxClient.Client.batchInsert(list,"WeightHei");
}
@PostMapping("/insertBatchJYDForTest")
public void insertBatchJYDForTest() throws InterruptedException {
List<Event> list = new ArrayList<>();
Random r = new Random();
for(int i=0;i<999;i++){
Thread.sleep(10);
Event event = new Event();
event.setTime(DataUtils.getAfterDate(i).toInstant());
event.setTransationId("asas"+i);
event.setArgName("LostDays");
int i1 = r.nextInt(10);
if(i1<4){
event.setArgValue(new Double(0));
}else {
event.setArgValue(new Double(1));
}
event.setInspectionSheetId(i+"");
event.setBatchNum(i);
list.add(event);
}
InfluxClient.Client.batchInsert(list,"WeightHeiHei");
}
/**
@ -139,8 +170,8 @@ public class S7DemoController {
.addField("argValue", event.getArgValue())
.time(event.getTime().toEpochMilli(), WritePrecision.MS);
return point;
}
@PostMapping("/insert")
public void insert() throws InterruptedException {
Event event = new Event();

View File

@ -0,0 +1,34 @@
package com.cnbm.influx.param;
import lombok.Data;
import lombok.EqualsAndHashCode;
import lombok.experimental.Accessors;
import java.util.List;
/**
* @Desc: "influx 查询条件构造"
* @Author: caixiang
* @DATE: 2022/6/29 10:17
*
* 注意
* 必填
* measurement 不能为空
* 时间段 不能为空
* bucket 不能为空
* 非必填
* 分页信息可选
* tag
*
*/
@Data
@EqualsAndHashCode(callSuper = false)
@Accessors(chain = true)
public class QueryDataGroupByTimeParam extends BaseParam{
private Tag tag;
//查询的时候需要忽略的字段(transationId是唯一标识会对 最终的查询结果集产生影响)
private List<String> dropedTagNames;
private String bucket;
//1-按年分组; 2-按月分组; 3-按日分组
private Integer timeType;
}

View File

@ -24,4 +24,7 @@ public class Event {
private String argName;
private Double argValue;
//批次号可选的
private Integer batchNum;
}

View File

@ -16,16 +16,18 @@ public class EventForCount {
private String inspectionSheetId;
//n = 某个批次的样本数
private Integer n;
private String transationId;
//failN = 某个批次不合格品数
private String failN;
private String argName;
//如果是计数类型1 = 代表ok 2 = nok
//todo 剩下样本量怎么估算
private Double argValue;
//类型 1 计量型 2.计数型
private Integer type;
//batchNum = 某个批次
private String batchNum;
//检测名
private String detectionName;
}

View File

@ -0,0 +1,10 @@
package com.cnbm.processInspection.constant;
/**
* @Desc: ""
* @Author: caixiang
* @DATE: 2022/8/3 10:01
*/
public class Constant {
public String measureMent = "WeightHeiHei";
}

View File

@ -7,9 +7,14 @@ import com.cnbm.common.spc.math.StandardDiviation;
import com.cnbm.common.spc.util.DataUtils;
import com.cnbm.common.vo.R;
import com.cnbm.influx.constant.Constant;
import com.cnbm.influx.param.QueryDataGroupByTimeParam;
import com.cnbm.influx.param.QueryDataParam;
import com.cnbm.influx.param.Range;
import com.cnbm.processInspection.dto.*;
import com.cnbm.processInspection.graphAnalyzed.forCount.c.CGraph;
import com.cnbm.processInspection.graphAnalyzed.forCount.np.NPGraph;
import com.cnbm.processInspection.graphAnalyzed.forCount.p.PGraph;
import com.cnbm.processInspection.graphAnalyzed.forCount.u.UGraph;
import com.cnbm.processInspection.graphAnalyzed.forMeterage.mr.MeanRGraph;
import com.cnbm.processInspection.graphAnalyzed.forMeterage.ms.MeanStandardDeviationGraph;
import com.cnbm.processInspection.graphAnalyzed.forMeterage.xmr.XMRGraph;
@ -36,9 +41,6 @@ public class ProcessInspectionController {
@PostMapping("/XbarSGraphTest")
public R<XbarSGraphData> xbarSGraphTest() throws Exception {
ProductFeaturesDTO productFeaturesDTO = productFeaturesService.get(new Long(1));
ProductFeatures productFeatures = new ProductFeatures();
productFeatures.setSl(new Float(5));
productFeatures.setUsl(new Float(10));
@ -55,7 +57,7 @@ public class ProcessInspectionController {
QueryDataParam queryDataParam = new QueryDataParam();
queryDataParam.setMeasurement("Weight");
queryDataParam.setMeasurement(Constant.measurement);
queryDataParam.setRange(new Range(DataUtils.getBeforeDate(10).toInstant(), Instant.now()));
meanStandardDeviationGraph.initialDate(queryDataParam);
@ -89,7 +91,7 @@ public class ProcessInspectionController {
QueryDataParam queryDataParam = new QueryDataParam();
queryDataParam.setMeasurement("Weight");
queryDataParam.setMeasurement(Constant.measurement);
queryDataParam.setRange(new Range(DataUtils.getBeforeDate(10).toInstant(), Instant.now()));
meanRGraph.initialDate(queryDataParam);
@ -122,7 +124,7 @@ public class ProcessInspectionController {
QueryDataParam queryDataParam = new QueryDataParam();
queryDataParam.setMeasurement("Weight");
queryDataParam.setMeasurement(Constant.measurement);
queryDataParam.setRange(new Range(DataUtils.getBeforeDate(10).toInstant(), Instant.now()));
xmrGraph.initialDate(queryDataParam);
@ -137,6 +139,109 @@ public class ProcessInspectionController {
return R.ok("成功",xmrGraphData);
}
@PostMapping("/NPGraphTest")
public R<NPGraphData> NPGraphTest() throws Exception {
ProductFeatures productFeatures = new ProductFeatures();
productFeatures.setSl(new Float(5));
productFeatures.setUsl(new Float(10));
productFeatures.setLsl(new Float(1));
productFeatures.setName("LostDays");
NPGraph npGraph = new NPGraph(productFeatures);
QueryDataGroupByTimeParam queryDataParam = new QueryDataGroupByTimeParam();
queryDataParam.setMeasurement(Constant.measurement);
queryDataParam.setRange(new Range( Instant.now() , DataUtils.getAfterDate(999).toInstant() ));
queryDataParam.setTimeType(1);
npGraph.initialDate(queryDataParam);
NPGraphData npGraph1 = new NPGraphData(
npGraph.getList(),
npGraph.getSpecificationLimit(),
npGraph.getArgName()
);
return R.ok("成功",npGraph1);
}
@PostMapping("/PGraphTest")
public R<PGraphData> PGraphTest() throws Exception {
ProductFeatures productFeatures = new ProductFeatures();
productFeatures.setSl(new Float(5));
productFeatures.setUsl(new Float(10));
productFeatures.setLsl(new Float(1));
productFeatures.setName("LostDays");
PGraph pGraph = new PGraph(productFeatures);
QueryDataGroupByTimeParam queryDataParam = new QueryDataGroupByTimeParam();
queryDataParam.setMeasurement(Constant.measurement);
queryDataParam.setRange(new Range( Instant.now() , DataUtils.getAfterDate(999).toInstant() ));
queryDataParam.setTimeType(2);
pGraph.initialDate(queryDataParam);
PGraphData npGraph1 = new PGraphData(
pGraph.getList(),
pGraph.getSpecificationLimit(),
pGraph.getArgName()
);
return R.ok("成功",npGraph1);
}
@PostMapping("/CGraphTest")
public R<CGraphData> CGraphTest() throws Exception {
ProductFeatures productFeatures = new ProductFeatures();
productFeatures.setSl(new Float(5));
productFeatures.setUsl(new Float(10));
productFeatures.setLsl(new Float(1));
productFeatures.setName("LostDays");
CGraph cGraph = new CGraph(productFeatures);
QueryDataGroupByTimeParam queryDataParam = new QueryDataGroupByTimeParam();
queryDataParam.setMeasurement(Constant.measurement);
queryDataParam.setRange(new Range( Instant.now() , DataUtils.getAfterDate(999).toInstant() ));
queryDataParam.setTimeType(2);
cGraph.initialDate(queryDataParam);
CGraphData npGraph1 = new CGraphData(
cGraph.getList(),
cGraph.getSpecificationLimit(),
cGraph.getArgName()
);
return R.ok("成功",npGraph1);
}
@PostMapping("/UGraphTest")
public R<UGraphData> UGraphTest() throws Exception {
ProductFeatures productFeatures = new ProductFeatures();
productFeatures.setSl(new Float(5));
productFeatures.setUsl(new Float(10));
productFeatures.setLsl(new Float(1));
productFeatures.setName("LostDays");
UGraph uGraph = new UGraph(productFeatures);
QueryDataGroupByTimeParam queryDataParam = new QueryDataGroupByTimeParam();
queryDataParam.setMeasurement(Constant.measurement);
queryDataParam.setRange(new Range( Instant.now() , DataUtils.getAfterDate(999).toInstant() ));
queryDataParam.setTimeType(2);
uGraph.initialDate(queryDataParam);
UGraphData npGraph1 = new UGraphData(
uGraph.getList(),
uGraph.getSpecificationLimit(),
uGraph.getArgName()
);
return R.ok("成功",npGraph1);
}
private ProductFeatures setRealSampleSize(GraphArg graphArg){
ProductFeaturesDTO productFeaturesDTO = productFeaturesService.get(graphArg.getProductFeaturesId());
@ -235,4 +340,87 @@ public class ProcessInspectionController {
return R.ok("成功",xmrGraphData);
}
@PostMapping("/NPGraph")
public R<NPGraphData> NPGraph(@RequestBody GraphArg graphArg) throws Exception {
ProductFeatures productFeatures = setRealSampleSize(graphArg);
NPGraph npGraph = new NPGraph(productFeatures);
QueryDataGroupByTimeParam queryDataParam = new QueryDataGroupByTimeParam();
queryDataParam.setMeasurement(Constant.measurement);
queryDataParam.setRange(new Range( graphArg.getBegin().toInstant() , graphArg.getEnd().toInstant() ));
queryDataParam.setTimeType(graphArg.getGroupType());
npGraph.initialDate(queryDataParam);
NPGraphData npGraph1 = new NPGraphData(
npGraph.getList(),
npGraph.getSpecificationLimit(),
npGraph.getArgName()
);
return R.ok("成功",npGraph1);
}
@PostMapping("/PGraph")
public R<PGraphData> PGraph(@RequestBody GraphArg graphArg) throws Exception {
ProductFeatures productFeatures = setRealSampleSize(graphArg);
PGraph pGraph = new PGraph(productFeatures);
QueryDataGroupByTimeParam queryDataParam = new QueryDataGroupByTimeParam();
queryDataParam.setMeasurement(Constant.measurement);
queryDataParam.setRange(new Range( graphArg.getBegin().toInstant() , graphArg.getEnd().toInstant() ));
queryDataParam.setTimeType(graphArg.getGroupType());
pGraph.initialDate(queryDataParam);
PGraphData npGraph1 = new PGraphData(
pGraph.getList(),
pGraph.getSpecificationLimit(),
pGraph.getArgName()
);
return R.ok("成功",npGraph1);
}
@PostMapping("/CGraph")
public R<CGraphData> CGraph(@RequestBody GraphArg graphArg) throws Exception {
ProductFeatures productFeatures = setRealSampleSize(graphArg);
CGraph cGraph = new CGraph(productFeatures);
QueryDataGroupByTimeParam queryDataParam = new QueryDataGroupByTimeParam();
queryDataParam.setMeasurement(Constant.measurement);
queryDataParam.setRange(new Range( graphArg.getBegin().toInstant() , graphArg.getEnd().toInstant() ));
queryDataParam.setTimeType(graphArg.getGroupType());
cGraph.initialDate(queryDataParam);
CGraphData npGraph1 = new CGraphData(
cGraph.getList(),
cGraph.getSpecificationLimit(),
cGraph.getArgName()
);
return R.ok("成功",npGraph1);
}
@PostMapping("/UGraph")
public R<UGraphData> UGraph(@RequestBody GraphArg graphArg) throws Exception {
ProductFeatures productFeatures = setRealSampleSize(graphArg);
UGraph uGraph = new UGraph(productFeatures);
QueryDataGroupByTimeParam queryDataParam = new QueryDataGroupByTimeParam();
queryDataParam.setMeasurement(Constant.measurement);
queryDataParam.setRange(new Range( graphArg.getBegin().toInstant() , graphArg.getEnd().toInstant() ));
queryDataParam.setTimeType(graphArg.getGroupType());
uGraph.initialDate(queryDataParam);
UGraphData uGraphData = new UGraphData(
uGraph.getList(),
uGraph.getSpecificationLimit(),
uGraph.getArgName()
);
return R.ok("成功",uGraphData);
}
}

View File

@ -0,0 +1,32 @@
package com.cnbm.processInspection.dto;
import com.cnbm.qualityPlanning.entity.CPoint;
import com.cnbm.qualityPlanning.entity.PPoint;
import com.cnbm.qualityPlanning.entity.SpecificationLimit;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import lombok.Data;
import java.util.List;
/**
* @Desc: ""
* @Author: caixiang
* @DATE: 2022/7/22 14:18
*/
@Data
@ApiModel(value = "C控制图 结果类")
public class CGraphData {
@ApiModelProperty(value = "P控制图list数据")
private List<CPoint> list;
@ApiModelProperty(value = "P控制图 规格线")
private SpecificationLimit specificationLimit;
@ApiModelProperty(value = "P控制图 参数名")
private String argName;
public CGraphData(List<CPoint> list, SpecificationLimit specificationLimit, String argName) {
this.list = list;
this.specificationLimit = specificationLimit;
this.argName = argName;
}
}

View File

@ -32,5 +32,6 @@ public class GraphArg {
@ApiModelProperty(value = "样本大小,不填的话用之前配置的")
private Integer sampleSize;
@ApiModelProperty(value = "分组类别1=年 , 2=月 , 3=日)(用于计数型控制图)")
private Integer groupType;
}

View File

@ -0,0 +1,22 @@
package com.cnbm.processInspection.dto;
import lombok.Data;
/**
* @Desc: ""
* @Author: caixiang
* @DATE: 2022/7/27 15:56
*/
@Data
public class InterpretationListArgForCount {
private Integer number;
private Integer arg;
public InterpretationListArgForCount() {
}
public InterpretationListArgForCount(Integer number, Integer arg) {
this.number = number;
this.arg = arg;
}
}

View File

@ -0,0 +1,35 @@
package com.cnbm.processInspection.dto;
import com.cnbm.common.spc.math.StandardDiviation;
import com.cnbm.processInspection.graphAnalyzed.forMeterage.xmr.XMRGraphEntity;
import com.cnbm.qualityPlanning.entity.ControlLimit;
import com.cnbm.qualityPlanning.entity.NPPoint;
import com.cnbm.qualityPlanning.entity.ProcessCapability;
import com.cnbm.qualityPlanning.entity.SpecificationLimit;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import lombok.Data;
import java.util.List;
/**
* @Desc: ""
* @Author: caixiang
* @DATE: 2022/7/22 14:18
*/
@Data
@ApiModel(value = "NP控制图 结果类")
public class NPGraphData {
@ApiModelProperty(value = "NP控制图list数据")
private List<NPPoint> list;
@ApiModelProperty(value = "NP控制图 规格线")
private SpecificationLimit specificationLimit;
@ApiModelProperty(value = "NP控制图 参数名")
private String argName;
public NPGraphData(List<NPPoint> list, SpecificationLimit specificationLimit, String argName) {
this.list = list;
this.specificationLimit = specificationLimit;
this.argName = argName;
}
}

View File

@ -0,0 +1,32 @@
package com.cnbm.processInspection.dto;
import com.cnbm.qualityPlanning.entity.NPPoint;
import com.cnbm.qualityPlanning.entity.PPoint;
import com.cnbm.qualityPlanning.entity.SpecificationLimit;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import lombok.Data;
import java.util.List;
/**
* @Desc: ""
* @Author: caixiang
* @DATE: 2022/7/22 14:18
*/
@Data
@ApiModel(value = "P控制图 结果类")
public class PGraphData {
@ApiModelProperty(value = "P控制图list数据")
private List<PPoint> list;
@ApiModelProperty(value = "P控制图 规格线")
private SpecificationLimit specificationLimit;
@ApiModelProperty(value = "P控制图 参数名")
private String argName;
public PGraphData(List<PPoint> list, SpecificationLimit specificationLimit, String argName) {
this.list = list;
this.specificationLimit = specificationLimit;
this.argName = argName;
}
}

View File

@ -0,0 +1,32 @@
package com.cnbm.processInspection.dto;
import com.cnbm.qualityPlanning.entity.CPoint;
import com.cnbm.qualityPlanning.entity.SpecificationLimit;
import com.cnbm.qualityPlanning.entity.UPoint;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import lombok.Data;
import java.util.List;
/**
* @Desc: ""
* @Author: caixiang
* @DATE: 2022/7/22 14:18
*/
@Data
@ApiModel(value = "U控制图 结果类")
public class UGraphData {
@ApiModelProperty(value = "U控制图list数据")
private List<UPoint> list;
@ApiModelProperty(value = "U控制图 规格线")
private SpecificationLimit specificationLimit;
@ApiModelProperty(value = "U控制图 参数名")
private String argName;
public UGraphData(List<UPoint> list, SpecificationLimit specificationLimit, String argName) {
this.list = list;
this.specificationLimit = specificationLimit;
this.argName = argName;
}
}

View File

@ -0,0 +1,153 @@
package com.cnbm.processInspection.graphAnalyzed.forCount.c;
import com.cnbm.basic.entity.ProductFeatures;
import com.cnbm.common.spc.util.DataUtils;
import com.cnbm.influx.config.InfluxClient;
import com.cnbm.influx.constant.Constant;
import com.cnbm.influx.param.QueryDataGroupByTimeParam;
import com.cnbm.influx.param.Tag;
import com.cnbm.qualityPlanning.entity.CPoint;
import com.cnbm.qualityPlanning.entity.ControlLimit;
import com.cnbm.qualityPlanning.entity.PPoint;
import com.cnbm.qualityPlanning.entity.SpecificationLimit;
import com.influxdb.query.FluxRecord;
import com.influxdb.query.FluxTable;
import lombok.Data;
import java.util.ArrayList;
import java.util.List;
/**
* @Desc: "均值标准差 控制图 , 计算类"
* @Author: caixiang
* @DATE: 2022/7/20 14:26
* 使用方式 先new MeanStandardDeviationGraph 对象 再initialData 初始化数据 再get 控制限
*
* 步骤
* 先读mysql表查询 product_features 先读到 sample_size样本量
* 再依据 influx.argName == mysql.product_feature.name && 时间段 查询所有的 参数数据
* 拿到参数数据后分组 整合成List<Point>,
* 计算控制限
* 计算 母体 \sigma bar{x}
* 计算CPK CPU CPL这些
* 如果配置了判读方案还要 调用 StatisticalControlledTest Function 检验
*
*/
@Data
public class CGraph {
//计数型不能用判读方案校验因为 当每个样本n不同控制限 都不一定相同
// private List<InterpretationListArgForCount> interpretationScheme;
private String argName;
private List<CPoint> list;
private Double cbar;
private SpecificationLimit specificationLimit;
public CGraph(ProductFeatures productFeatures) throws Exception {
this.argName = productFeatures.getName();
list = new ArrayList<>();
this.specificationLimit = new SpecificationLimit(
productFeatures.getUsl()==null?null:productFeatures.getUsl(),
productFeatures.getSl()==null?null:productFeatures.getSl(),
productFeatures.getLsl()==null?null:productFeatures.getLsl()
);
}
private Double[] toDoubleArray(Object[] o){
Double[] res= new Double[o.length];
for(int i=0;i<o.length;i++){
res[i] = (Double) o[i];
}
return res;
}
private Double computeCbar(List<FluxTable> query){
Double totalFailNum = (double)0;
for (FluxTable fluxTable : query) {
List<FluxRecord> records = fluxTable.getRecords();
Integer failNum = 0;
for (FluxRecord fluxRecord : records) {
//因为 传进去的就是Double 类型所以取出来自然而然就是Double
Double value = Double.parseDouble(fluxRecord.getValueByKey("_value").toString());
if(value.equals((double) 0)){
failNum+=1;
}
}
totalFailNum =totalFailNum + (double)failNum;
}
return totalFailNum/query.size();
}
public static void main(String[] args) {
//2022-08-04 T06:59:55.628Z
String name = "2022-08-04 T06:59:55.628Z";
String[] s = name.split(" ");
String[] split = s[0].split("-");
System.out.println(name);
}
/**
* name : 初始化数据函数
* desc : 从influxdb 里面读取数据然后 加工处理成 我需要的
* 步骤
*
* */
public void initialDate(QueryDataGroupByTimeParam queryDataParam){
queryDataParam.setBucket(Constant.bucket);
List<String> dropNames = new ArrayList<>();
dropNames.add("transationId");
dropNames.add("inspectionSheetId");
dropNames.add("batchNum");
queryDataParam.setDropedTagNames(dropNames);
queryDataParam.setTag(new Tag("argName",argName));
List<FluxTable> query = InfluxClient.Client.queryGroupByTime(queryDataParam);
//1. 先从fluxdb 里面提取原始数据
//计算p bar
this.cbar = computeCbar(query);
//2.计算各项式
for(int i=0 ;i<query.size();i++){
List<FluxRecord> records = query.get(i).getRecords();
Integer failNum = 0;
String name = DataUtils.splitToNeed(records.get(0).getTime().toString(),queryDataParam.getTimeType());
for (FluxRecord fluxRecord : records) {
//因为 传进去的就是Double 类型所以取出来自然而然就是Double
Double value = Double.parseDouble(fluxRecord.getValueByKey("_value").toString());
if(value.equals((double) 0)){
failNum+=1;
}
}
list.add(new CPoint(
getCL(),
i,
(double)failNum,
name
));
}
}
/**
* desc: get Xbar控制图 的控制限
* 注意此函数 要在 initialDate()函数执行之后
* */
public ControlLimit getCL(){
Double mul = 3 * Math.sqrt( this.cbar );
Double lcl = (this.cbar-mul)<0?0:(this.cbar-mul);
return new ControlLimit(
this.cbar + mul,
this.cbar,
lcl
);
}
}

View File

@ -0,0 +1,156 @@
package com.cnbm.processInspection.graphAnalyzed.forCount.np;
import com.cnbm.basic.entity.ProductFeatures;
import com.cnbm.common.spc.math.StandardDiviation;
import com.cnbm.common.spc.util.DataUtils;
import com.cnbm.influx.config.InfluxClient;
import com.cnbm.influx.constant.Constant;
import com.cnbm.influx.param.QueryDataGroupByTimeParam;
import com.cnbm.influx.param.QueryDataParam;
import com.cnbm.influx.param.Tag;
import com.cnbm.processInspection.controlCoefficientConstant.XBarRCoefficients;
import com.cnbm.processInspection.dto.InterpretationListArg;
import com.cnbm.processInspection.dto.InterpretationListArgForCount;
import com.cnbm.qualityPlanning.common.StatisticalControlledTest;
import com.cnbm.qualityPlanning.entity.*;
import com.influxdb.query.FluxRecord;
import com.influxdb.query.FluxTable;
import lombok.Data;
import org.omg.CORBA.PRIVATE_MEMBER;
import java.util.ArrayList;
import java.util.List;
/**
* @Desc: "均值标准差 控制图 , 计算类"
* @Author: caixiang
* @DATE: 2022/7/20 14:26
* 使用方式 先new MeanStandardDeviationGraph 对象 再initialData 初始化数据 再get 控制限
*
* 步骤
* 先读mysql表查询 product_features 先读到 sample_size样本量
* 再依据 influx.argName == mysql.product_feature.name && 时间段 查询所有的 参数数据
* 拿到参数数据后分组 整合成List<Point>,
* 计算控制限
* 计算 母体 \sigma bar{x}
* 计算CPK CPU CPL这些
* 如果配置了判读方案还要 调用 StatisticalControlledTest Function 检验
*
*/
@Data
public class NPGraph {
//计数型不能用判读方案校验因为 当每个样本n不同控制限 都不一定相同
// private List<InterpretationListArgForCount> interpretationScheme;
private String argName;
private List<NPPoint> list;
private Double pbar;
private SpecificationLimit specificationLimit;
public NPGraph(ProductFeatures productFeatures) throws Exception {
this.argName = productFeatures.getName();
list = new ArrayList<>();
this.specificationLimit = new SpecificationLimit(
productFeatures.getUsl()==null?null:productFeatures.getUsl(),
productFeatures.getSl()==null?null:productFeatures.getSl(),
productFeatures.getLsl()==null?null:productFeatures.getLsl()
);
}
private Double[] toDoubleArray(Object[] o){
Double[] res= new Double[o.length];
for(int i=0;i<o.length;i++){
res[i] = (Double) o[i];
}
return res;
}
private Double computePbar(List<FluxTable> query){
Double totalFailNum = (double)0;
Double totalN = (double)0;
for (FluxTable fluxTable : query) {
List<FluxRecord> records = fluxTable.getRecords();
Integer failNum = 0;
Integer n = records.size();
for (FluxRecord fluxRecord : records) {
//因为 传进去的就是Double 类型所以取出来自然而然就是Double
Double value = Double.parseDouble(fluxRecord.getValueByKey("_value").toString());
if(value.equals((double) 0)){
failNum+=1;
}
}
totalFailNum =totalFailNum + (double)failNum;
totalN = totalN + (double)n;
}
return totalFailNum/totalN;
}
/**
* name : 初始化数据函数
* desc : 从influxdb 里面读取数据然后 加工处理成 我需要的
* 步骤
*
* */
public void initialDate(QueryDataGroupByTimeParam queryDataParam){
queryDataParam.setBucket(Constant.bucket);
List<String> dropNames = new ArrayList<>();
dropNames.add("transationId");
dropNames.add("inspectionSheetId");
dropNames.add("batchNum");
queryDataParam.setDropedTagNames(dropNames);
queryDataParam.setTag(new Tag("argName",argName));
List<FluxTable> query = InfluxClient.Client.queryGroupByTime(queryDataParam);
//1. 先从fluxdb 里面提取原始数据
List<Double> originData = new ArrayList<>();
//计算p bar
this.pbar = computePbar(query);
//2.计算各项式
for(int i=0 ;i<query.size();i++){
List<FluxRecord> records = query.get(i).getRecords();
Integer failNum = 0;
Integer n = records.size();
String name = DataUtils.splitToNeed(records.get(0).getTime().toString(),queryDataParam.getTimeType());
for (FluxRecord fluxRecord : records) {
//因为 传进去的就是Double 类型所以取出来自然而然就是Double
Double value = Double.parseDouble(fluxRecord.getValueByKey("_value").toString());
if(value.equals((double) 0)){
failNum+=1;
}
}
list.add(new NPPoint(
getCL((double)n),
i,
failNum,
name
));
}
}
/**
* desc: get Xbar控制图 的控制限
* 注意此函数 要在 initialDate()函数执行之后
* */
public ControlLimit getCL(Double n){
Double npbar = n * this.pbar;
Double mul = 3 * Math.sqrt(npbar*(1-this.pbar));
Double lcl = (npbar-mul)<0?0:(npbar-mul);
return new ControlLimit(
npbar + mul,
npbar,
lcl
);
}
}

View File

@ -0,0 +1,163 @@
package com.cnbm.processInspection.graphAnalyzed.forCount.p;
import com.cnbm.basic.entity.ProductFeatures;
import com.cnbm.common.spc.util.DataUtils;
import com.cnbm.influx.config.InfluxClient;
import com.cnbm.influx.constant.Constant;
import com.cnbm.influx.param.QueryDataGroupByTimeParam;
import com.cnbm.influx.param.Tag;
import com.cnbm.qualityPlanning.entity.ControlLimit;
import com.cnbm.qualityPlanning.entity.NPPoint;
import com.cnbm.qualityPlanning.entity.PPoint;
import com.cnbm.qualityPlanning.entity.SpecificationLimit;
import com.influxdb.query.FluxRecord;
import com.influxdb.query.FluxTable;
import lombok.Data;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
/**
* @Desc: "均值标准差 控制图 , 计算类"
* @Author: caixiang
* @DATE: 2022/7/20 14:26
* 使用方式 先new MeanStandardDeviationGraph 对象 再initialData 初始化数据 再get 控制限
*
* 步骤
* 先读mysql表查询 product_features 先读到 sample_size样本量
* 再依据 influx.argName == mysql.product_feature.name && 时间段 查询所有的 参数数据
* 拿到参数数据后分组 整合成List<Point>,
* 计算控制限
* 计算 母体 \sigma bar{x}
* 计算CPK CPU CPL这些
* 如果配置了判读方案还要 调用 StatisticalControlledTest Function 检验
*
*/
@Data
public class PGraph {
//计数型不能用判读方案校验因为 当每个样本n不同控制限 都不一定相同
// private List<InterpretationListArgForCount> interpretationScheme;
private String argName;
private List<PPoint> list;
private Double pbar;
private SpecificationLimit specificationLimit;
public PGraph(ProductFeatures productFeatures) throws Exception {
this.argName = productFeatures.getName();
list = new ArrayList<>();
this.specificationLimit = new SpecificationLimit(
productFeatures.getUsl()==null?null:productFeatures.getUsl(),
productFeatures.getSl()==null?null:productFeatures.getSl(),
productFeatures.getLsl()==null?null:productFeatures.getLsl()
);
}
private Double[] toDoubleArray(Object[] o){
Double[] res= new Double[o.length];
for(int i=0;i<o.length;i++){
res[i] = (Double) o[i];
}
return res;
}
private Double computePbar(List<FluxTable> query){
Double totalFailNum = (double)0;
Double totalN = (double)0;
for (FluxTable fluxTable : query) {
List<FluxRecord> records = fluxTable.getRecords();
Integer failNum = 0;
Integer n = records.size();
for (FluxRecord fluxRecord : records) {
//因为 传进去的就是Double 类型所以取出来自然而然就是Double
Double value = Double.parseDouble(fluxRecord.getValueByKey("_value").toString());
if(value.equals((double) 0)){
failNum+=1;
}
}
totalFailNum =totalFailNum + (double)failNum;
totalN = totalN + (double)n;
}
return totalFailNum/totalN;
}
public static void main(String[] args) {
//2022-08-04 T06:59:55.628Z
String name = "2022-08-04 T06:59:55.628Z";
String[] s = name.split(" ");
String[] split = s[0].split("-");
System.out.println(name);
}
/**
* name : 初始化数据函数
* desc : 从influxdb 里面读取数据然后 加工处理成 我需要的
* 步骤
*
* */
public void initialDate(QueryDataGroupByTimeParam queryDataParam){
queryDataParam.setBucket(Constant.bucket);
List<String> dropNames = new ArrayList<>();
dropNames.add("transationId");
dropNames.add("inspectionSheetId");
dropNames.add("batchNum");
queryDataParam.setDropedTagNames(dropNames);
queryDataParam.setTag(new Tag("argName",argName));
List<FluxTable> query = InfluxClient.Client.queryGroupByTime(queryDataParam);
//1. 先从fluxdb 里面提取原始数据
//计算p bar
this.pbar = computePbar(query);
//2.计算各项式
for(int i=0 ;i<query.size();i++){
List<FluxRecord> records = query.get(i).getRecords();
Integer failNum = 0;
Integer n = records.size();
String name = DataUtils.splitToNeed(records.get(0).getTime().toString(),queryDataParam.getTimeType());
for (FluxRecord fluxRecord : records) {
//因为 传进去的就是Double 类型所以取出来自然而然就是Double
Double value = Double.parseDouble(fluxRecord.getValueByKey("_value").toString());
if(value.equals((double) 0)){
failNum+=1;
}
}
Double pi = (double)failNum / (double)n;
list.add(new PPoint(
getCL((double)n),
i,
pi,
name
));
}
}
/**
* desc: get Xbar控制图 的控制限
* 注意此函数 要在 initialDate()函数执行之后
* */
public ControlLimit getCL(Double n){
Double mul = 3 * Math.sqrt( ( this.pbar * (1-this.pbar) ) / n );
Double lcl = (this.pbar-mul)<0?0:(this.pbar-mul);
return new ControlLimit(
this.pbar + mul,
this.pbar,
lcl
);
}
}

View File

@ -0,0 +1,161 @@
package com.cnbm.processInspection.graphAnalyzed.forCount.u;
import com.cnbm.basic.entity.ProductFeatures;
import com.cnbm.common.spc.util.DataUtils;
import com.cnbm.influx.config.InfluxClient;
import com.cnbm.influx.constant.Constant;
import com.cnbm.influx.param.QueryDataGroupByTimeParam;
import com.cnbm.influx.param.Tag;
import com.cnbm.qualityPlanning.entity.ControlLimit;
import com.cnbm.qualityPlanning.entity.PPoint;
import com.cnbm.qualityPlanning.entity.SpecificationLimit;
import com.cnbm.qualityPlanning.entity.UPoint;
import com.influxdb.query.FluxRecord;
import com.influxdb.query.FluxTable;
import lombok.Data;
import java.util.ArrayList;
import java.util.List;
/**
* @Desc: "均值标准差 控制图 , 计算类"
* @Author: caixiang
* @DATE: 2022/7/20 14:26
* 使用方式 先new MeanStandardDeviationGraph 对象 再initialData 初始化数据 再get 控制限
*
* 步骤
* 先读mysql表查询 product_features 先读到 sample_size样本量
* 再依据 influx.argName == mysql.product_feature.name && 时间段 查询所有的 参数数据
* 拿到参数数据后分组 整合成List<Point>,
* 计算控制限
* 计算 母体 \sigma bar{x}
* 计算CPK CPU CPL这些
* 如果配置了判读方案还要 调用 StatisticalControlledTest Function 检验
*
*/
@Data
public class UGraph {
//计数型不能用判读方案校验因为 当每个样本n不同控制限 都不一定相同
// private List<InterpretationListArgForCount> interpretationScheme;
private String argName;
private List<UPoint> list;
private Double ubar;
private SpecificationLimit specificationLimit;
public UGraph(ProductFeatures productFeatures) throws Exception {
this.argName = productFeatures.getName();
list = new ArrayList<>();
this.specificationLimit = new SpecificationLimit(
productFeatures.getUsl()==null?null:productFeatures.getUsl(),
productFeatures.getSl()==null?null:productFeatures.getSl(),
productFeatures.getLsl()==null?null:productFeatures.getLsl()
);
}
private Double[] toDoubleArray(Object[] o){
Double[] res= new Double[o.length];
for(int i=0;i<o.length;i++){
res[i] = (Double) o[i];
}
return res;
}
private Double computeUbar(List<FluxTable> query){
Double totalFailNum = (double)0;
Double totalN = (double)0;
for (FluxTable fluxTable : query) {
List<FluxRecord> records = fluxTable.getRecords();
Integer failNum = 0;
Integer n = records.size();
for (FluxRecord fluxRecord : records) {
//因为 传进去的就是Double 类型所以取出来自然而然就是Double
Double value = Double.parseDouble(fluxRecord.getValueByKey("_value").toString());
if(value.equals((double) 0)){
failNum+=1;
}
}
totalFailNum =totalFailNum + (double)failNum;
totalN = totalN + (double)n;
}
return totalFailNum/totalN;
}
public static void main(String[] args) {
//2022-08-04 T06:59:55.628Z
String name = "2022-08-04 T06:59:55.628Z";
String[] s = name.split(" ");
String[] split = s[0].split("-");
System.out.println(name);
}
/**
* name : 初始化数据函数
* desc : 从influxdb 里面读取数据然后 加工处理成 我需要的
* 步骤
*
* */
public void initialDate(QueryDataGroupByTimeParam queryDataParam){
queryDataParam.setBucket(Constant.bucket);
List<String> dropNames = new ArrayList<>();
dropNames.add("transationId");
dropNames.add("inspectionSheetId");
dropNames.add("batchNum");
queryDataParam.setDropedTagNames(dropNames);
queryDataParam.setTag(new Tag("argName",argName));
List<FluxTable> query = InfluxClient.Client.queryGroupByTime(queryDataParam);
//1. 先从fluxdb 里面提取原始数据
//计算p bar
this.ubar = computeUbar(query);
//2.计算各项式
for(int i=0 ;i<query.size();i++){
List<FluxRecord> records = query.get(i).getRecords();
Integer failNum = 0;
Integer n = records.size();
String name = DataUtils.splitToNeed(records.get(0).getTime().toString(),queryDataParam.getTimeType());
for (FluxRecord fluxRecord : records) {
//因为 传进去的就是Double 类型所以取出来自然而然就是Double
Double value = Double.parseDouble(fluxRecord.getValueByKey("_value").toString());
if(value.equals((double) 0)){
failNum+=1;
}
}
Double ui = (double)failNum / (double)n;
list.add(new UPoint(
getCL((double)n),
i,
ui,
name
));
}
}
/**
* desc: get Xbar控制图 的控制限
* 注意此函数 要在 initialDate()函数执行之后
* */
public ControlLimit getCL(Double n){
Double mul = 3 * Math.sqrt( this.ubar / n );
Double lcl = (this.ubar-mul)<0?0:(this.ubar-mul);
return new ControlLimit(
this.ubar + mul,
this.ubar,
lcl
);
}
}

View File

@ -75,7 +75,7 @@ public class MeanRGraph {
this.specificationLimit = new SpecificationLimit(
productFeatures.getUsl()==null?null:productFeatures.getUsl(),
productFeatures.getSl()==null?null:productFeatures.getSl(),
productFeatures.getUsl()==null?null:productFeatures.getUsl()
productFeatures.getLsl()==null?null:productFeatures.getLsl()
);
}
@ -102,6 +102,7 @@ public class MeanRGraph {
List<String> dropNames = new ArrayList<>();
dropNames.add("transationId");
dropNames.add("inspectionSheetId");
dropNames.add("batchNum");
queryDataParam.setDropedTagNames(dropNames);
queryDataParam.setTag(new Tag("argName",argName));

View File

@ -72,7 +72,7 @@ public class MeanStandardDeviationGraph {
this.specificationLimit = new SpecificationLimit(
productFeatures.getUsl()==null?null:productFeatures.getUsl(),
productFeatures.getSl()==null?null:productFeatures.getSl(),
productFeatures.getUsl()==null?null:productFeatures.getUsl()
productFeatures.getLsl()==null?null:productFeatures.getLsl()
);
}
@ -99,6 +99,7 @@ public class MeanStandardDeviationGraph {
List<String> dropNames = new ArrayList<>();
dropNames.add("transationId");
dropNames.add("inspectionSheetId");
dropNames.add("batchNum");
queryDataParam.setDropedTagNames(dropNames);
queryDataParam.setTag(new Tag("argName",argName));

View File

@ -69,7 +69,7 @@ public class XMRGraph {
this.specificationLimit = new SpecificationLimit(
productFeatures.getUsl()==null?null:productFeatures.getUsl(),
productFeatures.getSl()==null?null:productFeatures.getSl(),
productFeatures.getUsl()==null?null:productFeatures.getUsl()
productFeatures.getLsl()==null?null:productFeatures.getLsl()
);
}
@ -96,6 +96,7 @@ public class XMRGraph {
List<String> dropNames = new ArrayList<>();
dropNames.add("transationId");
dropNames.add("inspectionSheetId");
dropNames.add("batchNum");
queryDataParam.setDropedTagNames(dropNames);
queryDataParam.setTag(new Tag("argName",argName));

View File

@ -0,0 +1,28 @@
package com.cnbm.qualityPlanning.entity;
import lombok.Data;
/**
* @Desc: "整合,处理好 后的一行数据 (sampleSize 后的数据) "
* @Author: caixiang
* @DATE: 2022/7/21 9:43
*/
@Data
public class CPoint {
private ControlLimit controlLimit;
private Integer position;
//不合格品数
private Double value;
private String name;
public CPoint(ControlLimit controlLimit, Integer position, Double value, String name) {
this.controlLimit = controlLimit;
this.position = position;
this.value = value;
this.name = name;
}
}

View File

@ -0,0 +1,32 @@
package com.cnbm.qualityPlanning.entity;
import io.swagger.annotations.ApiModelProperty;
import lombok.Data;
import java.util.ArrayList;
import java.util.List;
import java.util.Set;
/**
* @Desc: "整合,处理好 后的一行数据 (sampleSize 后的数据) "
* @Author: caixiang
* @DATE: 2022/7/21 9:43
*/
@Data
public class NPPoint{
private ControlLimit controlLimit;
private Integer position;
//不合格品数
private Integer value;
private String name;
public NPPoint(ControlLimit controlLimit, Integer position, Integer value,String name) {
this.controlLimit = controlLimit;
this.position = position;
this.value = value;
this.name = name;
}
}

View File

@ -0,0 +1,28 @@
package com.cnbm.qualityPlanning.entity;
import lombok.Data;
/**
* @Desc: "整合,处理好 后的一行数据 (sampleSize 后的数据) "
* @Author: caixiang
* @DATE: 2022/7/21 9:43
*/
@Data
public class PPoint {
private ControlLimit controlLimit;
private Integer position;
//不合格品数
private Double value;
private String name;
public PPoint(ControlLimit controlLimit, Integer position, Double value,String name) {
this.controlLimit = controlLimit;
this.position = position;
this.value = value;
this.name = name;
}
}

View File

@ -0,0 +1,28 @@
package com.cnbm.qualityPlanning.entity;
import lombok.Data;
/**
* @Desc: "整合,处理好 后的一行数据 (sampleSize 后的数据) "
* @Author: caixiang
* @DATE: 2022/7/21 9:43
*/
@Data
public class UPoint {
private ControlLimit controlLimit;
private Integer position;
//不合格品数
private Double value;
private String name;
public UPoint(ControlLimit controlLimit, Integer position, Double value, String name) {
this.controlLimit = controlLimit;
this.position = position;
this.value = value;
this.name = name;
}
}