yanyang #8

Merged
yanyang merged 4 commits from yanyang into master 2022-08-09 15:25:37 +08:00
58 changed files with 2158 additions and 130 deletions
Showing only changes of commit 848eb60f43 - Show all commits

29
README.md Normal file
View File

@ -0,0 +1,29 @@
**项目说明**
- 这是根据renren框架重新修改出来的框架
- 采用SpringBoot、MyBatis、spring security框架
- 提供了代码生成器只需编写30%左右代码,其余的代码交给系统自动生成,可快速完成开发任务
- 代码生成器是用的Velocity模版引擎如果需要可以在ym-generator模块中进行vm模板的修改和一些包名等基础修改
<br>
**新增业务模块需要注意**
- 新增业务模块时业务模块需要依赖ym-admin并且ym-gateway模块maven需要依赖新建模块
- 自动生成的controller接口都带鉴权注解如果开发阶段可以先注释掉后面开发完成之后再把鉴权添加到数据库
<br>
**技术选型:**
- 核心框架Spring Boot 2.7
- 安全框架Spring Security
- 持久层框架MyBatis 3.5
- 定时器Quartz 2.3
- 数据库连接池HikariCP
- 日志管理log4j2
- 页面交互Vue2.x
<br>
**软件需求**
- JDK1.8
- Maven3.0+
- MySQL8.0
- redis6.0+
<br>

11
pom.xml
View File

@ -35,7 +35,7 @@
<java.version>1.8</java.version>
<junit.version>4.13.2</junit.version>
<jedis.version>4.2.2</jedis.version>
<!-- <druid.version>1.2.9</druid.version>-->
<druid.version>1.2.9</druid.version>
<mybatisplus.version>3.5.1</mybatisplus.version>
<sqlserver.version>4.0</sqlserver.version>
<oracle.version>11.2.0.3</oracle.version>
@ -136,9 +136,14 @@
<artifactId>postgresql</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-data-jdbc</artifactId>
<groupId>com.alibaba</groupId>
<artifactId>druid-spring-boot-starter</artifactId>
<version>${druid.version}</version>
</dependency>
<!-- <dependency>-->
<!-- <groupId>org.springframework.boot</groupId>-->
<!-- <artifactId>spring-boot-starter-data-jdbc</artifactId>-->
<!-- </dependency>-->
<dependency>
<groupId>cn.afterturn</groupId>

View File

@ -15,6 +15,7 @@ CREATE TABLE sys_user (
create_date datetime COMMENT '创建时间',
updater bigint COMMENT '更新者',
update_date datetime COMMENT '更新时间',
valid tinyint COMMENT '删除标识是否有效1可用 0不可用',
primary key (id),
unique key uk_username (username),
key idx_create_date (create_date)
@ -31,6 +32,7 @@ CREATE TABLE sys_dept (
create_date datetime COMMENT '创建时间',
updater bigint COMMENT '更新者',
update_date datetime COMMENT '更新时间',
valid tinyint COMMENT '删除标识是否有效1可用 0不可用',
primary key (id),
key idx_pid (pid),
key idx_sort (sort)
@ -47,6 +49,7 @@ create table sys_role
create_date datetime COMMENT '创建时间',
updater bigint COMMENT '更新者',
update_date datetime COMMENT '更新时间',
valid tinyint COMMENT '删除标识是否有效1可用 0不可用',
primary key (id),
key idx_dept_id (dept_id)
)ENGINE=InnoDB DEFAULT CHARACTER SET utf8mb4 COMMENT='角色管理';
@ -66,6 +69,7 @@ create table sys_menu
create_date datetime COMMENT '创建时间',
updater bigint COMMENT '更新者',
update_date datetime COMMENT '更新时间',
valid tinyint COMMENT '删除标识是否有效1可用 0不可用',
primary key (id),
key idx_pid (pid),
key idx_sort (sort)
@ -79,6 +83,7 @@ create table sys_role_user
user_id bigint COMMENT '用户ID',
creator bigint COMMENT '创建者',
create_date datetime COMMENT '创建时间',
valid tinyint COMMENT '删除标识是否有效1可用 0不可用',
primary key (id),
key idx_role_id (role_id),
key idx_user_id (user_id)
@ -92,6 +97,7 @@ create table sys_role_menu
menu_id bigint COMMENT '菜单ID',
creator bigint COMMENT '创建者',
create_date datetime COMMENT '创建时间',
valid tinyint COMMENT '删除标识是否有效1可用 0不可用',
primary key (id),
key idx_role_id (role_id),
key idx_menu_id (menu_id)
@ -105,6 +111,7 @@ create table sys_role_data_scope
dept_id bigint COMMENT '部门ID',
creator bigint COMMENT '创建者',
create_date datetime COMMENT '创建时间',
valid tinyint COMMENT '删除标识是否有效1可用 0不可用',
primary key (id),
key idx_role_id (role_id)
)ENGINE=InnoDB DEFAULT CHARACTER SET utf8mb4 COMMENT='角色数据权限';
@ -121,6 +128,7 @@ create table sys_params
create_date datetime COMMENT '创建时间',
updater bigint COMMENT '更新者',
update_date datetime COMMENT '更新时间',
valid tinyint COMMENT '删除标识是否有效1可用 0不可用',
primary key (id),
unique key uk_param_code (param_code),
key idx_create_date (create_date)
@ -138,6 +146,7 @@ create table sys_dict_type
create_date datetime COMMENT '创建时间',
updater bigint COMMENT '更新者',
update_date datetime COMMENT '更新时间',
valid tinyint COMMENT '删除标识是否有效1可用 0不可用',
primary key (id),
UNIQUE KEY(dict_type)
)ENGINE=InnoDB DEFAULT CHARACTER SET utf8mb4 COMMENT='字典类型';
@ -155,6 +164,7 @@ create table sys_dict_data
create_date datetime COMMENT '创建时间',
updater bigint COMMENT '更新者',
update_date datetime COMMENT '更新时间',
valid tinyint COMMENT '删除标识是否有效1可用 0不可用',
primary key (id),
unique key uk_dict_type_value (dict_type_id, dict_value),
key idx_sort (sort)
@ -171,6 +181,7 @@ create table sys_log_login
creator_name varchar(50) COMMENT '用户名',
creator bigint COMMENT '创建者',
create_date datetime COMMENT '创建时间',
valid tinyint COMMENT '删除标识是否有效1可用 0不可用',
primary key (id),
key idx_status (status),
key idx_create_date (create_date)
@ -191,6 +202,7 @@ create table sys_log_operation
creator_name varchar(50) COMMENT '用户名',
creator bigint COMMENT '创建者',
create_date datetime COMMENT '创建时间',
valid tinyint COMMENT '删除标识是否有效1可用 0不可用',
primary key (id),
key idx_create_date (create_date)
)ENGINE=InnoDB DEFAULT CHARACTER SET utf8mb4 COMMENT='操作日志';
@ -207,6 +219,7 @@ create table sys_log_error
error_info text COMMENT '异常信息',
creator bigint COMMENT '创建者',
create_date datetime COMMENT '创建时间',
valid tinyint COMMENT '删除标识是否有效1可用 0不可用',
primary key (id),
key idx_create_date (create_date)
)ENGINE=InnoDB DEFAULT CHARACTER SET utf8mb4 COMMENT='异常日志';
@ -218,6 +231,7 @@ CREATE TABLE sys_oss (
url varchar(200) COMMENT 'URL地址',
creator bigint COMMENT '创建者',
create_date datetime COMMENT '创建时间',
valid tinyint COMMENT '删除标识是否有效1可用 0不可用',
PRIMARY KEY (id),
key idx_create_date (create_date)
) ENGINE=InnoDB DEFAULT CHARACTER SET utf8mb4 COMMENT='文件上传';
@ -234,6 +248,7 @@ CREATE TABLE schedule_job (
create_date datetime COMMENT '创建时间',
updater bigint COMMENT '更新者',
update_date datetime COMMENT '更新时间',
valid tinyint COMMENT '删除标识是否有效1可用 0不可用',
PRIMARY KEY (id),
key idx_create_date (create_date)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COMMENT='定时任务';
@ -248,6 +263,7 @@ CREATE TABLE schedule_job_log (
error varchar(2000) DEFAULT NULL COMMENT '失败信息',
times int NOT NULL COMMENT '耗时(单位:毫秒)',
create_date datetime COMMENT '创建时间',
valid tinyint COMMENT '删除标识是否有效1可用 0不可用',
PRIMARY KEY (id),
key idx_job_id (job_id),
key idx_create_date (create_date)
@ -261,6 +277,7 @@ CREATE TABLE sys_user_token (
expire_date datetime COMMENT '过期时间',
update_date datetime COMMENT '更新时间',
create_date datetime COMMENT '创建时间',
valid tinyint COMMENT '删除标识是否有效1可用 0不可用',
PRIMARY KEY (id),
UNIQUE KEY user_id (user_id),
UNIQUE KEY token (token)

View File

@ -21,6 +21,7 @@ import org.springframework.stereotype.Component;
import javax.servlet.http.HttpServletRequest;
import java.lang.reflect.Method;
import java.util.Date;
/**
* @Author weihongyang
@ -78,6 +79,7 @@ public class LogOperationAspect {
LoginUser loginUser = (LoginUser) authentication.getPrincipal();
if(loginUser != null){
log.setCreatorName(loginUser.getUsername());
log.setCreator(loginUser.getSysUserEntity().getCreator());
}
log.setStatus(status);
@ -89,6 +91,7 @@ public class LogOperationAspect {
log.setUserAgent(request.getHeader(HttpHeaders.USER_AGENT));
log.setRequestUri(request.getRequestURI());
log.setRequestMethod(request.getMethod());
log.setCreateDate(new Date());
//请求参数
Object[] args = joinPoint.getArgs();

View File

@ -0,0 +1,29 @@
package com.cnbm.admin.config;
import com.baomidou.mybatisplus.extension.plugins.MybatisPlusInterceptor;
import com.baomidou.mybatisplus.extension.plugins.inner.BlockAttackInnerInterceptor;
import com.baomidou.mybatisplus.extension.plugins.inner.OptimisticLockerInnerInterceptor;
import com.baomidou.mybatisplus.extension.plugins.inner.PaginationInnerInterceptor;
import com.cnbm.common.interceptor.DataFilterInterceptor;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
@Configuration
public class MybatisPlusConfig {
@Bean
public MybatisPlusInterceptor mybatisPlusInterceptor() {
MybatisPlusInterceptor mybatisPlusInterceptor = new MybatisPlusInterceptor();
// 数据权限
mybatisPlusInterceptor.addInnerInterceptor(new DataFilterInterceptor());
// 分页插件
mybatisPlusInterceptor.addInnerInterceptor(new PaginationInnerInterceptor());
// 乐观锁
mybatisPlusInterceptor.addInnerInterceptor(new OptimisticLockerInnerInterceptor());
// 防止全表更新与删除
mybatisPlusInterceptor.addInnerInterceptor(new BlockAttackInnerInterceptor());
return mybatisPlusInterceptor;
}
}

View File

@ -6,6 +6,7 @@ import com.cnbm.admin.service.CaptchaService;
import com.cnbm.admin.service.LoginService;
import com.cnbm.admin.utils.ResponseResult;
import com.cnbm.common.exception.ErrorCode;
import com.cnbm.common.utils.Result;
import com.cnbm.common.validator.AssertUtils;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiImplicitParam;
@ -52,13 +53,16 @@ public class LoginController {
@PostMapping("/login")
@ApiOperation(value = "登录")
public ResponseResult login(HttpServletRequest request, @RequestBody LoginParam loginParam) {
public Result login(HttpServletRequest request, @RequestBody LoginParam loginParam) {
return loginService.login(request,loginParam);
}
@PostMapping("/doLogout")
@PostMapping("logout")
@ApiOperation(value = "退出")
public void logout(){
public Result logout(HttpServletRequest request){
loginService.logout(request);
return new Result();
}
}

View File

@ -40,10 +40,11 @@ public class SysDictDataController {
@ApiImplicitParam(name = Constant.LIMIT, value = "每页显示记录数", paramType = "query",required = true, dataTypeClass=Integer.class) ,
@ApiImplicitParam(name = Constant.ORDER_FIELD, value = "排序字段", paramType = "query", dataTypeClass=String.class) ,
@ApiImplicitParam(name = Constant.ORDER, value = "排序方式,可选值(asc、desc)", paramType = "query", dataTypeClass=String.class) ,
@ApiImplicitParam(name = "dictTypeId", value = "字典类型id", paramType = "query", dataTypeClass = String.class),
@ApiImplicitParam(name = "dictLabel", value = "字典标签", paramType = "query", dataTypeClass=String.class),
@ApiImplicitParam(name = "dictValue", value = "字典值", paramType = "query", dataTypeClass=String.class)
})
@PreAuthorize("@ex.hasAuthority('sys:dept:page')")
@PreAuthorize("@ex.hasAuthority('sys:dict:page')")
public Result<PageData<SysDictDataDTO>> page(@ApiIgnore @RequestParam Map<String, Object> params){
//字典类型
PageData<SysDictDataDTO> page = sysDictDataService.page(params);

View File

@ -0,0 +1,69 @@
package com.cnbm.admin.controller;
import com.cnbm.admin.annotation.LogOperation;
import com.cnbm.admin.dto.SysLogLoginDTO;
import com.cnbm.admin.execl.SysLogLoginExcel;
import com.cnbm.admin.service.SysLogLoginService;
import com.cnbm.common.constant.Constant;
import com.cnbm.common.page.PageData;
import com.cnbm.common.utils.ExcelUtils;
import com.cnbm.common.utils.Result;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiImplicitParam;
import io.swagger.annotations.ApiImplicitParams;
import io.swagger.annotations.ApiOperation;
import lombok.extern.log4j.Log4j2;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.security.access.prepost.PreAuthorize;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;
import springfox.documentation.annotations.ApiIgnore;
import javax.servlet.http.HttpServletResponse;
import java.util.List;
import java.util.Map;
@RestController
@RequestMapping("sys/log/login")
@Api(tags="登录日志")
@Log4j2
public class SysLogLoginController {
@Autowired
private SysLogLoginService sysLogLoginService;
@GetMapping("page")
@ApiOperation("分页")
@ApiImplicitParams({
@ApiImplicitParam(name = Constant.PAGE, value = "当前页码从1开始", paramType = "query", required = true, dataTypeClass=Integer.class) ,
@ApiImplicitParam(name = Constant.LIMIT, value = "每页显示记录数", paramType = "query",required = true, dataTypeClass=Integer.class) ,
@ApiImplicitParam(name = Constant.ORDER_FIELD, value = "排序字段", paramType = "query", dataTypeClass=String.class) ,
@ApiImplicitParam(name = Constant.ORDER, value = "排序方式,可选值(asc、desc)", paramType = "query", dataTypeClass=String.class) ,
@ApiImplicitParam(name = "status", value = "状态 0失败 1成功 2账号已锁定", paramType = "query", dataTypeClass=Integer.class),
@ApiImplicitParam(name = "creatorName", value = "用户名", paramType = "query", dataTypeClass=String.class)
})
@PreAuthorize("@ex.hasAuthority('sys:log:login')")
public Result<PageData<SysLogLoginDTO>> page(@ApiIgnore @RequestParam Map<String, Object> params){
PageData<SysLogLoginDTO> page = sysLogLoginService.page(params);
log.info("PageData<SysLogLoginDTO>====={}",page.toString());
return new Result<PageData<SysLogLoginDTO>>().ok(page);
}
@GetMapping("export")
@ApiOperation("导出")
@LogOperation("导出")
@ApiImplicitParams({
@ApiImplicitParam(name = "status", value = "状态 0失败 1成功 2账号已锁定", paramType = "query", dataTypeClass=Integer.class),
@ApiImplicitParam(name = "creatorName", value = "用户名", paramType = "query", dataTypeClass=String.class)
})
@PreAuthorize("@ex.hasAuthority('sys:log:login')")
public void export(@ApiIgnore @RequestParam Map<String, Object> params, HttpServletResponse response) throws Exception {
List<SysLogLoginDTO> list = sysLogLoginService.list(params);
ExcelUtils.exportExcelToTarget(response, null, list, SysLogLoginExcel.class);
}
}

View File

@ -0,0 +1,10 @@
package com.cnbm.admin.dao;
import com.cnbm.admin.entity.SysLogLoginEntity;
import com.cnbm.common.dao.BaseDao;
import org.apache.ibatis.annotations.Mapper;
@Mapper
public interface SysLogLoginDao extends BaseDao<SysLogLoginEntity> {
}

View File

@ -0,0 +1,36 @@
package com.cnbm.admin.dto;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import lombok.Data;
import java.io.Serializable;
import java.util.Date;
@Data
@ApiModel(value = "登录日志")
public class SysLogLoginDTO implements Serializable {
private static final long serialVersionUID = 1L;
@ApiModelProperty(value = "id")
private Long id;
@ApiModelProperty(value = "用户操作 0用户登录 1用户退出")
private Integer operation;
@ApiModelProperty(value = "状态 0失败 1成功 2账号已锁定")
private Integer status;
@ApiModelProperty(value = "用户代理")
private String userAgent;
@ApiModelProperty(value = "操作IP")
private String ip;
@ApiModelProperty(value = "用户名")
private String creatorName;
@ApiModelProperty(value = "创建时间")
private Date createDate;
}

View File

@ -1,16 +1,19 @@
package com.cnbm.admin.entity;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import lombok.Data;
import lombok.NoArgsConstructor;
import java.io.Serializable;
import java.util.Collection;
import java.util.List;
import java.util.stream.Collectors;
import org.springframework.security.core.GrantedAuthority;
import org.springframework.security.core.authority.SimpleGrantedAuthority;
import org.springframework.security.core.userdetails.UserDetails;
import java.util.Collection;
import java.util.List;
import java.util.stream.Collectors;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import lombok.Data;
import lombok.NoArgsConstructor;
/**
* @Author weihongyang
@ -20,7 +23,9 @@ import java.util.stream.Collectors;
@Data
@NoArgsConstructor
@JsonIgnoreProperties(ignoreUnknown = true)
public class LoginUser implements UserDetails {
public class LoginUser implements UserDetails,Serializable {
private static final long serialVersionUID = 1L;
private SysUserEntity sysUserEntity;

View File

@ -0,0 +1,35 @@
package com.cnbm.admin.entity;
import com.baomidou.mybatisplus.annotation.TableName;
import com.cnbm.common.entity.BaseEntity;
import lombok.Data;
import lombok.EqualsAndHashCode;
@Data
@EqualsAndHashCode(callSuper=false)
@TableName("sys_log_login")
public class SysLogLoginEntity extends BaseEntity {
private static final long serialVersionUID = 1L;
/**
* 用户操作 0用户登录 1用户退出
*/
private Integer operation;
/**
* 状态 0失败 1成功 2账号已锁定
*/
private Integer status;
/**
* 用户代理
*/
private String userAgent;
/**
* 操作IP
*/
private String ip;
/**
* 用户名
*/
private String creatorName;
}

View File

@ -0,0 +1,22 @@
package com.cnbm.admin.enums;
public enum LoginOperationEnum {
/**
* 用户登录
*/
LOGIN(0),
/**
* 用户退出
*/
LOGOUT(1);
private int value;
LoginOperationEnum(int value) {
this.value = value;
}
public int value() {
return this.value;
}
}

View File

@ -0,0 +1,26 @@
package com.cnbm.admin.enums;
public enum LoginStatusEnum {
/**
* 失败
*/
FAIL(0),
/**
* 成功
*/
SUCCESS(1),
/**
* 账号已锁定
*/
LOCK(2);
private int value;
LoginStatusEnum(int value) {
this.value = value;
}
public int value() {
return this.value;
}
}

View File

@ -0,0 +1,52 @@
package com.cnbm.admin.enums;
/**
* <p>
* 否有效枚举
* </P>
*
* @author xcc
* @date 2022年7月5日
* @since 1.0
*/
public enum WhetherEnum {
NO(0, ""),
YES(1, "");
private final Integer value;
private final String label;
private final String remark;
WhetherEnum(final int value, final String label) {
this(value, label, null);
}
WhetherEnum(final int value, final String label, final String remark) {
this.value = value;
this.label = label;
this.remark = remark;
}
/**
* @return 数据值
*/
public Integer getValue() {
return value;
}
/**
* @return 标签名
*/
public String getLabel() {
return label;
}
/**
* @return 备注
*/
public String getRemark() {
return remark;
}
}

View File

@ -0,0 +1,24 @@
package com.cnbm.admin.execl;
import cn.afterturn.easypoi.excel.annotation.Excel;
import lombok.Data;
import java.util.Date;
@Data
public class SysLogLoginExcel {
@Excel(name = "用户操作")
private String operation;
@Excel(name = "状态", replace = {"失败_0", "成功_1", "账号已锁定_1"})
private Integer status;
@Excel(name = "User-Agent")
private String userAgent;
@Excel(name = "操作IP")
private String ip;
@Excel(name = "用户名")
private String creatorName;
@Excel(name = "创建时间", format = "yyyy-MM-dd HH:mm:ss")
private Date createDate;
}

View File

@ -1,7 +1,7 @@
package com.cnbm.admin.service;
import com.cnbm.admin.params.LoginParam;
import com.cnbm.admin.utils.ResponseResult;
import com.cnbm.common.utils.Result;
import javax.servlet.http.HttpServletRequest;
@ -12,6 +12,8 @@ import javax.servlet.http.HttpServletRequest;
*/
public interface LoginService {
ResponseResult login(HttpServletRequest request, LoginParam loginParam);
Result login(HttpServletRequest request, LoginParam loginParam);
void logout(HttpServletRequest request);
}

View File

@ -0,0 +1,18 @@
package com.cnbm.admin.service;
import com.cnbm.admin.dto.SysLogLoginDTO;
import com.cnbm.admin.entity.SysLogLoginEntity;
import com.cnbm.common.page.PageData;
import com.cnbm.common.service.BaseService;
import java.util.List;
import java.util.Map;
public interface SysLogLoginService extends BaseService<SysLogLoginEntity> {
PageData<SysLogLoginDTO> page(Map<String, Object> params);
List<SysLogLoginDTO> list(Map<String, Object> params);
void save(SysLogLoginEntity entity);
}

View File

@ -1,14 +1,22 @@
package com.cnbm.admin.service.impl;
import com.cnbm.admin.entity.LoginUser;
import com.cnbm.admin.entity.SysLogLoginEntity;
import com.cnbm.admin.enums.LoginOperationEnum;
import com.cnbm.admin.enums.LoginStatusEnum;
import com.cnbm.admin.params.LoginParam;
import com.cnbm.admin.service.CaptchaService;
import com.cnbm.admin.service.SysLogLoginService;
import com.cnbm.admin.utils.JwtUtil;
import com.cnbm.admin.service.LoginService;
import com.cnbm.admin.utils.ResponseResult;
import com.cnbm.common.exception.ErrorCode;
import com.cnbm.common.utils.IpUtils;
import com.cnbm.common.utils.Result;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.redis.core.RedisTemplate;
import org.springframework.http.HttpHeaders;
import org.springframework.security.authentication.AuthenticationManager;
import org.springframework.security.authentication.UsernamePasswordAuthenticationToken;
import org.springframework.security.core.Authentication;
@ -16,6 +24,7 @@ import org.springframework.security.core.context.SecurityContextHolder;
import org.springframework.stereotype.Service;
import javax.servlet.http.HttpServletRequest;
import java.util.Date;
import java.util.HashMap;
import java.util.Map;
import java.util.Objects;
@ -37,16 +46,30 @@ public class LoginServiceImpl implements LoginService {
@Autowired
private CaptchaService captchaService;
@Autowired
private SysLogLoginService sysLogLoginService;
@Override
public ResponseResult login(HttpServletRequest request, LoginParam loginParam) {
public Result login(HttpServletRequest request, LoginParam loginParam) {
//验证码是否正确
boolean flag = captchaService.validate(loginParam.getUuid(), loginParam.getCaptcha());
if(!flag){
return new ResponseResult(ErrorCode.CAPTCHA_ERROR,"验证码错误");
return new Result<>().error(ErrorCode.CAPTCHA_ERROR, "验证码错误");
}
UsernamePasswordAuthenticationToken authenticationToken = new UsernamePasswordAuthenticationToken(loginParam.getUsername(),loginParam.getPassword());
Authentication authenticate = authenticationManager.authenticate(authenticationToken);
//登录日志
SysLogLoginEntity log = new SysLogLoginEntity();
log.setOperation(LoginOperationEnum.LOGIN.value());
log.setCreateDate(new Date());
log.setIp(IpUtils.getIpAddr(request));
log.setUserAgent(request.getHeader(HttpHeaders.USER_AGENT));
log.setCreatorName(loginParam.getUsername());
if (Objects.isNull(authenticate)) {
log.setStatus(LoginStatusEnum.FAIL.value());
log.setCreatorName(loginParam.getUsername());
sysLogLoginService.save(log);
throw new RuntimeException("登录失败");
}
//如果认证通过了使用userid生成一个jwt jwt存入ResponseResult返回
@ -57,7 +80,33 @@ public class LoginServiceImpl implements LoginService {
map.put("token",jwt);
//把完整的用户信息存入redis userid作为key
redisTemplate.opsForValue().set("login:"+userid,loginUser);
return new ResponseResult(200,"登录成功",map);
//登录成功
log.setStatus(LoginStatusEnum.SUCCESS.value());
log.setCreator(loginUser.getSysUserEntity().getId());
log.setCreatorName(loginUser.getUsername());
sysLogLoginService.save(log);
return new Result<>().ok(map);
}
@Override
public void logout(HttpServletRequest request) {
//获取SecurityContextHolder中的用户id
UsernamePasswordAuthenticationToken authentication = (UsernamePasswordAuthenticationToken) SecurityContextHolder.getContext().getAuthentication();
LoginUser loginUser = (LoginUser) authentication.getPrincipal();
Long userid = loginUser.getSysUserEntity().getId();
//日志
SysLogLoginEntity log = new SysLogLoginEntity();
log.setOperation(LoginOperationEnum.LOGOUT.value());
log.setIp(IpUtils.getIpAddr(request));
log.setUserAgent(request.getHeader(HttpHeaders.USER_AGENT));
log.setIp(IpUtils.getIpAddr(request));
log.setStatus(LoginStatusEnum.SUCCESS.value());
log.setCreator(userid);
log.setCreatorName(loginUser.getUsername());
log.setCreateDate(new Date());
sysLogLoginService.save(log);
//删除redis中的值
redisTemplate.delete("login:"+userid);
}
}

View File

@ -0,0 +1,59 @@
package com.cnbm.admin.service.impl;
import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.cnbm.admin.dao.SysLogLoginDao;
import com.cnbm.admin.dto.SysLogLoginDTO;
import com.cnbm.admin.entity.SysLogLoginEntity;
import com.cnbm.admin.service.SysLogLoginService;
import com.cnbm.common.constant.Constant;
import com.cnbm.common.page.PageData;
import com.cnbm.common.service.impl.BaseServiceImpl;
import com.cnbm.common.utils.ConvertUtils;
import lombok.extern.log4j.Log4j2;
import org.apache.commons.lang3.StringUtils;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import java.util.List;
import java.util.Map;
@Service
@Log4j2
public class SysLogLoginServiceImpl extends BaseServiceImpl<SysLogLoginDao, SysLogLoginEntity> implements SysLogLoginService {
@Override
public PageData<SysLogLoginDTO> page(Map<String, Object> params) {
IPage<SysLogLoginEntity> page = baseDao.selectPage(
getPage(params, Constant.CREATE_DATE, false),
getWrapper(params)
);
log.info("page======{}",page.toString());
return getPageData(page, SysLogLoginDTO.class);
}
@Override
public List<SysLogLoginDTO> list(Map<String, Object> params) {
List<SysLogLoginEntity> entityList = baseDao.selectList(getWrapper(params));
return ConvertUtils.sourceToTarget(entityList, SysLogLoginDTO.class);
}
private QueryWrapper<SysLogLoginEntity> getWrapper(Map<String, Object> params){
String status = (String) params.get("status");
String creatorName = (String) params.get("creatorName");
QueryWrapper<SysLogLoginEntity> wrapper = new QueryWrapper<>();
wrapper.eq(StringUtils.isNotBlank(status), "status", status);
wrapper.like(StringUtils.isNotBlank(creatorName), "creator_name", creatorName);
return wrapper;
}
@Override
@Transactional(rollbackFor = Exception.class)
public void save(SysLogLoginEntity entity) {
insert(entity);
}
}

View File

@ -5,23 +5,24 @@ import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper;
import com.cnbm.admin.dto.SysMenuDTO;
import com.cnbm.admin.dto.SysUserDTO;
import com.cnbm.admin.entity.LoginUser;
import com.cnbm.admin.entity.SysLogLoginEntity;
import com.cnbm.admin.entity.SysMenuEntity;
import com.cnbm.admin.entity.SysUserEntity;
import com.cnbm.admin.enums.LoginOperationEnum;
import com.cnbm.admin.exception.LoginStatusException;
import com.cnbm.admin.service.SysMenuService;
import com.cnbm.common.utils.ConvertUtils;
import com.cnbm.common.utils.IpUtils;
import lombok.extern.log4j.Log4j2;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.HttpHeaders;
import org.springframework.security.authentication.DisabledException;
import org.springframework.security.core.userdetails.UserDetails;
import org.springframework.security.core.userdetails.UserDetailsService;
import org.springframework.security.core.userdetails.UsernameNotFoundException;
import org.springframework.stereotype.Service;
import java.util.ArrayList;
import java.util.List;
import java.util.Objects;
import java.util.Set;
import java.util.*;
import java.util.stream.Collectors;
/**
@ -45,6 +46,7 @@ public class UserDetailsServiceImpl implements UserDetailsService {
LambdaQueryWrapper<SysUserEntity> lambdaQueryWrapper = new LambdaQueryWrapper<>();
lambdaQueryWrapper.eq(SysUserEntity::getUsername,username);
SysUserEntity sysUserEntity = userDao.selectOne(lambdaQueryWrapper);
if (Objects.isNull(sysUserEntity)) {
throw new UsernameNotFoundException("用户名不存在");
}

View File

@ -0,0 +1,136 @@
/*
* Copyright (c) 2018.
* http://www.ulabcare.com
*/
package com.cnbm.admin.utils;
import com.cnbm.admin.entity.LoginUser;
import com.cnbm.admin.entity.SysUserEntity;
import com.cnbm.admin.enums.WhetherEnum;
import lombok.Builder;
import lombok.Data;
import org.springframework.beans.BeanUtils;
import org.springframework.security.authentication.UsernamePasswordAuthenticationToken;
import org.springframework.security.core.context.SecurityContextHolder;
import java.io.Serializable;
import java.time.LocalDateTime;
/**
* 接口支持基类
*
* @author jiff
* @date 2018/11/1
* @since 1.0
*/
public abstract class BaseSupportUtils {
/**
* 获取当前登录用户信息
*
* @return
*/
private static SysUserEntity getLoginUser() {
//登录用户信息
UsernamePasswordAuthenticationToken authentication = (UsernamePasswordAuthenticationToken) SecurityContextHolder.getContext().getAuthentication();
LoginUser loginUser = (LoginUser) authentication.getPrincipal();
return loginUser.getSysUserEntity();
}
/**
* 设置公共字段值一般用于创建新记录包含以下字段
*
* <p>
* {@link CommonField#enabled}<br>
* {@link CommonField#valid}<br>
* {@link CommonField#creatorId}<br>
* {@link CommonField#creatorName}<br>
* {@link CommonField#createTime}<br>
* {@link CommonField#updaterId}<br>
* {@link CommonField#updaterName}<br>
* {@link CommonField#updateTime}<br>
* </p>
*
* @param t 需要设置的对象
* @param ignoreProperties 忽略的字段
* @param <T>
*/
public static <T extends Serializable> T setCommonField(T t, String... ignoreProperties) {
CommonField commonField = CommonField.builder()
.enabled(WhetherEnum.YES.getValue())
.valid(WhetherEnum.YES.getValue())
.createTime(LocalDateTime.now())
.creatorId(getLoginUser().getId())
.creatorName(getLoginUser().getUsername())
.updateTime(LocalDateTime.now())
.updaterId(getLoginUser().getId())
.updaterName(getLoginUser().getUsername())
.build();
BeanUtils.copyProperties(commonField, t, ignoreProperties);
return t;
}
/**
* 设置更新的公共字段值一般用于更新记录包含以下字段
*
* <p>
* {@link CommonField#updaterId}<br>
* {@link CommonField#updaterName}<br>
* {@link CommonField#updateTime}<br>
* </p>
*
* @param t 需要设置的对象
* @param <T>
*/
public static <T extends Serializable> T setUpdateCommonField(T t) {
CommonField commonField = CommonField.builder()
.updaterId(getLoginUser().getId())
.updaterName(getLoginUser().getUsername())
.updateTime(LocalDateTime.now())
.build();
BeanUtils.copyProperties(commonField, t, "enabled", "valid");
return t;
}
@Data
@Builder
private static class CommonField implements Serializable {
/**
* 启用状态:0 停用1启用
*/
private Integer enabled;
/**
* 删除标志是否有效:1 可用 0不可用
*/
private Integer valid;
/**
* 创建人
*/
private Long creatorId;
/**
* 创建人
*/
private String creatorName;
/**
* 创建时间
*/
private LocalDateTime createTime;
/**
* 更新人
*/
private Long updaterId;
/**
* 更新人
*/
private String updaterName;
/**
* 更新时间
*/
private LocalDateTime updateTime;
}
}

View File

@ -0,0 +1,6 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE mapper PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN" "http://mybatis.org/dtd/mybatis-3-mapper.dtd">
<mapper namespace="com.cnbm.admin.dao.SysLogLoginDao">
</mapper>

View File

@ -89,9 +89,7 @@ public class FactoryController {
public Result update(@RequestBody FactoryDTO dto){
//效验数据
ValidatorUtils.validateEntity(dto, UpdateGroup.class, DefaultGroup.class);
factoryService.update(dto);
return new Result();
}

View File

@ -70,7 +70,7 @@ public class ProductController {
return new Result<ProductDTO>().ok(data);
}
@PostMapping
@PostMapping("save")
@ApiOperation("保存")
//@LogOperation("保存")
//@PreAuthorize("@ex.hasAuthority('code:product:save')")
@ -83,7 +83,7 @@ public class ProductController {
return new Result();
}
@PutMapping
@PutMapping("update")
@ApiOperation("修改")
//@LogOperation("修改")
//@PreAuthorize("@ex.hasAuthority('code:product:update')")
@ -96,7 +96,7 @@ public class ProductController {
return new Result();
}
@DeleteMapping
@DeleteMapping("delete")
@ApiOperation("删除")
//@LogOperation("删除")
//@PreAuthorize("@ex.hasAuthority('code:product:delete')")

View File

@ -0,0 +1,76 @@
package com.cnbm.common.interceptor;
import cn.hutool.core.util.StrUtil;
import com.baomidou.mybatisplus.core.toolkit.PluginUtils;
import com.baomidou.mybatisplus.extension.plugins.inner.InnerInterceptor;
import net.sf.jsqlparser.JSQLParserException;
import net.sf.jsqlparser.expression.Expression;
import net.sf.jsqlparser.expression.StringValue;
import net.sf.jsqlparser.expression.operators.conditional.AndExpression;
import net.sf.jsqlparser.parser.CCJSqlParserUtil;
import net.sf.jsqlparser.statement.select.PlainSelect;
import net.sf.jsqlparser.statement.select.Select;
import org.apache.ibatis.executor.Executor;
import org.apache.ibatis.mapping.BoundSql;
import org.apache.ibatis.mapping.MappedStatement;
import org.apache.ibatis.session.ResultHandler;
import org.apache.ibatis.session.RowBounds;
import java.util.Map;
public class DataFilterInterceptor implements InnerInterceptor {
@Override
public void beforeQuery(Executor executor, MappedStatement ms, Object parameter, RowBounds rowBounds, ResultHandler resultHandler, BoundSql boundSql) {
DataScope scope = getDataScope(parameter);
// 不进行数据过滤
if(scope == null || StrUtil.isBlank(scope.getSqlFilter())){
return;
}
// 拼接新SQL
String buildSql = getSelect(boundSql.getSql(), scope);
// 重写SQL
PluginUtils.mpBoundSql(boundSql).sql(buildSql);
}
private DataScope getDataScope(Object parameter){
if (parameter == null){
return null;
}
// 判断参数里是否有DataScope对象
if (parameter instanceof Map) {
Map<?, ?> parameterMap = (Map<?, ?>) parameter;
for (Map.Entry entry : parameterMap.entrySet()) {
if (entry.getValue() != null && entry.getValue() instanceof DataScope) {
return (DataScope) entry.getValue();
}
}
} else if (parameter instanceof DataScope) {
return (DataScope) parameter;
}
return null;
}
private String getSelect(String buildSql, DataScope scope){
try {
Select select = (Select) CCJSqlParserUtil.parse(buildSql);
PlainSelect plainSelect = (PlainSelect) select.getSelectBody();
Expression expression = plainSelect.getWhere();
if(expression == null){
plainSelect.setWhere(new StringValue(scope.getSqlFilter()));
}else{
AndExpression andExpression = new AndExpression(expression, new StringValue(scope.getSqlFilter()));
plainSelect.setWhere(andExpression);
}
return select.toString().replaceAll("'", "");
}catch (JSQLParserException e){
return buildSql;
}
}
}

View File

@ -0,0 +1,22 @@
package com.cnbm.common.interceptor;
public class DataScope {
private String sqlFilter;
public DataScope(String sqlFilter) {
this.sqlFilter = sqlFilter;
}
public String getSqlFilter() {
return sqlFilter;
}
public void setSqlFilter(String sqlFilter) {
this.sqlFilter = sqlFilter;
}
@Override
public String toString() {
return this.sqlFilter;
}
}

View File

@ -23,7 +23,45 @@ public class DataUtils {
}
public List<List<Date>> split(List<Date> value) {
List<List<Date>> result = new ArrayList<>();
int day = value.iterator().next().getDate();
List<Date> newListEntry = new ArrayList<>();
for (Date date : value) {
if (date.getDate() == day) {
newListEntry.add(date);
}
else {
day = date.getDate();
result.add(newListEntry);
newListEntry = new ArrayList<>();
newListEntry.add(date);
}
}
result.add(newListEntry);//because the last sublist was not added
return result;
}
public static String splitToNeed(String s,Integer type){
String[] s1 = s.split(" ");
String[] split = s1[0].split("-");
String year = split[0];
String mon = split[1];
String day = split[2];
if(type == 1 ){
//
return year;
}else if(type == 2 ){
//
return year+"-"+mon;
}else {
//
return s1[0];
}
}
public static Date getBeforeDate(Integer number){
Date date = new Date();//获取当前日期
SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMdd");//格式化一下
@ -37,6 +75,19 @@ public class DataUtils {
Date day = calendar1.getTime();
return day;
}
public static Date getAfterDate(Integer number){
Date date = new Date();//获取当前日期
SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMdd");//格式化一下
Calendar calendar1 = Calendar.getInstance();//获取对日期操作的类对象
//两种写法都可以获取到前三天的日期
// calendar1.set(Calendar.DAY_OF_YEAR,calendar1.get(Calendar.DAY_OF_YEAR) -3);
//在当前时间的基础上获取前三天的日期
calendar1.add(Calendar.DATE, 0+number);
//add方法 参数也可传入 月份获取的是前几月或后几月的日期
//calendar1.add(Calendar.MONTH, -3);
Date day = calendar1.getTime();
return day;
}
/**
* 将一组数据固定分组每组n个元素

View File

@ -20,10 +20,36 @@ spring:
# url: jdbc:postgresql://192.168.10.10:5432/postgres
# username: postgres
# password: 123456
hikari:
pool-name: GrowUpHikariCP
minimum-idle: 1
maximum-pool-size: 10
initial-size: 10
max-active: 100
min-idle: 10
max-wait: 60000
pool-prepared-statements: true
max-pool-prepared-statement-per-connection-size: 20
time-between-eviction-runs-millis: 60000
min-evictable-idle-time-millis: 300000
#Oracle需要打开注释
#validation-query: SELECT 1 FROM DUAL
test-while-idle: true
test-on-borrow: false
test-on-return: false
stat-view-servlet:
enabled: true
url-pattern: /druid/*
#login-username: admin
#login-password: admin
filter:
stat:
log-slow-sql: true
slow-sql-millis: 1000
merge-sql: false
wall:
config:
multi-statement-allow: true
# hikari:
# pool-name: GrowUpHikariCP
# minimum-idle: 1
# maximum-pool-size: 10
##多数据源的配置需要引用renren-dynamic-datasource
#dynamic:

View File

@ -0,0 +1,21 @@
<?xml version="1.0" encoding="UTF-8"?>
<configuration>
<include resource="org/springframework/boot/logging/logback/base.xml" />
<logger name="org.springframework.web" level="INFO"/>
<logger name="org.springboot.sample" level="TRACE" />
<!-- 开发、测试环境 -->
<springProfile name="dev,test">
<logger name="org.springframework.web" level="INFO"/>
<logger name="org.springboot.sample" level="INFO" />
<logger name="com.cnbm" level="DEBUG" />
</springProfile>
<!-- 生产环境 -->
<springProfile name="prod">
<logger name="org.springframework.web" level="ERROR"/>
<logger name="org.springboot.sample" level="ERROR" />
<logger name="com.cnbm" level="ERROR" />
</springProfile>
</configuration>

View File

@ -68,26 +68,26 @@ public class ${table.controllerName} {
@ApiOperation("保存")
@LogOperation("保存")
@PreAuthorize("@ex.hasAuthority('${package.ModuleName}:${table.entityPath}:save')")
public Result save(@RequestBody ${entity}DTO dto){
public Result<Long> save(@RequestBody ${entity}DTO dto){
//效验数据
ValidatorUtils.validateEntity(dto, AddGroup.class, DefaultGroup.class);
${table.entityPath}Service.save(dto);
return new Result();
return new Result<Long>().ok(dto.getId());
}
@PutMapping
@ApiOperation("修改")
@LogOperation("修改")
@PreAuthorize("@ex.hasAuthority('${package.ModuleName}:${table.entityPath}:update')")
public Result update(@RequestBody ${entity}DTO dto){
public Result<Long> update(@RequestBody ${entity}DTO dto){
//效验数据
ValidatorUtils.validateEntity(dto, UpdateGroup.class, DefaultGroup.class);
${table.entityPath}Service.update(dto);
return new Result();
return new Result<Long>().ok(dto.getId());
}
@DeleteMapping

View File

@ -1,39 +1,10 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE mapper PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN" "http://mybatis.org/dtd/mybatis-3-mapper.dtd">
<mapper namespace="${package.Mapper}.${table.mapperName}">
#if(${enableCache})
<!-- 开启二级缓存 -->
<cache type="${cacheClassName}"/>
#end
#if(${baseResultMap})
<!-- 通用查询映射结果 -->
<resultMap id="BaseResultMap" type="${package.Entity}.${entity}">
#foreach($field in ${table.fields})
#if(${field.keyFlag})##生成主键排在第一位
<id column="${field.name}" property="${field.propertyName}" />
#end
#end
#foreach($field in ${table.commonFields})##生成公共字段
<result column="${field.name}" property="${field.propertyName}" />
#end
#foreach($field in ${table.fields})
#if(!${field.keyFlag})##生成普通字段
<result column="${field.name}" property="${field.propertyName}" />
#end
#end
<resultMap type="${package.Entity}.${table.entityName}" id="${table.entityName}Map">
#foreach($field in ${table.fields})
<id column="${field.name}" property="${field.propertyName}" />
#end
</resultMap>
#end
#if(${baseColumnList})
<!-- 通用查询结果列 -->
<sql id="Base_Column_List">
#foreach($field in ${table.commonFields})
${field.columnName},
#end
${table.fieldNames}
</sql>
#end
</mapper>

View File

@ -27,7 +27,7 @@ public class Main {
public static void main(String[] args) throws InterruptedException {
char[] token = "lkBsC27QZr1W50BSPlGxpTqNNpwuUk5uz1dZZRPSPbCG5VmNDDUo8P3UkZIhGWwfJwkuz6ZGZ7Et4_KBaG3gHw==".toCharArray();
String org = "qgs";
String bucket = "mytest";
String bucket = "qgs-bucket";
InfluxDBClient influxDBClient = InfluxDBClientFactory.create("http://192.168.0.170:8086", token, org, bucket);
@ -105,24 +105,24 @@ public class Main {
// }
String flux = "from(bucket:\"mytest\") |> range(start: -6000000000000000m)";
flux += "|> filter(fn: (r) =>\n" +
" r._measurement == \"ASProcessCompleteEvent\" and \n" +
// " r._field == \"type\" and \n" + //对应 Field key
" r.argName == \"arg3\"\n" + //对应 Tags key (Tag 信息无法在FluxRecord 里面获取)
" )";
QueryApi queryApi = influxDBClient.getQueryApi();
List<FluxTable> tables = queryApi.query(flux);
for (FluxTable fluxTable : tables) {
List<FluxRecord> records = fluxTable.getRecords();
for (FluxRecord fluxRecord : records) {
Double o = (Double)fluxRecord.getValueByKey("_value");
System.out.println("time: "+fluxRecord.getTime() +" key:"+fluxRecord.getField()+" value: " + fluxRecord.getValueByKey("_value")+" measurement: " + fluxRecord.getMeasurement());
// System.out.println("time: "+fluxRecord.getTime() +" key:"++" value: " + fluxRecord.getValueByKey("_value")+" measurement: " + fluxRecord.getMeasurement());
}
}
// String flux = "from(bucket:\"mytest\") |> range(start: -6000000000000000m)";
// flux += "|> filter(fn: (r) =>\n" +
// " r._measurement == \"ASProcessCompleteEvent\" and \n" +
//// " r._field == \"type\" and \n" + //对应 Field key
// " r.argName == \"arg3\"\n" + //对应 Tags key (Tag 信息无法在FluxRecord 里面获取)
// " )";
// QueryApi queryApi = influxDBClient.getQueryApi();
//
// List<FluxTable> tables = queryApi.query(flux);
// for (FluxTable fluxTable : tables) {
// List<FluxRecord> records = fluxTable.getRecords();
// for (FluxRecord fluxRecord : records) {
// Double o = (Double)fluxRecord.getValueByKey("_value");
// System.out.println("time: "+fluxRecord.getTime() +" key:"+fluxRecord.getField()+" value: " + fluxRecord.getValueByKey("_value")+" measurement: " + fluxRecord.getMeasurement());
//// System.out.println("time: "+fluxRecord.getTime() +" key:"++" value: " + fluxRecord.getValueByKey("_value")+" measurement: " + fluxRecord.getMeasurement());
//
// }
// }
// from(bucket: "mytest")
@ -133,34 +133,40 @@ public class Main {
// |> sort(columns: ["_time"], desc: true)
// 取前10条数据
// |> limit(n: 10, offset: 0)
//
// 10-20 条数据
// |> limit(n: 10, offset: 10)
//
// 20-30 条数据
// |> limit(n: 10, offset: 20)
// QueryDataParam queryDataParam = new QueryDataParam();
// queryDataParam.setBucket("mytest");
// queryDataParam.setRange(new Range(getDate().toInstant(),new Date().toInstant()));
// queryDataParam.setMeasurement("ASProcessCompleteEvent");
// queryDataParam.setTag(new Tag("argName","arg4"));
QueryDataParam queryDataParam = new QueryDataParam();
queryDataParam.setBucket("qgs-bucket");
queryDataParam.setRange(new Range(getDate().toInstant(),new Date().toInstant()));
queryDataParam.setMeasurement("WeightHeiHei");
queryDataParam.setTag(new Tag("argName","LTWeight"));
// queryDataParam.setDropedTagName("transationId");
// queryDataParam.setPageInfo(new PageInfo(1,100));
//
// List<FluxTable> tables = query(queryDataParam,influxDBClient);
// List<FluxRecord> records1 = tables.get(0).getRecords();
List<String> dropNames = new ArrayList<>();
dropNames.add("transationId");
dropNames.add("inspectionSheetId");
dropNames.add("batchNum");
queryDataParam.setDropedTagNames(dropNames);
queryDataParam.setPageInfo(new PageInfo(1,10000));
List<FluxTable> tables = query(queryDataParam,influxDBClient);
List<FluxRecord> records1 = tables.get(0).getRecords();
// List<List<FluxRecord>> lists = Utils.fixedGroup(records1, 10);
// for (FluxTable fluxTable : tables) {
// List<FluxRecord> records = fluxTable.getRecords();
// for (FluxRecord fluxRecord : records) {
// System.out.println("time: "+fluxRecord.getTime() +" key:"+fluxRecord.getField()+" value: " + fluxRecord.getValueByKey("_value")+" measurement: " + fluxRecord.getMeasurement());
//
// }
// }
for (FluxTable fluxTable : tables) {
List<FluxRecord> records = fluxTable.getRecords();
for (FluxRecord fluxRecord : records) {
Instant timms = fluxRecord.getTime();
System.out.println("time: "+fluxRecord.getTime() +" key:"+fluxRecord.getField()+" value: " + fluxRecord.getValueByKey("_value")+" measurement: " + fluxRecord.getMeasurement());
}
}
influxDBClient.close();
}
@ -173,7 +179,7 @@ public class Main {
//两种写法都可以获取到前三天的日期
// calendar1.set(Calendar.DAY_OF_YEAR,calendar1.get(Calendar.DAY_OF_YEAR) -3);
//在当前时间的基础上获取前三天的日期
calendar1.add(Calendar.DATE, -3);
calendar1.add(Calendar.DATE, -1000);
//add方法 参数也可传入 月份获取的是前几月或后几月的日期
//calendar1.add(Calendar.MONTH, -3);
Date day = calendar1.getTime();
@ -197,6 +203,7 @@ public class Main {
flux += "|> drop(columns: [\""+ dropName +"\"]) \n";
}
flux += "|> sort(columns: [\"_time\"], desc: true) \n";
flux += "|> window(every: 1y) \n";
if(pageInfo!=null){
flux += "|> limit(n: "+pageInfo.getSize()+", offset: "+(pageInfo.getCurrent()-1)* pageInfo.getSize()+")";
}

View File

@ -2,6 +2,7 @@ package com.cnbm.influx.config;
import com.cnbm.influx.constant.Constant;
import com.cnbm.influx.param.PageInfo;
import com.cnbm.influx.param.QueryDataGroupByTimeParam;
import com.cnbm.influx.param.QueryDataParam;
import com.cnbm.influx.param.Range;
import com.cnbm.influx.template.Event;
@ -94,6 +95,7 @@ public enum InfluxClient {
Point point = Point.measurement(measurement)
.addTag("transationId", event.getTransationId())
.addTag("inspectionSheetId", event.getInspectionSheetId())
.addTag("batchNum", event.getBatchNum().toString())
.addTag("argName", event.getArgName())
.addField("argValue", event.getArgValue())
.time(event.getTime().toEpochMilli(), WritePrecision.MS);
@ -125,6 +127,47 @@ public enum InfluxClient {
}
// List<FluxTable> tables = queryApi.query(flux);
// for (FluxTable fluxTable : tables) {
// List<FluxRecord> records = fluxTable.getRecords();
// for (FluxRecord fluxRecord : records) {
// System.out.println("time: "+fluxRecord.getTime() +" key:"+fluxRecord.getField()+" value: " + fluxRecord.getValueByKey("_value")+" measurement: " + fluxRecord.getMeasurement());
//
// }
// }
return queryApi.query(flux);
}
public List<FluxTable> queryGroupByTime(QueryDataGroupByTimeParam param){
String measurement = param.getMeasurement();
List<String> dropedTagNames = param.getDropedTagNames();
Range range = param.getRange();
String bucket = param.getBucket();
String tagName = param.getTag().getTagName();
String tagValue = param.getTag().getTagValue();
PageInfo pageInfo = param.getPageInfo();
String flux = "from(bucket:\""+bucket+"\")";
flux += "|> range(start: "+range.getBegin()+",stop:"+range.getEnd()+")";
flux += "|> filter(fn: (r) => r[\"_measurement\"] == \""+measurement+"\")";
flux += "|> filter(fn: (r) => r[\""+tagName+"\"] == \""+tagValue+"\")";
for(String dropName:dropedTagNames){
flux += "|> drop(columns: [\""+dropName+"\"])";
}
//|> window(every: 1mo)
if(param.getTimeType() == 1){
flux += "|> window(every: 1y)";
}else if(param.getTimeType() == 2 ){
flux += "|> window(every: 1mo)";
}else{
flux += "|> window(every: 1d)";
}
flux += "|> sort(columns: [\"_time\"], desc: true)";
if(pageInfo!=null){
flux += "|> limit(n: "+pageInfo.getSize()+", offset: "+(pageInfo.getCurrent()-1)* pageInfo.getSize()+")";
}
// List<FluxTable> tables = queryApi.query(flux);
// for (FluxTable fluxTable : tables) {
// List<FluxRecord> records = fluxTable.getRecords();

View File

@ -18,7 +18,7 @@ public class Constant {
public static final LogLevel readTimeout = LogLevel.BODY;
public static final LogLevel writeTimeout = LogLevel.BODY;
public static final LogLevel connectTimeout = LogLevel.BODY;
public static final String measurement = "Weight";
public static final String measurement = "WeightHeiHei";
}

View File

@ -16,8 +16,11 @@ import org.slf4j.LoggerFactory;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import java.text.DateFormat;
import java.time.Instant;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.Random;
@ -49,21 +52,49 @@ public class S7DemoController {
public void insertBatchJYD() throws InterruptedException {
List<Event> list = new ArrayList<>();
Random r = new Random();
Instant instant = DataUtils.getBeforeDate(400).toInstant();
for(int j=0;j<10;j++){
for(int i=0;i<99;i++){
Thread.sleep(10);
Event event = new Event();
event.setTime(Instant.now());
event.setTime(instant);
event.setTransationId("asas"+i);
event.setArgName("LTWeight");
Double d = r.nextDouble() * 2.5 + 66;
event.setInspectionSheetId(j+"");
event.setArgValue(d);
event.setBatchNum(i);
list.add(event);
}
}
InfluxClient.Client.batchInsert(list,"Weight");
InfluxClient.Client.batchInsert(list,"WeightHei");
}
@PostMapping("/insertBatchJYDForTest")
public void insertBatchJYDForTest() throws InterruptedException {
List<Event> list = new ArrayList<>();
Random r = new Random();
for(int i=0;i<999;i++){
Thread.sleep(10);
Event event = new Event();
event.setTime(DataUtils.getAfterDate(i).toInstant());
event.setTransationId("asas"+i);
event.setArgName("LostDays");
int i1 = r.nextInt(10);
if(i1<4){
event.setArgValue(new Double(0));
}else {
event.setArgValue(new Double(1));
}
event.setInspectionSheetId(i+"");
event.setBatchNum(i);
list.add(event);
}
InfluxClient.Client.batchInsert(list,"WeightHeiHei");
}
/**
@ -139,8 +170,8 @@ public class S7DemoController {
.addField("argValue", event.getArgValue())
.time(event.getTime().toEpochMilli(), WritePrecision.MS);
return point;
}
@PostMapping("/insert")
public void insert() throws InterruptedException {
Event event = new Event();

View File

@ -0,0 +1,34 @@
package com.cnbm.influx.param;
import lombok.Data;
import lombok.EqualsAndHashCode;
import lombok.experimental.Accessors;
import java.util.List;
/**
* @Desc: "influx 查询条件构造"
* @Author: caixiang
* @DATE: 2022/6/29 10:17
*
* 注意
* 必填
* measurement 不能为空
* 时间段 不能为空
* bucket 不能为空
* 非必填
* 分页信息可选
* tag
*
*/
@Data
@EqualsAndHashCode(callSuper = false)
@Accessors(chain = true)
public class QueryDataGroupByTimeParam extends BaseParam{
private Tag tag;
//查询的时候需要忽略的字段(transationId是唯一标识会对 最终的查询结果集产生影响)
private List<String> dropedTagNames;
private String bucket;
//1-按年分组; 2-按月分组; 3-按日分组
private Integer timeType;
}

View File

@ -24,4 +24,7 @@ public class Event {
private String argName;
private Double argValue;
//批次号可选的
private Integer batchNum;
}

View File

@ -16,16 +16,18 @@ public class EventForCount {
private String inspectionSheetId;
//n = 某个批次的样本数
private Integer n;
private String transationId;
//failN = 某个批次不合格品数
private String failN;
private String argName;
//如果是计数类型1 = 代表ok 2 = nok
//todo 剩下样本量怎么估算
private Double argValue;
//类型 1 计量型 2.计数型
private Integer type;
//batchNum = 某个批次
private String batchNum;
//检测名
private String detectionName;
}

View File

@ -0,0 +1,10 @@
package com.cnbm.processInspection.constant;
/**
* @Desc: ""
* @Author: caixiang
* @DATE: 2022/8/3 10:01
*/
public class Constant {
public String measureMent = "WeightHeiHei";
}

View File

@ -7,9 +7,14 @@ import com.cnbm.common.spc.math.StandardDiviation;
import com.cnbm.common.spc.util.DataUtils;
import com.cnbm.common.vo.R;
import com.cnbm.influx.constant.Constant;
import com.cnbm.influx.param.QueryDataGroupByTimeParam;
import com.cnbm.influx.param.QueryDataParam;
import com.cnbm.influx.param.Range;
import com.cnbm.processInspection.dto.*;
import com.cnbm.processInspection.graphAnalyzed.forCount.c.CGraph;
import com.cnbm.processInspection.graphAnalyzed.forCount.np.NPGraph;
import com.cnbm.processInspection.graphAnalyzed.forCount.p.PGraph;
import com.cnbm.processInspection.graphAnalyzed.forCount.u.UGraph;
import com.cnbm.processInspection.graphAnalyzed.forMeterage.mr.MeanRGraph;
import com.cnbm.processInspection.graphAnalyzed.forMeterage.ms.MeanStandardDeviationGraph;
import com.cnbm.processInspection.graphAnalyzed.forMeterage.xmr.XMRGraph;
@ -36,9 +41,6 @@ public class ProcessInspectionController {
@PostMapping("/XbarSGraphTest")
public R<XbarSGraphData> xbarSGraphTest() throws Exception {
ProductFeaturesDTO productFeaturesDTO = productFeaturesService.get(new Long(1));
ProductFeatures productFeatures = new ProductFeatures();
productFeatures.setSl(new Float(5));
productFeatures.setUsl(new Float(10));
@ -55,7 +57,7 @@ public class ProcessInspectionController {
QueryDataParam queryDataParam = new QueryDataParam();
queryDataParam.setMeasurement("Weight");
queryDataParam.setMeasurement(Constant.measurement);
queryDataParam.setRange(new Range(DataUtils.getBeforeDate(10).toInstant(), Instant.now()));
meanStandardDeviationGraph.initialDate(queryDataParam);
@ -89,7 +91,7 @@ public class ProcessInspectionController {
QueryDataParam queryDataParam = new QueryDataParam();
queryDataParam.setMeasurement("Weight");
queryDataParam.setMeasurement(Constant.measurement);
queryDataParam.setRange(new Range(DataUtils.getBeforeDate(10).toInstant(), Instant.now()));
meanRGraph.initialDate(queryDataParam);
@ -122,7 +124,7 @@ public class ProcessInspectionController {
QueryDataParam queryDataParam = new QueryDataParam();
queryDataParam.setMeasurement("Weight");
queryDataParam.setMeasurement(Constant.measurement);
queryDataParam.setRange(new Range(DataUtils.getBeforeDate(10).toInstant(), Instant.now()));
xmrGraph.initialDate(queryDataParam);
@ -137,6 +139,109 @@ public class ProcessInspectionController {
return R.ok("成功",xmrGraphData);
}
@PostMapping("/NPGraphTest")
public R<NPGraphData> NPGraphTest() throws Exception {
ProductFeatures productFeatures = new ProductFeatures();
productFeatures.setSl(new Float(5));
productFeatures.setUsl(new Float(10));
productFeatures.setLsl(new Float(1));
productFeatures.setName("LostDays");
NPGraph npGraph = new NPGraph(productFeatures);
QueryDataGroupByTimeParam queryDataParam = new QueryDataGroupByTimeParam();
queryDataParam.setMeasurement(Constant.measurement);
queryDataParam.setRange(new Range( Instant.now() , DataUtils.getAfterDate(999).toInstant() ));
queryDataParam.setTimeType(1);
npGraph.initialDate(queryDataParam);
NPGraphData npGraph1 = new NPGraphData(
npGraph.getList(),
npGraph.getSpecificationLimit(),
npGraph.getArgName()
);
return R.ok("成功",npGraph1);
}
@PostMapping("/PGraphTest")
public R<PGraphData> PGraphTest() throws Exception {
ProductFeatures productFeatures = new ProductFeatures();
productFeatures.setSl(new Float(5));
productFeatures.setUsl(new Float(10));
productFeatures.setLsl(new Float(1));
productFeatures.setName("LostDays");
PGraph pGraph = new PGraph(productFeatures);
QueryDataGroupByTimeParam queryDataParam = new QueryDataGroupByTimeParam();
queryDataParam.setMeasurement(Constant.measurement);
queryDataParam.setRange(new Range( Instant.now() , DataUtils.getAfterDate(999).toInstant() ));
queryDataParam.setTimeType(2);
pGraph.initialDate(queryDataParam);
PGraphData npGraph1 = new PGraphData(
pGraph.getList(),
pGraph.getSpecificationLimit(),
pGraph.getArgName()
);
return R.ok("成功",npGraph1);
}
@PostMapping("/CGraphTest")
public R<CGraphData> CGraphTest() throws Exception {
ProductFeatures productFeatures = new ProductFeatures();
productFeatures.setSl(new Float(5));
productFeatures.setUsl(new Float(10));
productFeatures.setLsl(new Float(1));
productFeatures.setName("LostDays");
CGraph cGraph = new CGraph(productFeatures);
QueryDataGroupByTimeParam queryDataParam = new QueryDataGroupByTimeParam();
queryDataParam.setMeasurement(Constant.measurement);
queryDataParam.setRange(new Range( Instant.now() , DataUtils.getAfterDate(999).toInstant() ));
queryDataParam.setTimeType(2);
cGraph.initialDate(queryDataParam);
CGraphData npGraph1 = new CGraphData(
cGraph.getList(),
cGraph.getSpecificationLimit(),
cGraph.getArgName()
);
return R.ok("成功",npGraph1);
}
@PostMapping("/UGraphTest")
public R<UGraphData> UGraphTest() throws Exception {
ProductFeatures productFeatures = new ProductFeatures();
productFeatures.setSl(new Float(5));
productFeatures.setUsl(new Float(10));
productFeatures.setLsl(new Float(1));
productFeatures.setName("LostDays");
UGraph uGraph = new UGraph(productFeatures);
QueryDataGroupByTimeParam queryDataParam = new QueryDataGroupByTimeParam();
queryDataParam.setMeasurement(Constant.measurement);
queryDataParam.setRange(new Range( Instant.now() , DataUtils.getAfterDate(999).toInstant() ));
queryDataParam.setTimeType(2);
uGraph.initialDate(queryDataParam);
UGraphData npGraph1 = new UGraphData(
uGraph.getList(),
uGraph.getSpecificationLimit(),
uGraph.getArgName()
);
return R.ok("成功",npGraph1);
}
private ProductFeatures setRealSampleSize(GraphArg graphArg){
ProductFeaturesDTO productFeaturesDTO = productFeaturesService.get(graphArg.getProductFeaturesId());
@ -235,4 +340,87 @@ public class ProcessInspectionController {
return R.ok("成功",xmrGraphData);
}
@PostMapping("/NPGraph")
public R<NPGraphData> NPGraph(@RequestBody GraphArg graphArg) throws Exception {
ProductFeatures productFeatures = setRealSampleSize(graphArg);
NPGraph npGraph = new NPGraph(productFeatures);
QueryDataGroupByTimeParam queryDataParam = new QueryDataGroupByTimeParam();
queryDataParam.setMeasurement(Constant.measurement);
queryDataParam.setRange(new Range( graphArg.getBegin().toInstant() , graphArg.getEnd().toInstant() ));
queryDataParam.setTimeType(graphArg.getGroupType());
npGraph.initialDate(queryDataParam);
NPGraphData npGraph1 = new NPGraphData(
npGraph.getList(),
npGraph.getSpecificationLimit(),
npGraph.getArgName()
);
return R.ok("成功",npGraph1);
}
@PostMapping("/PGraph")
public R<PGraphData> PGraph(@RequestBody GraphArg graphArg) throws Exception {
ProductFeatures productFeatures = setRealSampleSize(graphArg);
PGraph pGraph = new PGraph(productFeatures);
QueryDataGroupByTimeParam queryDataParam = new QueryDataGroupByTimeParam();
queryDataParam.setMeasurement(Constant.measurement);
queryDataParam.setRange(new Range( graphArg.getBegin().toInstant() , graphArg.getEnd().toInstant() ));
queryDataParam.setTimeType(graphArg.getGroupType());
pGraph.initialDate(queryDataParam);
PGraphData npGraph1 = new PGraphData(
pGraph.getList(),
pGraph.getSpecificationLimit(),
pGraph.getArgName()
);
return R.ok("成功",npGraph1);
}
@PostMapping("/CGraph")
public R<CGraphData> CGraph(@RequestBody GraphArg graphArg) throws Exception {
ProductFeatures productFeatures = setRealSampleSize(graphArg);
CGraph cGraph = new CGraph(productFeatures);
QueryDataGroupByTimeParam queryDataParam = new QueryDataGroupByTimeParam();
queryDataParam.setMeasurement(Constant.measurement);
queryDataParam.setRange(new Range( graphArg.getBegin().toInstant() , graphArg.getEnd().toInstant() ));
queryDataParam.setTimeType(graphArg.getGroupType());
cGraph.initialDate(queryDataParam);
CGraphData npGraph1 = new CGraphData(
cGraph.getList(),
cGraph.getSpecificationLimit(),
cGraph.getArgName()
);
return R.ok("成功",npGraph1);
}
@PostMapping("/UGraph")
public R<UGraphData> UGraph(@RequestBody GraphArg graphArg) throws Exception {
ProductFeatures productFeatures = setRealSampleSize(graphArg);
UGraph uGraph = new UGraph(productFeatures);
QueryDataGroupByTimeParam queryDataParam = new QueryDataGroupByTimeParam();
queryDataParam.setMeasurement(Constant.measurement);
queryDataParam.setRange(new Range( graphArg.getBegin().toInstant() , graphArg.getEnd().toInstant() ));
queryDataParam.setTimeType(graphArg.getGroupType());
uGraph.initialDate(queryDataParam);
UGraphData uGraphData = new UGraphData(
uGraph.getList(),
uGraph.getSpecificationLimit(),
uGraph.getArgName()
);
return R.ok("成功",uGraphData);
}
}

View File

@ -0,0 +1,32 @@
package com.cnbm.processInspection.dto;
import com.cnbm.qualityPlanning.entity.CPoint;
import com.cnbm.qualityPlanning.entity.PPoint;
import com.cnbm.qualityPlanning.entity.SpecificationLimit;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import lombok.Data;
import java.util.List;
/**
* @Desc: ""
* @Author: caixiang
* @DATE: 2022/7/22 14:18
*/
@Data
@ApiModel(value = "C控制图 结果类")
public class CGraphData {
@ApiModelProperty(value = "P控制图list数据")
private List<CPoint> list;
@ApiModelProperty(value = "P控制图 规格线")
private SpecificationLimit specificationLimit;
@ApiModelProperty(value = "P控制图 参数名")
private String argName;
public CGraphData(List<CPoint> list, SpecificationLimit specificationLimit, String argName) {
this.list = list;
this.specificationLimit = specificationLimit;
this.argName = argName;
}
}

View File

@ -32,5 +32,6 @@ public class GraphArg {
@ApiModelProperty(value = "样本大小,不填的话用之前配置的")
private Integer sampleSize;
@ApiModelProperty(value = "分组类别1=年 , 2=月 , 3=日)(用于计数型控制图)")
private Integer groupType;
}

View File

@ -0,0 +1,22 @@
package com.cnbm.processInspection.dto;
import lombok.Data;
/**
* @Desc: ""
* @Author: caixiang
* @DATE: 2022/7/27 15:56
*/
@Data
public class InterpretationListArgForCount {
private Integer number;
private Integer arg;
public InterpretationListArgForCount() {
}
public InterpretationListArgForCount(Integer number, Integer arg) {
this.number = number;
this.arg = arg;
}
}

View File

@ -0,0 +1,35 @@
package com.cnbm.processInspection.dto;
import com.cnbm.common.spc.math.StandardDiviation;
import com.cnbm.processInspection.graphAnalyzed.forMeterage.xmr.XMRGraphEntity;
import com.cnbm.qualityPlanning.entity.ControlLimit;
import com.cnbm.qualityPlanning.entity.NPPoint;
import com.cnbm.qualityPlanning.entity.ProcessCapability;
import com.cnbm.qualityPlanning.entity.SpecificationLimit;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import lombok.Data;
import java.util.List;
/**
* @Desc: ""
* @Author: caixiang
* @DATE: 2022/7/22 14:18
*/
@Data
@ApiModel(value = "NP控制图 结果类")
public class NPGraphData {
@ApiModelProperty(value = "NP控制图list数据")
private List<NPPoint> list;
@ApiModelProperty(value = "NP控制图 规格线")
private SpecificationLimit specificationLimit;
@ApiModelProperty(value = "NP控制图 参数名")
private String argName;
public NPGraphData(List<NPPoint> list, SpecificationLimit specificationLimit, String argName) {
this.list = list;
this.specificationLimit = specificationLimit;
this.argName = argName;
}
}

View File

@ -0,0 +1,32 @@
package com.cnbm.processInspection.dto;
import com.cnbm.qualityPlanning.entity.NPPoint;
import com.cnbm.qualityPlanning.entity.PPoint;
import com.cnbm.qualityPlanning.entity.SpecificationLimit;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import lombok.Data;
import java.util.List;
/**
* @Desc: ""
* @Author: caixiang
* @DATE: 2022/7/22 14:18
*/
@Data
@ApiModel(value = "P控制图 结果类")
public class PGraphData {
@ApiModelProperty(value = "P控制图list数据")
private List<PPoint> list;
@ApiModelProperty(value = "P控制图 规格线")
private SpecificationLimit specificationLimit;
@ApiModelProperty(value = "P控制图 参数名")
private String argName;
public PGraphData(List<PPoint> list, SpecificationLimit specificationLimit, String argName) {
this.list = list;
this.specificationLimit = specificationLimit;
this.argName = argName;
}
}

View File

@ -0,0 +1,32 @@
package com.cnbm.processInspection.dto;
import com.cnbm.qualityPlanning.entity.CPoint;
import com.cnbm.qualityPlanning.entity.SpecificationLimit;
import com.cnbm.qualityPlanning.entity.UPoint;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import lombok.Data;
import java.util.List;
/**
* @Desc: ""
* @Author: caixiang
* @DATE: 2022/7/22 14:18
*/
@Data
@ApiModel(value = "U控制图 结果类")
public class UGraphData {
@ApiModelProperty(value = "U控制图list数据")
private List<UPoint> list;
@ApiModelProperty(value = "U控制图 规格线")
private SpecificationLimit specificationLimit;
@ApiModelProperty(value = "U控制图 参数名")
private String argName;
public UGraphData(List<UPoint> list, SpecificationLimit specificationLimit, String argName) {
this.list = list;
this.specificationLimit = specificationLimit;
this.argName = argName;
}
}

View File

@ -0,0 +1,153 @@
package com.cnbm.processInspection.graphAnalyzed.forCount.c;
import com.cnbm.basic.entity.ProductFeatures;
import com.cnbm.common.spc.util.DataUtils;
import com.cnbm.influx.config.InfluxClient;
import com.cnbm.influx.constant.Constant;
import com.cnbm.influx.param.QueryDataGroupByTimeParam;
import com.cnbm.influx.param.Tag;
import com.cnbm.qualityPlanning.entity.CPoint;
import com.cnbm.qualityPlanning.entity.ControlLimit;
import com.cnbm.qualityPlanning.entity.PPoint;
import com.cnbm.qualityPlanning.entity.SpecificationLimit;
import com.influxdb.query.FluxRecord;
import com.influxdb.query.FluxTable;
import lombok.Data;
import java.util.ArrayList;
import java.util.List;
/**
* @Desc: "均值标准差 控制图 , 计算类"
* @Author: caixiang
* @DATE: 2022/7/20 14:26
* 使用方式 先new MeanStandardDeviationGraph 对象 再initialData 初始化数据 再get 控制限
*
* 步骤
* 先读mysql表查询 product_features 先读到 sample_size样本量
* 再依据 influx.argName == mysql.product_feature.name && 时间段 查询所有的 参数数据
* 拿到参数数据后分组 整合成List<Point>,
* 计算控制限
* 计算 母体 \sigma bar{x}
* 计算CPK CPU CPL这些
* 如果配置了判读方案还要 调用 StatisticalControlledTest Function 检验
*
*/
@Data
public class CGraph {
//计数型不能用判读方案校验因为 当每个样本n不同控制限 都不一定相同
// private List<InterpretationListArgForCount> interpretationScheme;
private String argName;
private List<CPoint> list;
private Double cbar;
private SpecificationLimit specificationLimit;
public CGraph(ProductFeatures productFeatures) throws Exception {
this.argName = productFeatures.getName();
list = new ArrayList<>();
this.specificationLimit = new SpecificationLimit(
productFeatures.getUsl()==null?null:productFeatures.getUsl(),
productFeatures.getSl()==null?null:productFeatures.getSl(),
productFeatures.getLsl()==null?null:productFeatures.getLsl()
);
}
private Double[] toDoubleArray(Object[] o){
Double[] res= new Double[o.length];
for(int i=0;i<o.length;i++){
res[i] = (Double) o[i];
}
return res;
}
private Double computeCbar(List<FluxTable> query){
Double totalFailNum = (double)0;
for (FluxTable fluxTable : query) {
List<FluxRecord> records = fluxTable.getRecords();
Integer failNum = 0;
for (FluxRecord fluxRecord : records) {
//因为 传进去的就是Double 类型所以取出来自然而然就是Double
Double value = Double.parseDouble(fluxRecord.getValueByKey("_value").toString());
if(value.equals((double) 0)){
failNum+=1;
}
}
totalFailNum =totalFailNum + (double)failNum;
}
return totalFailNum/query.size();
}
public static void main(String[] args) {
//2022-08-04 T06:59:55.628Z
String name = "2022-08-04 T06:59:55.628Z";
String[] s = name.split(" ");
String[] split = s[0].split("-");
System.out.println(name);
}
/**
* name : 初始化数据函数
* desc : 从influxdb 里面读取数据然后 加工处理成 我需要的
* 步骤
*
* */
public void initialDate(QueryDataGroupByTimeParam queryDataParam){
queryDataParam.setBucket(Constant.bucket);
List<String> dropNames = new ArrayList<>();
dropNames.add("transationId");
dropNames.add("inspectionSheetId");
dropNames.add("batchNum");
queryDataParam.setDropedTagNames(dropNames);
queryDataParam.setTag(new Tag("argName",argName));
List<FluxTable> query = InfluxClient.Client.queryGroupByTime(queryDataParam);
//1. 先从fluxdb 里面提取原始数据
//计算p bar
this.cbar = computeCbar(query);
//2.计算各项式
for(int i=0 ;i<query.size();i++){
List<FluxRecord> records = query.get(i).getRecords();
Integer failNum = 0;
String name = DataUtils.splitToNeed(records.get(0).getTime().toString(),queryDataParam.getTimeType());
for (FluxRecord fluxRecord : records) {
//因为 传进去的就是Double 类型所以取出来自然而然就是Double
Double value = Double.parseDouble(fluxRecord.getValueByKey("_value").toString());
if(value.equals((double) 0)){
failNum+=1;
}
}
list.add(new CPoint(
getCL(),
i,
(double)failNum,
name
));
}
}
/**
* desc: get Xbar控制图 的控制限
* 注意此函数 要在 initialDate()函数执行之后
* */
public ControlLimit getCL(){
Double mul = 3 * Math.sqrt( this.cbar );
Double lcl = (this.cbar-mul)<0?0:(this.cbar-mul);
return new ControlLimit(
this.cbar + mul,
this.cbar,
lcl
);
}
}

View File

@ -0,0 +1,156 @@
package com.cnbm.processInspection.graphAnalyzed.forCount.np;
import com.cnbm.basic.entity.ProductFeatures;
import com.cnbm.common.spc.math.StandardDiviation;
import com.cnbm.common.spc.util.DataUtils;
import com.cnbm.influx.config.InfluxClient;
import com.cnbm.influx.constant.Constant;
import com.cnbm.influx.param.QueryDataGroupByTimeParam;
import com.cnbm.influx.param.QueryDataParam;
import com.cnbm.influx.param.Tag;
import com.cnbm.processInspection.controlCoefficientConstant.XBarRCoefficients;
import com.cnbm.processInspection.dto.InterpretationListArg;
import com.cnbm.processInspection.dto.InterpretationListArgForCount;
import com.cnbm.qualityPlanning.common.StatisticalControlledTest;
import com.cnbm.qualityPlanning.entity.*;
import com.influxdb.query.FluxRecord;
import com.influxdb.query.FluxTable;
import lombok.Data;
import org.omg.CORBA.PRIVATE_MEMBER;
import java.util.ArrayList;
import java.util.List;
/**
* @Desc: "均值标准差 控制图 , 计算类"
* @Author: caixiang
* @DATE: 2022/7/20 14:26
* 使用方式 先new MeanStandardDeviationGraph 对象 再initialData 初始化数据 再get 控制限
*
* 步骤
* 先读mysql表查询 product_features 先读到 sample_size样本量
* 再依据 influx.argName == mysql.product_feature.name && 时间段 查询所有的 参数数据
* 拿到参数数据后分组 整合成List<Point>,
* 计算控制限
* 计算 母体 \sigma bar{x}
* 计算CPK CPU CPL这些
* 如果配置了判读方案还要 调用 StatisticalControlledTest Function 检验
*
*/
@Data
public class NPGraph {
//计数型不能用判读方案校验因为 当每个样本n不同控制限 都不一定相同
// private List<InterpretationListArgForCount> interpretationScheme;
private String argName;
private List<NPPoint> list;
private Double pbar;
private SpecificationLimit specificationLimit;
public NPGraph(ProductFeatures productFeatures) throws Exception {
this.argName = productFeatures.getName();
list = new ArrayList<>();
this.specificationLimit = new SpecificationLimit(
productFeatures.getUsl()==null?null:productFeatures.getUsl(),
productFeatures.getSl()==null?null:productFeatures.getSl(),
productFeatures.getLsl()==null?null:productFeatures.getLsl()
);
}
private Double[] toDoubleArray(Object[] o){
Double[] res= new Double[o.length];
for(int i=0;i<o.length;i++){
res[i] = (Double) o[i];
}
return res;
}
private Double computePbar(List<FluxTable> query){
Double totalFailNum = (double)0;
Double totalN = (double)0;
for (FluxTable fluxTable : query) {
List<FluxRecord> records = fluxTable.getRecords();
Integer failNum = 0;
Integer n = records.size();
for (FluxRecord fluxRecord : records) {
//因为 传进去的就是Double 类型所以取出来自然而然就是Double
Double value = Double.parseDouble(fluxRecord.getValueByKey("_value").toString());
if(value.equals((double) 0)){
failNum+=1;
}
}
totalFailNum =totalFailNum + (double)failNum;
totalN = totalN + (double)n;
}
return totalFailNum/totalN;
}
/**
* name : 初始化数据函数
* desc : 从influxdb 里面读取数据然后 加工处理成 我需要的
* 步骤
*
* */
public void initialDate(QueryDataGroupByTimeParam queryDataParam){
queryDataParam.setBucket(Constant.bucket);
List<String> dropNames = new ArrayList<>();
dropNames.add("transationId");
dropNames.add("inspectionSheetId");
dropNames.add("batchNum");
queryDataParam.setDropedTagNames(dropNames);
queryDataParam.setTag(new Tag("argName",argName));
List<FluxTable> query = InfluxClient.Client.queryGroupByTime(queryDataParam);
//1. 先从fluxdb 里面提取原始数据
List<Double> originData = new ArrayList<>();
//计算p bar
this.pbar = computePbar(query);
//2.计算各项式
for(int i=0 ;i<query.size();i++){
List<FluxRecord> records = query.get(i).getRecords();
Integer failNum = 0;
Integer n = records.size();
String name = DataUtils.splitToNeed(records.get(0).getTime().toString(),queryDataParam.getTimeType());
for (FluxRecord fluxRecord : records) {
//因为 传进去的就是Double 类型所以取出来自然而然就是Double
Double value = Double.parseDouble(fluxRecord.getValueByKey("_value").toString());
if(value.equals((double) 0)){
failNum+=1;
}
}
list.add(new NPPoint(
getCL((double)n),
i,
failNum,
name
));
}
}
/**
* desc: get Xbar控制图 的控制限
* 注意此函数 要在 initialDate()函数执行之后
* */
public ControlLimit getCL(Double n){
Double npbar = n * this.pbar;
Double mul = 3 * Math.sqrt(npbar*(1-this.pbar));
Double lcl = (npbar-mul)<0?0:(npbar-mul);
return new ControlLimit(
npbar + mul,
npbar,
lcl
);
}
}

View File

@ -0,0 +1,163 @@
package com.cnbm.processInspection.graphAnalyzed.forCount.p;
import com.cnbm.basic.entity.ProductFeatures;
import com.cnbm.common.spc.util.DataUtils;
import com.cnbm.influx.config.InfluxClient;
import com.cnbm.influx.constant.Constant;
import com.cnbm.influx.param.QueryDataGroupByTimeParam;
import com.cnbm.influx.param.Tag;
import com.cnbm.qualityPlanning.entity.ControlLimit;
import com.cnbm.qualityPlanning.entity.NPPoint;
import com.cnbm.qualityPlanning.entity.PPoint;
import com.cnbm.qualityPlanning.entity.SpecificationLimit;
import com.influxdb.query.FluxRecord;
import com.influxdb.query.FluxTable;
import lombok.Data;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
/**
* @Desc: "均值标准差 控制图 , 计算类"
* @Author: caixiang
* @DATE: 2022/7/20 14:26
* 使用方式 先new MeanStandardDeviationGraph 对象 再initialData 初始化数据 再get 控制限
*
* 步骤
* 先读mysql表查询 product_features 先读到 sample_size样本量
* 再依据 influx.argName == mysql.product_feature.name && 时间段 查询所有的 参数数据
* 拿到参数数据后分组 整合成List<Point>,
* 计算控制限
* 计算 母体 \sigma bar{x}
* 计算CPK CPU CPL这些
* 如果配置了判读方案还要 调用 StatisticalControlledTest Function 检验
*
*/
@Data
public class PGraph {
//计数型不能用判读方案校验因为 当每个样本n不同控制限 都不一定相同
// private List<InterpretationListArgForCount> interpretationScheme;
private String argName;
private List<PPoint> list;
private Double pbar;
private SpecificationLimit specificationLimit;
public PGraph(ProductFeatures productFeatures) throws Exception {
this.argName = productFeatures.getName();
list = new ArrayList<>();
this.specificationLimit = new SpecificationLimit(
productFeatures.getUsl()==null?null:productFeatures.getUsl(),
productFeatures.getSl()==null?null:productFeatures.getSl(),
productFeatures.getLsl()==null?null:productFeatures.getLsl()
);
}
private Double[] toDoubleArray(Object[] o){
Double[] res= new Double[o.length];
for(int i=0;i<o.length;i++){
res[i] = (Double) o[i];
}
return res;
}
private Double computePbar(List<FluxTable> query){
Double totalFailNum = (double)0;
Double totalN = (double)0;
for (FluxTable fluxTable : query) {
List<FluxRecord> records = fluxTable.getRecords();
Integer failNum = 0;
Integer n = records.size();
for (FluxRecord fluxRecord : records) {
//因为 传进去的就是Double 类型所以取出来自然而然就是Double
Double value = Double.parseDouble(fluxRecord.getValueByKey("_value").toString());
if(value.equals((double) 0)){
failNum+=1;
}
}
totalFailNum =totalFailNum + (double)failNum;
totalN = totalN + (double)n;
}
return totalFailNum/totalN;
}
public static void main(String[] args) {
//2022-08-04 T06:59:55.628Z
String name = "2022-08-04 T06:59:55.628Z";
String[] s = name.split(" ");
String[] split = s[0].split("-");
System.out.println(name);
}
/**
* name : 初始化数据函数
* desc : 从influxdb 里面读取数据然后 加工处理成 我需要的
* 步骤
*
* */
public void initialDate(QueryDataGroupByTimeParam queryDataParam){
queryDataParam.setBucket(Constant.bucket);
List<String> dropNames = new ArrayList<>();
dropNames.add("transationId");
dropNames.add("inspectionSheetId");
dropNames.add("batchNum");
queryDataParam.setDropedTagNames(dropNames);
queryDataParam.setTag(new Tag("argName",argName));
List<FluxTable> query = InfluxClient.Client.queryGroupByTime(queryDataParam);
//1. 先从fluxdb 里面提取原始数据
//计算p bar
this.pbar = computePbar(query);
//2.计算各项式
for(int i=0 ;i<query.size();i++){
List<FluxRecord> records = query.get(i).getRecords();
Integer failNum = 0;
Integer n = records.size();
String name = DataUtils.splitToNeed(records.get(0).getTime().toString(),queryDataParam.getTimeType());
for (FluxRecord fluxRecord : records) {
//因为 传进去的就是Double 类型所以取出来自然而然就是Double
Double value = Double.parseDouble(fluxRecord.getValueByKey("_value").toString());
if(value.equals((double) 0)){
failNum+=1;
}
}
Double pi = (double)failNum / (double)n;
list.add(new PPoint(
getCL((double)n),
i,
pi,
name
));
}
}
/**
* desc: get Xbar控制图 的控制限
* 注意此函数 要在 initialDate()函数执行之后
* */
public ControlLimit getCL(Double n){
Double mul = 3 * Math.sqrt( ( this.pbar * (1-this.pbar) ) / n );
Double lcl = (this.pbar-mul)<0?0:(this.pbar-mul);
return new ControlLimit(
this.pbar + mul,
this.pbar,
lcl
);
}
}

View File

@ -0,0 +1,161 @@
package com.cnbm.processInspection.graphAnalyzed.forCount.u;
import com.cnbm.basic.entity.ProductFeatures;
import com.cnbm.common.spc.util.DataUtils;
import com.cnbm.influx.config.InfluxClient;
import com.cnbm.influx.constant.Constant;
import com.cnbm.influx.param.QueryDataGroupByTimeParam;
import com.cnbm.influx.param.Tag;
import com.cnbm.qualityPlanning.entity.ControlLimit;
import com.cnbm.qualityPlanning.entity.PPoint;
import com.cnbm.qualityPlanning.entity.SpecificationLimit;
import com.cnbm.qualityPlanning.entity.UPoint;
import com.influxdb.query.FluxRecord;
import com.influxdb.query.FluxTable;
import lombok.Data;
import java.util.ArrayList;
import java.util.List;
/**
* @Desc: "均值标准差 控制图 , 计算类"
* @Author: caixiang
* @DATE: 2022/7/20 14:26
* 使用方式 先new MeanStandardDeviationGraph 对象 再initialData 初始化数据 再get 控制限
*
* 步骤
* 先读mysql表查询 product_features 先读到 sample_size样本量
* 再依据 influx.argName == mysql.product_feature.name && 时间段 查询所有的 参数数据
* 拿到参数数据后分组 整合成List<Point>,
* 计算控制限
* 计算 母体 \sigma bar{x}
* 计算CPK CPU CPL这些
* 如果配置了判读方案还要 调用 StatisticalControlledTest Function 检验
*
*/
@Data
public class UGraph {
//计数型不能用判读方案校验因为 当每个样本n不同控制限 都不一定相同
// private List<InterpretationListArgForCount> interpretationScheme;
private String argName;
private List<UPoint> list;
private Double ubar;
private SpecificationLimit specificationLimit;
public UGraph(ProductFeatures productFeatures) throws Exception {
this.argName = productFeatures.getName();
list = new ArrayList<>();
this.specificationLimit = new SpecificationLimit(
productFeatures.getUsl()==null?null:productFeatures.getUsl(),
productFeatures.getSl()==null?null:productFeatures.getSl(),
productFeatures.getLsl()==null?null:productFeatures.getLsl()
);
}
private Double[] toDoubleArray(Object[] o){
Double[] res= new Double[o.length];
for(int i=0;i<o.length;i++){
res[i] = (Double) o[i];
}
return res;
}
private Double computeUbar(List<FluxTable> query){
Double totalFailNum = (double)0;
Double totalN = (double)0;
for (FluxTable fluxTable : query) {
List<FluxRecord> records = fluxTable.getRecords();
Integer failNum = 0;
Integer n = records.size();
for (FluxRecord fluxRecord : records) {
//因为 传进去的就是Double 类型所以取出来自然而然就是Double
Double value = Double.parseDouble(fluxRecord.getValueByKey("_value").toString());
if(value.equals((double) 0)){
failNum+=1;
}
}
totalFailNum =totalFailNum + (double)failNum;
totalN = totalN + (double)n;
}
return totalFailNum/totalN;
}
public static void main(String[] args) {
//2022-08-04 T06:59:55.628Z
String name = "2022-08-04 T06:59:55.628Z";
String[] s = name.split(" ");
String[] split = s[0].split("-");
System.out.println(name);
}
/**
* name : 初始化数据函数
* desc : 从influxdb 里面读取数据然后 加工处理成 我需要的
* 步骤
*
* */
public void initialDate(QueryDataGroupByTimeParam queryDataParam){
queryDataParam.setBucket(Constant.bucket);
List<String> dropNames = new ArrayList<>();
dropNames.add("transationId");
dropNames.add("inspectionSheetId");
dropNames.add("batchNum");
queryDataParam.setDropedTagNames(dropNames);
queryDataParam.setTag(new Tag("argName",argName));
List<FluxTable> query = InfluxClient.Client.queryGroupByTime(queryDataParam);
//1. 先从fluxdb 里面提取原始数据
//计算p bar
this.ubar = computeUbar(query);
//2.计算各项式
for(int i=0 ;i<query.size();i++){
List<FluxRecord> records = query.get(i).getRecords();
Integer failNum = 0;
Integer n = records.size();
String name = DataUtils.splitToNeed(records.get(0).getTime().toString(),queryDataParam.getTimeType());
for (FluxRecord fluxRecord : records) {
//因为 传进去的就是Double 类型所以取出来自然而然就是Double
Double value = Double.parseDouble(fluxRecord.getValueByKey("_value").toString());
if(value.equals((double) 0)){
failNum+=1;
}
}
Double ui = (double)failNum / (double)n;
list.add(new UPoint(
getCL((double)n),
i,
ui,
name
));
}
}
/**
* desc: get Xbar控制图 的控制限
* 注意此函数 要在 initialDate()函数执行之后
* */
public ControlLimit getCL(Double n){
Double mul = 3 * Math.sqrt( this.ubar / n );
Double lcl = (this.ubar-mul)<0?0:(this.ubar-mul);
return new ControlLimit(
this.ubar + mul,
this.ubar,
lcl
);
}
}

View File

@ -75,7 +75,7 @@ public class MeanRGraph {
this.specificationLimit = new SpecificationLimit(
productFeatures.getUsl()==null?null:productFeatures.getUsl(),
productFeatures.getSl()==null?null:productFeatures.getSl(),
productFeatures.getUsl()==null?null:productFeatures.getUsl()
productFeatures.getLsl()==null?null:productFeatures.getLsl()
);
}
@ -102,6 +102,7 @@ public class MeanRGraph {
List<String> dropNames = new ArrayList<>();
dropNames.add("transationId");
dropNames.add("inspectionSheetId");
dropNames.add("batchNum");
queryDataParam.setDropedTagNames(dropNames);
queryDataParam.setTag(new Tag("argName",argName));

View File

@ -72,7 +72,7 @@ public class MeanStandardDeviationGraph {
this.specificationLimit = new SpecificationLimit(
productFeatures.getUsl()==null?null:productFeatures.getUsl(),
productFeatures.getSl()==null?null:productFeatures.getSl(),
productFeatures.getUsl()==null?null:productFeatures.getUsl()
productFeatures.getLsl()==null?null:productFeatures.getLsl()
);
}
@ -99,6 +99,7 @@ public class MeanStandardDeviationGraph {
List<String> dropNames = new ArrayList<>();
dropNames.add("transationId");
dropNames.add("inspectionSheetId");
dropNames.add("batchNum");
queryDataParam.setDropedTagNames(dropNames);
queryDataParam.setTag(new Tag("argName",argName));

View File

@ -69,7 +69,7 @@ public class XMRGraph {
this.specificationLimit = new SpecificationLimit(
productFeatures.getUsl()==null?null:productFeatures.getUsl(),
productFeatures.getSl()==null?null:productFeatures.getSl(),
productFeatures.getUsl()==null?null:productFeatures.getUsl()
productFeatures.getLsl()==null?null:productFeatures.getLsl()
);
}
@ -96,6 +96,7 @@ public class XMRGraph {
List<String> dropNames = new ArrayList<>();
dropNames.add("transationId");
dropNames.add("inspectionSheetId");
dropNames.add("batchNum");
queryDataParam.setDropedTagNames(dropNames);
queryDataParam.setTag(new Tag("argName",argName));

View File

@ -0,0 +1,28 @@
package com.cnbm.qualityPlanning.entity;
import lombok.Data;
/**
* @Desc: "整合,处理好 后的一行数据 (sampleSize 后的数据) "
* @Author: caixiang
* @DATE: 2022/7/21 9:43
*/
@Data
public class CPoint {
private ControlLimit controlLimit;
private Integer position;
//不合格品数
private Double value;
private String name;
public CPoint(ControlLimit controlLimit, Integer position, Double value, String name) {
this.controlLimit = controlLimit;
this.position = position;
this.value = value;
this.name = name;
}
}

View File

@ -0,0 +1,32 @@
package com.cnbm.qualityPlanning.entity;
import io.swagger.annotations.ApiModelProperty;
import lombok.Data;
import java.util.ArrayList;
import java.util.List;
import java.util.Set;
/**
* @Desc: "整合,处理好 后的一行数据 (sampleSize 后的数据) "
* @Author: caixiang
* @DATE: 2022/7/21 9:43
*/
@Data
public class NPPoint{
private ControlLimit controlLimit;
private Integer position;
//不合格品数
private Integer value;
private String name;
public NPPoint(ControlLimit controlLimit, Integer position, Integer value,String name) {
this.controlLimit = controlLimit;
this.position = position;
this.value = value;
this.name = name;
}
}

View File

@ -0,0 +1,28 @@
package com.cnbm.qualityPlanning.entity;
import lombok.Data;
/**
* @Desc: "整合,处理好 后的一行数据 (sampleSize 后的数据) "
* @Author: caixiang
* @DATE: 2022/7/21 9:43
*/
@Data
public class PPoint {
private ControlLimit controlLimit;
private Integer position;
//不合格品数
private Double value;
private String name;
public PPoint(ControlLimit controlLimit, Integer position, Double value,String name) {
this.controlLimit = controlLimit;
this.position = position;
this.value = value;
this.name = name;
}
}

View File

@ -0,0 +1,28 @@
package com.cnbm.qualityPlanning.entity;
import lombok.Data;
/**
* @Desc: "整合,处理好 后的一行数据 (sampleSize 后的数据) "
* @Author: caixiang
* @DATE: 2022/7/21 9:43
*/
@Data
public class UPoint {
private ControlLimit controlLimit;
private Integer position;
//不合格品数
private Double value;
private String name;
public UPoint(ControlLimit controlLimit, Integer position, Double value, String name) {
this.controlLimit = controlLimit;
this.position = position;
this.value = value;
this.name = name;
}
}