Commit f84acfe5 authored by 赵啸非's avatar 赵啸非

init commit

parents
*.iml
node_modules/
.idea/
*.class
target/
\ No newline at end of file
*.iml
node_modules/
.idea/
*.class
target/
\ No newline at end of file
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>com.mortals.xhx</groupId>
<artifactId>common-lib</artifactId>
<version>0.0.1-SNAPSHOT</version>
<packaging>jar</packaging>
<name>common-lib</name>
<description>公共依赖库</description>
<parent>
<groupId>com.mortals.xhx</groupId>
<artifactId>device-platform</artifactId>
<version>1.0.0-SNAPSHOT</version>
</parent>
<properties>
<rabbitmq.version>4.8.0</rabbitmq.version>
</properties>
<dependencies>
<dependency>
<groupId>com.mortals.framework</groupId>
<artifactId>mortals-framework-springcloud</artifactId>
</dependency>
<dependency>
<groupId>com.alibaba</groupId>
<artifactId>fastjson</artifactId>
</dependency>
<dependency>
<groupId>org.projectlombok</groupId>
<artifactId>lombok</artifactId>
</dependency>
<dependency>
<groupId>io.netty</groupId>
<artifactId>netty-all</artifactId>
</dependency>
<dependency>
<groupId>org.apache.kafka</groupId>
<artifactId>kafka-clients</artifactId>
<version>2.6.0</version>
</dependency>
<dependency>
<groupId>com.rabbitmq</groupId>
<artifactId>amqp-client</artifactId>
<version>${rabbitmq.version}</version>
</dependency>
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-pool2</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-configuration-processor</artifactId>
<optional>true</optional>
</dependency>
<dependency>
<groupId>cn.hutool</groupId>
<artifactId>hutool-all</artifactId>
</dependency>
<dependency>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
</dependency>
<dependency>
<groupId>io.swagger</groupId>
<artifactId>swagger-annotations</artifactId>
</dependency>
<dependency>
<groupId>com.google.zxing</groupId>
<artifactId>javase</artifactId>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<scope>test</scope>
</dependency>
</dependencies>
</project>
\ No newline at end of file
package com.mortals.xhx.common;
/**
* @author: zxfei
* @date: 2021/11/22 11:17
* @description:
**/
public class BrokerConfig {
}
package com.mortals.xhx.common.code;
import com.mortals.framework.ap.SysConstains;
import com.mortals.framework.common.IBaseEnum;
import java.util.LinkedHashMap;
import java.util.Map;
/**
* 审核状态
* @author
*
*/
public enum ApproveStateEnum implements IBaseEnum {
UN_AUDIT(1, "未审核", SysConstains.STYLE_DEFAULT),
AUDITING(2, "审核中", SysConstains.STYLE_PRIMARY),
AUDIT_FIN(3, "审核完成", SysConstains.STYLE_PRIMARY);
private int value;
private String desc;
private String style;
ApproveStateEnum(int value, String desc, String style) {
this.value = value;
this.desc = desc;
this.style = style;
}
@Override
public int getValue() {
return this.value;
}
public String getDesc() {
return desc;
}
public String getStyle()
{
return style;
}
public static ApproveStateEnum getByValue(int value) {
for (ApproveStateEnum examStatus : ApproveStateEnum.values()) {
if (examStatus.getValue() == value) {
return examStatus;
}
}
return null;
}
/**
* 获取Map集合
* @param eItem 不包含项
* @return
*/
public static Map<String,String> getEnumMap(int... eItem) {
Map<String,String> resultMap= new LinkedHashMap<String,String>();
for (ApproveStateEnum item : ApproveStateEnum.values()) {
try{
boolean hasE = false;
for (int e : eItem){
if(item.getValue()==e){
hasE = true;
break;
}
}
if(!hasE){
resultMap.put(item.getValue()+"", item.getDesc());
}
}catch(Exception ex){
}
}
return resultMap;
}
}
package com.mortals.xhx.common.code;
import com.mortals.framework.ap.SysConstains;
import com.mortals.framework.common.IBaseEnum;
import java.util.LinkedHashMap;
import java.util.Map;
/**
* 区域级别
*
* @author yiqimin
* @create 2018/09/26
*/
public enum AreaLevelEnum implements IBaseEnum {
PROVINCE(1, "省", SysConstains.STYLE_DEFAULT),
CITY(2, "市", SysConstains.STYLE_DEFAULT),
AREA(3, "区/县", SysConstains.STYLE_DEFAULT),
TOWN(4, "镇", SysConstains.STYLE_DEFAULT);
private int value;
private String desc;
private String style;
AreaLevelEnum(int value, String desc, String style) {
this.value = value;
this.desc = desc;
this.style = style;
}
@Override
public int getValue() {
return this.value;
}
public String getDesc() {
return desc;
}
public String getStyle()
{
return style;
}
public static AreaLevelEnum getByValue(int value) {
for (AreaLevelEnum e : AreaLevelEnum.values()) {
if (e.getValue() == value) {
return e;
}
}
return null;
}
public static Map<String,String> getEnumMap(int... eItem) {
Map<String,String> resultMap= new LinkedHashMap<String,String>();
for (AreaLevelEnum item : AreaLevelEnum.values()) {
try{
boolean hasE = false;
for (int e : eItem){
if(item.getValue()==e){
hasE = true;
break;
}
}
if(!hasE){
resultMap.put(item.getValue()+"", item.getDesc());
}
}catch(Exception ex){
}
}
return resultMap;
}
}
package com.mortals.xhx.common.code;
import com.mortals.framework.ap.SysConstains;
import com.mortals.framework.common.IBaseEnum;
import java.util.LinkedHashMap;
import java.util.Map;
/**
* 资源类型
* @author
*
*/
public enum AuthType implements IBaseEnum {
UNLIMITED(0, "无限制", SysConstains.STYLE_DEFAULT),
UNLOGIN(1, "无需登录查看", SysConstains.STYLE_DEFAULT),
LOGIN(2, "需要登录查看", SysConstains.STYLE_DEFAULT),
AUTH(3, "需要角色权限查看", SysConstains.STYLE_PRIMARY);
//0:无限制,1:无需登录查看,2:需要登录查看,3:需要角色权限查看,默认3
private int value;
private String desc;
private String style;
AuthType(int value, String desc, String style) {
this.value = value;
this.desc = desc;
this.style = style;
}
@Override
public int getValue() {
return this.value;
}
public String getDesc() {
return desc;
}
public String getStyle()
{
return style;
}
public static AuthType getByValue(int value) {
for (AuthType examStatus : AuthType.values()) {
if (examStatus.getValue() == value) {
return examStatus;
}
}
return null;
}
/**
* 获取Map集合
* @param eItem 不包含项
* @return
*/
public static Map<String,String> getEnumMap(int... eItem) {
Map<String,String> resultMap= new LinkedHashMap<String,String>();
for (AuthType item : AuthType.values()) {
try{
boolean hasE = false;
for (int e : eItem){
if(item.getValue()==e){
hasE = true;
break;
}
}
if(!hasE){
resultMap.put(item.getValue()+"", item.getDesc());
}
}catch(Exception ex){
}
}
return resultMap;
}
}
package com.mortals.xhx.common.code;
import java.io.Serializable;
/**
* 审批模式
*
* @author: zxfei
* @date: 2021/8/26 9:58
*/
public enum CommentTypeEnum implements Serializable {
SP("审批"),
QS("签收"),
FQS("反签收"),
BH("驳回"),
CH("撤回"),
ZC("暂存"),
XTZX("系统执行"),
TJ("提交"),
CXTJ("重新提交"),
SPJS("审批结束"),
LCZZ("流程终止"),
WP("委派"),
ZH("知会"),
ZY("转阅"),
YY("已阅"),
ZB("转办"),
SQ("授权"),
SPBJQ("审批并加签"),
HJQ("后加签"),
QJQ("前加签"),
CFTG("重复跳过"),
XT("协同"),
PS("评审");
private String name;//名称
/**
* 通过type获取Msg
*
* @param type
* @return
* @Description:
*/
public static String getEnumMsgByType(String type) {
for (CommentTypeEnum e : CommentTypeEnum.values()) {
if (e.toString().equals(type)) {
return e.name;
}
}
return "";
}
CommentTypeEnum(String name) {
this.name = name;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
}
package com.mortals.xhx.common.code;
import com.mortals.framework.common.IBaseEnum;
import java.util.LinkedHashMap;
import java.util.Map;
public enum CompareTypeEnum implements IBaseEnum {
DAY(1, "按日"),
MONTH(2, "按月");
private int value;
private String desc;
CompareTypeEnum(int value, String desc) {
this.value = value;
this.desc = desc;
}
@Override
public int getValue() {
return this.value;
}
@Override
public String getDesc() {
return this.desc;
}
public static Map<String,String> getEnumMap() {
Map<String, String> resultMap= new LinkedHashMap<String, String>();
for (CompareTypeEnum item : CompareTypeEnum.values()) {
resultMap.put(item.getValue() + "", item.getDesc());
}
return resultMap;
}
}
package com.mortals.xhx.common.code;
import com.mortals.framework.ap.SysConstains;
import com.mortals.framework.common.IBaseEnum;
import java.util.LinkedHashMap;
import java.util.Map;
/**
* 资源类型
* @author
*
*/
public enum DataSatus implements IBaseEnum {
DISENABLE(0, "禁用", SysConstains.STYLE_DEFAULT),
ENABLE(1, "启用", SysConstains.STYLE_DEFAULT),
CLOSE(2, "注销", SysConstains.STYLE_DEFAULT),
DELETE(3, "已删除", SysConstains.STYLE_DEFAULT),
OVERDUE(4, "过期", SysConstains.STYLE_DEFAULT),
USEOUT(5, "用完", SysConstains.STYLE_DEFAULT);
private int value;
private String desc;
private String style;
DataSatus(int value, String desc, String style) {
this.value = value;
this.desc = desc;
this.style = style;
}
@Override
public int getValue() {
return this.value;
}
public String getDesc() {
return desc;
}
public String getStyle()
{
return style;
}
public static DataSatus getByValue(int value) {
for (DataSatus examStatus : DataSatus.values()) {
if (examStatus.getValue() == value) {
return examStatus;
}
}
return null;
}
/**
* 获取Map集合
* @param eItem 不包含项
* @return
*/
public static Map<String,String> getEnumMap(int... eItem) {
Map<String,String> resultMap= new LinkedHashMap<String,String>();
for (DataSatus item : DataSatus.values()) {
try{
boolean hasE = false;
for (int e : eItem){
if(item.getValue()==e){
hasE = true;
break;
}
}
if(!hasE){
resultMap.put(item.getValue()+"", item.getDesc());
}
}catch(Exception ex){
}
}
return resultMap;
}
}
package com.mortals.xhx.common.code;
import java.util.LinkedHashMap;
import java.util.Map;
/**
* 资源类型
* @author
*
*/
public enum DataSatusEnum {
DISENABLE(0, "禁用"),
ENABLE(1, "启用"),
CLOSE(2, "注销"),
DELETE(3, "已删除"),
OVERDUE(4, "过期"),
USEOUT(5, "用完");
private int value;
private String desc;
DataSatusEnum(int value, String desc) {
this.value = value;
this.desc = desc;
}
public int getValue() {
return this.value;
}
public String getDesc() {
return desc;
}
public static DataSatusEnum getByValue(int value) {
for (DataSatusEnum examStatus : DataSatusEnum.values()) {
if (examStatus.getValue() == value) {
return examStatus;
}
}
return null;
}
/**
* 获取Map集合
* @param eItem 不包含项
* @return
*/
public static Map<String,String> getEnumMap(int... eItem) {
Map<String,String> resultMap= new LinkedHashMap<String,String>();
for (DataSatusEnum item : DataSatusEnum.values()) {
try{
boolean hasE = false;
for (int e : eItem){
if(item.getValue()==e){
hasE = true;
break;
}
}
if(!hasE){
resultMap.put(item.getValue()+"", item.getDesc());
}
}catch(Exception ex){
}
}
return resultMap;
}
}
package com.mortals.xhx.common.code;
import com.mortals.framework.ap.SysConstains;
import com.mortals.framework.common.IBaseEnum;
import java.util.LinkedHashMap;
import java.util.Map;
/**
* 流程状态
* @author
*
*/
public enum FlowStateType implements IBaseEnum {
/** 激活 */
active(1, "激活", SysConstains.STYLE_DEFAULT),
/** 挂起 */
suspend(2, "挂起", SysConstains.STYLE_PRIMARY);
private int value;
private String desc;
private String style;
FlowStateType(int value, String desc, String style) {
this.value = value;
this.desc = desc;
this.style = style;
}
@Override
public int getValue() {
return this.value;
}
public String getDesc() {
return desc;
}
public String getStyle()
{
return style;
}
public static FlowStateType getByValue(int value) {
for (FlowStateType examStatus : FlowStateType.values()) {
if (examStatus.getValue() == value) {
return examStatus;
}
}
return null;
}
/**
* 获取Map集合
* @param eItem 不包含项
* @return
*/
public static Map<String,String> getEnumMap(int... eItem) {
Map<String,String> resultMap= new LinkedHashMap<String,String>();
for (FlowStateType item : FlowStateType.values()) {
try{
boolean hasE = false;
for (int e : eItem){
if(item.getValue()==e){
hasE = true;
break;
}
}
if(!hasE){
resultMap.put(item.getValue()+"", item.getDesc());
}
}catch(Exception ex){
}
}
return resultMap;
}
}
package com.mortals.xhx.common.code;
import com.mortals.framework.ap.SysConstains;
import com.mortals.framework.common.IBaseEnum;
import java.util.LinkedHashMap;
import java.util.Map;
/**
* 流程通知状态
* @author
*
*/
public enum FlowStatusNotifyType implements IBaseEnum {
FLOW_START(0, "流程启动", SysConstains.STYLE_DEFAULT),
FLOW_END(1, "流程结束", SysConstains.STYLE_PRIMARY),
TASK_CREATE(2, "任务创建", SysConstains.STYLE_PRIMARY),
TASK_COMPLETE(3, "任务结束", SysConstains.STYLE_PRIMARY);
private int value;
private String desc;
private String style;
FlowStatusNotifyType(int value, String desc, String style) {
this.value = value;
this.desc = desc;
this.style = style;
}
@Override
public int getValue() {
return this.value;
}
public String getDesc() {
return desc;
}
public String getStyle()
{
return style;
}
public static FlowStatusNotifyType getByValue(int value) {
for (FlowStatusNotifyType examStatus : FlowStatusNotifyType.values()) {
if (examStatus.getValue() == value) {
return examStatus;
}
}
return null;
}
/**
* 获取Map集合
* @param eItem 不包含项
* @return
*/
public static Map<String,String> getEnumMap(int... eItem) {
Map<String,String> resultMap= new LinkedHashMap<String,String>();
for (FlowStatusNotifyType item : FlowStatusNotifyType.values()) {
try{
boolean hasE = false;
for (int e : eItem){
if(item.getValue()==e){
hasE = true;
break;
}
}
if(!hasE){
resultMap.put(item.getValue()+"", item.getDesc());
}
}catch(Exception ex){
}
}
return resultMap;
}
}
package com.mortals.xhx.common.code;
import com.mortals.framework.ap.SysConstains;
import com.mortals.framework.common.IBaseEnum;
import java.util.LinkedHashMap;
import java.util.Map;
public enum MenuAuthType implements IBaseEnum{
/** 无限制 */
UNLIMIT(0, "无限制", SysConstains.STYLE_DEFAULT),
/** 无需登录查看 */
WITHOUTLOGIN(1, "无需登录查看", SysConstains.STYLE_DEFAULT),
/** 需要登录查看 */
WITHLOGIN(2, "需要登录查看", SysConstains.STYLE_DEFAULT),
/** 需要角色权限查看 */
WITHAUTHORITY(3, "需要角色权限查看", SysConstains.STYLE_DEFAULT);
private int value;
private String desc;
private String style;
MenuAuthType(int value, String desc, String style) {
this.value = value;
this.desc = desc;
this.style = style;
}
@Override
public int getValue() {
return this.value;
}
public String getDesc() {
return desc;
}
public String getStyle()
{
return style;
}
public static MenuAuthType getByValue(int value) {
for (MenuAuthType menuAuthType : MenuAuthType.values()) {
if (menuAuthType.getValue() == value) {
return menuAuthType;
}
}
return null;
}
/**
* 获取Map集合
* @param eItem 不包含项
* @return
*/
public static Map<String,String> getEnumMap(int... eItem) {
Map<String,String> resultMap= new LinkedHashMap<String,String>();
for (MenuAuthType item : MenuAuthType.values()) {
try{
boolean hasE = false;
for (int e : eItem){
if(item.getValue()==e){
hasE = true;
break;
}
}
if(!hasE){
resultMap.put(item.getValue()+"", item.getDesc());
}
}catch(Exception ex){
}
}
return resultMap;
}
}
package com.mortals.xhx.common.code;
import com.mortals.framework.ap.SysConstains;
import com.mortals.framework.common.IBaseEnum;
import java.util.LinkedHashMap;
import java.util.Map;
public enum MenuComm implements IBaseEnum{
/** 非常用 */
UNCOMMON(0, "非常用", SysConstains.STYLE_DEFAULT),
/** 常用 */
COMMON(1, "常用", SysConstains.STYLE_DEFAULT);
private int value;
private String desc;
private String style;
MenuComm(int value, String desc, String style) {
this.value = value;
this.desc = desc;
this.style = style;
}
@Override
public int getValue() {
return this.value;
}
public String getDesc() {
return desc;
}
public String getStyle()
{
return style;
}
public static MenuComm getByValue(int value) {
for (MenuComm menuComm : MenuComm.values()) {
if (menuComm.getValue() == value) {
return menuComm;
}
}
return null;
}
/**
* 获取Map集合
* @param eItem 不包含项
* @return
*/
public static Map<String,String> getEnumMap(int... eItem) {
Map<String,String> resultMap= new LinkedHashMap<String,String>();
for (MenuComm item : MenuComm.values()) {
try{
boolean hasE = false;
for (int e : eItem){
if(item.getValue()==e){
hasE = true;
break;
}
}
if(!hasE){
resultMap.put(item.getValue()+"", item.getDesc());
}
}catch(Exception ex){
}
}
return resultMap;
}
}
package com.mortals.xhx.common.code;
import com.mortals.framework.ap.SysConstains;
import com.mortals.framework.common.IBaseEnum;
import java.util.LinkedHashMap;
import java.util.Map;
public enum MenuLinkType implements IBaseEnum{
/** 普通 */
NORMAL(0, "普通", SysConstains.STYLE_DEFAULT),
/** 弹出 */
POP(1, "弹出", SysConstains.STYLE_DEFAULT),
/** 脚本(JavaScript) */
SCRIPT(2, "脚本", SysConstains.STYLE_DEFAULT);
private int value;
private String desc;
private String style;
MenuLinkType(int value, String desc, String style) {
this.value = value;
this.desc = desc;
this.style = style;
}
@Override
public int getValue() {
return this.value;
}
public String getDesc() {
return desc;
}
public String getStyle()
{
return style;
}
public static MenuLinkType getByValue(int value) {
for (MenuLinkType menuLinkType : MenuLinkType.values()) {
if (menuLinkType.getValue() == value) {
return menuLinkType;
}
}
return null;
}
/**
* 获取Map集合
* @param eItem 不包含项
* @return
*/
public static Map<String,String> getEnumMap(int... eItem) {
Map<String,String> resultMap= new LinkedHashMap<String,String>();
for (MenuLinkType item : MenuLinkType.values()) {
try{
boolean hasE = false;
for (int e : eItem){
if(item.getValue()==e){
hasE = true;
break;
}
}
if(!hasE){
resultMap.put(item.getValue()+"", item.getDesc());
}
}catch(Exception ex){
}
}
return resultMap;
}
}
package com.mortals.xhx.common.code;
import com.mortals.framework.ap.SysConstains;
import com.mortals.framework.common.IBaseEnum;
import java.util.LinkedHashMap;
import java.util.Map;
public enum MenuType implements IBaseEnum{
/** 主菜单 */
MAIN(0, "主菜单", SysConstains.STYLE_DEFAULT),
/** 非主菜单 */
UNMAIN(1, "非主菜单", SysConstains.STYLE_DEFAULT);
private int value;
private String desc;
private String style;
MenuType(int value, String desc, String style) {
this.value = value;
this.desc = desc;
this.style = style;
}
@Override
public int getValue() {
return this.value;
}
public String getDesc() {
return desc;
}
public String getStyle()
{
return style;
}
public static MenuType getByValue(int value) {
for (MenuType menuType : MenuType.values()) {
if (menuType.getValue() == value) {
return menuType;
}
}
return null;
}
/**
* 获取Map集合
* @param eItem 不包含项
* @return
*/
public static Map<String,String> getEnumMap(int... eItem) {
Map<String,String> resultMap= new LinkedHashMap<String,String>();
for (MenuType item : MenuType.values()) {
try{
boolean hasE = false;
for (int e : eItem){
if(item.getValue()==e){
hasE = true;
break;
}
}
if(!hasE){
resultMap.put(item.getValue()+"", item.getDesc());
}
}catch(Exception ex){
}
}
return resultMap;
}
}
package com.mortals.xhx.common.code;
import java.util.LinkedHashMap;
import java.util.Map;
/**
* 参数修改状态
* @author
*
*/
public enum ModStatusEnum {
/** 页面隐藏 */
HIDDEN(0, "隐藏"),
/** 页面可查看 */
VIEW(1, "查看"),
/** 页面可修改 */
EDIT(2, "修改"),
/** 页面可删除 */
DELETE(3, "删除"),
/** 页面可修改删除 */
EDITANDDELETE(4, "修改删除");
private int value;
private String desc;
ModStatusEnum(int value, String desc) {
this.value = value;
this.desc = desc;
}
public int getValue() {
return this.value;
}
public String getDesc() {
return desc;
}
public static ModStatusEnum getByValue(int value) {
for (ModStatusEnum modStatus : ModStatusEnum.values()) {
if (modStatus.getValue() == value) {
return modStatus;
}
}
return null;
}
/**
* 获取Map集合
* @param eItem 不包含项
* @return
*/
public static Map<String,String> getEnumMap(int... eItem) {
Map<String,String> resultMap= new LinkedHashMap<String,String>();
for (ModStatusEnum item : ModStatusEnum.values()) {
try{
boolean hasE = false;
for (int e : eItem){
if(item.getValue()==e){
hasE = true;
break;
}
}
if(!hasE){
resultMap.put(item.getValue()+"", item.getDesc());
}
}catch(Exception ex){
}
}
return resultMap;
}
}
package com.mortals.xhx.common.code;
import java.util.HashMap;
import java.util.Map;
/**
* Created by chendilin on 2018/3/7.
*/
public enum OperTypeEnum {
SAVE(0,"添加"),
UPDATE(1,"更新"),
DELETE(2,"删除"),
OTHER(-1,"其它");
private int value;
private String msg;
private OperTypeEnum(int value,String msg) {
this.value = value;
this.msg = msg;
}
public int getValue() {
return this.value;
}
public static Map<String,String> getEnumMap(){
Map<String,String> resultMap = new HashMap<>();
OperTypeEnum[] operTypeEnum = OperTypeEnum.values();
for (OperTypeEnum typeEnum : operTypeEnum) {
resultMap.put(String.valueOf(typeEnum.value),typeEnum.msg);
}
return resultMap;
}
}
package com.mortals.xhx.common.code;
import java.io.Serializable;
/**
* 流程的状态
*
* @author: zxfei
* @date: 2021/8/26 9:58
*/
public enum ProcessStatusEnum implements Serializable {
SPZ("审批中"),
BH("驳回"),
CH("撤回"),
JQ("加签"),
ZC("暂存"),
ZB("转办"),
BJ("办结"),
ZZ("终止");
private String msg;
private String type;
ProcessStatusEnum(String msg) {
this.msg = msg;
}
/**
* 通过type获取Msg
*
* @param type
* @return
* @Description:
*/
public static String getEnumMsgByType(String type) {
for (ProcessStatusEnum e : ProcessStatusEnum.values()) {
if (e.toString().equals(type)) {
return e.msg;
}
}
return "";
}
public String getMsg() {
return msg;
}
public void setMsg(String msg) {
this.msg = msg;
}
public String getType() {
return type;
}
public void setType(String type) {
this.type = type;
}
}
\ No newline at end of file
package com.mortals.xhx.common.code;
/**
* 业务统一返回码
* @author linlc
*
*/
public enum ResultCodeEnum {
CODE_001(156001,"参数类型错误"),
CODE_002(156002,"部分参数为空"),
CODE_003(156003,"参数长度超过限制"),
CODE_004(156004,"日期格式错误"),
CODE_005(156005,"时间区间不合法"),
CODE_006(156006,"远端服务不可用"),
CODE_007(156007,"无效的签名"),
CODE_008(156008,"不合法的 AppID ,请开发者检查 AppID 的正确性,避免异常字符,注意大小写"),
CODE_009(156009,"请求来源地址不合法"),
CODE_010(156010,"没有相应的用户"),
CODE_011(156011,"不合法的文件类型"),
CODE_012(156012,"不合法的文件大小"),
CODE_013(156013,"上传文件缺失"),
CODE_014(156014,"不支持的图片格式"),
CODE_015(156015,"无效的url"),
CODE_016(156016,"设备编号不合法"),
CODE_017(356017,"设备编号不存在"),
CODE_018(356018,"设备当前在线状态为“离线”,无法使用!"),
CODE_019(356019,"设备当前投放状态为“待重投”,无法使用!"),
CODE_020(356020,"设备当前投放状态为“已报废”,无法使用!"),
CODE_021(356021,"设备当前投放状态为“初始化”,无法使用!"),
CODE_022(356022,"此设备当前启用状态为“禁用”,无法使用!"),
CODE_023(356023,"API 调用太频繁,请稍候再试"),
CODE_024(256024,"用户未授权该 api"),
CODE_025(156025,"解析 JSON/XML 内容错误"),
CODE_FAILUER(-1,"失败");
private int value;
private String desc;
ResultCodeEnum(int value, String desc) {
this.value = value;
this.desc = desc;
}
public int getValue() {
return value;
}
public String getDesc() {
return desc;
}
}
package com.mortals.xhx.common.code;
import com.mortals.framework.ap.SysConstains;
import com.mortals.framework.common.IBaseEnum;
import java.util.LinkedHashMap;
import java.util.Map;
/**
* 角色类型
* @author
*
*/
public enum RoleType implements IBaseEnum {
/** 系统内置角色 */
BUILT_IN(0, "系统内置角色", SysConstains.STYLE_DEFAULT),
/** 默认系统角色 */
SYSTEM_DEFAULT(1, "默认系统角色", SysConstains.STYLE_INFO),
/** 企业用户角色 */
CUSTOMER_DEFAULT(2, "企业用户角色", SysConstains.STYLE_PRIMARY),
/** 普通角色 */
NORMAL(4, "普通角色 ", SysConstains.STYLE_SUCCESS),;
//0:系统内置角色(不可删除),1:默认系统角色,2:默认新闻用户角色,3:默认资讯用户角色,4:普通角色,默认4
private int value;
private String desc;
private String style;
RoleType(int value, String desc, String style) {
this.value = value;
this.desc = desc;
this.style = style;
}
@Override
public int getValue() {
return this.value;
}
public String getDesc() {
return desc;
}
public String getStyle()
{
return style;
}
public static RoleType getByValue(int value) {
for (RoleType examStatus : RoleType.values()) {
if (examStatus.getValue() == value) {
return examStatus;
}
}
return null;
}
/**
* 获取Map集合
* @param eItem 不包含项
* @return
*/
public static Map<String,String> getEnumMap(int... eItem) {
Map<String,String> resultMap= new LinkedHashMap<String,String>();
for (RoleType item : RoleType.values()) {
try{
boolean hasE = false;
for (int e : eItem){
if(item.getValue()==e){
hasE = true;
break;
}
}
if(!hasE){
resultMap.put(item.getValue()+"", item.getDesc());
}
}catch(Exception ex){
}
}
return resultMap;
}
}
package com.mortals.xhx.common.code;
import com.mortals.framework.ap.SysConstains;
import com.mortals.framework.common.IBaseEnum;
import java.util.LinkedHashMap;
import java.util.Map;
/**
* 资源类型
* @author
*
*/
public enum SourceType implements IBaseEnum {
/** 系统资源 */
SYSTEM(0, "系统资源", SysConstains.STYLE_DEFAULT),
/** 开放资源 */
OPEN(1, "开放资源", SysConstains.STYLE_PRIMARY);
private int value;
private String desc;
private String style;
SourceType(int value, String desc, String style) {
this.value = value;
this.desc = desc;
this.style = style;
}
@Override
public int getValue() {
return this.value;
}
public String getDesc() {
return desc;
}
public String getStyle()
{
return style;
}
public static SourceType getByValue(int value) {
for (SourceType examStatus : SourceType.values()) {
if (examStatus.getValue() == value) {
return examStatus;
}
}
return null;
}
/**
* 获取Map集合
* @param eItem 不包含项
* @return
*/
public static Map<String,String> getEnumMap(int... eItem) {
Map<String,String> resultMap= new LinkedHashMap<String,String>();
for (SourceType item : SourceType.values()) {
try{
boolean hasE = false;
for (int e : eItem){
if(item.getValue()==e){
hasE = true;
break;
}
}
if(!hasE){
resultMap.put(item.getValue()+"", item.getDesc());
}
}catch(Exception ex){
}
}
return resultMap;
}
}
package com.mortals.xhx.common.code;
import java.io.Serializable;
/**
* 启动参数
*
* @author: zxfei
* @date: 2021/8/26 9:59
*/
public enum StartVariableEnum implements Serializable {
USER("user", "人员信息"),
UDEPT("udept", "人员部门变量"),
UCOMPANY("ucompany", "人员公司变量"),
COMPANY_ROLE("company", "公司角色层级审批领导"),
MATRIX_COMPANY_ROLE("mcompany", "矩阵公司角色"),
MATRIX_DEPT_ROLE("mdept", "矩阵部门角色"),
FORM("form", "表单"),
LCDB("lcdb", "流程底表"),
LINE("line", "汇报线");
private String code;
private String msg;
StartVariableEnum(String code, String msg) {
this.msg = msg;
this.code = code;
}
/**
* 通过code获取Msg
*
* @param code
* @return
* @Description:
*/
public static String getEnumMsgByCode(String code) {
for (StartVariableEnum e : StartVariableEnum.values()) {
if (e.getCode().equals(code)) {
return e.msg;
}
}
return "";
}
public String getMsg() {
return msg;
}
public void setMsg(String msg) {
this.msg = msg;
}
public String getCode() {
return code;
}
public void setCode(String code) {
this.code = code;
}
}
package com.mortals.xhx.common.code;
import java.util.LinkedHashMap;
import java.util.Map;
/**
* @author
* @create 2018/01/12
*/
public enum TaskExcuteStatusEnum {
WAIT_RUN(0, "待执行"),
RUNNING(1, "执行中"),
SUCCESS_RUN(2, "执行成功"),
FAIL_RUN(3, "执行失败"),
CANCEL(4, "取消");
private int value;
private String desc;
TaskExcuteStatusEnum(int value, String desc) {
this.value = value;
this.desc = desc;
}
public int getValue() {
return this.value;
}
public String getDesc() {
return this.desc;
}
public static TaskExcuteStatusEnum getByValue(int value) {
for (TaskExcuteStatusEnum taskExcuteStatusEnum : TaskExcuteStatusEnum.values()) {
if (taskExcuteStatusEnum.getValue() == value) {
return taskExcuteStatusEnum;
}
}
return null;
}
/**
* 获取Map集合
* @param eItem 不包含项
* @return
*/
public static Map<String,String> getEnumMap(int... eItem) {
Map<String,String> resultMap= new LinkedHashMap<String,String>();
for (TaskExcuteStatusEnum item : TaskExcuteStatusEnum.values()) {
try{
boolean hasE = false;
for (int e : eItem){
if(item.getValue()==e){
hasE = true;
break;
}
}
if(!hasE){
resultMap.put(item.getValue()+"", item.getDesc());
}
}catch(Exception ex){
}
}
return resultMap;
}
}
package com.mortals.xhx.common.code;
import java.util.LinkedHashMap;
import java.util.Map;
public enum TaskExcuteStrategyEnum{
/** 按日 */
DAY(1, "按日"),
/** 按周 */
WEEK(2, "按周"),
/** 按月 */
MONTH(3, "按月"),
/** 按间隔时间 */
INTERVAL(4, "按间隔时间");
private int value;
private String desc;
TaskExcuteStrategyEnum(int value, String desc) {
this.value = value;
this.desc = desc;
}
public int getValue() {
return this.value;
}
public String getDesc() {
return desc;
}
public static TaskExcuteStrategyEnum getByValue(int value) {
for (TaskExcuteStrategyEnum taskExcuteStrategy : TaskExcuteStrategyEnum.values()) {
if (taskExcuteStrategy.getValue() == value) {
return taskExcuteStrategy;
}
}
return null;
}
/**
* 获取Map集合
* @param eItem 不包含项
* @return
*/
public static Map<String,String> getEnumMap(int... eItem) {
Map<String,String> resultMap= new LinkedHashMap<String,String>();
for (TaskExcuteStrategyEnum item : TaskExcuteStrategyEnum.values()) {
try{
boolean hasE = false;
for (int e : eItem){
if(item.getValue()==e){
hasE = true;
break;
}
}
if(!hasE){
resultMap.put(item.getValue()+"", item.getDesc());
}
}catch(Exception ex){
}
}
return resultMap;
}
}
package com.mortals.xhx.common.code;
import java.util.LinkedHashMap;
import java.util.Map;
public enum TaskInterimExcuteStatusEnum{
/** 未启用 */
UNUSE(0, "未启用"),
/** 立即执行并保留 */
IMMEDIATE_EXECUTION(1, "立即执行并保留"),
/** 立即执行并删除 */
IMMEDIATE_EXECUTION_BEFORE_DELETE(2, "立即执行并删除");
private int value;
private String desc;
TaskInterimExcuteStatusEnum(int value, String desc) {
this.value = value;
this.desc = desc;
}
public int getValue() {
return this.value;
}
public String getDesc() {
return desc;
}
public static TaskInterimExcuteStatusEnum getByValue(int value) {
for (TaskInterimExcuteStatusEnum taskInterimExcuteStatus : TaskInterimExcuteStatusEnum.values()) {
if (taskInterimExcuteStatus.getValue() == value) {
return taskInterimExcuteStatus;
}
}
return null;
}
/**
* 获取Map集合
* @param eItem 不包含项
* @return
*/
public static Map<String,String> getEnumMap(int... eItem) {
Map<String,String> resultMap= new LinkedHashMap<String,String>();
for (TaskInterimExcuteStatusEnum item : TaskInterimExcuteStatusEnum.values()) {
try{
boolean hasE = false;
for (int e : eItem){
if(item.getValue()==e){
hasE = true;
break;
}
}
if(!hasE){
resultMap.put(item.getValue()+"", item.getDesc());
}
}catch(Exception ex){
}
}
return resultMap;
}
}
package com.mortals.xhx.common.code;
import com.mortals.framework.common.IBaseEnum;
/**
* 上传文件类型枚举
*
* @author pengziyuan
*/
public enum UploadFileType implements IBaseEnum {
/** 表格 */
EXCEL(1, "表格", 1024 * 1023 * 100),
/** 图片 */
IMG(2, "图片", 1024 * 1024 * 10),
/** 压缩文件 */
ZIP(3, "压缩文件", 1024 * 1024 * 100),
/** PDF */
PDF(4, "PDF", 1024 * 1024 * 100),
/** 其他 */
OTHER(99, "其他", 1024 * 1024 * 100);
private int value;
private String desc;
private int maxSize;
UploadFileType(int value, String desc, int maxSize) {
this.value = value;
this.desc = desc;
this.maxSize = maxSize;
}
@Override
public int getValue() {
return this.value;
}
@Override
public String getDesc() {
return desc;
}
public int getMaxSize() {
return maxSize;
}
public static UploadFileType getFileType(String extension) {
if ("xls".equalsIgnoreCase(extension) || "xlsx".equalsIgnoreCase(extension)) {
return EXCEL;
}
if ("jpg".equalsIgnoreCase(extension) || "jpeg".equalsIgnoreCase(extension) || "png".equalsIgnoreCase(extension)
|| "gif".equalsIgnoreCase(extension)) {
return IMG;
}
if ("zip".equalsIgnoreCase(extension)) {
return ZIP;
}
if ("pdf".equalsIgnoreCase(extension)) {
return PDF;
}
return OTHER;
}
}
package com.mortals.xhx.common.code;
import com.mortals.framework.ap.SysConstains;
import com.mortals.framework.common.IBaseEnum;
import java.util.LinkedHashMap;
import java.util.Map;
public enum ValidCodeType implements IBaseEnum{
IMAGE(0, "图片校验", SysConstains.STYLE_DEFAULT),
MOBILE(1, "手机校验", SysConstains.STYLE_DEFAULT),
EMAIL(2, "邮箱校验", SysConstains.STYLE_DEFAULT);
private int value;
private String desc;
private String style;
ValidCodeType(int value, String desc, String style) {
this.value = value;
this.desc = desc;
this.style = style;
}
@Override
public int getValue() {
return this.value;
}
public String getDesc() {
return desc;
}
public String getStyle()
{
return style;
}
public static ValidCodeType getByValue(int value) {
for (ValidCodeType validCodeType : ValidCodeType.values()) {
if (validCodeType.getValue() == value) {
return validCodeType;
}
}
return null;
}
/**
* 获取Map集合
* @param eItem 不包含项
* @return
*/
public static Map<String,String> getEnumMap(int... eItem) {
Map<String,String> resultMap= new LinkedHashMap<String,String>();
for (ValidCodeType item : ValidCodeType.values()) {
try{
boolean hasE = false;
for (int e : eItem){
if(item.getValue()==e){
hasE = true;
break;
}
}
if(!hasE){
resultMap.put(item.getValue()+"", item.getDesc());
}
}catch(Exception ex){
}
}
return resultMap;
}
}
package com.mortals.xhx.common.code;
import com.mortals.framework.common.IBaseEnum;
import java.util.LinkedHashMap;
import java.util.Map;
public enum YesNoEnum implements IBaseEnum{
NO(0, "否"),
YES(1, "是");
private int value;
private String desc;
YesNoEnum(int value, String desc) {
this.value = value;
this.desc = desc;
}
@Override
public int getValue() {
return this.value;
}
public String getDesc() {
return desc;
}
public static YesNoEnum getByValue(int value) {
for (YesNoEnum YesNoEnum : YesNoEnum.values()) {
if (YesNoEnum.getValue() == value) {
return YesNoEnum;
}
}
return null;
}
/**
* 获取Map集合
* @param eItem 不包含项
* @return
*/
public static Map<String,String> getEnumMap(int... eItem) {
Map<String,String> resultMap= new LinkedHashMap<String,String>();
for (YesNoEnum item : YesNoEnum.values()) {
try{
boolean hasE = false;
for (int e : eItem){
if(item.getValue()==e){
hasE = true;
break;
}
}
if(!hasE){
resultMap.put(item.getValue()+"", item.getDesc());
}
}catch(Exception ex){
}
}
return resultMap;
}
}
package com.mortals.xhx.common.keys;
import java.math.BigDecimal;
/**
* 业务参数类,这个类全部为field参数,可以直接从外部设置和获取
*
* @author karlhoo
*/
public final class BusizParams {
public static int WORK_ID;
/**
* 无限常量:-1
*/
public static int NO_LIMIT = -1;
/**
* 一天的分钟数
*/
public static long DAY_MINUTES = 24 * 60;
/**
* 现金类押金支付金额默认折扣
*/
public static BigDecimal DEFAULT_CASH_DEPOSIT_DISCOUNT = new BigDecimal("1.0");
/**
* 押金默认计费上限费用
*/
public static Long DEFAULT_BILL_LIMIT_FEE = 20000L;
/**
* 现金押金默认计费上限费用
*/
public static Long DEFAULT_CASH_BILL_LIMIT_FEE = 8000L;
/**
* 用户冻结的默认延时
*/
public static Integer DEFAULT_CUSTOMER_FREEZE_DELAY = 121;
/**
* 用户冻结的默认时长
*/
public static Integer DEFAULT_CUSTOMER_FREEZE_LENGHT = 1440;
/**
* 系统参数:按摩椅下发命令间隔时间(单位:秒,默认值:10秒)
*/
public static String SYS_PARAM_CHAIR_ISCMDRSP_INTERVAL = "chair_iscmdrsp_interval";
/**
* 系统参数:闪充待支付超时时间(单位:秒,默认值:600秒)
*/
public static String SYS_PARAM_WIRE_WAITLOAN_TIMEOUT = "wire_waitloan_timeout";
/**
* 系统参数:filetoken过期时间(单位:天,默认值:1天)
*/
public static String SYS_PARAM_PRINT_FILETOKEN_TIMEOUT = "print_filetoken_timeout";
/**
* 系统参数:打印机保存文件过期时间(单位:月,默认值:3)
*/
public static String SYS_PARAM_PRINT_FILE_TIMEOUT = "print_orderfile_timeout";
/**
* 系统参数:打印机保存文件存储使用方式(0:本地存储,1:fastdfs存储)
*/
public static String SYS_PARAM_PRINT_FILE_STORE_MODE = "print_file_store_mode";
/**
* 系统参数:word文件转换方式
*/
public static String SYS_PARAM_PRINT_WORD_CONVERT_METHOD = "print_word_convert_method";
/**
* 系统参数:文件访问域
*/
public static String SYS_PARAM_FILE_DOMAIN = "file_domain";
/**
* 流程参数
*/
public static String SYS_PARAM_FLOW_KEY = "oneThingKey";
}
package com.mortals.xhx.common.keys;
/**
* 关键操作失败时,用来缓存数据的logger名称
*
* @author karlhoo
*/
public interface CacheLoggers {
}
package com.mortals.xhx.common.keys;
/**
* redis缓存参数key
*
* @author karlhoo
*/
public final class RedisCacheKeys {
/**
* @return 文件访问的地址
*/
public static String getFileUrlKey() {
return "params:file:upload";
}
public static String getCoopsDistributedLockKey() {
return "coops:distributed:lock";
}
}
package com.mortals.xhx.common.keys;
/**
* 服务调用响应常量
*
* @author karlhoo
*/
public interface ResponseConstants {
/**
* 请求结果Code主键
*/
String KEY_RESULT_CODE = "code";
/**
* 请求结果Message主键
*/
String KEY_RESULT_MSG = "msg";
/**
* 请求结果Code值-成功
*/
int RESULT_VALUE_SUCCESS = 1;
/**
* 请求结果Code值-失败
*/
int RESULT_VALUE_FAILURE = -1;
}
\ No newline at end of file
package com.mortals.xhx.common.model;
import com.mortals.xhx.queue.TbQueueMsgHeaders;
import java.util.HashMap;
import java.util.Map;
/**
* 默认消息头
*
* @author: zxfei
* @date: 2021/11/22 11:14
*/
public class DefaultTbQueueMsgHeaders implements TbQueueMsgHeaders {
protected final Map<String, byte[]> data = new HashMap<>();
public DefaultTbQueueMsgHeaders() {
data.put(MessageHeader.TOPIC, new byte[]{});
data.put(MessageHeader.QOS, new byte[]{(byte) 0});
}
@Override
public byte[] put(String key, byte[] value) {
return data.put(key, value);
}
@Override
public byte[] get(String key) {
return data.get(key);
}
@Override
public Map<String, byte[]> getData() {
return data;
}
}
package com.mortals.xhx.common.model;
/**
* 消息头
*
* @author: zxfei
* @date: 2021/11/22 11:15
*/
public class MessageHeader {
/**
* 客户id
*/
public static final String CLIENTID = "clientId";
public static final String MESSAGEPROTOCOL = "protocol";
public static final String TIMESTAMP = "timestamp";
public static final String MESSAGETYPE = "messageType";
/**
* topic
*/
public static final String TOPIC = "topic";
public static final String QOS = "qos";
public static final String RETAIN = "retain";
public static final String WILL = "will";
public static final String DUP = "dup";
public static final String productKey = "dup";
// public static final String productKey = "dup";
}
package com.mortals.xhx.queue;
import com.alibaba.fastjson.JSONObject;
import lombok.Getter;
import lombok.extern.apachecommons.CommonsLog;
import org.slf4j.Logger;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
/**
* 服务端消费消息服务
*
* @author: zxfei
* @date: 2021/11/22 11:31
*/
@CommonsLog
public class ConsumerService {
private long pollDuration;
protected volatile ExecutorService consumersExecutor;
@Getter
private TbQueueConsumer<TbQueueMsg> mainConsumer;
private String currentIp;
protected volatile boolean stopped = false;
public void init(TbQueueConsumer<TbQueueMsg> mainConsumer) {
this.consumersExecutor = Executors.newCachedThreadPool();
this.mainConsumer = mainConsumer;
launchMainConsumers();
this.mainConsumer.subscribe();
}
public ConsumerService(long pollDuration, String currentIp) {
this.pollDuration = pollDuration;
this.currentIp = currentIp;
}
/**
* 消费服务主线程
*/
protected void launchMainConsumers() {
consumersExecutor.submit(() -> {
while (!stopped) {
try {
//todo
List<TbQueueMsg> poll = mainConsumer.poll(pollDuration);
List<TbQueueMsg> msgs = poll;
if (msgs.isEmpty()) {
continue;
}
for (TbQueueMsg item : msgs) {
//todo
// Message innerMsg = new Message();
// TbQueueMsgHeaders headMap = item.getHeaders();
// int messageProtocl = headMap.get(MessageHeader.MESSAGEPROTOCOL)[0];
// Map<String, Object> headers = new HashMap<>();
// innerMsg.setPayload(item.getData());
// headers.put(MessageHeader.MESSAGEPROTOCOL, messageProtocl);
// innerMsg.setStoreTime(DateUtils.getCurrDate().getTime());
// if(messageProtocl== MsgTypeEnum.MSG_SYSTEM.getValue()){
// innerMsg.setHeaders(headers);
// sendSysMessage2Cluster(innerMsg);
// }else{
// String clientId = MixAll.convertByteS2Str(headMap.get(MessageHeader.CLIENTID));
// int messageType = headMap.get(MessageHeader.MESSAGETYPE)[0];
// String topic = MixAll.convertByteS2Str(headMap.get(MessageHeader.TOPIC));
// int qos = headMap.get(MessageHeader.QOS)[0];
//
// innerMsg.setClientId(clientId);
// innerMsg.setType(Message.Type.valueOf(messageType));
// headers.put(MessageHeader.TOPIC, topic);
// headers.put(MessageHeader.QOS, qos);
// headers.put(MessageHeader.RETAIN, false);
// headers.put(MessageHeader.DUP, false);
// innerMsg.setHeaders(headers);
// sendContrlMessage2Cluster(innerMsg);
// }
}
} catch (Exception e) {
log.error("Exception", e);
}
}
log.info("Queue Consumer stopped.");
});
}
public void destroy() {
if (!stopped) {
stopMainConsumers();
}
stopped = true;
if (consumersExecutor != null) {
consumersExecutor.shutdownNow();
}
}
protected void stopMainConsumers() {
if (mainConsumer != null) {
mainConsumer.unsubscribe();
}
}
}
package com.mortals.xhx.queue;
import com.mortals.xhx.queue.processing.AbstractConsumerService;
import com.mortals.xhx.queue.provider.TbCoreQueueFactory;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.context.event.ApplicationReadyEvent;
import org.springframework.context.ApplicationEvent;
import org.springframework.context.event.EventListener;
import org.springframework.core.annotation.Order;
import org.springframework.stereotype.Service;
import javax.annotation.PostConstruct;
import javax.annotation.PreDestroy;
import java.util.List;
@Service
@Slf4j
public class DefaultTbCoreConsumerService extends AbstractConsumerService<TbQueueMsg> implements TbCoreConsumerService {
@Value("${queue.core.poll-interval}")
private long pollDuration;
@Value("${queue.core.pack-processing-timeout}")
private long packProcessingTimeout;
private TbQueueConsumer<TbQueueMsg> mainConsumer;
public DefaultTbCoreConsumerService(TbCoreQueueFactory tbCoreQueueFactory) {
this.mainConsumer = tbCoreQueueFactory.createMsgConsumer();
}
@PostConstruct
public void init() {
log.info("初始化消费服务线程");
super.init("core-consumer");
// super.init("tb-core-consumer", "tb-core-notifications-consumer");
}
@PreDestroy
public void destroy() {
super.destroy();
}
@EventListener(ApplicationReadyEvent.class)
@Order(value = 2)
public void onApplicationEvent(ApplicationReadyEvent event) {
super.onApplicationEvent(event);
}
@Override
protected void launchMainConsumers() {
log.info("启动消费线程!");
consumersExecutor.submit(() -> {
while (!stopped) {
try {
List<TbQueueMsg> msgs = mainConsumer.poll(pollDuration);
if (msgs.isEmpty()) {
continue;
}
for (TbQueueMsg item:msgs){
log.info("msg:"+item.toString());
}
mainConsumer.commit();
} catch (Exception e) {
if (!stopped) {
log.warn("Failed to obtain messages from queue.", e);
try {
Thread.sleep(pollDuration);
} catch (InterruptedException e2) {
log.trace("Failed to wait until the server has capacity to handle new requests", e2);
}
}
}
}
log.info(" Core Consumer stopped.");
});
}
@Override
public void onApplicationEvent(ApplicationEvent applicationEvent) {
}
private static class PendingMsgHolder {
// @Getter
// @Setter
// private volatile ToCoreMsg toCoreMsg;
}
@Override
protected void stopMainConsumers() {
if (mainConsumer != null) {
mainConsumer.unsubscribe();
}
}
}
package com.mortals.xhx.queue;
import com.mortals.xhx.common.model.DefaultTbQueueMsgHeaders;
import lombok.AllArgsConstructor;
import lombok.Data;
import java.util.UUID;
/**
* 默认消息
*
* @author: zxfei
* @date: 2021/11/22 10:59
*/
@Data
@AllArgsConstructor
public class DefaultTbQueueMsg implements TbQueueMsg {
private final UUID key;
private final byte[] data;
private final TbQueueMsgHeaders headers;
public DefaultTbQueueMsg(TbQueueMsg msg) {
this.key = msg.getKey();
this.data = msg.getData();
TbQueueMsgHeaders headers = new DefaultTbQueueMsgHeaders();
msg.getHeaders().getData().forEach(headers::put);
this.headers = headers;
}
}
package com.mortals.xhx.queue;
import com.mortals.xhx.common.BrokerConfig;
/**
* 消息队列工厂类,初始化MQ类型(kafka,rabbitMQ,memory)
*
* @author: zxfei
* @date: 2021/11/22 10:57
*/
public interface MessageQueueFactory {
/**
* 创建消息生产者
* @return
*/
TbQueueProducer<TbQueueMsg> createMsgProducer(BrokerConfig brokerConfig);
/**
* 创建消息消费者
* @return
*/
TbQueueConsumer<TbQueueMsg> createMsgConsumer(BrokerConfig brokerConfig);
}
package com.mortals.xhx.queue;
import org.springframework.context.ApplicationListener;
public interface TbCoreConsumerService extends ApplicationListener {
}
package com.mortals.xhx.queue;
/**
* 队列回调消息
*
* @author: zxfei
* @date: 2021/11/22 10:57
*/
public interface TbQueueCallback {
void onSuccess(TbQueueMsgMetadata metadata);
void onFailure(Throwable t);
}
package com.mortals.xhx.queue;
import java.util.List;
import java.util.Set;
/**
* 队列消息消费者接口
*
* @author: zxfei
* @date: 2021/11/22 10:57
*/
public interface TbQueueConsumer<T extends TbQueueMsg> {
/**
* 获取当topic
* @return
*/
String getTopic();
/**
* 订阅
*/
void subscribe();
/**
* 订阅(分区)
* @param partitions
*/
void subscribe(Set<TopicPartitionInfo> partitions);
/**
* 取消订阅
*/
void unsubscribe();
/**
* 拉取消息间隔
* @param durationInMillis
* @return
*/
List<T> poll(long durationInMillis);
/**
* 提交
*/
void commit();
}
package com.mortals.xhx.queue;
import lombok.Data;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Component;
@Data
@Component
public class TbQueueCoreSettings {
@Value("${queue.core.topic}")
private String topic;
@Value("${queue.core.partitions}")
private int partitions;
}
package com.mortals.xhx.queue;
import java.util.UUID;
/**
* 队列消息体
*
* @author: zxfei
* @date: 2021/11/22 10:56
*/
public interface TbQueueMsg {
UUID getKey();
TbQueueMsgHeaders getHeaders();
byte[] getData();
}
package com.mortals.xhx.queue;
public interface TbQueueMsgDecoder<T extends TbQueueMsg> {
T decode(TbQueueMsg msg);
}
package com.mortals.xhx.queue;
import java.util.Map;
/**
* 消息头信息
*
* @author: zxfei
* @date: 2021/11/22 10:56
*/
public interface TbQueueMsgHeaders {
byte[] put(String key, byte[] value);
byte[] get(String key);
Map<String, byte[]> getData();
}
package com.mortals.xhx.queue;
/**
* 队列消息元数据
*
* @author: zxfei
* @date: 2021/11/22 10:56
*/
public interface TbQueueMsgMetadata {
}
package com.mortals.xhx.queue;
/**
* 队列消息生产者
*
* @author: zxfei
* @date: 2021/11/22 10:55
*/
public interface TbQueueProducer<T extends TbQueueMsg> {
void init();
String getDefaultTopic();
//发送消息
void send(TopicPartitionInfo tpi, T msg, TbQueueCallback callback);
void stop();
}
package com.mortals.xhx.queue;
import lombok.Builder;
import lombok.Data;
import java.util.Objects;
import java.util.Optional;
@Data
public class TopicPartitionInfo {
private String topic;
private Integer partition;
private String fullTopicName;
@Builder
public TopicPartitionInfo(String topic, Integer partition) {
this.topic = topic;
this.partition = partition;
String tmp = topic;
if (partition != null) {
tmp += "." + partition;
}
this.fullTopicName = tmp;
}
public TopicPartitionInfo newByTopic(String topic) {
return new TopicPartitionInfo(topic, this.partition);
}
public String getTopic() {
return topic;
}
public Optional<Integer> getPartition() {
return Optional.ofNullable(partition);
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
TopicPartitionInfo that = (TopicPartitionInfo) o;
return topic.equals(that.topic) &&
Objects.equals(partition, that.partition) &&
fullTopicName.equals(that.fullTopicName);
}
@Override
public int hashCode() {
return Objects.hash(fullTopicName);
}
}
package com.mortals.xhx.queue.kafka;
import com.mortals.xhx.queue.TbQueueConsumer;
import com.mortals.xhx.queue.TbQueueMsg;
import com.mortals.xhx.queue.TopicPartitionInfo;
import lombok.Getter;
import lombok.extern.slf4j.Slf4j;
import java.io.IOException;
import java.util.*;
import java.util.concurrent.ConcurrentLinkedQueue;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.locks.ReentrantLock;
import java.util.stream.Collectors;
import static java.util.Collections.emptyList;
/**
* 抽象队列消费者模板
*
* @author: zxfei
* @date: 2021/11/22 11:12
*/
@Slf4j
public abstract class AbstractTbQueueConsumerTemplate<R, T extends TbQueueMsg> implements TbQueueConsumer<T> {
private volatile boolean subscribed;
protected volatile boolean stopped = false;
protected volatile Set<TopicPartitionInfo> partitions;
protected final ReentrantLock consumerLock = new ReentrantLock();
final Queue<Set<TopicPartitionInfo>> subscribeQueue = new ConcurrentLinkedQueue<>();
@Getter
private final String topic;
public AbstractTbQueueConsumerTemplate(String topic) {
this.topic = topic;
}
@Override
public void subscribe() {
log.info("enqueue topic subscribe {} ", topic);
if (stopped) {
log.error("trying subscribe, but consumer stopped for topic {}", topic);
return;
}
subscribeQueue.add(Collections.singleton(new TopicPartitionInfo(topic, null)));
}
@Override
public void subscribe(Set<TopicPartitionInfo> partitions) {
log.info("enqueue topics subscribe {} ", partitions);
if (stopped) {
log.error("trying subscribe, but consumer stopped for topic {}", topic);
return;
}
subscribeQueue.add(partitions);
}
@Override
public List<T> poll(long durationInMillis) {
List<R> records;
long startNanos = System.nanoTime();
if (stopped) {
return errorAndReturnEmpty();
}
if (!subscribed && partitions == null && subscribeQueue.isEmpty()) {
return sleepAndReturnEmpty(startNanos, durationInMillis);
}
if (consumerLock.isLocked()) {
log.error("poll. consumerLock is locked. will wait with no timeout. it looks like a race conditions or deadlock", new RuntimeException("stacktrace"));
}
consumerLock.lock();
try {
//更新订阅的主题
while (!subscribeQueue.isEmpty()) {
subscribed = false;
partitions = subscribeQueue.poll();
}
if (!subscribed) {
List<String> topicNames = partitions.stream().map(TopicPartitionInfo::getFullTopicName).collect(Collectors.toList());
doSubscribe(topicNames);
subscribed = true;
}
records = partitions.isEmpty() ? emptyList() : doPoll(durationInMillis);
} finally {
consumerLock.unlock();
}
if (records.isEmpty()) {
return sleepAndReturnEmpty(startNanos, durationInMillis);
}
return decodeRecords(records);
}
List<T> decodeRecords(List<R> records) {
List<T> result = new ArrayList<>(records.size());
records.forEach(record -> {
try {
if (record != null) {
result.add(decode(record));
}
} catch (IOException e) {
log.error("Failed decode record: [{}]", record);
throw new RuntimeException("Failed to decode record: ", e);
}
});
return result;
}
List<T> errorAndReturnEmpty() {
log.error("poll invoked but consumer stopped for topic:" + topic, new RuntimeException("stacktrace"));
return emptyList();
}
List<T> sleepAndReturnEmpty(final long startNanos, final long durationInMillis) {
long durationNanos = TimeUnit.MILLISECONDS.toNanos(durationInMillis);
long spentNanos = System.nanoTime() - startNanos;
if (spentNanos < durationNanos) {
try {
Thread.sleep(Math.max(TimeUnit.NANOSECONDS.toMillis(durationNanos - spentNanos), 1));
} catch (InterruptedException e) {
if (!stopped) {
log.error("Failed to wait", e);
}
}
}
return emptyList();
}
@Override
public void commit() {
if (consumerLock.isLocked()) {
log.error("commit. consumerLock is locked. will wait with no timeout. it looks like a race conditions or deadlock", new RuntimeException("stacktrace"));
}
consumerLock.lock();
try {
doCommit();
} finally {
consumerLock.unlock();
}
}
@Override
public void unsubscribe() {
log.info("unsubscribe topic and stop consumer {}", getTopic());
stopped = true;
consumerLock.lock();
try {
doUnsubscribe();
} finally {
consumerLock.unlock();
}
}
abstract protected List<R> doPoll(long durationInMillis);
abstract protected T decode(R record) throws IOException;
abstract protected void doSubscribe(List<String> topicNames);
abstract protected void doCommit();
abstract protected void doUnsubscribe();
}
package com.mortals.xhx.queue.kafka;
import com.mortals.xhx.common.model.DefaultTbQueueMsgHeaders;
import com.mortals.xhx.queue.TbQueueMsg;
import com.mortals.xhx.queue.TbQueueMsgHeaders;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import java.util.UUID;
public class KafkaTbQueueMsg implements TbQueueMsg {
private final UUID key;
private final TbQueueMsgHeaders headers;
private final byte[] data;
public KafkaTbQueueMsg(ConsumerRecord<String, byte[]> record) {
this.key = UUID.fromString(record.key());
TbQueueMsgHeaders headers = new DefaultTbQueueMsgHeaders();
record.headers().forEach(header -> {
headers.put(header.key(), header.value());
});
this.headers = headers;
this.data = record.value();
}
@Override
public UUID getKey() {
return key;
}
@Override
public TbQueueMsgHeaders getHeaders() {
return headers;
}
@Override
public byte[] getData() {
return data;
}
}
package com.mortals.xhx.queue.kafka;
import com.mortals.xhx.queue.TbQueueMsgMetadata;
import lombok.AllArgsConstructor;
import lombok.Data;
import org.apache.kafka.clients.producer.RecordMetadata;
/**
* 队列元数据
*
* @author: zxfei
* @date: 2021/11/22 14:40
*/
@Data
@AllArgsConstructor
public class KafkaTbQueueMsgMetadata implements TbQueueMsgMetadata {
private RecordMetadata metadata;
}
package com.mortals.xhx.queue.kafka;
import com.mortals.xhx.queue.TbQueueMsg;
import lombok.Builder;
import lombok.extern.apachecommons.CommonsLog;
import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.slf4j.Logger;
import java.io.IOException;
import java.time.Duration;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Properties;
/**
* kafka consumer 消费者模板
*
* @author: zxfei
* @date: 2021/11/22 11:21
*/
@Slf4j
public class TbKafkaConsumerTemplate<T extends TbQueueMsg> extends AbstractTbQueueConsumerTemplate<ConsumerRecord<String, byte[]>, T> {
private final KafkaConsumer<String, byte[]> consumer;
private final String groupId;
private final TbKafkaDecoder<T> decoder;
@Builder
private TbKafkaConsumerTemplate(TbKafkaSettings settings, String clientId, TbKafkaDecoder<T> decoder, String groupId, String topic) {
//默认topic
super(topic);
Properties props = settings.toConsumerProps();
//多个输入源的时候 需要配置
//props.put(ConsumerConfig.CLIENT_ID_CONFIG, clientId);
if (groupId != null) {
props.put(ConsumerConfig.GROUP_ID_CONFIG, groupId);
}
this.groupId = groupId;
this.consumer = new KafkaConsumer<>(props);
this.decoder = decoder;
}
@Override
protected void doSubscribe(List<String> topicNames) {
if (!topicNames.isEmpty()) {
// topicNames.forEach(admin::createTopicIfNotExists);
log.info("subscribe topics {}", topicNames);
consumer.subscribe(topicNames);
} else {
log.info("unsubscribe due to empty topic list");
consumer.unsubscribe();
}
}
@Override
protected List<ConsumerRecord<String, byte[]>> doPoll(long durationInMillis) {
ConsumerRecords<String, byte[]> records = consumer.poll(Duration.ofMillis(durationInMillis));
records.forEach(record -> {
System.out.printf("topic = %s ,partition = %d,offset = %d, key = %s, value = %s%n", record.topic(), record.partition(),
record.offset(), record.key(), record.value());
});
if (records.isEmpty()) {
return Collections.emptyList();
} else {
List<ConsumerRecord<String, byte[]>> recordList = new ArrayList<>(256);
records.forEach(recordList::add);
return recordList;
}
}
@Override
public T decode(ConsumerRecord<String, byte[]> record) throws IOException {
return decoder.decode(new KafkaTbQueueMsg(record));
}
@Override
protected void doCommit() {
//同步提交,线程会阻塞,直到当前批次offset提交成功
consumer.commitAsync();
}
@Override
protected void doUnsubscribe() {
log.info("unsubscribe topic and close consumer for topic {}", getTopic());
if (consumer != null) {
//consumer.unsubscribe();
consumer.close();
}
}
public static void main(String[] args) {
// TbKafkaConsumerTemplate.builder().
}
}
package com.mortals.xhx.queue.kafka;
import com.mortals.xhx.queue.TbQueueMsg;
import java.io.IOException;
/**
* 队列消息编码
*
* @author: zxfei
* @date: 2021/11/22 11:22
*/
public interface TbKafkaDecoder<T extends TbQueueMsg> {
T decode(TbQueueMsg msg) throws IOException;
}
package com.mortals.xhx.queue.kafka;
import com.mortals.xhx.queue.TbQueueCallback;
import com.mortals.xhx.queue.TbQueueMsg;
import com.mortals.xhx.queue.TbQueueProducer;
import com.mortals.xhx.queue.TopicPartitionInfo;
import lombok.Builder;
import lombok.Data;
import lombok.Getter;
import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.common.header.Header;
import org.apache.kafka.common.header.internals.RecordHeader;
import java.util.Properties;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.stream.Collectors;
/**
* kafka 生产者模板
*
* @author: zxfei
* @date: 2021/11/22 11:23
*/
@Data
@Slf4j
public class TbKafkaProducerTemplate<T extends TbQueueMsg> implements TbQueueProducer<T> {
/**
* 生产者
*/
private KafkaProducer<String, byte[]> producer;
/**
* kafka 配置信息
*/
private TbKafkaSettings settings;
private String defaultTopic;
/**
* topic组
*/
private Set<TopicPartitionInfo> topics;
@Builder
private TbKafkaProducerTemplate(TbKafkaSettings settings,String defaultTopic) {
this.settings = settings;
//初始化生产者参数
this.producer = new KafkaProducer<>(settings.toProducerProps());
this.defaultTopic = defaultTopic;
topics = ConcurrentHashMap.newKeySet();
}
@Override
public void init() {
}
@Override
public void send(TopicPartitionInfo tpi, T msg, TbQueueCallback callback) {
String key = msg.getKey().toString();
byte[] data = msg.getData();
ProducerRecord<String, byte[]> record;
if (tpi.getTopic() == null) {
tpi.setTopic(this.defaultTopic);
}
Iterable<Header> headers = msg.getHeaders().getData().entrySet().stream().map(e -> new RecordHeader(e.getKey(), e.getValue())).collect(Collectors.toList());
record = new ProducerRecord<>(tpi.getTopic(), null, key, data, headers);
producer.send(record, (metadata, exception) -> {
if (exception == null) {
if (callback != null) {
callback.onSuccess(new KafkaTbQueueMsgMetadata(metadata));
}
} else {
if (callback != null) {
callback.onFailure(exception);
} else {
log.warn("Producer template failure: {}", exception.getMessage(), exception);
}
}
});
}
@Override
public void stop() {
if (producer != null) {
producer.close();
}
}
}
package com.mortals.xhx.queue.kafka;
import lombok.Data;
/**
* 其它配置类
*
* @author: zxfei
* @date: 2021/11/22 13:31
*/
@Data
public class TbKafkaProperty {
private String key;
private String value;
}
package com.mortals.xhx.queue.kafka;
import lombok.Getter;
import lombok.Setter;
import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.clients.CommonClientConfigs;
import org.apache.kafka.clients.admin.AdminClientConfig;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.common.serialization.ByteArrayDeserializer;
import org.apache.kafka.common.serialization.ByteArraySerializer;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.apache.kafka.common.serialization.StringSerializer;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.stereotype.Component;
import java.util.List;
import java.util.Properties;
/**
* kafka 配置类
*
* @author: zxfei
* @date: 2021/11/22 13:30
*/
@Slf4j
@ConditionalOnProperty(prefix = "queue", value = "type", havingValue = "kafka")
@ConfigurationProperties(prefix = "queue.kafka")
@Component
public class TbKafkaSettings {
@Value("${queue.kafka.bootstrap.servers}")
private String servers;
@Value("${queue.kafka.acks}")
private String acks;
@Value("${queue.kafka.retries}")
private int retries;
@Value("${queue.kafka.batch.size}")
private int batchSize;
@Value("${queue.kafka.linger.ms}")
private long lingerMs;
@Value("${queue.kafka.buffer.memory}")
private long bufferMemory;
@Value("${queue.kafka.replication_factor}")
@Getter
private short replicationFactor;
@Value("${queue.kafka.max_poll_records:8192}")
private int maxPollRecords;
@Value("${queue.kafka.max_poll_interval_ms:300000}")
private int maxPollIntervalMs;
@Value("${queue.kafka.max_partition_fetch_bytes:16777216}")
private int maxPartitionFetchBytes;
@Value("${queue.kafka.fetch_max_bytes:134217728}")
private int fetchMaxBytes;
@Setter
private List<TbKafkaProperty> other;
/**
* 管理端参数配置
* @return
*/
public Properties toAdminProps() {
Properties props = toProps();
props.put(AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG, servers);
props.put(AdminClientConfig.RETRIES_CONFIG, retries);
return props;
}
/**
* 消费者参数
*
* @return
*/
public Properties toConsumerProps() {
Properties props = toProps();
props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, servers);
props.put(ConsumerConfig.MAX_POLL_RECORDS_CONFIG, maxPollRecords);
props.put(ConsumerConfig.MAX_PARTITION_FETCH_BYTES_CONFIG, maxPartitionFetchBytes);
props.put(ConsumerConfig.FETCH_MAX_BYTES_CONFIG, fetchMaxBytes);
props.put(ConsumerConfig.MAX_POLL_INTERVAL_MS_CONFIG, maxPollIntervalMs);
props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, ByteArrayDeserializer.class);
return props;
}
/**
* 生产者参数
*
* @return
*/
public Properties toProducerProps() {
Properties props = toProps();
props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, servers);
props.put(ProducerConfig.RETRIES_CONFIG, retries);
props.put(ProducerConfig.ACKS_CONFIG, acks);
props.put(ProducerConfig.BATCH_SIZE_CONFIG, batchSize);
props.put(ProducerConfig.LINGER_MS_CONFIG, lingerMs);
props.put(ProducerConfig.BUFFER_MEMORY_CONFIG, bufferMemory);
props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, ByteArraySerializer.class);
return props;
}
private Properties toProps() {
Properties props = new Properties();
//添加其它参数
if (other != null) {
other.forEach(kv -> props.put(kv.getKey(), kv.getValue()));
}
return props;
}
}
package com.mortals.xhx.queue.processing;
import com.mortals.xhx.queue.TbQueueMsg;
import com.mortals.xhx.utils.IotThreadFactory;
import lombok.extern.slf4j.Slf4j;
import org.springframework.boot.context.event.ApplicationReadyEvent;
import org.springframework.context.event.EventListener;
import org.springframework.core.annotation.Order;
import javax.annotation.PreDestroy;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
@Slf4j
public abstract class AbstractConsumerService<N extends TbQueueMsg> {
protected volatile ExecutorService consumersExecutor;
protected volatile boolean stopped = false;
public void init(String mainConsumerThreadName) {
this.consumersExecutor = Executors.newCachedThreadPool(IotThreadFactory.forName(mainConsumerThreadName));
}
@EventListener(ApplicationReadyEvent.class)
@Order(value = 2)
public void onApplicationEvent(ApplicationReadyEvent event) {
launchMainConsumers();
}
/**
* 启动消费主线程服务
*/
protected abstract void launchMainConsumers();
/**
* 停止消费主线程服务
*/
protected abstract void stopMainConsumers();
@PreDestroy
public void destroy() {
stopped = true;
stopMainConsumers();
if (consumersExecutor != null) {
consumersExecutor.shutdownNow();
}
}
}
package com.mortals.xhx.queue.provider;
import com.mortals.xhx.queue.TbQueueConsumer;
import com.mortals.xhx.queue.TbQueueMsg;
import com.mortals.xhx.queue.TbQueueProducer;
import com.mortals.xhx.queue.kafka.TbKafkaConsumerTemplate;
import com.mortals.xhx.queue.kafka.TbKafkaProducerTemplate;
import com.mortals.xhx.queue.kafka.TbKafkaSettings;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.autoconfigure.condition.ConditionalOnExpression;
import org.springframework.stereotype.Component;
/**
* kafka 消息工厂类
*
* @author: zxfei
* @date: 2021/11/22 15:00
*/
@Component
@ConditionalOnExpression("'${queue.type:null}'=='kafka'")
public class KafkaTbCoreQueueFactory implements TbCoreQueueFactory {
@Autowired
private TbKafkaSettings kafkaSettings;
/**
* 初始化创建消息生产者
*
* @return
*/
@Override
public TbQueueProducer<TbQueueMsg> createMsgProducer() {
TbKafkaProducerTemplate.TbKafkaProducerTemplateBuilder<TbQueueMsg> builder = TbKafkaProducerTemplate.builder();
builder.settings(kafkaSettings);
return builder.build();
}
/**
* 初始化创建消息消费者
*
* @return
*/
@Override
public TbQueueConsumer<TbQueueMsg> createMsgConsumer() {
TbKafkaConsumerTemplate.TbKafkaConsumerTemplateBuilder<TbQueueMsg> comsumerBuilder = TbKafkaConsumerTemplate.builder();
comsumerBuilder.settings(kafkaSettings);
return comsumerBuilder.build();
}
}
package com.mortals.xhx.queue.provider;
import com.mortals.xhx.queue.*;
import com.mortals.xhx.queue.rabbitmq.TbRabbitMqConsumerTemplate;
import com.mortals.xhx.queue.rabbitmq.TbRabbitMqProducerTemplate;
import com.mortals.xhx.queue.rabbitmq.TbRabbitMqSettings;
import org.springframework.boot.autoconfigure.condition.ConditionalOnExpression;
import org.springframework.stereotype.Component;
import java.util.UUID;
@Component
@ConditionalOnExpression("'${queue.type:null}'=='rabbitmq'")
public class RabbitMqTbCoreQueueFactory implements TbCoreQueueFactory {
private final TbRabbitMqSettings rabbitMqSettings;
private final TbQueueCoreSettings coreSettings;
public RabbitMqTbCoreQueueFactory(TbRabbitMqSettings rabbitMqSettings, TbQueueCoreSettings coreSettings) {
this.rabbitMqSettings = rabbitMqSettings;
this.coreSettings = coreSettings;
}
@Override
public TbQueueProducer<TbQueueMsg> createMsgProducer() {
return new TbRabbitMqProducerTemplate<>(rabbitMqSettings, coreSettings.getTopic());
}
@Override
public TbQueueConsumer<TbQueueMsg> createMsgConsumer() {
return new TbRabbitMqConsumerTemplate<>(rabbitMqSettings, coreSettings.getTopic(), msg -> new TbQueueMsg() {
@Override
public UUID getKey() {
return msg.getKey();
}
@Override
public TbQueueMsgHeaders getHeaders() {
return msg.getHeaders();
}
@Override
public byte[] getData() {
return msg.getData();
}
});
}
}
package com.mortals.xhx.queue.provider;
import com.mortals.xhx.queue.TbQueueConsumer;
import com.mortals.xhx.queue.TbQueueMsg;
import com.mortals.xhx.queue.TbQueueProducer;
public interface TbCoreQueueFactory {
/**
* 消息生产者
* @return
*/
TbQueueProducer<TbQueueMsg> createMsgProducer();
/**
* 消息消费服务
* @return
*/
TbQueueConsumer<TbQueueMsg> createMsgConsumer();
}
package com.mortals.xhx.queue.provider;
import com.mortals.xhx.queue.TbQueueMsg;
import com.mortals.xhx.queue.TbQueueProducer;
import lombok.extern.apachecommons.CommonsLog;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import javax.annotation.PostConstruct;
/**
* 初始化消息生产者服务
*/
@CommonsLog
@Service
public class TbCoreQueueProducerProvider implements TbQueueProducerProvider {
/**
* 消息队列提供
*/
@Autowired
private TbCoreQueueFactory tbQueueProvider;
/**
* 消息队列生产者
*/
private TbQueueProducer<TbQueueMsg> queueProducer;
// public TbCoreQueueProducerProvider(TbCoreQueueFactory tbQueueProvider) {
// this.tbQueueProvider = tbQueueProvider;
// }
//
@PostConstruct
public void init() {
log.info("消息队列生产服务开始...");
this.queueProducer = tbQueueProvider.createMsgProducer();
}
@Override
public TbQueueProducer<TbQueueMsg> getTbCoreMsgProducer() {
return queueProducer;
}
}
package com.mortals.xhx.queue.provider;
import com.mortals.xhx.queue.TbQueueMsg;
import com.mortals.xhx.queue.TbQueueProducer;
/**
* 消息队列提供接口
*
* @author: zxfei
* @date: 2021/11/22 14:59
*/
public interface TbQueueProducerProvider {
/**
* 消息生产者
* @return
*/
TbQueueProducer<TbQueueMsg> getTbCoreMsgProducer();
}
package com.mortals.xhx.queue.rabbitmq;
import com.alibaba.fastjson.JSON;
import com.mortals.xhx.queue.DefaultTbQueueMsg;
import com.mortals.xhx.queue.TbQueueMsg;
import com.mortals.xhx.queue.TbQueueMsgDecoder;
import com.mortals.xhx.queue.TopicPartitionInfo;
import com.mortals.xhx.queue.kafka.AbstractTbQueueConsumerTemplate;
import com.rabbitmq.client.Channel;
import com.rabbitmq.client.Connection;
import com.rabbitmq.client.GetResponse;
import lombok.extern.slf4j.Slf4j;
import java.io.IOException;
import java.util.Collections;
import java.util.List;
import java.util.Objects;
import java.util.Set;
import java.util.concurrent.TimeoutException;
import java.util.stream.Collectors;
@Slf4j
public class TbRabbitMqConsumerTemplate<T extends TbQueueMsg> extends AbstractTbQueueConsumerTemplate<GetResponse, T> {
private final TbQueueMsgDecoder<T> decoder;
private Channel channel;
private Connection connection;
private volatile Set<String> queues;
public TbRabbitMqConsumerTemplate(TbRabbitMqSettings rabbitMqSettings, String topic, TbQueueMsgDecoder<T> decoder) {
super(topic);
this.decoder = decoder;
try {
connection = rabbitMqSettings.getConnectionFactory().newConnection();
channel = connection.createChannel();
} catch (IOException | TimeoutException e) {
log.error("Failed to create connection." , e);
// throw new RuntimeException("Failed to create connection." , e);
}
stopped = false;
}
@Override
protected List<GetResponse> doPoll(long durationInMillis) {
List<GetResponse> result = queues.stream()
.map(queue -> {
try {
return channel.basicGet(queue, false);
} catch (IOException e) {
log.error("Failed to get messages from queue: [{}]" , queue);
return null;
// throw new RuntimeException("Failed to get messages from queue." , e);
}
}).filter(Objects::nonNull).collect(Collectors.toList());
if (result.size() > 0) {
return result;
} else {
return Collections.emptyList();
}
}
@Override
protected void doSubscribe(List<String> topicNames) {
queues = partitions.stream()
.map(TopicPartitionInfo::getFullTopicName)
.collect(Collectors.toSet());
//queues.forEach(admin::createTopicIfNotExists);
}
@Override
protected void doCommit() {
try {
channel.basicAck(0, true);
} catch (IOException e) {
log.error("Failed to ack messages." , e);
}
}
@Override
protected void doUnsubscribe() {
if (channel != null) {
try {
channel.close();
} catch (IOException | TimeoutException e) {
log.error("Failed to close the channel.");
}
}
if (connection != null) {
try {
connection.close();
} catch (IOException e) {
log.error("Failed to close the connection.");
}
}
}
public T decode(GetResponse message) {
DefaultTbQueueMsg msg = JSON.parseObject(new String(message.getBody()), DefaultTbQueueMsg.class);
return decoder.decode(msg);
}
}
package com.mortals.xhx.queue.rabbitmq;
import com.alibaba.fastjson.JSON;
import com.google.common.util.concurrent.ListeningExecutorService;
import com.google.common.util.concurrent.MoreExecutors;
import com.mortals.xhx.queue.*;
import com.rabbitmq.client.AMQP;
import com.rabbitmq.client.Channel;
import com.rabbitmq.client.Connection;
import lombok.extern.slf4j.Slf4j;
import java.io.IOException;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeoutException;
@Slf4j
public class TbRabbitMqProducerTemplate<T extends TbQueueMsg> implements TbQueueProducer<T> {
private String defaultTopic;
private TbRabbitMqSettings rabbitMqSettings;
private ListeningExecutorService producerExecutor;
private Channel channel;
private Connection connection;
private final Set<TopicPartitionInfo> topics = ConcurrentHashMap.newKeySet();
public TbRabbitMqProducerTemplate(TbRabbitMqSettings rabbitMqSettings, String defaultTopic) {
this.defaultTopic = defaultTopic;
this.rabbitMqSettings = rabbitMqSettings;
producerExecutor = MoreExecutors.listeningDecorator(Executors.newCachedThreadPool());
try {
connection = rabbitMqSettings.getConnectionFactory().newConnection();
channel = connection.createChannel();
} catch (IOException | TimeoutException e) {
log.error("Failed to create connection." , e);
// throw new RuntimeException("Failed to create connection." , e);
}
}
@Override
public void init() {
}
@Override
public String getDefaultTopic() {
return defaultTopic;
}
@Override
public void send(TopicPartitionInfo tpi, T msg, TbQueueCallback callback) {
createTopicIfNotExist(tpi);
AMQP.BasicProperties properties = new AMQP.BasicProperties();
try {
channel.basicPublish(rabbitMqSettings.getExchangeName(), tpi.getFullTopicName(), properties, JSON.toJSONString(new DefaultTbQueueMsg(msg)).getBytes());
if (callback != null) {
callback.onSuccess(null);
}
} catch (IOException e) {
log.error("Failed publish message: [{}]." , msg, e);
if (callback != null) {
callback.onFailure(e);
}
}
}
@Override
public void stop() {
if (producerExecutor != null) {
producerExecutor.shutdownNow();
}
if (channel != null) {
try {
channel.close();
} catch (IOException | TimeoutException e) {
log.error("Failed to close the channel.");
}
}
if (connection != null) {
try {
connection.close();
} catch (IOException e) {
log.error("Failed to close the connection.");
}
}
}
private void createTopicIfNotExist(TopicPartitionInfo tpi) {
if (topics.contains(tpi)) {
return;
}
// admin.createTopicIfNotExists(tpi.getFullTopicName());
topics.add(tpi);
}
}
package com.mortals.xhx.queue.rabbitmq;
import lombok.Getter;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.autoconfigure.condition.ConditionalOnExpression;
import org.springframework.stereotype.Component;
import javax.annotation.PostConstruct;
import java.util.HashMap;
import java.util.Map;
import java.util.regex.Pattern;
//@Component
//@ConditionalOnExpression("'${queue.type:null}'=='rabbitmq'")
public class TbRabbitMqQueueArguments {
@Value("${queue.rabbitmq.queue-properties.core}")
private String coreProperties;
@Value("${queue.rabbitmq.queue-properties.rule-engine}")
private String ruleEngineProperties;
@Value("${queue.rabbitmq.queue-properties.transport-api}")
private String transportApiProperties;
@Value("${queue.rabbitmq.queue-properties.notifications}")
private String notificationsProperties;
@Value("${queue.rabbitmq.queue-properties.js-executor}")
private String jsExecutorProperties;
@Getter
private Map<String, Object> coreArgs;
@Getter
private Map<String, Object> ruleEngineArgs;
@Getter
private Map<String, Object> transportApiArgs;
@Getter
private Map<String, Object> notificationsArgs;
@Getter
private Map<String, Object> jsExecutorArgs;
@PostConstruct
private void init() {
coreArgs = getArgs(coreProperties);
ruleEngineArgs = getArgs(ruleEngineProperties);
transportApiArgs = getArgs(transportApiProperties);
notificationsArgs = getArgs(notificationsProperties);
jsExecutorArgs = getArgs(jsExecutorProperties);
}
private Map<String, Object> getArgs(String properties) {
Map<String, Object> configs = new HashMap<>();
for (String property : properties.split(";")) {
int delimiterPosition = property.indexOf(":");
String key = property.substring(0, delimiterPosition);
String strValue = property.substring(delimiterPosition + 1);
configs.put(key, getObjectValue(strValue));
}
return configs;
}
private Object getObjectValue(String str) {
if (str.equalsIgnoreCase("true") || str.equalsIgnoreCase("false")) {
return Boolean.valueOf(str);
} else if (isNumeric(str)) {
return getNumericValue(str);
}
return str;
}
private Object getNumericValue(String str) {
if (str.contains(".")) {
return Double.valueOf(str);
} else {
return Long.valueOf(str);
}
}
private static final Pattern PATTERN = Pattern.compile("-?\\d+(\\.\\d+)?");
public boolean isNumeric(String strNum) {
if (strNum == null) {
return false;
}
return PATTERN.matcher(strNum).matches();
}
}
package com.mortals.xhx.queue.rabbitmq;
import com.rabbitmq.client.ConnectionFactory;
import lombok.Data;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.autoconfigure.condition.ConditionalOnExpression;
import org.springframework.stereotype.Component;
import javax.annotation.PostConstruct;
@Slf4j
@ConditionalOnExpression("'${queue.type:null}'=='rabbitmq'")
@Component
@Data
public class TbRabbitMqSettings {
@Value("${queue.rabbitmq.exchange_name:}")
private String exchangeName;
@Value("${queue.rabbitmq.host:}")
private String host;
@Value("${queue.rabbitmq.port:}")
private int port;
@Value("${queue.rabbitmq.virtual_host:}")
private String virtualHost;
@Value("${queue.rabbitmq.username:}")
private String username;
@Value("${queue.rabbitmq.password:}")
private String password;
@Value("${queue.rabbitmq.automatic_recovery_enabled:}")
private boolean automaticRecoveryEnabled;
@Value("${queue.rabbitmq.connection_timeout:}")
private int connectionTimeout;
@Value("${queue.rabbitmq.handshake_timeout:}")
private int handshakeTimeout;
private ConnectionFactory connectionFactory;
@PostConstruct
private void init() {
connectionFactory = new ConnectionFactory();
connectionFactory.setHost(host);
connectionFactory.setPort(port);
connectionFactory.setVirtualHost(virtualHost);
connectionFactory.setUsername(username);
connectionFactory.setPassword(password);
connectionFactory.setAutomaticRecoveryEnabled(automaticRecoveryEnabled);
connectionFactory.setConnectionTimeout(connectionTimeout);
connectionFactory.setHandshakeTimeout(handshakeTimeout);
}
}
/*
package com.mortals.pstation.tools.lock.config;
import lombok.Data;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.context.annotation.Configuration;
*/
/**
* @author karlhoo
*//*
@Data
@Configuration
@ConditionalOnProperty("application.zookeeper.nodes")
public class ZookeeperConfiguration {
*/
/* *//*
*/
/**
* zookeeper节点地址
*//*
*/
/*
@Value("${application.zookeeper.nodes}")
private String zookeeperNodes;
*//*
*/
/**
* session连接超时时间(单位:ms)
*//*
*/
/*
@Value("${application.zookeeper.session-timeout-ms:60000}")
private Integer sessionTimeoutMs;
*//*
*/
/**
* 连接超时时间(单位:ms)
*//*
*/
/*
@Value("${application.zookeeper.connection-timeout-ms:5000}")
private Integer connectionTimeoutMs;
*//*
*/
/**
* 连接失败最大重试次数
*//*
*/
/*
@Value("${application.zookeeper.max-retry:3}")
private Integer maxRetry;
*//*
*/
/**
* 连接失败重试间隔时间(单位:ms)
*//*
*/
/*
@Value("${application.zookeeper.retry-interval-ms:1000}")
private Integer retryIntervalMs;
*//*
*/
/**
* 获取锁超时时间(单位:sec)
*//*
*/
/*
@Value("${application.zookeeper.lock-timeout-sec:5}")
private Integer lockTimeoutSec;
*//*
*/
/**
* 使用者标识
*//*
*/
/*
@Value("${application.user.mark}")
private String userMark;
*//*
*/
/**
* 配置文件标识
*//*
*/
/*
@Value("${spring.cloud.config.profile}")
private String configProfile;*//*
}
*/
package com.mortals.xhx.tools.lock.service;
import com.mortals.framework.exception.AppException;
import org.springframework.validation.annotation.Validated;
/**
* @author karlhoo
* <p>
* 分布式锁的加锁和解锁
*/
@Validated
public interface ILockService {
/**
* 分布式锁-加锁
*
* @param lockKey
* @return true 成功
*/
boolean lock(String lockKey);
/**
* 分布式锁-加锁
*
* @param lockKey
* @param unlockSec 自动解锁失效,单位:秒
* @return true 成功
*/
boolean lock(String lockKey, Long unlockSec);
/**
* 分布式锁-解锁(和lock方法成对使用,加锁后一定要解锁)
* @return
* @throws AppException
*/
boolean unlock() throws AppException;
}
//package com.mortals.pstation.tools.lock.service.impl;
//
//import com.mortals.pstation.tools.lock.config.ZookeeperConfiguration;
//import com.mortals.pstation.tools.lock.service.ILockService;
//import lombok.extern.apachecommons.CommonsLog;
//import org.apache.commons.lang3.StringUtils;
//import org.apache.curator.framework.CuratorFramework;
//import org.apache.curator.framework.CuratorFrameworkFactory;
//import org.apache.curator.framework.recipes.locks.InterProcessMutex;
//import org.apache.curator.retry.RetryNTimes;
//import org.springframework.beans.factory.annotation.Autowired;
//import org.springframework.boot.autoconfigure.condition.ConditionalOnBean;
//import org.springframework.stereotype.Service;
//
//import javax.annotation.PostConstruct;
//import java.util.concurrent.TimeUnit;
//
///**
// * @author karlhoo
// */
//@CommonsLog
//@Service("defaultLockService")
//@ConditionalOnBean(ZookeeperConfiguration.class)
//public class DefaultLockServiceImpl implements ILockService {
// @Autowired
// private ZookeeperConfiguration zookeeperConfiguration;
// private CuratorFramework curatorClient;
// private String LOCK_KEY_PREFIX;
// private ThreadLocal<InterProcessMutex> interProcessMutexThreadLocal = new ThreadLocal<>();
//
// @Override
// public boolean lock(String lockKey) {
// InterProcessMutex lock = new InterProcessMutex(getCuratorClient(), LOCK_KEY_PREFIX + (StringUtils.isBlank(lockKey) ? "/" : lockKey));
// try {
// if (lock.acquire(zookeeperConfiguration.getLockTimeoutSec(), TimeUnit.SECONDS)) {
// interProcessMutexThreadLocal.set(lock);
// return true;
// }
// } catch (Exception e) {
// log.warn("获取分布式锁报错", e);
// }
// return false;
// }
//
// @Override
// public void unlock() {
// try {
// interProcessMutexThreadLocal.get().release();
// } catch (Exception e) {
// log.warn("释放分布式锁报错", e);
// } finally {
// interProcessMutexThreadLocal.remove();
// }
// }
//
// private CuratorFramework getCuratorClient() {
// if (curatorClient == null) {
// this.curatorClient = CuratorFrameworkFactory.newClient(
// zookeeperConfiguration.getZookeeperNodes(), zookeeperConfiguration.getSessionTimeoutMs(),
// zookeeperConfiguration.getConnectionTimeoutMs(), new RetryNTimes(zookeeperConfiguration.getMaxRetry(), zookeeperConfiguration.getRetryIntervalMs()));
// this.curatorClient.start();
// }
// return this.curatorClient;
// }
//
// @PostConstruct
// public void init() {
// StringBuilder lockKeyPrefixBuilder = new StringBuilder("/sms/lock/");
// if (StringUtils.isNotEmpty(zookeeperConfiguration.getUserMark())) {
// lockKeyPrefixBuilder.append(zookeeperConfiguration.getUserMark()).append("/");
// }
// if (StringUtils.isNotEmpty(zookeeperConfiguration.getConfigProfile())) {
// lockKeyPrefixBuilder.append(zookeeperConfiguration.getConfigProfile()).append("/");
// }
// LOCK_KEY_PREFIX = lockKeyPrefixBuilder.toString();
// }
//}
//
package com.mortals.xhx.tools.lock.service.impl;
import com.mortals.framework.service.ICacheService;
import com.mortals.xhx.common.keys.RedisCacheKeys;
import com.mortals.xhx.tools.lock.service.ILockService;
import lombok.extern.apachecommons.CommonsLog;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.util.ObjectUtils;
/**
* redis 实现分布式锁机制
* @author zxfei
*/
@CommonsLog
@Service("redisLockService")
//@ConditionalOnProperty("application.cache.redis")
public class RedisLockServiceImpl implements ILockService {
private static ThreadLocal<String> threadLocal = new ThreadLocal<>();
@Autowired
private ICacheService cacheService;
@Override
public boolean lock(String lockKey) {
return lock(lockKey, null);
}
@Override
public boolean lock(String lockKey, Long unlockSec) {
try {
boolean ret;
if (ObjectUtils.isEmpty(unlockSec)) {
ret = cacheService.hincrBy(RedisCacheKeys.getCoopsDistributedLockKey(), lockKey, 1) == 1;
} else {
ret = cacheService.hincrByForTime(RedisCacheKeys.getCoopsDistributedLockKey(), lockKey, 1, unlockSec) == 1;
}
if (ret) {
threadLocal.set(lockKey);
}
return ret;
} catch (Exception e) {
log.error(String.format("分布式加锁出错 lockKey:%s%s", lockKey, ObjectUtils.isEmpty(unlockSec) ? "" : ",unlockSec:" + unlockSec), e);
return false;
}
}
@Override
public boolean unlock() {
try {
cacheService.hdel(RedisCacheKeys.getCoopsDistributedLockKey(), threadLocal.get());
threadLocal.remove();
return true;
} catch(Exception e) {
log.error(String.format("分布式解锁出错 lockKey:%s", threadLocal.get()), e);
return false;
}
}
}
package com.mortals.xhx.tools.uid;
public interface ISeqGeneratorService {
/**
* 分布式订单号(本地生成)
* <p>
* 平台唯一ID,返馈给下游进行订单回执关联以及对帐,以及提供给上游订单号中的一部份
*
* @param workerId 工作主机ID(取值:0 ~ 1023)
* @return 18位的数字
*/
long nextOrderId(int workerId);
/**
* 解析分布式订单号
*
* @param orderId
* @return [0]:距离当前的毫秒时间,[1]:工作主机ID,[2]:序号
*/
Long[] parserOrderId(long orderId);
}
package com.mortals.xhx.tools.uid.impl;
import com.mortals.xhx.tools.uid.ISeqGeneratorService;
import org.springframework.stereotype.Service;
@Service("seqGeneratorService")
public class DefaultSeqGeneratorService implements ISeqGeneratorService {
@Override
public long nextOrderId(int workerId) {
return SeqGenerator.nextId(workerId);
}
@Override
public Long[] parserOrderId(long orderId) {
return SeqGenerator.parserId(orderId);
}
}
package com.mortals.xhx.tools.uid.impl;
public class SeqGenerator {
/**
* 开始时间截 (2017-01-01)
*/
private static final long twepoch = 1483200000000L;
/**
* 时间所占的位数
*/
private static final long timestampBits = 41L;
/**
* 机器id所占的位数
*/
private static final long workerIdBits = 10L;
/**
* 支持的最大机器id,结果是31 (这个移位算法可以很快的计算出几位二进制数所能表示的最大十进制数)
*/
private static final long maxWorkerId = -1L ^ (-1L << workerIdBits);
/**
* 序列在id中占的位数
*/
private static final long sequenceBits = 12L;
/**
* 机器ID向左移12位
*/
private static final long workerIdShift = sequenceBits;
/**
* 时间截向左移22位(10+12)
*/
private static final long timestampLeftShift = sequenceBits + workerIdBits;
/**
* 生成序列的掩码,这里为4095 (0b111111111111=0xfff=4095)
*/
private static final long sequenceMask = -1L ^ (-1L << sequenceBits);
/**
* 毫秒内序列(0~4095)
*/
private static long sequence = 0L;
/**
* 上次生成ID的时间截
*/
private static long lastTimestamp = -1L;
/**
* 分布式订单号<br>
* 为一个64位Long型数字,结构如下(每部分用-分开):<br>
* 0 - 0000000000 0000000000 0000000000 0000000000 0 - 0000000000 - 000000000000 <br>
* 1位标识,由于long基本类型在Java中是带符号的,最高位是符号位,正数是0,负数是1,所以id一般是正数,最高位是0<br>
* 41位时间截(毫秒级),注意,41位时间截不是存储当前时间的时间截,而是存储时间截的差值(当前时间截 - 开始时间截),
* 这里的的开始时间截,一般是我们的id生成器开始使用的时间,由我们程序来指定的(如下下面程序twepoch属性)。
* 41位的时间截,可以使用69年,年T = (1L << 41) / (1000L * 60 * 60 * 24 * 365) = 69<br>
* 10位的数据机器位,可以部署在1024个节点<br>
* 12位序列,毫秒内的计数,12位的计数顺序号支持每个节点每毫秒(同一机器,同一时间截)产生4096个ID序号
*
* @param workerId 工作主机ID(取值:0 ~ 1023)
*/
public static synchronized long nextId(int workerId) {
if (workerId > maxWorkerId || workerId < 0) {
throw new IllegalArgumentException(String.format("worker Id can't be greater than %d or less than 0", maxWorkerId));
}
long timestamp = timeGen();
//如果当前时间小于上一次ID生成的时间戳,说明系统时钟回退过这个时候应当抛出异常
if (timestamp < lastTimestamp) {
throw new RuntimeException(
String.format("Clock moved backwards. Refusing to generate id for %d milliseconds", lastTimestamp - timestamp));
}
//如果是同一时间生成的,则进行毫秒内序列
if (lastTimestamp == timestamp) {
sequence = (sequence + 1) & sequenceMask;
//毫秒内序列溢出
if (sequence == 0) {
//阻塞到下一个毫秒,获得新的时间戳
timestamp = tilNextMillis(lastTimestamp);
}
}
//时间戳改变,毫秒内序列重置
else {
sequence = 0L;
}
//上次生成ID的时间截
lastTimestamp = timestamp;
//移位并通过或运算拼到一起组成64位的ID
// System.out.println(String.format("timestamp:%d, workerId:%d, sequence:%d", timestamp - twepoch, workerId, sequence));
return ((timestamp - twepoch) << timestampLeftShift)
| (workerId << workerIdShift)
| sequence;
}
public static synchronized Long[] parserId(long id) {
long timestamp = id >>> timestampLeftShift;
long workerId = id << timestampBits + 1;
workerId = workerId >>> timestampBits + sequenceBits + 1;
long sequence = id << timestampBits + workerIdBits + 1;
sequence = sequence >>> timestampBits + workerIdBits + 1;
// System.out.println(String.format("timestamp:%d, workerId:%d, sequence:%d", timestamp, workerId, sequence));
return new Long[]{timestamp + twepoch, workerId, sequence};
}
/**
* 阻塞到下一个毫秒,直到获得新的时间戳
*
* @param lastTimestamp 上次生成ID的时间截
* @return 当前时间戳
*/
private static long tilNextMillis(long lastTimestamp) {
long timestamp = timeGen();
while (timestamp <= lastTimestamp) {
timestamp = timeGen();
}
return timestamp;
}
/**
* 返回以毫秒为单位的当前时间
*
* @return 当前时间(毫秒)
*/
private static long timeGen() {
return System.currentTimeMillis();
}
public static void main(String[] args) {
long id = nextId(1000);
System.out.println(id);
parserId(id);
}
}
\ No newline at end of file
package com.mortals.xhx.utils;
import org.springframework.beans.BeanWrapper;
import org.springframework.beans.BeanWrapperImpl;
import java.util.HashSet;
import java.util.Set;
public class BeanUtil
{
/**
*
* @Title: getNullPropertyNames
* @Description: 获取一个对象中属性值为null的属性名字符串数组
* @param source
* @return
*/
public static String[] getNullPropertyNames (Object source) {
final BeanWrapper src = new BeanWrapperImpl(source);
java.beans.PropertyDescriptor[] pds = src.getPropertyDescriptors();
Set<String> emptyNames = new HashSet<String>();
for(java.beans.PropertyDescriptor pd : pds) {
Object srcValue = src.getPropertyValue(pd.getName());
if (srcValue == null) emptyNames.add(pd.getName());
}
String[] result = new String[emptyNames.size()];
return emptyNames.toArray(result);
}
}
package com.mortals.xhx.utils;
import java.util.concurrent.ThreadFactory;
import java.util.concurrent.atomic.AtomicInteger;
public class IotThreadFactory implements ThreadFactory {
private static final AtomicInteger poolNumber = new AtomicInteger(1);
private final ThreadGroup group;
private final AtomicInteger threadNumber = new AtomicInteger(1);
private final String namePrefix;
public static IotThreadFactory forName(String name) {
return new IotThreadFactory(name);
}
private IotThreadFactory(String name) {
SecurityManager s = System.getSecurityManager();
group = (s != null) ? s.getThreadGroup() :
Thread.currentThread().getThreadGroup();
namePrefix = name + "-" +
poolNumber.getAndIncrement() +
"-thread-";
}
@Override
public Thread newThread(Runnable r) {
Thread t = new Thread(group, r,
namePrefix + threadNumber.getAndIncrement(),
0);
if (t.isDaemon())
t.setDaemon(false);
if (t.getPriority() != Thread.NORM_PRIORITY)
t.setPriority(Thread.NORM_PRIORITY);
return t;
}
}
package com.mortals.xhx.utils;//package com.mortals.coops.utils;
//
//import javax.validation.Constraint;
//import javax.validation.ConstraintValidator;
//import javax.validation.ConstraintValidatorContext;
//import javax.validation.Payload;
//import java.lang.annotation.*;
//import java.time.LocalDate;
//
//@Target({ElementType.FIELD})
//@Retention(RetentionPolicy.RUNTIME)
//@Constraint(validatedBy = PastLocalDate.PastValidator.class)
//@Documented
//public @interface PastLocalDate {
// String message() default "{javax.validation.constraints.Past.message}";
//
// Class<?>[] groups() default {};
//
// Class<? extends Payload>[] payload() default {};
//
// class PastValidator implements ConstraintValidator<PastLocalDate,
// LocalDate> {
// public void initialize(PastLocalDate past) {
// }
//
// public boolean isValid(LocalDate localDate,
// ConstraintValidatorContext context) {
// return localDate == null || localDate.isBefore(LocalDate.now());
// }
// }
//}
package com.mortals.xhx.utils;
import org.springframework.beans.BeansException;
import org.springframework.context.ApplicationContext;
import org.springframework.context.ApplicationContextAware;
import org.springframework.stereotype.Component;
/**
* @ClassName SpringUtils
* @Description TODO
* @Author finegirl
* @Date 2020/4/24 15:32
**/
@Component
public class SpringUtils implements ApplicationContextAware {
private static ApplicationContext applicationContext;
@Override
public void setApplicationContext(ApplicationContext applicationContext) throws BeansException {
SpringUtils.applicationContext = applicationContext;
}
public static ApplicationContext getApplicationContext() {
return applicationContext;
}
/**
* 对应的被管理类有别名时使用
* @param name
* @param <T>
* @return
* @throws BeansException
*/
@SuppressWarnings("unchecked")
public static <T> T getBean(String name) throws BeansException {
return (T) applicationContext.getBean(name);
}
/**
* 在对应的注解内未使用别名时 使用
*/
public static <T> T getBean(Class<T> clazz) {
return applicationContext.getBean(clazz);
}
}
package com.mortals.xhx.utils;
import com.mortals.framework.util.DateUtils;
import java.text.SimpleDateFormat;
import java.util.*;
public class TimeUtil {
public static List<String> findDaysStr(String begintTime, String endTime) {
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd");
Date dBegin = null;
Date dEnd = null;
try {
dBegin = sdf.parse(begintTime);
dEnd = sdf.parse(endTime);
} catch (Exception e) {
e.printStackTrace();
}
//存放每一天日期String对象的daysStrList
List<String> daysStrList = new ArrayList<String>();
//放入开始的那一天日期String
daysStrList.add(sdf.format(dBegin));
Calendar calBegin = Calendar.getInstance();
// 使用给定的 Date 设置此 Calendar 的时间
calBegin.setTime(dBegin);
Calendar calEnd = Calendar.getInstance();
// 使用给定的 Date 设置此 Calendar 的时间
calEnd.setTime(dEnd);
// 判断循环此日期是否在指定日期之后
while (dEnd.after(calBegin.getTime())) {
// 根据日历的规则,给定的日历字段增加或减去指定的时间量
calBegin.add(Calendar.DAY_OF_MONTH, 1);
String dayStr = sdf.format(calBegin.getTime());
daysStrList.add(dayStr);
}
return daysStrList;
}
public static List<String> findMonthsStr(String begintTime, String endTime) {
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM");
SimpleDateFormat sdf1 = new SimpleDateFormat("yyyy-MM-dd");
Date dBegin = null;
Date dEnd = null;
try {
dBegin = sdf.parse(begintTime);
dEnd = sdf.parse(endTime);
} catch (Exception e) {
e.printStackTrace();
}
//存放每一天日期String对象的daysStrList
List<String> monthsStrList = new ArrayList<String>();
//放入开始的那一天的月份String
monthsStrList.add(sdf1.format(dBegin));
Calendar calBegin = Calendar.getInstance();
// 使用给定的 Date 设置此 Calendar 的时间
calBegin.setTime(dBegin);
Calendar calEnd = Calendar.getInstance();
// 使用给定的 Date 设置此 Calendar 的时间
calEnd.setTime(dEnd);
// 判断循环此日期是否在指定日期之后
while (dEnd.after(calBegin.getTime())) {
// 根据日历的规则,给定的日历字段增加或减去指定的时间量
calBegin.add(Calendar.MONTH, 1);
String dayStr = sdf1.format(calBegin.getTime());
monthsStrList.add(dayStr);
}
return monthsStrList;
}
public static List<String> findYearsStr(String begintTime, String endTime) {
SimpleDateFormat sdf = new SimpleDateFormat("yyyy");
Date dBegin = null;
Date dEnd = null;
try {
dBegin = sdf.parse(begintTime);
dEnd = sdf.parse(endTime);
} catch (Exception e) {
e.printStackTrace();
}
List<String> yearsStrList = new ArrayList<String>();
yearsStrList.add(sdf.format(dBegin));
Calendar calBegin = Calendar.getInstance();
// 使用给定的 Date 设置此 Calendar 的时间
calBegin.setTime(dBegin);
Calendar calEnd = Calendar.getInstance();
// 使用给定的 Date 设置此 Calendar 的时间
calEnd.setTime(dEnd);
// 判断循环此日期是否在指定日期之后
while (dEnd.after(calBegin.getTime())) {
// 根据日历的规则,给定的日历字段增加或减去指定的时间量
calBegin.add(Calendar.YEAR, 1);
String dayStr = sdf.format(calBegin.getTime());
yearsStrList.add(dayStr);
}
return yearsStrList;
}
public static void main(String[] args) {
/* String begintTime = "2017";
String endTime = "2018";
// TimeUtil.findDaysStr(begintTime,endTime).stream().forEach(f->System.out.println(f));
//TimeUtil.findMonthsStr(begintTime,endTime).stream().forEach(f->System.out.println(f));
TimeUtil.findYearsStr(begintTime,endTime).stream().forEach(f->System.out.println(f));*/
/* String begintTime = "2017-03-01";
;
String yyyy = DateUtils.getDateTimeStr(DateUtils.StrToDateTime(begintTime, "yyyy"), DateUtils.P_yyyy_MM_dd);
System.out.println(yyyy);*/
String startTime="2019-03-18";
System.out.println(DateUtils.StrToDateTime(startTime,DateUtils.P_yyyy_MM_dd).getTime());
}
}
package com.mortals.xhx.utils.stream.messaging;//package com.mortals.xhx.utils.stream.messaging;
//
///**
// * @author karlhoo
// */
//public interface ProcessTaskProcessor extends ProcessTaskSink, ProcessTaskSource {
//}
package com.mortals.xhx.utils.stream.service.impl;//package com.mortals.xhx.utils.stream.service.impl;
//
//import com.mortals.xhx.utils.stream.service.IMessageService;
//import lombok.extern.apachecommons.CommonsLog;
//import org.springframework.beans.factory.annotation.Autowired;
//import org.springframework.cloud.stream.annotation.EnableBinding;
//import org.springframework.kafka.support.KafkaHeaders;
//import org.springframework.messaging.Message;
//import org.springframework.messaging.MessageChannel;
//import org.springframework.messaging.support.MessageBuilder;
//import org.springframework.stereotype.Component;
//
///**
// * @author karlhoo
// */
//@CommonsLog
//@Component
//public class DefaultMessageServiceImpl implements IMessageService {
//
// @Override
// public boolean sendMessage(MessageChannel messageChannel, String message) {
// return sendMessage(messageChannel, message, null);
// }
//
// @Override
// public boolean sendMessage(MessageChannel messageChannel, String message, String messageKey) {
// return sendMessage(messageChannel, MessageBuilder.withPayload(message).setHeader(KafkaHeaders.MESSAGE_KEY, messageKey == null ? messageKey : messageKey.getBytes()).build());
// }
//
// private boolean sendMessage(MessageChannel messageChannel, Message message) {
// try {
// return messageChannel.send(message);
// } catch (Exception e) {
// log.error(String.format("提交消息出错 messageChannel: %s, message: %s", messageChannel.toString(), message.getPayload()), e);
// return false;
// }
// }
//
//}
This diff is collapsed.
This diff is collapsed.
# 开发环境配置
NODE_ENV = development
# 地址
VUE_APP_BASE_API =http://plm.testnew.com:8082/m
# websocket地址
VUE_APP_WEBSOCKET_API =127.0.0.1:18221/m
# 生产环境配置
NODE_ENV = production
# 地址
VUE_APP_BASE_API = http://192.168.0.26:9005/m
# websocket地址
VUE_APP_WEBSOCKET_API =192.168.0.26:18221/m
# 测试环境配置
NODE_ENV = 'production'
# 地址
VUE_APP_BASE_API = http://192.168.0.98:11021/m
# websocket地址
VUE_APP_WEBSOCKET_API =192.168.0.98:18221/m
*.iml
node_modules/
.idea/
*.class
target/
\ No newline at end of file
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment