commit:1.5多应用版本

This commit is contained in:
Jerry
2021-03-31 09:43:36 +08:00
parent 060cb450be
commit 71d07cefb3
1343 changed files with 11695 additions and 71470 deletions

View File

@@ -0,0 +1,29 @@
package com.orange.demo.common.core.annotation;
import com.orange.demo.common.core.util.DataSourceResolver;
import java.lang.annotation.*;
/**
* 基于自定义解析规则的多数据源注解。主要用于标注Service的实现类。
*
* @author Jerry
* @date 2020-08-08
*/
@Target(ElementType.TYPE)
@Retention(RetentionPolicy.RUNTIME)
@Documented
public @interface MyDataSourceResolver {
/**
* 多数据源路由键解析接口的Class。
* @return 多数据源路由键解析接口的Class。
*/
Class<? extends DataSourceResolver> resolver();
/**
* DataSourceResolver.resovle方法的入参。
* @return DataSourceResolver.resovle方法的入参。
*/
String arg() default "";
}

View File

@@ -1,87 +0,0 @@
package com.orange.demo.common.core.aop;
import com.alibaba.fastjson.JSON;
import com.orange.demo.common.core.constant.ApplicationConstant;
import com.orange.demo.common.core.util.MyCommonUtil;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import org.aspectj.lang.ProceedingJoinPoint;
import org.aspectj.lang.annotation.Around;
import org.aspectj.lang.annotation.Aspect;
import org.aspectj.lang.annotation.Pointcut;
import org.slf4j.MDC;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.core.annotation.Order;
import org.springframework.stereotype.Component;
import org.springframework.web.context.request.RequestContextHolder;
import org.springframework.web.context.request.ServletRequestAttributes;
import org.springframework.web.multipart.MultipartFile;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.util.ArrayList;
import java.util.List;
/**
* 记录接口的链路traceId、请求参数、应答数据、错误信息和调用时长。
*
* @author Jerry
* @date 2020-08-08
*/
@Aspect
@Component
@Order(1)
@Slf4j
public class AccessLogAspect {
@Value("")
private String applicationName;
/**
* 所有controller方法。
*/
@Pointcut("execution(public * com.orange.demo..controller..*(..))")
public void controllerPointCut() {
// 空注释避免sonar警告
}
@Around("controllerPointCut()")
public Object around(ProceedingJoinPoint joinPoint) throws Throwable {
HttpServletRequest request =
((ServletRequestAttributes) RequestContextHolder.currentRequestAttributes()).getRequest();
// 请求流水号
String traceId = request.getHeader(ApplicationConstant.HTTP_HEADER_TRACE_ID);
if (StringUtils.isBlank(traceId)) {
traceId = MyCommonUtil.generateUuid();
}
MDC.put(ApplicationConstant.HTTP_HEADER_TRACE_ID, traceId);
long start = System.currentTimeMillis();
// 获取方法参数
List<Object> httpReqArgs = new ArrayList<>();
Object[] args = joinPoint.getArgs();
for (Object object : args) {
if (!(object instanceof HttpServletRequest)
&& !(object instanceof HttpServletResponse)
&& !(object instanceof MultipartFile)) {
httpReqArgs.add(object);
}
}
String url = request.getRequestURI();
String params = JSON.toJSONString(httpReqArgs);
log.info("开始请求app={}, url={}, reqData={}", applicationName, url, params);
Object result = null;
try {
// 调用原来的方法
result = joinPoint.proceed();
} catch (Exception e) {
log.error("请求报错app={}, url={}, reqData={}, error={}", applicationName, url, params, e.getMessage());
throw e;
} finally {
// 获取应答报文及接口处理耗时
String respData = result == null ? null : JSON.toJSONString(result);
log.info("请求完成, app={}, url={}elapse={}ms, respData={}",
applicationName, url, (System.currentTimeMillis() - start), respData);
}
return result;
}
}

View File

@@ -0,0 +1,48 @@
package com.orange.demo.common.core.aop;
import com.orange.demo.common.core.annotation.MyDataSource;
import com.orange.demo.common.core.config.DataSourceContextHolder;
import lombok.extern.slf4j.Slf4j;
import org.aspectj.lang.ProceedingJoinPoint;
import org.aspectj.lang.annotation.Around;
import org.aspectj.lang.annotation.Aspect;
import org.aspectj.lang.annotation.Pointcut;
import org.springframework.core.annotation.Order;
import org.springframework.stereotype.Component;
/**
* 多数据源AOP切面处理类。
*
* @author Jerry
* @date 2020-08-08
*/
@Aspect
@Component
@Order(1)
@Slf4j
public class DataSourceAspect {
/**
* 所有配置MyDataSource注解的Service实现类。
*/
@Pointcut("execution(public * com.orange.demo..service..*(..)) " +
"&& @target(com.orange.demo.common.core.annotation.MyDataSource)")
public void datasourcePointCut() {
// 空注释避免sonar警告
}
@Around("datasourcePointCut()")
public Object around(ProceedingJoinPoint point) throws Throwable {
Class<?> clazz = point.getTarget().getClass();
MyDataSource ds = clazz.getAnnotation(MyDataSource.class);
// 通过判断 DataSource 中的值来判断当前方法应用哪个数据源
DataSourceContextHolder.setDataSourceType(ds.value());
log.debug("set datasource is " + ds.value());
try {
return point.proceed();
} finally {
DataSourceContextHolder.clear();
log.debug("clean datasource");
}
}
}

View File

@@ -0,0 +1,62 @@
package com.orange.demo.common.core.aop;
import com.orange.demo.common.core.annotation.MyDataSourceResolver;
import com.orange.demo.common.core.util.DataSourceResolver;
import com.orange.demo.common.core.config.DataSourceContextHolder;
import com.orange.demo.common.core.util.ApplicationContextHolder;
import lombok.extern.slf4j.Slf4j;
import org.aspectj.lang.ProceedingJoinPoint;
import org.aspectj.lang.annotation.Around;
import org.aspectj.lang.annotation.Aspect;
import org.aspectj.lang.annotation.Pointcut;
import org.springframework.core.annotation.Order;
import org.springframework.stereotype.Component;
import java.util.HashMap;
import java.util.Map;
/**
* 基于自定义解析规则的多数据源AOP切面处理类。
*
* @author Jerry
* @date 2020-08-08
*/
@Aspect
@Component
@Order(1)
@Slf4j
public class DataSourceResolveAspect {
private Map<Class<? extends DataSourceResolver>, DataSourceResolver> resolverMap = new HashMap<>();
/**
* 所有配置MyDataSourceResovler注解的Service实现类。
*/
@Pointcut("execution(public * com.orange.demo..service..*(..)) " +
"&& @target(com.orange.demo.common.core.annotation.MyDataSourceResolver)")
public void datasourceResolverPointCut() {
// 空注释避免sonar警告
}
@Around("datasourceResolverPointCut()")
public Object around(ProceedingJoinPoint point) throws Throwable {
Class<?> clazz = point.getTarget().getClass();
MyDataSourceResolver dsr = clazz.getAnnotation(MyDataSourceResolver.class);
Class<? extends DataSourceResolver> resolverClass = dsr.resolver();
DataSourceResolver resolver = resolverMap.get(resolverClass);
if (resolver == null) {
resolver = ApplicationContextHolder.getBean(resolverClass);
resolverMap.put(resolverClass, resolver);
}
int type = resolver.resolve(dsr.arg());
// 通过判断 DataSource 中的值来判断当前方法应用哪个数据源
DataSourceContextHolder.setDataSourceType(type);
log.debug("set datasource is " + type);
try {
return point.proceed();
} finally {
DataSourceContextHolder.clear();
log.debug("clean datasource");
}
}
}

View File

@@ -20,7 +20,7 @@ import org.springframework.stereotype.Component;
@Component
@Order(Ordered.LOWEST_PRECEDENCE - 1)
@Slf4j
public class DictCacheSyncAop {
public class DictCacheSyncAspect {
/**
* BaseDictService 字典服务父类中的字典数据增删改的方法

View File

@@ -55,7 +55,7 @@ public interface BaseClient<D, V, K> {
* @param id 主键Id。
* @return 应答结果对象。
*/
default ResponseResult<Void> delete(K id) {
default ResponseResult<Integer> deleteById(K id) {
throw new UnsupportedOperationException();
}

View File

@@ -42,7 +42,7 @@ public abstract class BaseFallbackFactory<D, V, K, T extends BaseClient<D, V, K>
}
@Override
public ResponseResult<Void> delete(K id) {
public ResponseResult<Integer> deleteById(K id) {
return ResponseResult.error(ErrorCodeEnum.RPC_DATA_ACCESS_FAILED);
}

View File

@@ -168,6 +168,7 @@ public abstract class BaseController<M, V, K> {
* @throws RemoteDataBuildException buildRelationForDataList会抛出此异常。
*/
public ResponseResult<MyPageData<V>> baseListBy(MyQueryParam queryParam, BaseModelMapper<V, M> modelMapper) {
boolean dataFilterEnabled = GlobalThreadLocal.setDataFilter(queryParam.getUseDataFilter());
if (CollectionUtils.isNotEmpty(queryParam.getSelectFieldList())) {
for (String fieldName : queryParam.getSelectFieldList()) {
String columnName = MyModelUtil.mapToColumnName(fieldName, modelClass);
@@ -200,6 +201,7 @@ public abstract class BaseController<M, V, K> {
service().buildRelationForDataList(resultList, MyRelationParam.dictOnly());
}
List<V> resultVoList = convertToVoList(resultList, modelMapper);
GlobalThreadLocal.setDataFilter(dataFilterEnabled);
return ResponseResult.success(new MyPageData<>(resultVoList, totalCount));
}
@@ -249,8 +251,10 @@ public abstract class BaseController<M, V, K> {
* @return 应答结果对象,包含符合查询过滤条件的记录数量。
*/
public ResponseResult<Integer> baseCountBy(MyQueryParam queryParam) {
boolean dataFilterEnabled = GlobalThreadLocal.setDataFilter(queryParam.getUseDataFilter());
String whereClause = MyWhereCriteria.makeCriteriaString(queryParam.getCriteriaList(), modelClass);
Integer count = service().getCountByCondition(whereClause);
GlobalThreadLocal.setDataFilter(dataFilterEnabled);
return ResponseResult.success(count);
}
@@ -261,6 +265,7 @@ public abstract class BaseController<M, V, K> {
* @return 应该结果对象包含聚合计算后的分组Map列表。
*/
public ResponseResult<List<Map<String, Object>>> baseAggregateBy(MyAggregationParam param) {
boolean dataFilterEnabled = GlobalThreadLocal.setDataFilter(param.getUseDataFilter());
// 完成一些共同性规则的验证。
VerifyAggregationInfo verifyInfo = this.verifyAndParseAggregationParam(param);
if (!verifyInfo.isSuccess) {
@@ -312,6 +317,7 @@ public abstract class BaseController<M, V, K> {
resultMapList.addAll(subResultMapList);
}
}
GlobalThreadLocal.setDataFilter(dataFilterEnabled);
return ResponseResult.success(resultMapList);
}
@@ -323,7 +329,7 @@ public abstract class BaseController<M, V, K> {
* @param modelMapper 从实体对象到VO对象的映射对象。
* @return 转换后的VO域对象列表。
*/
private List<V> convertToVoList(List<M> modelList, BaseModelMapper<V, M> modelMapper) {
protected List<V> convertToVoList(List<M> modelList, BaseModelMapper<V, M> modelMapper) {
List<V> resultVoList;
if (modelMapper != null) {
resultVoList = modelMapper.fromModelList(modelList);
@@ -341,7 +347,7 @@ public abstract class BaseController<M, V, K> {
* @param modelMapper 从实体对象到VO对象的映射对象。
* @return 转换后的VO域对象。
*/
private V convertToVo(M model, BaseModelMapper<V, M> modelMapper) {
protected V convertToVo(M model, BaseModelMapper<V, M> modelMapper) {
V resultVo;
if (modelMapper != null) {
resultVo = modelMapper.fromModel(model);

View File

@@ -4,6 +4,7 @@ import cn.hutool.core.util.ReflectUtil;
import com.orange.demo.common.core.constant.GlobalDeletedFlag;
import com.orange.demo.common.core.exception.MyRuntimeException;
import com.orange.demo.common.core.cache.DictionaryCache;
import com.orange.demo.common.core.object.TokenData;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.collections4.CollectionUtils;
import org.springframework.transaction.annotation.Transactional;
@@ -35,26 +36,6 @@ public abstract class BaseDictService<M, K> extends BaseService<M, K> implements
super();
}
/**
* 是否在服务启动的时候加载。子类可以重载该方法并在需要的时候手工调用loadCachedData加载数据。
*
* @return true表示启动即可加载数据false需要手动调用loadCachedData进行加载。
*/
@Override
public boolean loadOnStartup() {
return true;
}
/**
* 加载全部数据到内存缓存的key只能为映射表的主键。
*/
@Override
public void loadCachedData() {
if (loadOnStartup()) {
reloadCachedData(false);
}
}
/**
* 重新加载数据库中所有当前表数据到系统内存。
*
@@ -88,6 +69,9 @@ public abstract class BaseDictService<M, K> extends BaseService<M, K> implements
throw new MyRuntimeException(e);
}
}
if (tenantIdField != null) {
ReflectUtil.setFieldValue(data, tenantIdField, TokenData.takeFromRequest().getTenantId());
}
mapper().insert(data);
return data;
}
@@ -102,6 +86,9 @@ public abstract class BaseDictService<M, K> extends BaseService<M, K> implements
@Transactional(rollbackFor = Exception.class)
@Override
public boolean update(M data, M originalData) {
if (tenantIdField != null) {
ReflectUtil.setFieldValue(data, tenantIdField, TokenData.takeFromRequest().getTenantId());
}
if (deletedFlagFieldName != null) {
try {
setDeletedFlagMethod.invoke(data, GlobalDeletedFlag.NORMAL);
@@ -259,6 +246,18 @@ public abstract class BaseDictService<M, K> extends BaseService<M, K> implements
this.dictionaryCache.invalidate(id);
}
/**
* 根据字典对象将数据从缓存中删除。
*
* @param data 字典数据。
*/
@SuppressWarnings("unchecked")
@Override
public void removeDictionaryCacheByModel(M data) {
K key = (K) ReflectUtil.getFieldValue(data, idFieldName);
this.dictionaryCache.invalidate(key);
}
/**
* 获取缓存中的数据数量。
*

View File

@@ -64,7 +64,7 @@ public abstract class BaseService<M, K> implements IBaseService<M, K> {
*/
protected String idFieldName;
/**
* 当前Service关联的主数据表中数据字段名称。
* 当前Service关联的主数据表中主键列名称。
*/
protected String idColumnName;
/**
@@ -75,6 +75,18 @@ public abstract class BaseService<M, K> implements IBaseService<M, K> {
* 当前Service关联的主数据表中逻辑删除字段名称。
*/
protected String deletedFlagColumnName;
/**
* 当前Service关联的主Model对象租户Id字段。
*/
protected Field tenantIdField;
/**
* 当前Service关联的主Model对象租户Id字段名称。
*/
protected String tenantIdFieldName;
/**
* 当前Service关联的主数据表中租户Id列名称。
*/
protected String tenantIdColumnName;
/**
* 当前Job服务源主表Model对象的最后更新时间字段名称。
*/
@@ -178,6 +190,12 @@ public abstract class BaseService<M, K> implements IBaseService<M, K> {
setDeletedFlagMethod = ReflectUtil.getMethod(
modelClass, "set" + StringUtils.capitalize(deletedFlagFieldName), Integer.class);
}
if (tenantIdFieldName == null && null != field.getAnnotation(TenantFilterColumn.class)) {
tenantIdField = field;
tenantIdFieldName = field.getName();
Column c = field.getAnnotation(Column.class);
tenantIdColumnName = c == null ? tenantIdFieldName : c.name();
}
}
/**
@@ -212,10 +230,12 @@ public abstract class BaseService<M, K> implements IBaseService<M, K> {
return mapper().deleteByPrimaryKey(id) == 1;
}
try {
Example e = new Example(modelClass);
Example.Criteria c = e.createCriteria().andEqualTo(idFieldName, id);
c.andEqualTo(deletedFlagFieldName, GlobalDeletedFlag.NORMAL);
M data = modelClass.newInstance();
setDeletedFlagMethod.invoke(data, GlobalDeletedFlag.DELETED);
setIdFieldMethod.invoke(data, id);
return mapper().updateByPrimaryKeySelective(data) == 1;
return mapper().updateByExampleSelective(data, e) == 1;
} catch (Exception ex) {
log.error("Failed to call reflection method in BaseService.removeById.", ex);
throw new MyRuntimeException(ex);
@@ -607,6 +627,7 @@ public abstract class BaseService<M, K> implements IBaseService<M, K> {
/**
* 集成所有与主表实体对象相关的关联数据列表。包括本地和远程服务的一对一、字典、一对多和多对多聚合运算等。
* 也可以根据实际需求,单独调用该函数所包含的各个数据集成函数。
* NOTE: 该方法内执行的SQL将禁用数据权限过滤。
*
* @param resultList 主表实体对象列表。数据集成将直接作用于该对象列表。
* @param relationParam 实体对象数据组装的参数构建器。
@@ -617,47 +638,93 @@ public abstract class BaseService<M, K> implements IBaseService<M, K> {
if (relationParam == null || CollectionUtils.isEmpty(resultList)) {
return;
}
// 集成本地一对一和字段级别的数据关联。
// NOTE: 这里必须要在集成远程一对一之前集成本地一对一。因为远程集成方法中,会为本地一对一从表数据进行远程集成。
boolean buildOneToOne = relationParam.isBuildOneToOne() || relationParam.isBuildOneToOneWithDict();
// 这里集成一对一关联
if (buildOneToOne) {
this.buildOneToOneForDataList(resultList, relationParam.isBuildOneToOneWithDict());
boolean dataFilterValue = GlobalThreadLocal.setDataFilter(false);
try {
// 集成本地一对一和字段级别的数据关联。
// NOTE: 这里必须要在集成远程一对一之前集成本地一对一。因为远程集成方法中,会为本地一对一从表数据进行远程集成
boolean buildOneToOne = relationParam.isBuildOneToOne() || relationParam.isBuildOneToOneWithDict();
// 这里集成一对一关联。
if (buildOneToOne) {
this.buildOneToOneForDataList(resultList, relationParam.isBuildOneToOneWithDict());
}
// 集成一对多关联
if (relationParam.isBuildOneToMany()) {
this.buildOneToManyForDataList(resultList);
}
// 这里集成字典关联
if (relationParam.isBuildDict()) {
// 构建常量字典关联关系
this.buildConstDictForDataList(resultList);
this.buildDictForDataList(resultList, buildOneToOne);
}
// 集成远程一对一和字段级别的数据关联。
boolean buildRemoteOneToOne =
relationParam.isBuildRemoteOneToOne() || relationParam.isBuildRemoteOneToOneWithDict();
if (buildRemoteOneToOne) {
this.buildRemoteOneToOneForDataList(resultList, relationParam.isBuildRemoteOneToOneWithDict());
}
if (relationParam.isBuildRemoteDict()) {
this.buildRemoteDictForDataList(resultList, buildRemoteOneToOne);
}
// 组装本地聚合计算关联数据
if (relationParam.isBuildAggregation()) {
// 处理多一多场景下,根据主表的结果,进行从表聚合数据的计算。
this.buildOneToManyAggregationForDataList(resultList, buildAggregationAdditionalWhereCriteria());
// 处理多对多场景下,根据主表的结果,进行从表聚合数据的计算。
this.buildManyToManyAggregationForDataList(resultList, buildAggregationAdditionalWhereCriteria());
}
// 组装远程聚合计算关联数据
if (relationParam.isBuildRemoteAggregation()) {
// 一对多场景。
this.buildRemoteOneToManyAggregationForDataList(resultList, buildAggregationAdditionalWhereCriteria());
// 处理多对多场景。
this.buildRemoteManyToManyAggregationForDataList(resultList, buildAggregationAdditionalWhereCriteria());
}
} finally {
GlobalThreadLocal.setDataFilter(dataFilterValue);
}
// 这里集成字典关联
if (relationParam.isBuildDict()) {
// 构建常量字典关联关系
this.buildConstDictForDataList(resultList);
this.buildDictForDataList(resultList, buildOneToOne);
}
/**
* 该函数主要用于对查询结果的批量导出。不同于支持分页的列表查询,批量导出没有分页机制,
* 因此在导出数据量较大的情况下很容易给数据库的内存、CPU和IO带来较大的压力。而通过
* 我们的分批处理可以极大的规避该问题的出现几率。调整batchSize的大小也可以有效的
* 改善运行效率。
* 我们目前的处理机制是,先从主表取出所有符合条件的主表数据,这样可以避免分批处理时,
* 后面几批数据因为skip过多而带来的效率问题。因为是单表过滤不会给数据库带来过大的压力。
* 之后再在主表结果集数据上进行分批级联处理。
* 集成所有与主表实体对象相关的关联数据列表。包括一对一、字典、一对多和多对多聚合运算等。
* 也可以根据实际需求,单独调用该函数所包含的各个数据集成函数。
* NOTE: 该方法内执行的SQL将禁用数据权限过滤。
*
* @param resultList 主表实体对象列表。数据集成将直接作用于该对象列表。
* @param relationParam 实体对象数据组装的参数构建器。
* @param batchSize 每批集成的记录数量。小于等于时将不做分批处理。
*/
@Override
public void buildRelationForDataList(List<M> resultList, MyRelationParam relationParam, int batchSize) {
if (CollectionUtils.isEmpty(resultList)) {
return;
}
// 集成远程一对一和字段级别的数据关联。
boolean buildRemoteOneToOne =
relationParam.isBuildRemoteOneToOne() || relationParam.isBuildRemoteOneToOneWithDict();
if (buildRemoteOneToOne) {
this.buildRemoteOneToOneForDataList(resultList, relationParam.isBuildRemoteOneToOneWithDict());
if (batchSize <= 0) {
this.buildRelationForDataList(resultList, relationParam);
return;
}
if (relationParam.isBuildRemoteDict()) {
this.buildRemoteDictForDataList(resultList, buildRemoteOneToOne);
}
// 组装本地聚合计算关联数据
if (relationParam.isBuildAggregation()) {
// 处理多一多场景下,根据主表的结果,进行从表聚合数据的计算。
this.buildOneToManyAggregationForDataList(resultList, buildAggregationAdditionalWhereCriteria());
// 处理多对多场景下,根据主表的结果,进行从表聚合数据的计算。
this.buildManyToManyAggregationForDataList(resultList, buildAggregationAdditionalWhereCriteria());
}
// 组装远程聚合计算关联数据
if (relationParam.isBuildRemoteAggregation()) {
// 一对多场景。
this.buildRemoteOneToManyAggregationForDataList(resultList, buildAggregationAdditionalWhereCriteria());
// 处理多对多场景。
this.buildRemoteManyToManyAggregationForDataList(resultList, buildAggregationAdditionalWhereCriteria());
int totalCount = resultList.size();
int fromIndex = 0;
int toIndex = Math.min(batchSize, totalCount);
while (toIndex > fromIndex) {
List<M> subResultList = resultList.subList(fromIndex, toIndex);
this.buildRelationForDataList(subResultList, relationParam);
fromIndex = toIndex;
toIndex = Math.min(batchSize + fromIndex, totalCount);
}
}
/**
* 集成所有与主表实体对象相关的关联数据对象。包括本地和远程服务的一对一、字典、一对多和多对多聚合运算等。
* 也可以根据实际需求,单独调用该函数所包含的各个数据集成函数。
* NOTE: 该方法内执行的SQL将禁用数据权限过滤。
*
* @param dataObject 主表实体对象。数据集成将直接作用于该对象。
* @param relationParam 实体对象数据组装的参数构建器。
@@ -669,41 +736,50 @@ public abstract class BaseService<M, K> implements IBaseService<M, K> {
if (dataObject == null || relationParam == null) {
return;
}
// 集成本地一对一和字段级别的数据关联。
boolean buildOneToOne = relationParam.isBuildOneToOne() || relationParam.isBuildOneToOneWithDict();
if (buildOneToOne) {
this.buildOneToOneForData(dataObject, relationParam.isBuildOneToOneWithDict());
}
if (relationParam.isBuildDict()) {
// 构建常量字典关联关系
this.buildConstDictForData(dataObject);
// 构建本地数据字典关联关系。
this.buildDictForData(dataObject, buildOneToOne);
}
boolean buildRemoteOneToOne =
relationParam.isBuildRemoteOneToOne() || relationParam.isBuildRemoteOneToOneWithDict();
if (buildRemoteOneToOne) {
this.buildRemoteOneToOneForData(dataObject, relationParam.isBuildRemoteOneToOneWithDict());
}
if (relationParam.isBuildRemoteDict()) {
this.buildRemoteDictForData(dataObject, buildRemoteOneToOne);
}
// 组装本地聚合计算关联数据
if (relationParam.isBuildAggregation()) {
// 构建一对多场景
buildOneToManyAggregationForData(dataObject, buildAggregationAdditionalWhereCriteria());
// 开始处理多对多场景。
buildManyToManyAggregationForData(dataObject, buildAggregationAdditionalWhereCriteria());
}
// 组装远程聚合计算关联数据
if (relationParam.isBuildRemoteAggregation()) {
// 处理一对多场景
this.buildRemoteOneToManyAggregationForData(dataObject, buildAggregationAdditionalWhereCriteria());
// 处理多对多场景
this.buildRemoteManyToManyAggregationForData(dataObject, buildAggregationAdditionalWhereCriteria());
}
if (relationParam.isBuildRelationManyToMany()) {
this.buildRelationManyToMany(dataObject);
boolean dataFilterValue = GlobalThreadLocal.setDataFilter(false);
try {
// 集成本地一对一和字段级别的数据关联。
boolean buildOneToOne = relationParam.isBuildOneToOne() || relationParam.isBuildOneToOneWithDict();
if (buildOneToOne) {
this.buildOneToOneForData(dataObject, relationParam.isBuildOneToOneWithDict());
}
// 集成一对多关联
if (relationParam.isBuildOneToMany()) {
this.buildOneToManyForData(dataObject);
}
if (relationParam.isBuildDict()) {
// 构建常量字典关联关系
this.buildConstDictForData(dataObject);
// 构建本地数据字典关联关系。
this.buildDictForData(dataObject, buildOneToOne);
}
boolean buildRemoteOneToOne =
relationParam.isBuildRemoteOneToOne() || relationParam.isBuildRemoteOneToOneWithDict();
if (buildRemoteOneToOne) {
this.buildRemoteOneToOneForData(dataObject, relationParam.isBuildRemoteOneToOneWithDict());
}
if (relationParam.isBuildRemoteDict()) {
this.buildRemoteDictForData(dataObject, buildRemoteOneToOne);
}
// 组装本地聚合计算关联数据
if (relationParam.isBuildAggregation()) {
// 构建一对多场景
buildOneToManyAggregationForData(dataObject, buildAggregationAdditionalWhereCriteria());
// 开始处理多对多场景。
buildManyToManyAggregationForData(dataObject, buildAggregationAdditionalWhereCriteria());
}
// 组装远程聚合计算关联数据
if (relationParam.isBuildRemoteAggregation()) {
// 处理一对多场景
this.buildRemoteOneToManyAggregationForData(dataObject, buildAggregationAdditionalWhereCriteria());
// 处理多对多场景
this.buildRemoteManyToManyAggregationForData(dataObject, buildAggregationAdditionalWhereCriteria());
}
if (relationParam.isBuildRelationManyToMany()) {
this.buildRelationManyToMany(dataObject);
}
} finally {
GlobalThreadLocal.setDataFilter(dataFilterValue);
}
}
@@ -794,6 +870,7 @@ public abstract class BaseService<M, K> implements IBaseService<M, K> {
}
boolean buildRemoteOneToOneDict = withDict && relationStruct.relationOneToOne.loadSlaveDict();
MyQueryParam queryParam = new MyQueryParam(buildRemoteOneToOneDict);
queryParam.setUseDataFilter(false);
MyWhereCriteria whereCriteria = new MyWhereCriteria();
whereCriteria.setCriteria(
relationStruct.relationOneToOne.slaveIdField(), MyWhereCriteria.OPERATOR_IN, masterIdSet);
@@ -826,6 +903,7 @@ public abstract class BaseService<M, K> implements IBaseService<M, K> {
continue;
}
MyQueryParam queryParam = new MyQueryParam(withDict);
queryParam.setUseDataFilter(false);
MyWhereCriteria whereCriteria = new MyWhereCriteria();
whereCriteria.setCriteria(
relationStruct.relationOneToOne.slaveIdField(), MyWhereCriteria.OPERATOR_EQUAL, id);
@@ -871,6 +949,7 @@ public abstract class BaseService<M, K> implements IBaseService<M, K> {
continue;
}
MyQueryParam queryParam = new MyQueryParam(false);
queryParam.setUseDataFilter(false);
MyWhereCriteria whereCriteria = new MyWhereCriteria();
whereCriteria.setCriteria(
relationStruct.relationDict.slaveIdField(), MyWhereCriteria.OPERATOR_IN, masterIdSet);
@@ -910,6 +989,7 @@ public abstract class BaseService<M, K> implements IBaseService<M, K> {
continue;
}
MyQueryParam queryParam = new MyQueryParam(false);
queryParam.setUseDataFilter(false);
MyWhereCriteria whereCriteria = new MyWhereCriteria();
whereCriteria.setCriteria(
relationStruct.relationDict.slaveIdField(), MyWhereCriteria.OPERATOR_EQUAL, id);
@@ -967,6 +1047,7 @@ public abstract class BaseService<M, K> implements IBaseService<M, K> {
}
criteriaList.add(criteria);
aggregationParam.setWhereCriteriaList(criteriaList);
aggregationParam.setUseDataFilter(false);
ResponseResult<List<Map<String, Object>>> responseResult =
relationStruct.remoteClient.aggregateBy(aggregationParam);
if (responseResult.isSuccess()) {
@@ -1009,6 +1090,7 @@ public abstract class BaseService<M, K> implements IBaseService<M, K> {
relation.slaveIdField(), MyWhereCriteria.OPERATOR_EQUAL, masterIdValue);
criteriaList.add(criteria);
aggregationParam.setWhereCriteriaList(criteriaList);
aggregationParam.setUseDataFilter(false);
ResponseResult<List<Map<String, Object>>> result =
relationStruct.remoteClient.aggregateBy(aggregationParam);
if (result.isSuccess()) {
@@ -1925,6 +2007,8 @@ public abstract class BaseService<M, K> implements IBaseService<M, K> {
List<String> slaveSelectList = new LinkedList<>();
slaveSelectList.add(relation.slaveIdField());
queryParam.setSelectFieldList(slaveSelectList);
// 关联集成数据需要把数据权限过滤关闭,以保证计算结果的正确性。
queryParam.setUseDataFilter(false);
ResponseResult<MyPageData<Map<String, Object>>> result = relationStruct.remoteClient.listMapBy(queryParam);
if (!result.isSuccess()) {
this.logErrorOrThrowException(result.getErrorMessage());
@@ -1989,6 +2073,7 @@ public abstract class BaseService<M, K> implements IBaseService<M, K> {
groupingBy(m -> m.get(relationInfo.relationMasterColumn),
mapping(n -> n.get(relationInfo.relationSlaveColumn), toSet())));
aggregationParam.setGroupedInFilterValues(groupedFilterMap);
aggregationParam.setUseDataFilter(false);
// 开始将远程返回的聚合计算结果集合,回填到主表中的聚合虚拟字段。
ResponseResult<List<Map<String, Object>>> result =
relationStruct.remoteClient.aggregateBy(aggregationParam);

View File

@@ -11,17 +11,6 @@ import java.util.List;
* @date 2020-08-08
*/
public interface IBaseDictService<M, K> extends IBaseService<M, K> {
/**
* 是否在服务启动的时候加载。子类可以重载该方法并在需要的时候手工调用loadCachedData加载数据。
*
* @return true表示启动即可加载数据false需要手动调用loadCachedData进行加载。
*/
boolean loadOnStartup();
/**
* 在系统启动时加载全部数据到内存缓存的key只能为映射表的主键。
*/
void loadCachedData();
/**
* 重新加载数据库中所有当前表数据到系统内存。
@@ -76,6 +65,13 @@ public interface IBaseDictService<M, K> extends IBaseService<M, K> {
*/
void removeDictionaryCache(K id);
/**
* 根据字典对象将数据从缓存中删除。
*
* @param data 字典数据。
*/
void removeDictionaryCacheByModel(M data);
/**
* 获取缓存中的数据数量。
*

View File

@@ -207,6 +207,24 @@ public interface IBaseService<M, K> {
*/
void buildRelationForDataList(List<M> resultList, MyRelationParam relationParam);
/**
* 该函数主要用于对查询结果的批量导出。不同于支持分页的列表查询,批量导出没有分页机制,
* 因此在导出数据量较大的情况下很容易给数据库的内存、CPU和IO带来较大的压力。而通过
* 我们的分批处理可以极大的规避该问题的出现几率。调整batchSize的大小也可以有效的
* 改善运行效率。
* 我们目前的处理机制是,先从主表取出所有符合条件的主表数据,这样可以避免分批处理时,
* 后面几批数据因为skip过多而带来的效率问题。因为是单表过滤不会给数据库带来过大的压力。
* 之后再在主表结果集数据上进行分批级联处理。
* 集成所有与主表实体对象相关的关联数据列表。包括一对一、字典、一对多和多对多聚合运算等。
* 也可以根据实际需求,单独调用该函数所包含的各个数据集成函数。
* NOTE: 该方法内执行的SQL将禁用数据权限过滤。
*
* @param resultList 主表实体对象列表。数据集成将直接作用于该对象列表。
* @param relationParam 实体对象数据组装的参数构建器。
* @param batchSize 每批集成的记录数量。小于等于时将不做分批处理。
*/
void buildRelationForDataList(List<M> resultList, MyRelationParam relationParam, int batchSize);
/**
* 集成所有与主表实体对象相关的关联数据对象。包括一对一、字典、一对多和多对多聚合运算等。
* 也可以根据实际需求,单独调用该函数所包含的各个数据集成函数。

View File

@@ -60,6 +60,45 @@ public class MapTreeDictionaryCache<K, V> extends MapDictionaryCache<K, V> {
this.parentIdGetter = parentIdGetter;
}
/**
* 重新加载先清空原有数据在执行putAll的操作。
*
* @param dataList 待缓存的数据列表。
* @param force true则强制刷新如果false当缓存中存在数据时不刷新。
*/
@Override
public void reload(List<V> dataList, boolean force) {
if (!force && this.getCount() > 0) {
return;
}
String exceptionMessage;
try {
if (lock.readLock().tryLock(TIMEOUT, TimeUnit.MILLISECONDS)) {
try {
dataMap.clear();
allTreeMap.clear();
dataList.forEach(data -> {
K id = idGetter.apply(data);
dataMap.put(id, data);
K parentId = parentIdGetter.apply(data);
allTreeMap.put(parentId, data);
});
} finally {
lock.readLock().unlock();
}
} else {
throw new TimeoutException();
}
} catch (Exception e) {
exceptionMessage = String.format(
"LOCK Operation of [MapDictionaryCache::getInList] encountered EXCEPTION [%s] for DICT.",
e.getClass().getSimpleName());
log.warn(exceptionMessage);
throw new MapCacheAccessException(exceptionMessage, e);
}
}
/**
* 获取该父主键的子数据列表。
*
@@ -103,8 +142,9 @@ public class MapTreeDictionaryCache<K, V> extends MapDictionaryCache<K, V> {
try {
if (lock.readLock().tryLock(TIMEOUT, TimeUnit.MILLISECONDS)) {
try {
super.putAll(dataList);
dataList.forEach(data -> {
K id = idGetter.apply(data);
dataMap.put(id, data);
K parentId = parentIdGetter.apply(data);
allTreeMap.remove(parentId, data);
allTreeMap.put(parentId, data);
@@ -136,7 +176,7 @@ public class MapTreeDictionaryCache<K, V> extends MapDictionaryCache<K, V> {
try {
if (lock.readLock().tryLock(TIMEOUT, TimeUnit.MILLISECONDS)) {
try {
super.put(id, data);
dataMap.put(id, data);
K parentId = parentIdGetter.apply(data);
allTreeMap.remove(parentId, data);
allTreeMap.put(parentId, data);
@@ -168,7 +208,7 @@ public class MapTreeDictionaryCache<K, V> extends MapDictionaryCache<K, V> {
try {
if (lock.readLock().tryLock(TIMEOUT, TimeUnit.MILLISECONDS)) {
try {
v = super.invalidate(id);
v = dataMap.remove(id);
if (v != null) {
K parentId = parentIdGetter.apply(v);
allTreeMap.remove(parentId, v);
@@ -233,7 +273,7 @@ public class MapTreeDictionaryCache<K, V> extends MapDictionaryCache<K, V> {
try {
if (lock.readLock().tryLock(TIMEOUT, TimeUnit.MILLISECONDS)) {
try {
super.invalidateAll();
dataMap.clear();
allTreeMap.clear();
} finally {
lock.readLock().unlock();

View File

@@ -12,6 +12,10 @@ public final class ApplicationConstant {
* 为字典表数据缓存时,缓存名称的固定后缀。
*/
public static final String DICT_CACHE_NAME_SUFFIX = "-DICT";
/**
* 为树形字典表数据缓存时,缓存名称的固定后缀。
*/
public static final String TREE_DICT_CACHE_NAME_SUFFIX = "-TREE-DICT";
/**
* 图片文件上传的父目录。
*/
@@ -44,6 +48,12 @@ public final class ApplicationConstant {
* 请求头跟踪id名。
*/
public static final String HTTP_HEADER_TRACE_ID = "traceId";
/**
* 操作日志的数据源类型。仅当前服务为多数据源时使用。
* 在common-log模块中SysOperationLogServiceImpl的MyDataSource注解一定要使用该参数。
* 在多数据源的业务服务中DataSourceType的常量一定要包含该值多数据源的配置中也一定要有与该值匹配的数据源Bean。
*/
public static final int OPERATION_LOG_DATASOURCE_TYPE = 1000;
/**
* 重要说明:该值为项目生成后的缺省密钥,仅为使用户可以快速上手并跑通流程。
* 在实际的应用中,一定要为不同的项目或服务,自行生成公钥和私钥,并将 PRIVATE_KEY 的引用改为服务的配置项。

View File

@@ -1,28 +0,0 @@
package com.orange.demo.common.core.listener;
import com.orange.demo.common.core.base.service.BaseDictService;
import org.springframework.boot.context.event.ApplicationReadyEvent;
import org.springframework.context.ApplicationListener;
import org.springframework.stereotype.Component;
import java.util.Map;
/**
* 应用程序启动后的事件监听对象。主要负责加载Model之间的字典关联和一对一关联所对应的Service结构关系。
*
* @author Jerry
* @date 2020-08-08
*/
@Component
public class LoadCachedDataListener implements ApplicationListener<ApplicationReadyEvent> {
@SuppressWarnings("all")
@Override
public void onApplicationEvent(ApplicationReadyEvent applicationReadyEvent) {
Map<String, BaseDictService> serviceMap =
applicationReadyEvent.getApplicationContext().getBeansOfType(BaseDictService.class);
for (Map.Entry<String, BaseDictService> e : serviceMap.entrySet()) {
e.getValue().loadCachedData();
}
}
}

View File

@@ -27,6 +27,12 @@ public class MyAggregationParam {
*/
public static final String VALUE_NAME = "aggregatedValue";
/**
* 聚合计算是否使用数据权限进行过滤。true表示数据过滤将产生作用否则SQL中不会包含数据过滤。
* 目前数据过滤包括数据权限过滤和租户数据过滤。
*/
private Boolean useDataFilter = true;
/**
* 聚合分类具体数值见AggregationKind。
*/

View File

@@ -23,6 +23,13 @@ public class MyQueryParam {
* 用于数据过滤的DTO对象。
*/
private Map<String, Object> filterMap;
/**
* 聚合计算是否使用数据权限进行过滤。true表示数据过滤将产生作用否则SQL中不会包含数据过滤。
* 目前数据过滤包括数据权限过滤和租户数据过滤。
*/
private Boolean useDataFilter = true;
/**
* (In-list) 实体对象中的过滤字段(而非数据表列名)需和下面的inFilterValues字段一起使用。
* NOTE: MyWhereCriteria中的IN类型过滤条件完全可以替代该字段。之所以保留主要是为了保证更好的接口可读性。

View File

@@ -70,7 +70,7 @@ public class LocalUpDownloader extends BaseUpDownloader {
BufferedInputStream bis = new BufferedInputStream(new FileInputStream(file))) {
int i = bis.read(buff);
while (i != -1) {
os.write(buff, 0, buff.length);
os.write(buff, 0, i);
os.flush();
i = bis.read(buff);
}

View File

@@ -0,0 +1,18 @@
package com.orange.demo.common.core.util;
/**
* 基于自定义解析规则的多数据源解析接口。
*
* @author Jerry
* @date 2020-08-08
*/
public interface DataSourceResolver {
/**
* 动态解析方法。实现类可以根据当前的请求,或者上下文环境进行动态解析。
*
* @param arg 可选的入参。MyDataSourceResolver注解中的arg参数。
* @return 返回用于多数据源切换的类型值。DataSourceResolveAspect 切面方法会根据该返回值和配置信息,进行多数据源切换。
*/
int resolve(String arg);
}

View File

@@ -0,0 +1,234 @@
<?xml version="1.0" encoding="UTF-8"?>
<module org.jetbrains.idea.maven.project.MavenProjectsManager.isMavenModule="true" type="JAVA_MODULE" version="4">
<component name="FacetManager">
<facet type="Spring" name="Spring">
<configuration />
</facet>
<facet type="web" name="Web">
<configuration>
<webroots />
</configuration>
</facet>
</component>
<component name="NewModuleRootManager" LANGUAGE_LEVEL="JDK_1_8">
<output url="file://$MODULE_DIR$/target/classes" />
<output-test url="file://$MODULE_DIR$/target/test-classes" />
<content url="file://$MODULE_DIR$">
<sourceFolder url="file://$MODULE_DIR$/src/main/java" isTestSource="false" />
<sourceFolder url="file://$MODULE_DIR$/src/main/resources" type="java-resource" />
<excludeFolder url="file://$MODULE_DIR$/target" />
</content>
<orderEntry type="inheritedJdk" />
<orderEntry type="sourceFolder" forTests="false" />
<orderEntry type="module" module-name="common-core" />
<orderEntry type="library" name="Maven: org.springframework.boot:spring-boot-starter-web:2.2.5.RELEASE" level="project" />
<orderEntry type="library" name="Maven: org.springframework.boot:spring-boot-starter-json:2.2.5.RELEASE" level="project" />
<orderEntry type="library" name="Maven: com.fasterxml.jackson.datatype:jackson-datatype-jdk8:2.10.2" level="project" />
<orderEntry type="library" name="Maven: com.fasterxml.jackson.module:jackson-module-parameter-names:2.10.2" level="project" />
<orderEntry type="library" name="Maven: org.springframework.boot:spring-boot-starter-tomcat:2.2.5.RELEASE" level="project" />
<orderEntry type="library" name="Maven: org.apache.tomcat.embed:tomcat-embed-core:9.0.31" level="project" />
<orderEntry type="library" name="Maven: org.apache.tomcat.embed:tomcat-embed-el:9.0.31" level="project" />
<orderEntry type="library" name="Maven: org.apache.tomcat.embed:tomcat-embed-websocket:9.0.31" level="project" />
<orderEntry type="library" name="Maven: org.springframework.boot:spring-boot-starter-validation:2.2.5.RELEASE" level="project" />
<orderEntry type="library" name="Maven: jakarta.validation:jakarta.validation-api:2.0.2" level="project" />
<orderEntry type="library" name="Maven: org.hibernate.validator:hibernate-validator:6.0.18.Final" level="project" />
<orderEntry type="library" name="Maven: org.jboss.logging:jboss-logging:3.4.1.Final" level="project" />
<orderEntry type="library" name="Maven: com.fasterxml:classmate:1.5.1" level="project" />
<orderEntry type="library" name="Maven: org.springframework:spring-webmvc:5.2.4.RELEASE" level="project" />
<orderEntry type="library" name="Maven: org.springframework:spring-expression:5.2.4.RELEASE" level="project" />
<orderEntry type="library" name="Maven: com.google.guava:guava:28.2-android" level="project" />
<orderEntry type="library" name="Maven: com.google.guava:failureaccess:1.0.1" level="project" />
<orderEntry type="library" name="Maven: com.google.guava:listenablefuture:9999.0-empty-to-avoid-conflict-with-guava" level="project" />
<orderEntry type="library" name="Maven: com.google.code.findbugs:jsr305:3.0.2" level="project" />
<orderEntry type="library" name="Maven: org.checkerframework:checker-compat-qual:2.5.5" level="project" />
<orderEntry type="library" name="Maven: com.google.errorprone:error_prone_annotations:2.3.4" level="project" />
<orderEntry type="library" name="Maven: com.google.j2objc:j2objc-annotations:1.3" level="project" />
<orderEntry type="library" name="Maven: org.apache.commons:commons-lang3:3.9" level="project" />
<orderEntry type="library" name="Maven: commons-codec:commons-codec:1.13" level="project" />
<orderEntry type="library" name="Maven: commons-io:commons-io:2.6" level="project" />
<orderEntry type="library" name="Maven: commons-fileupload:commons-fileupload:1.3.3" level="project" />
<orderEntry type="library" name="Maven: joda-time:joda-time:2.10.5" level="project" />
<orderEntry type="library" name="Maven: org.apache.commons:commons-collections4:4.4" level="project" />
<orderEntry type="library" name="Maven: org.apache.commons:commons-csv:1.8" level="project" />
<orderEntry type="library" name="Maven: cn.hutool:hutool-all:5.4.5" level="project" />
<orderEntry type="library" name="Maven: io.jsonwebtoken:jjwt:0.9.1" level="project" />
<orderEntry type="library" name="Maven: com.fasterxml.jackson.core:jackson-databind:2.10.2" level="project" />
<orderEntry type="library" name="Maven: com.alibaba:fastjson:1.2.74" level="project" />
<orderEntry type="library" name="Maven: com.github.ben-manes.caffeine:caffeine:2.8.1" level="project" />
<orderEntry type="library" name="Maven: org.checkerframework:checker-qual:3.1.0" level="project" />
<orderEntry type="library" name="Maven: cn.jimmyshi:bean-query:1.1.5" level="project" />
<orderEntry type="library" name="Maven: org.hamcrest:hamcrest-all:1.3" level="project" />
<orderEntry type="library" name="Maven: commons-beanutils:commons-beanutils:1.9.3" level="project" />
<orderEntry type="library" name="Maven: commons-collections:commons-collections:3.2.2" level="project" />
<orderEntry type="library" name="Maven: org.slf4j:jcl-over-slf4j:1.7.30" level="project" />
<orderEntry type="library" name="Maven: org.apache.poi:poi-ooxml:3.17" level="project" />
<orderEntry type="library" name="Maven: org.apache.poi:poi:3.17" level="project" />
<orderEntry type="library" name="Maven: org.apache.poi:poi-ooxml-schemas:3.17" level="project" />
<orderEntry type="library" name="Maven: org.apache.xmlbeans:xmlbeans:2.6.0" level="project" />
<orderEntry type="library" name="Maven: stax:stax-api:1.0.1" level="project" />
<orderEntry type="library" name="Maven: com.github.virtuald:curvesapi:1.04" level="project" />
<orderEntry type="library" scope="RUNTIME" name="Maven: mysql:mysql-connector-java:8.0.19" level="project" />
<orderEntry type="library" name="Maven: com.alibaba:druid-spring-boot-starter:1.1.22" level="project" />
<orderEntry type="library" name="Maven: com.alibaba:druid:1.1.22" level="project" />
<orderEntry type="library" name="Maven: org.springframework.boot:spring-boot-autoconfigure:2.2.5.RELEASE" level="project" />
<orderEntry type="library" name="Maven: tk.mybatis:mapper-spring-boot-starter:2.1.5" level="project" />
<orderEntry type="library" name="Maven: org.springframework.boot:spring-boot-starter-jdbc:2.2.5.RELEASE" level="project" />
<orderEntry type="library" name="Maven: com.zaxxer:HikariCP:3.4.2" level="project" />
<orderEntry type="library" name="Maven: org.springframework:spring-jdbc:5.2.4.RELEASE" level="project" />
<orderEntry type="library" name="Maven: org.springframework:spring-tx:5.2.4.RELEASE" level="project" />
<orderEntry type="library" name="Maven: org.mybatis:mybatis:3.4.6" level="project" />
<orderEntry type="library" name="Maven: org.mybatis:mybatis-spring:1.3.2" level="project" />
<orderEntry type="library" name="Maven: tk.mybatis:mapper-core:1.1.5" level="project" />
<orderEntry type="library" name="Maven: javax.persistence:persistence-api:1.0" level="project" />
<orderEntry type="library" name="Maven: tk.mybatis:mapper-base:1.1.5" level="project" />
<orderEntry type="library" name="Maven: tk.mybatis:mapper-weekend:1.1.5" level="project" />
<orderEntry type="library" name="Maven: tk.mybatis:mapper-spring:1.1.5" level="project" />
<orderEntry type="library" name="Maven: tk.mybatis:mapper-extra:1.1.5" level="project" />
<orderEntry type="library" name="Maven: tk.mybatis:mapper-spring-boot-autoconfigure:2.1.5" level="project" />
<orderEntry type="library" name="Maven: com.github.pagehelper:pagehelper-spring-boot-starter:1.3.0" level="project" />
<orderEntry type="library" name="Maven: org.mybatis.spring.boot:mybatis-spring-boot-starter:2.1.3" level="project" />
<orderEntry type="library" name="Maven: org.mybatis.spring.boot:mybatis-spring-boot-autoconfigure:2.1.3" level="project" />
<orderEntry type="library" name="Maven: com.github.pagehelper:pagehelper-spring-boot-autoconfigure:1.3.0" level="project" />
<orderEntry type="library" name="Maven: com.github.pagehelper:pagehelper:5.2.0" level="project" />
<orderEntry type="library" name="Maven: com.github.jsqlparser:jsqlparser:3.2" level="project" />
<orderEntry type="module" module-name="common-redis" />
<orderEntry type="library" name="Maven: redis.clients:jedis:3.1.0" level="project" />
<orderEntry type="library" name="Maven: org.apache.commons:commons-pool2:2.7.0" level="project" />
<orderEntry type="library" name="Maven: org.redisson:redisson:3.12.3" level="project" />
<orderEntry type="library" name="Maven: io.netty:netty-common:4.1.45.Final" level="project" />
<orderEntry type="library" name="Maven: io.netty:netty-codec:4.1.45.Final" level="project" />
<orderEntry type="library" name="Maven: io.netty:netty-buffer:4.1.45.Final" level="project" />
<orderEntry type="library" name="Maven: io.netty:netty-transport:4.1.45.Final" level="project" />
<orderEntry type="library" name="Maven: io.netty:netty-resolver:4.1.45.Final" level="project" />
<orderEntry type="library" name="Maven: io.netty:netty-resolver-dns:4.1.45.Final" level="project" />
<orderEntry type="library" name="Maven: io.netty:netty-codec-dns:4.1.45.Final" level="project" />
<orderEntry type="library" name="Maven: io.netty:netty-handler:4.1.45.Final" level="project" />
<orderEntry type="library" name="Maven: javax.cache:cache-api:1.1.1" level="project" />
<orderEntry type="library" name="Maven: io.projectreactor:reactor-core:3.3.3.RELEASE" level="project" />
<orderEntry type="library" name="Maven: io.reactivex.rxjava2:rxjava:2.2.18" level="project" />
<orderEntry type="library" name="Maven: de.ruedigermoeller:fst:2.57" level="project" />
<orderEntry type="library" name="Maven: org.javassist:javassist:3.21.0-GA" level="project" />
<orderEntry type="library" name="Maven: org.yaml:snakeyaml:1.25" level="project" />
<orderEntry type="library" name="Maven: com.fasterxml.jackson.dataformat:jackson-dataformat-yaml:2.10.2" level="project" />
<orderEntry type="library" name="Maven: com.fasterxml.jackson.core:jackson-core:2.10.2" level="project" />
<orderEntry type="library" name="Maven: net.bytebuddy:byte-buddy:1.10.8" level="project" />
<orderEntry type="library" name="Maven: org.jodd:jodd-bean:5.0.13" level="project" />
<orderEntry type="library" name="Maven: org.jodd:jodd-core:5.0.13" level="project" />
<orderEntry type="library" name="Maven: org.springframework.boot:spring-boot-starter-freemarker:2.2.5.RELEASE" level="project" />
<orderEntry type="library" name="Maven: org.springframework.boot:spring-boot-starter:2.2.5.RELEASE" level="project" />
<orderEntry type="library" name="Maven: org.springframework.boot:spring-boot:2.2.5.RELEASE" level="project" />
<orderEntry type="library" name="Maven: jakarta.annotation:jakarta.annotation-api:1.3.5" level="project" />
<orderEntry type="library" name="Maven: org.freemarker:freemarker:2.3.29" level="project" />
<orderEntry type="library" name="Maven: org.springframework:spring-context-support:5.2.4.RELEASE" level="project" />
<orderEntry type="library" name="Maven: org.springframework:spring-beans:5.2.4.RELEASE" level="project" />
<orderEntry type="library" name="Maven: org.springframework:spring-context:5.2.4.RELEASE" level="project" />
<orderEntry type="library" name="Maven: javax.servlet:javax.servlet-api:4.0.1" level="project" />
<orderEntry type="library" name="Maven: org.springframework.boot:spring-boot-starter-log4j2:2.2.5.RELEASE" level="project" />
<orderEntry type="library" name="Maven: org.apache.logging.log4j:log4j-slf4j-impl:2.12.1" level="project" />
<orderEntry type="library" name="Maven: org.apache.logging.log4j:log4j-api:2.12.1" level="project" />
<orderEntry type="library" name="Maven: org.apache.logging.log4j:log4j-core:2.12.1" level="project" />
<orderEntry type="library" name="Maven: org.apache.logging.log4j:log4j-jul:2.12.1" level="project" />
<orderEntry type="library" name="Maven: org.slf4j:jul-to-slf4j:1.7.30" level="project" />
<orderEntry type="library" name="Maven: org.springframework.boot:spring-boot-starter-aop:2.2.5.RELEASE" level="project" />
<orderEntry type="library" name="Maven: org.springframework:spring-aop:5.2.4.RELEASE" level="project" />
<orderEntry type="library" name="Maven: org.aspectj:aspectjweaver:1.9.5" level="project" />
<orderEntry type="library" name="Maven: org.springframework.boot:spring-boot-starter-cache:2.2.5.RELEASE" level="project" />
<orderEntry type="library" name="Maven: org.springframework.boot:spring-boot-configuration-processor:2.2.5.RELEASE" level="project" />
<orderEntry type="library" name="Maven: org.springframework.boot:spring-boot-starter-actuator:2.2.5.RELEASE" level="project" />
<orderEntry type="library" name="Maven: org.springframework.boot:spring-boot-actuator-autoconfigure:2.2.5.RELEASE" level="project" />
<orderEntry type="library" name="Maven: org.springframework.boot:spring-boot-actuator:2.2.5.RELEASE" level="project" />
<orderEntry type="library" name="Maven: com.fasterxml.jackson.datatype:jackson-datatype-jsr310:2.10.2" level="project" />
<orderEntry type="library" name="Maven: io.micrometer:micrometer-core:1.3.5" level="project" />
<orderEntry type="library" name="Maven: org.hdrhistogram:HdrHistogram:2.1.11" level="project" />
<orderEntry type="library" name="Maven: org.latencyutils:LatencyUtils:2.0.3" level="project" />
<orderEntry type="library" name="Maven: de.codecentric:spring-boot-admin-starter-client:2.2.3" level="project" />
<orderEntry type="library" name="Maven: de.codecentric:spring-boot-admin-client:2.2.3" level="project" />
<orderEntry type="library" name="Maven: com.alibaba.cloud:spring-cloud-starter-alibaba-nacos-config:2.2.1.RELEASE" level="project" />
<orderEntry type="library" name="Maven: com.alibaba.spring:spring-context-support:1.0.6" level="project" />
<orderEntry type="library" name="Maven: com.alibaba.nacos:nacos-client:1.2.1" level="project" />
<orderEntry type="library" name="Maven: com.alibaba.nacos:nacos-common:1.2.1" level="project" />
<orderEntry type="library" name="Maven: com.alibaba.nacos:nacos-api:1.2.1" level="project" />
<orderEntry type="library" name="Maven: io.prometheus:simpleclient:0.5.0" level="project" />
<orderEntry type="library" name="Maven: org.springframework.cloud:spring-cloud-commons:2.2.2.RELEASE" level="project" />
<orderEntry type="library" name="Maven: org.springframework.security:spring-security-crypto:5.2.2.RELEASE" level="project" />
<orderEntry type="library" name="Maven: org.springframework.cloud:spring-cloud-context:2.2.2.RELEASE" level="project" />
<orderEntry type="library" name="Maven: org.springframework.cloud:spring-cloud-starter-openfeign:2.2.2.RELEASE" level="project" />
<orderEntry type="library" name="Maven: org.springframework.cloud:spring-cloud-starter:2.2.2.RELEASE" level="project" />
<orderEntry type="library" name="Maven: org.springframework.security:spring-security-rsa:1.0.9.RELEASE" level="project" />
<orderEntry type="library" name="Maven: org.bouncycastle:bcpkix-jdk15on:1.59" level="project" />
<orderEntry type="library" name="Maven: org.bouncycastle:bcprov-jdk15on:1.59" level="project" />
<orderEntry type="library" name="Maven: org.springframework.cloud:spring-cloud-openfeign-core:2.2.2.RELEASE" level="project" />
<orderEntry type="library" name="Maven: io.github.openfeign.form:feign-form-spring:3.8.0" level="project" />
<orderEntry type="library" name="Maven: io.github.openfeign.form:feign-form:3.8.0" level="project" />
<orderEntry type="library" name="Maven: org.springframework:spring-web:5.2.4.RELEASE" level="project" />
<orderEntry type="library" name="Maven: io.github.openfeign:feign-core:10.7.4" level="project" />
<orderEntry type="library" name="Maven: io.github.openfeign:feign-slf4j:10.7.4" level="project" />
<orderEntry type="library" name="Maven: io.github.openfeign:feign-hystrix:10.7.4" level="project" />
<orderEntry type="library" name="Maven: com.netflix.archaius:archaius-core:0.7.6" level="project" />
<orderEntry type="library" name="Maven: io.github.openfeign:feign-httpclient:10.7.4" level="project" />
<orderEntry type="library" name="Maven: org.apache.httpcomponents:httpclient:4.5.11" level="project" />
<orderEntry type="library" name="Maven: org.apache.httpcomponents:httpcore:4.4.13" level="project" />
<orderEntry type="library" name="Maven: org.springframework.cloud:spring-cloud-starter-netflix-hystrix:2.2.2.RELEASE" level="project" />
<orderEntry type="library" name="Maven: org.springframework.cloud:spring-cloud-netflix-hystrix:2.2.2.RELEASE" level="project" />
<orderEntry type="library" name="Maven: org.springframework.cloud:spring-cloud-netflix-ribbon:2.2.2.RELEASE" level="project" />
<orderEntry type="library" name="Maven: org.springframework.cloud:spring-cloud-netflix-archaius:2.2.2.RELEASE" level="project" />
<orderEntry type="library" name="Maven: org.springframework.cloud:spring-cloud-starter-netflix-archaius:2.2.2.RELEASE" level="project" />
<orderEntry type="library" name="Maven: commons-configuration:commons-configuration:1.8" level="project" />
<orderEntry type="library" name="Maven: commons-lang:commons-lang:2.6" level="project" />
<orderEntry type="library" name="Maven: com.netflix.hystrix:hystrix-core:1.5.18" level="project" />
<orderEntry type="library" name="Maven: io.reactivex:rxjava:1.3.8" level="project" />
<orderEntry type="library" name="Maven: com.netflix.hystrix:hystrix-serialization:1.5.18" level="project" />
<orderEntry type="library" scope="RUNTIME" name="Maven: com.fasterxml.jackson.module:jackson-module-afterburner:2.10.2" level="project" />
<orderEntry type="library" name="Maven: com.fasterxml.jackson.core:jackson-annotations:2.10.2" level="project" />
<orderEntry type="library" name="Maven: com.netflix.hystrix:hystrix-metrics-event-stream:1.5.18" level="project" />
<orderEntry type="library" name="Maven: com.netflix.hystrix:hystrix-javanica:1.5.18" level="project" />
<orderEntry type="library" scope="RUNTIME" name="Maven: org.ow2.asm:asm:5.0.4" level="project" />
<orderEntry type="library" name="Maven: io.reactivex:rxjava-reactive-streams:1.2.1" level="project" />
<orderEntry type="library" name="Maven: org.reactivestreams:reactive-streams:1.0.3" level="project" />
<orderEntry type="library" name="Maven: org.mapstruct:mapstruct:1.3.1.Final" level="project" />
<orderEntry type="library" scope="PROVIDED" name="Maven: org.mapstruct:mapstruct-processor:1.3.1.Final" level="project" />
<orderEntry type="library" scope="PROVIDED" name="Maven: org.projectlombok:lombok:1.18.12" level="project" />
<orderEntry type="library" name="Maven: org.apache.curator:curator-recipes:4.3.0" level="project" />
<orderEntry type="library" name="Maven: org.apache.curator:curator-framework:4.0.1" level="project" />
<orderEntry type="library" name="Maven: org.apache.curator:curator-client:4.0.1" level="project" />
<orderEntry type="library" name="Maven: org.apache.zookeeper:zookeeper:3.5.3-beta" level="project" />
<orderEntry type="library" name="Maven: commons-cli:commons-cli:1.4" level="project" />
<orderEntry type="library" name="Maven: org.apache.kafka:kafka-clients:2.4.0" level="project" />
<orderEntry type="library" name="Maven: com.github.luben:zstd-jni:1.4.3-1" level="project" />
<orderEntry type="library" name="Maven: org.lz4:lz4-java:1.6.0" level="project" />
<orderEntry type="library" name="Maven: org.xerial.snappy:snappy-java:1.1.7.3" level="project" />
<orderEntry type="library" name="Maven: org.slf4j:slf4j-api:1.7.30" level="project" />
<orderEntry type="library" name="Maven: org.scala-lang:scala-library:2.12.10" level="project" />
<orderEntry type="library" name="Maven: com.lmax:disruptor:3.4.2" level="project" />
<orderEntry type="library" scope="TEST" name="Maven: org.springframework.boot:spring-boot-starter-test:2.2.5.RELEASE" level="project" />
<orderEntry type="library" scope="TEST" name="Maven: org.springframework.boot:spring-boot-test:2.2.5.RELEASE" level="project" />
<orderEntry type="library" scope="TEST" name="Maven: org.springframework.boot:spring-boot-test-autoconfigure:2.2.5.RELEASE" level="project" />
<orderEntry type="library" scope="TEST" name="Maven: com.jayway.jsonpath:json-path:2.4.0" level="project" />
<orderEntry type="library" scope="TEST" name="Maven: net.minidev:json-smart:2.3" level="project" />
<orderEntry type="library" scope="TEST" name="Maven: net.minidev:accessors-smart:1.2" level="project" />
<orderEntry type="library" scope="TEST" name="Maven: jakarta.xml.bind:jakarta.xml.bind-api:2.3.2" level="project" />
<orderEntry type="library" scope="TEST" name="Maven: jakarta.activation:jakarta.activation-api:1.2.2" level="project" />
<orderEntry type="library" scope="TEST" name="Maven: org.junit.jupiter:junit-jupiter:5.5.2" level="project" />
<orderEntry type="library" scope="TEST" name="Maven: org.junit.jupiter:junit-jupiter-api:5.5.2" level="project" />
<orderEntry type="library" scope="TEST" name="Maven: org.opentest4j:opentest4j:1.2.0" level="project" />
<orderEntry type="library" scope="TEST" name="Maven: org.junit.platform:junit-platform-commons:1.5.2" level="project" />
<orderEntry type="library" scope="TEST" name="Maven: org.junit.jupiter:junit-jupiter-params:5.5.2" level="project" />
<orderEntry type="library" scope="TEST" name="Maven: org.junit.jupiter:junit-jupiter-engine:5.5.2" level="project" />
<orderEntry type="library" scope="TEST" name="Maven: org.junit.vintage:junit-vintage-engine:5.5.2" level="project" />
<orderEntry type="library" scope="TEST" name="Maven: org.apiguardian:apiguardian-api:1.1.0" level="project" />
<orderEntry type="library" scope="TEST" name="Maven: org.junit.platform:junit-platform-engine:1.5.2" level="project" />
<orderEntry type="library" scope="TEST" name="Maven: junit:junit:4.12" level="project" />
<orderEntry type="library" scope="TEST" name="Maven: org.mockito:mockito-junit-jupiter:3.1.0" level="project" />
<orderEntry type="library" scope="TEST" name="Maven: org.assertj:assertj-core:3.13.2" level="project" />
<orderEntry type="library" scope="TEST" name="Maven: org.hamcrest:hamcrest:2.1" level="project" />
<orderEntry type="library" scope="TEST" name="Maven: org.mockito:mockito-core:3.1.0" level="project" />
<orderEntry type="library" scope="TEST" name="Maven: net.bytebuddy:byte-buddy-agent:1.10.8" level="project" />
<orderEntry type="library" name="Maven: org.objenesis:objenesis:2.6" level="project" />
<orderEntry type="library" scope="TEST" name="Maven: org.skyscreamer:jsonassert:1.5.0" level="project" />
<orderEntry type="library" scope="TEST" name="Maven: com.vaadin.external.google:android-json:0.0.20131108.vaadin1" level="project" />
<orderEntry type="library" name="Maven: org.springframework:spring-core:5.2.4.RELEASE" level="project" />
<orderEntry type="library" name="Maven: org.springframework:spring-jcl:5.2.4.RELEASE" level="project" />
<orderEntry type="library" scope="TEST" name="Maven: org.springframework:spring-test:5.2.4.RELEASE" level="project" />
<orderEntry type="library" scope="TEST" name="Maven: org.xmlunit:xmlunit-core:2.6.3" level="project" />
</component>
</module>

View File

@@ -0,0 +1,29 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<parent>
<artifactId>common</artifactId>
<groupId>com.orange.demo</groupId>
<version>1.0.0</version>
</parent>
<modelVersion>4.0.0</modelVersion>
<artifactId>common-datafilter</artifactId>
<version>1.0.0</version>
<name>common-datafilter</name>
<packaging>jar</packaging>
<dependencies>
<dependency>
<groupId>com.orange.demo</groupId>
<artifactId>common-core</artifactId>
<version>1.0.0</version>
</dependency>
<dependency>
<groupId>com.orange.demo</groupId>
<artifactId>common-redis</artifactId>
<version>1.0.0</version>
</dependency>
</dependencies>
</project>

View File

@@ -0,0 +1,41 @@
package com.orange.demo.common.datafilter.aop;
import com.orange.demo.common.core.object.GlobalThreadLocal;
import lombok.extern.slf4j.Slf4j;
import org.aspectj.lang.ProceedingJoinPoint;
import org.aspectj.lang.annotation.Around;
import org.aspectj.lang.annotation.Aspect;
import org.aspectj.lang.annotation.Pointcut;
import org.springframework.core.annotation.Order;
import org.springframework.stereotype.Component;
/**
* 禁用Mybatis拦截器数据过滤的AOP处理类。
*
* @author Jerry
* @date 2020-08-08
*/
@Aspect
@Component
@Order(1)
@Slf4j
public class DisableDataFilterAspect {
/**
* 所有标记了DisableDataFilter注解的方法。
*/
@Pointcut("@annotation(com.orange.demo.common.core.annotation.DisableDataFilter)")
public void disableDataFilterPointCut() {
// 空注释避免sonar警告
}
@Around("disableDataFilterPointCut()")
public Object around(ProceedingJoinPoint point) throws Throwable {
boolean dataFilterEnabled = GlobalThreadLocal.setDataFilter(false);
try {
return point.proceed();
} finally {
GlobalThreadLocal.setDataFilter(dataFilterEnabled);
}
}
}

View File

@@ -0,0 +1,13 @@
package com.orange.demo.common.datafilter.config;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
/**
* common-datafilter模块的自动配置引导类。
*
* @author Jerry
* @date 2020-08-08
*/
@EnableConfigurationProperties({DataFilterProperties.class})
public class DataFilterAutoConfig {
}

View File

@@ -0,0 +1,44 @@
package com.orange.demo.common.datafilter.config;
import lombok.Data;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.context.properties.ConfigurationProperties;
/**
* common-datafilter模块的配置类。
*
* @author Jerry
* @date 2020-08-08
*/
@Data
@ConfigurationProperties(prefix = "datafilter")
public class DataFilterProperties {
/**
* 是否启用租户过滤。
*/
@Value("${datafilter.tenant.enabled}")
private Boolean enabledTenantFilter;
/**
* 是否启动数据权限过滤。
*/
@Value("${datafilter.dataperm.enabled}")
private Boolean enabledDataPermFilter;
/**
* 部门关联表的表名前缀如zz_。该值主要用在MybatisDataFilterInterceptor拦截器中
* 用于拼接数据权限过滤的SQL语句。
*/
@Value("${datafilter.dataperm.deptRelationTablePrefix:}")
private String deptRelationTablePrefix;
/**
* 该值为true的时候在进行数据权限过滤时会加上表名zz_sys_user.dept_id = xxx。
* 为false时过滤条件不加表名只是使用字段名dept_id = xxx。该值目前主要适用于
* Oracle分页SQL使用了子查询的场景。此场景下由于子查询使用了别名再在数据权限过滤条件中
* 加上原有表名时SQL语法会报错。
*/
@Value("${datafilter.dataperm.addTableNamePrefix:true}")
private Boolean addTableNamePrefix;
}

View File

@@ -0,0 +1,21 @@
package com.orange.demo.common.datafilter.config;
import com.orange.demo.common.datafilter.interceptor.DataFilterInterceptor;
import org.springframework.context.annotation.Configuration;
import org.springframework.web.servlet.config.annotation.InterceptorRegistry;
import org.springframework.web.servlet.config.annotation.WebMvcConfigurer;
/**
* 添加数据过滤相关的拦截器。
*
* @author Jerry
* @date 2020-08-08
*/
@Configuration
public class DataFilterWebMvcConfigurer implements WebMvcConfigurer {
@Override
public void addInterceptors(InterceptorRegistry registry) {
registry.addInterceptor(new DataFilterInterceptor()).addPathPatterns("/**");
}
}

View File

@@ -0,0 +1,69 @@
package com.orange.demo.common.datafilter.constant;
import java.util.HashMap;
import java.util.Map;
/**
* 数据权限规则类型常量类。
*
* @author Jerry
* @date 2020-08-08
*/
public final class DataPermRuleType {
/**
* 查看全部。
*/
public static final int TYPE_ALL = 0;
/**
* 仅查看当前用户
*/
public static final int TYPE_USER_ONLY = 1;
/**
* 仅查看当前部门
*/
public static final int TYPE_DEPT_ONLY = 2;
/**
* 所在部门及子部门
*/
public static final int TYPE_DEPT_AND_CHILD_DEPT = 3;
/**
* 多部门及子部门
*/
public static final int TYPE_MULTI_DEPT_AND_CHILD_DEPT = 4;
/**
* 自定义部门列表
*/
public static final int TYPE_CUSTOM_DETP_LIST = 5;
private static final Map<Object, String> DICT_MAP = new HashMap<>(4);
static {
DICT_MAP.put(0, "查看全部");
DICT_MAP.put(1, "仅查看当前用户");
DICT_MAP.put(2, "仅查看所在部门");
DICT_MAP.put(3, "所在部门及子部门");
DICT_MAP.put(4, "多部门及子部门");
DICT_MAP.put(5, "自定义部门列表");
}
/**
* 判断参数是否为当前常量字典的合法取值范围。
*
* @param value 待验证的参数值。
* @return 合法返回true否则false。
*/
public static boolean isValid(Integer value) {
return value != null && DICT_MAP.containsKey(value);
}
/**
* 私有构造函数,明确标识该常量类的作用。
*/
private DataPermRuleType() {
}
}

View File

@@ -0,0 +1,42 @@
package com.orange.demo.common.datafilter.interceptor;
import com.orange.demo.common.core.object.GlobalThreadLocal;
import lombok.extern.slf4j.Slf4j;
import org.springframework.web.servlet.HandlerInterceptor;
import org.springframework.web.servlet.ModelAndView;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
/**
* 主要用于初始化通过Mybatis拦截器插件进行数据过滤的标记。
* 在调用controller接口处理方法之前必须强制将数据过滤标记设置为缺省值。
* 这样可以避免使用当前线程在处理上一个请求时,未能正常清理的数据过滤标记值。
*
* @author Jerry
* @date 2020-08-08
*/
@Slf4j
public class DataFilterInterceptor implements HandlerInterceptor {
@Override
public boolean preHandle(HttpServletRequest request, HttpServletResponse response, Object handler)
throws Exception {
// 每次进入Controller接口之前均主动打开数据权限验证。
// 可以避免该Servlet线程在处理之前的请求时异常退出从而导致该状态数据没有被正常清除。
GlobalThreadLocal.setDataFilter(true);
return true;
}
@Override
public void postHandle(HttpServletRequest request, HttpServletResponse response, Object handler,
ModelAndView modelAndView) throws Exception {
// 这里需要加注释否则sonar不happy。
}
@Override
public void afterCompletion(HttpServletRequest request, HttpServletResponse response, Object handler, Exception ex)
throws Exception {
GlobalThreadLocal.clearDataFilter();
}
}

View File

@@ -0,0 +1,479 @@
package com.orange.demo.common.datafilter.interceptor;
import cn.hutool.core.collection.CollUtil;
import cn.hutool.core.util.ReflectUtil;
import com.alibaba.fastjson.JSON;
import com.orange.demo.common.core.base.dao.BaseDaoMapper;
import com.orange.demo.common.core.annotation.*;
import com.orange.demo.common.core.exception.NoDataPermException;
import com.orange.demo.common.core.object.GlobalThreadLocal;
import com.orange.demo.common.core.object.TokenData;
import com.orange.demo.common.core.util.ApplicationContextHolder;
import com.orange.demo.common.core.util.ContextUtil;
import com.orange.demo.common.core.util.MyModelUtil;
import com.orange.demo.common.core.util.RedisKeyUtil;
import com.orange.demo.common.datafilter.config.DataFilterProperties;
import com.orange.demo.common.datafilter.constant.DataPermRuleType;
import lombok.Data;
import lombok.extern.slf4j.Slf4j;
import net.sf.jsqlparser.JSQLParserException;
import net.sf.jsqlparser.expression.operators.conditional.AndExpression;
import net.sf.jsqlparser.parser.CCJSqlParserUtil;
import net.sf.jsqlparser.statement.Statement;
import net.sf.jsqlparser.statement.delete.Delete;
import net.sf.jsqlparser.statement.select.FromItem;
import net.sf.jsqlparser.statement.select.PlainSelect;
import net.sf.jsqlparser.statement.select.Select;
import net.sf.jsqlparser.statement.select.SubSelect;
import net.sf.jsqlparser.statement.update.Update;
import org.apache.commons.collections4.CollectionUtils;
import org.apache.commons.collections4.MapUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.ibatis.executor.statement.RoutingStatementHandler;
import org.apache.ibatis.executor.statement.StatementHandler;
import org.apache.ibatis.mapping.BoundSql;
import org.apache.ibatis.mapping.MappedStatement;
import org.apache.ibatis.mapping.SqlCommandType;
import org.apache.ibatis.plugin.*;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import redis.clients.jedis.Jedis;
import redis.clients.jedis.JedisPool;
import javax.persistence.Table;
import java.lang.reflect.Field;
import java.lang.reflect.ParameterizedType;
import java.sql.Connection;
import java.util.*;
/**
* Mybatis拦截器。目前用于数据权限的统一拦截和注入处理。
*
* @author Jerry
* @date 2020-08-08
*/
@Intercepts({@Signature(type = StatementHandler.class, method = "prepare", args = {Connection.class, Integer.class})})
@Slf4j
@Component
public class MybatisDataFilterInterceptor implements Interceptor {
@Autowired
private JedisPool jedisPool;
@Autowired
private DataFilterProperties properties;
/**
* 对象缓存。由于Set是排序后的因此在查找排除方法名称时效率更高。
* 在应用服务启动的监听器中(LoadDataPermMapperListener),会调用当前对象的(loadMappersWithDataPerm)方法,加载缓存。
*/
private final Map<String, ModelDataPermInfo> cachedDataPermMap = new HashMap<>();
/**
* 租户租户对象缓存。
*/
private final Map<String, ModelTenantInfo> cachedTenantMap = new HashMap<>();
/**
* 预先加载与数据过滤相关的数据到缓存,该函数会在(LoadDataFilterInfoListener)监听器中调用。
*/
@SuppressWarnings("all")
public void loadInfoWithDataFilter() {
Map<String, BaseDaoMapper> mapperMap =
ApplicationContextHolder.getApplicationContext().getBeansOfType(BaseDaoMapper.class);
for (BaseDaoMapper<?> mapperProxy : mapperMap.values()) {
// 优先处理jdk的代理
Object proxy = ReflectUtil.getFieldValue(mapperProxy, "h");
// 如果不是jdk的代理再看看cjlib的代理。
if (proxy == null) {
proxy = ReflectUtil.getFieldValue(mapperProxy, "CGLIB$CALLBACK_0");
}
Class<?> mapperClass =
(Class<?>) ReflectUtil.getFieldValue(proxy, "mapperInterface");
if (properties.getEnabledTenantFilter()) {
loadTenantFilterData(mapperClass);
}
if (properties.getEnabledDataPermFilter()) {
EnableDataPerm rule = mapperClass.getAnnotation(EnableDataPerm.class);
if (rule != null) {
loadDataPermFilterRules(mapperClass, rule);
}
}
}
}
private void loadTenantFilterData(Class<?> mapperClass) {
Class<?> modelClass = (Class<?>) ((ParameterizedType)
mapperClass.getGenericInterfaces()[0]).getActualTypeArguments()[0];
Field[] fields = ReflectUtil.getFields(modelClass);
for (Field field : fields) {
if (field.getAnnotation(TenantFilterColumn.class) != null) {
ModelTenantInfo tenantInfo = new ModelTenantInfo();
tenantInfo.setModelName(modelClass.getSimpleName());
tenantInfo.setTableName(modelClass.getAnnotation(Table.class).name());
tenantInfo.setFieldName(field.getName());
tenantInfo.setColumnName(field.getName());
tenantInfo.setColumnName(MyModelUtil.mapToColumnName(field, modelClass));
// 判断当前dao中是否包括不需要自动注入租户Id过滤的方法。
DisableTenantFilter disableTenantFilter =
mapperClass.getAnnotation(DisableTenantFilter.class);
if (disableTenantFilter != null) {
// 这里开始获取当前Mapper已经声明的的SqlId中有哪些是需要排除在外的。
// 排除在外的将不进行数据过滤。
Set<String> excludeMethodNameSet = new HashSet<>();
for (String excludeName : disableTenantFilter.includeMethodName()) {
excludeMethodNameSet.add(excludeName);
// 这里是给pagehelper中分页查询先获取数据总量的查询。
excludeMethodNameSet.add(excludeName + "_COUNT");
}
tenantInfo.setExcludeMethodNameSet(excludeMethodNameSet);
}
cachedTenantMap.put(mapperClass.getName(), tenantInfo);
break;
}
}
}
private void loadDataPermFilterRules(Class<?> mapperClass, EnableDataPerm rule) {
String sysDataPermMapperName = "SysDataPermMapper";
// 由于给数据权限Mapper添加@EnableDataPerm将会导致无限递归因此这里检测到之后
// 会在系统启动加载监听器的时候,及时抛出异常。
if (StringUtils.equals(sysDataPermMapperName, mapperClass.getSimpleName())) {
throw new IllegalStateException("Add @EnableDataPerm annotation to SysDataPermMapper is ILLEGAL!");
}
// 这里开始获取当前Mapper已经声明的的SqlId中有哪些是需要排除在外的。
// 排除在外的将不进行数据过滤。
Set<String> excludeMethodNameSet = null;
String[] excludes = rule.excluseMethodName();
if (excludes.length > 0) {
excludeMethodNameSet = new HashSet<>();
for (String excludeName : excludes) {
excludeMethodNameSet.add(excludeName);
// 这里是给pagehelper中分页查询先获取数据总量的查询。
excludeMethodNameSet.add(excludeName + "_COUNT");
}
}
// 获取Mapper关联的主表信息包括表名user过滤字段名和dept过滤字段名。
Class<?> modelClazz = (Class<?>)
((ParameterizedType) mapperClass.getGenericInterfaces()[0]).getActualTypeArguments()[0];
Field[] fields = ReflectUtil.getFields(modelClazz);
Field userFilterField = null;
Field deptFilterField = null;
for (Field field : fields) {
if (null != field.getAnnotation(UserFilterColumn.class)) {
userFilterField = field;
}
if (null != field.getAnnotation(DeptFilterColumn.class)) {
deptFilterField = field;
}
if (userFilterField != null && deptFilterField != null) {
break;
}
}
// 通过注解解析与Mapper关联的Model并获取与数据权限关联的信息并将结果缓存。
ModelDataPermInfo info = new ModelDataPermInfo();
info.setMainTableName(MyModelUtil.mapToTableName(modelClazz));
info.setExcludeMethodNameSet(excludeMethodNameSet);
if (userFilterField != null) {
info.setUserFilterColumn(MyModelUtil.mapToColumnName(userFilterField, modelClazz));
}
if (deptFilterField != null) {
info.setDeptFilterColumn(MyModelUtil.mapToColumnName(deptFilterField, modelClazz));
}
cachedDataPermMap.put(mapperClass.getName(), info);
}
@Override
public Object intercept(Invocation invocation) throws Throwable {
// 判断当前线程本地存储中,业务操作是否禁用了数据权限过滤,如果禁用,则不进行后续的数据过滤处理了。
if (!GlobalThreadLocal.enabledDataFilter()) {
return invocation.proceed();
}
// 只有在HttpServletRequest场景下该拦截器才起作用对于系统级别的预加载数据不会应用数据权限。
if (!ContextUtil.hasRequestContext()) {
return invocation.proceed();
}
// 没有登录的用户,不会参与租户过滤,如果需要过滤的,自己在代码中手动实现
// 通常对于无需登录的白名单url也无需过滤了。
// 另外就是登录接口中获取菜单列表的接口由于尚未登录没有TokenData所以这个接口我们手动加入了该条件。
if (TokenData.takeFromRequest() == null) {
return invocation.proceed();
}
RoutingStatementHandler handler = (RoutingStatementHandler) invocation.getTarget();
StatementHandler delegate =
(StatementHandler) ReflectUtil.getFieldValue(handler, "delegate");
// 通过反射获取delegate父类BaseStatementHandler的mappedStatement属性
MappedStatement mappedStatement =
(MappedStatement) ReflectUtil.getFieldValue(delegate, "mappedStatement");
SqlCommandType commandType = mappedStatement.getSqlCommandType();
// 对于INSERT语句我们不进行任何数据过滤。
if (commandType == SqlCommandType.INSERT) {
return invocation.proceed();
}
String sqlId = mappedStatement.getId();
int pos = StringUtils.lastIndexOf(sqlId, ".");
String className = StringUtils.substring(sqlId, 0, pos);
String methodName = StringUtils.substring(sqlId, pos + 1);
// 先进行租户过滤条件的处理再将解析并处理后的SQL Statement交给下一步的数据权限过滤去处理。
// 这样做的目的主要是为了减少一次SQL解析的过程因为这是高频操作所以要尽量去优化。
Statement statement = null;
if (properties.getEnabledTenantFilter()) {
statement = this.processTenantFilter(className, methodName, delegate.getBoundSql(), commandType);
}
// 处理数据权限过滤。
if (properties.getEnabledDataPermFilter()) {
this.processDataPermFilter(className, methodName, delegate.getBoundSql(), commandType, statement, sqlId);
}
return invocation.proceed();
}
private Statement processTenantFilter(
String className, String methodName, BoundSql boundSql, SqlCommandType commandType) throws JSQLParserException {
ModelTenantInfo info = cachedTenantMap.get(className);
if (info == null || CollUtil.contains(info.getExcludeMethodNameSet(), methodName)) {
return null;
}
String sql = boundSql.getSql();
Statement statement = CCJSqlParserUtil.parse(sql);
StringBuilder filterBuilder = new StringBuilder(64);
filterBuilder.append(info.tableName).append(".")
.append(info.columnName)
.append("=")
.append(TokenData.takeFromRequest().getTenantId());
String dataFilter = filterBuilder.toString();
if (commandType == SqlCommandType.UPDATE) {
Update update = (Update) statement;
this.buildWhereClause(update, dataFilter);
} else if (commandType == SqlCommandType.DELETE) {
Delete delete = (Delete) statement;
this.buildWhereClause(delete, dataFilter);
} else {
Select select = (Select) statement;
PlainSelect selectBody = (PlainSelect) select.getSelectBody();
FromItem fromItem = selectBody.getFromItem();
if (fromItem != null) {
PlainSelect subSelect = null;
if (fromItem instanceof SubSelect) {
subSelect = (PlainSelect) ((SubSelect) fromItem).getSelectBody();
}
if (subSelect != null) {
buildWhereClause(subSelect, dataFilter);
} else {
buildWhereClause(selectBody, dataFilter);
}
}
}
ReflectUtil.setFieldValue(boundSql, "sql", statement.toString());
return statement;
}
private void processDataPermFilter(
String className, String methodName, BoundSql boundSql, SqlCommandType commandType, Statement statement, String sqlId)
throws JSQLParserException {
// 判断当前线程本地存储中,业务操作是否禁用了数据权限过滤,如果禁用,则不进行后续的数据过滤处理了。
// 数据过滤权限中INSERT不过滤。如果是管理员则不参与数据权限的数据过滤显示全部数据。
TokenData tokenData = TokenData.takeFromRequest();
if (Boolean.TRUE.equals(tokenData.getIsAdmin())) {
return;
}
ModelDataPermInfo info = cachedDataPermMap.get(className);
// 再次查找当前方法是否为排除方法,如果不是,就参与数据权限注入过滤。
if (info == null || CollUtil.contains(info.getExcludeMethodNameSet(), methodName)) {
return;
}
String dataPermData;
try (Jedis jedis = jedisPool.getResource()) {
String dataPermSessionKey =
RedisKeyUtil.makeSessionDataPermIdKeyForRedis(tokenData.getSessionId());
dataPermData = jedis.get(dataPermSessionKey);
if (StringUtils.isBlank(dataPermData)) {
throw new NoDataPermException(
"No Related DataPerm found for SQL_ID [ " + sqlId + " ].");
}
}
Map<Integer, String> dataPermMap = new HashMap<>(8);
for (Map.Entry<String, Object> entry : JSON.parseObject(dataPermData).entrySet()) {
dataPermMap.put(Integer.valueOf(entry.getKey()), entry.getValue().toString());
}
if (MapUtils.isEmpty(dataPermMap)) {
throw new NoDataPermException(
"No Related DataPerm found for SQL_ID [ " + sqlId + " ].");
}
if (dataPermMap.containsKey(DataPermRuleType.TYPE_ALL)) {
return;
}
this.processDataPerm(info, dataPermMap, boundSql, commandType, statement);
}
private void processDataPerm(
ModelDataPermInfo info,
Map<Integer, String> dataPermMap,
BoundSql boundSql,
SqlCommandType commandType,
Statement statement) throws JSQLParserException {
List<String> criteriaList = new LinkedList<>();
for (Map.Entry<Integer, String> entry : dataPermMap.entrySet()) {
String filterClause = processDataPermRule(info, entry.getKey(), entry.getValue());
if (StringUtils.isNotBlank(filterClause)) {
criteriaList.add(filterClause);
}
}
if (CollectionUtils.isEmpty(criteriaList)) {
return;
}
StringBuilder filterBuilder = new StringBuilder(128);
filterBuilder.append("(");
filterBuilder.append(StringUtils.join(criteriaList, " OR "));
filterBuilder.append(")");
String dataFilter = filterBuilder.toString();
if (statement == null) {
String sql = boundSql.getSql();
statement = CCJSqlParserUtil.parse(sql);
}
if (commandType == SqlCommandType.UPDATE) {
Update update = (Update) statement;
this.buildWhereClause(update, dataFilter);
} else if (commandType == SqlCommandType.DELETE) {
Delete delete = (Delete) statement;
this.buildWhereClause(delete, dataFilter);
} else {
Select select = (Select) statement;
PlainSelect selectBody = (PlainSelect) select.getSelectBody();
FromItem fromItem = selectBody.getFromItem();
PlainSelect subSelect = null;
if (fromItem != null) {
if (fromItem instanceof SubSelect) {
subSelect = (PlainSelect) ((SubSelect) fromItem).getSelectBody();
}
if (subSelect != null) {
buildWhereClause(subSelect, dataFilter);
} else {
buildWhereClause(selectBody, dataFilter);
}
}
}
ReflectUtil.setFieldValue(boundSql, "sql", statement.toString());
}
private String processDataPermRule(ModelDataPermInfo info, Integer ruleType, String deptIds) {
TokenData tokenData = TokenData.takeFromRequest();
StringBuilder filter = new StringBuilder(128);
if (ruleType == DataPermRuleType.TYPE_USER_ONLY) {
if (StringUtils.isNotBlank(info.getUserFilterColumn())) {
if (properties.getAddTableNamePrefix()) {
filter.append(info.getMainTableName()).append(".");
}
filter.append(info.getUserFilterColumn())
.append(" = ")
.append(tokenData.getUserId());
}
} else {
if (StringUtils.isNotBlank(info.getDeptFilterColumn())) {
if (ruleType == DataPermRuleType.TYPE_DEPT_ONLY) {
if (properties.getAddTableNamePrefix()) {
filter.append(info.getMainTableName()).append(".");
}
filter.append(info.getDeptFilterColumn())
.append(" = ")
.append(tokenData.getDeptId());
} else if (ruleType == DataPermRuleType.TYPE_DEPT_AND_CHILD_DEPT) {
filter.append(" EXISTS ")
.append("(SELECT 1 FROM ")
.append(properties.getDeptRelationTablePrefix())
.append("sys_dept_relation WHERE ")
.append(properties.getDeptRelationTablePrefix())
.append("sys_dept_relation.parent_dept_id = ")
.append(tokenData.getDeptId())
.append(" AND ");
if (properties.getAddTableNamePrefix()) {
filter.append(info.getMainTableName()).append(".");
}
filter.append(info.getDeptFilterColumn())
.append(" = ")
.append(properties.getDeptRelationTablePrefix())
.append("sys_dept_relation.dept_id) ");
} else if (ruleType == DataPermRuleType.TYPE_MULTI_DEPT_AND_CHILD_DEPT) {
filter.append(" EXISTS ")
.append("(SELECT 1 FROM ")
.append(properties.getDeptRelationTablePrefix())
.append("sys_dept_relation WHERE ")
.append(properties.getDeptRelationTablePrefix())
.append("sys_dept_relation.parent_dept_id IN (")
.append(deptIds)
.append(") AND ");
if (properties.getAddTableNamePrefix()) {
filter.append(info.getMainTableName()).append(".");
}
filter.append(info.getDeptFilterColumn())
.append(" = ")
.append(properties.getDeptRelationTablePrefix())
.append("sys_dept_relation.dept_id) ");
} else if (ruleType == DataPermRuleType.TYPE_CUSTOM_DETP_LIST) {
if (properties.getAddTableNamePrefix()) {
filter.append(info.getMainTableName()).append(".");
}
filter.append(info.getDeptFilterColumn())
.append(" IN (")
.append(deptIds)
.append(") ");
}
}
}
return filter.toString();
}
private void buildWhereClause(Update update, String dataFilter) throws JSQLParserException {
if (update.getWhere() == null) {
update.setWhere(CCJSqlParserUtil.parseCondExpression(dataFilter));
} else {
AndExpression and = new AndExpression(
CCJSqlParserUtil.parseCondExpression(dataFilter), update.getWhere());
update.setWhere(and);
}
}
private void buildWhereClause(Delete delete, String dataFilter) throws JSQLParserException {
if (delete.getWhere() == null) {
delete.setWhere(CCJSqlParserUtil.parseCondExpression(dataFilter));
} else {
AndExpression and = new AndExpression(
CCJSqlParserUtil.parseCondExpression(dataFilter), delete.getWhere());
delete.setWhere(and);
}
}
private void buildWhereClause(PlainSelect select, String dataFilter) throws JSQLParserException {
if (select.getWhere() == null) {
select.setWhere(CCJSqlParserUtil.parseCondExpression(dataFilter));
} else {
AndExpression and = new AndExpression(
CCJSqlParserUtil.parseCondExpression(dataFilter), select.getWhere());
select.setWhere(and);
}
}
@Override
public Object plugin(Object target) {
return Plugin.wrap(target, this);
}
@Override
public void setProperties(Properties properties) {
// 这里需要空注解否则sonar会不happy。
}
@Data
private static final class ModelDataPermInfo {
private Set<String> excludeMethodNameSet;
private String userFilterColumn;
private String deptFilterColumn;
private String mainTableName;
}
@Data
private static final class ModelTenantInfo {
private Set<String> excludeMethodNameSet;
private String modelName;
private String tableName;
private String fieldName;
private String columnName;
}
}

View File

@@ -0,0 +1,25 @@
package com.orange.demo.common.datafilter.listener;
import com.orange.demo.common.datafilter.interceptor.MybatisDataFilterInterceptor;
import org.springframework.boot.context.event.ApplicationReadyEvent;
import org.springframework.context.ApplicationListener;
import org.springframework.stereotype.Component;
/**
* 应用服务启动监听器。
* 目前主要功能是调用MybatisDataFilterInterceptor中的loadInfoWithDataFilter方法
* 将标记有过滤注解的数据加载到缓存,以提升系统运行时效率。
*
* @author Jerry
* @date 2020-08-08
*/
@Component
public class LoadDataFilterInfoListener implements ApplicationListener<ApplicationReadyEvent> {
@Override
public void onApplicationEvent(ApplicationReadyEvent applicationReadyEvent) {
MybatisDataFilterInterceptor interceptor =
applicationReadyEvent.getApplicationContext().getBean(MybatisDataFilterInterceptor.class);
interceptor.loadInfoWithDataFilter();
}
}

View File

@@ -0,0 +1,2 @@
org.springframework.boot.autoconfigure.EnableAutoConfiguration=\
com.orange.demo.common.datafilter.config.DataFilterAutoConfig

View File

@@ -0,0 +1,428 @@
package com.orange.demo.common.redis.cache;
import com.alibaba.fastjson.JSON;
import com.orange.demo.common.core.cache.DictionaryCache;
import com.orange.demo.common.core.constant.ApplicationConstant;
import com.orange.demo.common.core.exception.RedisCacheAccessException;
import com.orange.demo.common.core.object.TokenData;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.collections4.CollectionUtils;
import org.redisson.api.RMap;
import org.redisson.api.RedissonClient;
import java.util.*;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import java.util.concurrent.locks.ReadWriteLock;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import java.util.function.Function;
import java.util.stream.Collectors;
/**
* 租户字典数据Redis缓存对象。
*
* @param <K> 字典表主键类型。
* @param <V> 字典表对象类型。
* @author Jerry
* @date 2020-08-08
*/
@Slf4j
public class RedisTenantDictionaryCache<K, V> implements DictionaryCache<K, V> {
/**
* redisson客户端。
*/
protected final RedissonClient redissonClient;
/**
* 字典名称。
*/
protected final String dictionaryName;
/**
* 字典值对象类型。
*/
protected final Class<V> valueClazz;
/**
* 由于大部分场景是读取操作,所以使用读写锁提高并发的伸缩性。
*/
protected final ReadWriteLock lock;
/**
* 获取字典主键数据的函数对象。
*/
protected final Function<V, K> idGetter;
/**
* 超时时长。单位毫秒。
*/
protected static final long TIMEOUT = 2000L;
/**
* 当前对象的构造器函数。
*
* @param redissonClient Redisson的客户端对象。
* @param valueClazz 值对象的Class对象。
* @param idGetter 获取当前类主键字段值的函数对象。
* @param <K> 字典主键类型。
* @param <V> 字典对象类型
* @return 实例化后的字典内存缓存对象。
*/
public static <K, V> RedisTenantDictionaryCache<K, V> create(
RedissonClient redissonClient,
String dictionaryName,
Class<V> valueClazz,
Function<V, K> idGetter) {
if (idGetter == null) {
throw new IllegalArgumentException("IdGetter can't be NULL.");
}
return new RedisTenantDictionaryCache<>(redissonClient, dictionaryName, valueClazz, idGetter);
}
/**
* 构造函数。
*
* @param redissonClient Redisson的客户端对象。
* @param valueClazz 值对象的Class对象。
* @param idGetter 获取当前类主键字段值的函数对象。
*/
public RedisTenantDictionaryCache(
RedissonClient redissonClient,
String dictionaryName,
Class<V> valueClazz,
Function<V, K> idGetter) {
this.redissonClient = redissonClient;
this.lock = new ReentrantReadWriteLock();
this.dictionaryName = dictionaryName;
this.valueClazz = valueClazz;
this.idGetter = idGetter;
}
/**
* 获取租户的字典数据Map。
*
* @return 当前租户的字典数据Map。
*/
protected RMap<K, String> getTenantDataMap() {
Long tenantId = TokenData.takeFromRequest().getTenantId();
StringBuilder s = new StringBuilder(64);
s.append(dictionaryName).append("-")
.append(tenantId).append(ApplicationConstant.TREE_DICT_CACHE_NAME_SUFFIX);
return redissonClient.getMap(s.toString());
}
/**
* 按照数据插入的顺序返回全部字典对象的列表。
*
* @return 全部字段数据列表。
*/
@Override
public List<V> getAll() {
Collection<String> dataList;
String exceptionMessage;
try {
if (lock.readLock().tryLock(TIMEOUT, TimeUnit.MILLISECONDS)) {
try {
dataList = this.getTenantDataMap().readAllValues();
} finally {
// 如果上面的操作时间超过redisson.lockWatchdogTimeout的时长
// redis会将与该锁关联的键删除此后调用unlock的时候就会抛出运行时异常。
lock.readLock().unlock();
}
} else {
throw new TimeoutException();
}
} catch (Exception e) {
exceptionMessage = String.format(
"LOCK Operation of [RedisDictionaryCache::getAll] encountered EXCEPTION [%s] for DICT [%s].",
e.getClass().getSimpleName(), valueClazz.getSimpleName());
log.warn(exceptionMessage);
throw new RedisCacheAccessException(exceptionMessage, e);
}
if (CollectionUtils.isEmpty(dataList)) {
return new LinkedList<>();
}
return dataList.stream()
.map(data -> JSON.parseObject(data, valueClazz))
.collect(Collectors.toCollection(LinkedList::new));
}
/**
* 获取缓存中与键列表对应的对象列表。
*
* @param keys 主键集合。
* @return 对象列表。
*/
@Override
public List<V> getInList(Set<K> keys) {
if (CollectionUtils.isEmpty(keys)) {
return new LinkedList<>();
}
Collection<String> dataList = null;
String exceptionMessage;
try {
if (lock.readLock().tryLock(TIMEOUT, TimeUnit.MILLISECONDS)) {
try {
Map<K, String> m = this.getTenantDataMap().getAll(keys);
if (m != null) {
dataList = m.values();
}
} finally {
lock.readLock().unlock();
}
} else {
throw new TimeoutException();
}
} catch (Exception e) {
exceptionMessage = String.format(
"LOCK Operation of [RedisDictionaryCache::getInList] encountered EXCEPTION [%s] for DICT [%s].",
e.getClass().getSimpleName(), valueClazz.getSimpleName());
log.warn(exceptionMessage);
throw new RedisCacheAccessException(exceptionMessage, e);
}
if (dataList == null) {
return new LinkedList<>();
}
return dataList.stream()
.map(data -> JSON.parseObject(data, valueClazz))
.collect(Collectors.toCollection(LinkedList::new));
}
/**
* 从缓存中获取指定的数据。
*
* @param id 数据的key。
* @return 获取到的数据如果没有返回null。
*/
@Override
public V get(K id) {
if (id == null) {
return null;
}
String data;
String exceptionMessage;
try {
if (lock.readLock().tryLock(TIMEOUT, TimeUnit.MILLISECONDS)) {
try {
data = this.getTenantDataMap().get(id);
} finally {
lock.readLock().unlock();
}
} else {
throw new TimeoutException();
}
} catch (Exception e) {
exceptionMessage = String.format(
"LOCK Operation of [RedisDictionaryCache::get] encountered EXCEPTION [%s] for DICT [%s].",
e.getClass().getSimpleName(), valueClazz.getSimpleName());
log.warn(exceptionMessage);
throw new RedisCacheAccessException(exceptionMessage, e);
}
if (data == null) {
return null;
}
return JSON.parseObject(data, valueClazz);
}
/**
* 获取缓存中数据条目的数量。
*
* @return 返回缓存的数据数量。
*/
@Override
public int getCount() {
return this.getTenantDataMap().size();
}
/**
* 将参数List中的数据保存到缓存中同时保证getAll返回的数据列表与参数列表中数据项的顺序保持一致。
*
* @param dataList 待缓存的数据列表。
*/
@Override
public void putAll(List<V> dataList) {
if (CollectionUtils.isEmpty(dataList)) {
return;
}
Map<K, String> map = dataList.stream()
.collect(Collectors.toMap(idGetter, JSON::toJSONString));
String exceptionMessage;
try {
if (lock.writeLock().tryLock(TIMEOUT, TimeUnit.MILLISECONDS)) {
try {
this.getTenantDataMap().putAll(map, 1000);
} finally {
lock.writeLock().unlock();
}
} else {
throw new TimeoutException();
}
} catch (Exception e) {
exceptionMessage = String.format(
"LOCK Operation of [RedisDictionaryCache::putAll] encountered EXCEPTION [%s] for DICT [%s].",
e.getClass().getSimpleName(), valueClazz.getSimpleName());
log.warn(exceptionMessage);
throw new RedisCacheAccessException(exceptionMessage, e);
}
}
/**
* 将数据存入缓存。
*
* @param id 通常为字典数据的主键。
* @param data 字典数据对象。
*/
@Override
public void put(K id, V data) {
if (id == null || data == null) {
return;
}
String exceptionMessage;
try {
if (lock.writeLock().tryLock(TIMEOUT, TimeUnit.MILLISECONDS)) {
try {
this.getTenantDataMap().fastPut(id, JSON.toJSONString(data));
} finally {
lock.writeLock().unlock();
}
} else {
throw new TimeoutException();
}
} catch (Exception e) {
exceptionMessage = String.format(
"LOCK Operation of [RedisDictionaryCache::put] encountered EXCEPTION [%s] for DICT [%s].",
e.getClass().getSimpleName(), valueClazz.getSimpleName());
log.warn(exceptionMessage);
throw new RedisCacheAccessException(exceptionMessage, e);
}
}
/**
* 重新加载先清空原有数据在执行putAll的操作。
*
* @param dataList 待缓存的数据列表。
* @param force true则强制刷新如果false当缓存中存在数据时不刷新。
*/
@Override
public void reload(List<V> dataList, boolean force) {
Map<K, String> map = null;
if (CollectionUtils.isNotEmpty(dataList)) {
map = dataList.stream().collect(Collectors.toMap(idGetter, JSON::toJSONString));
}
String exceptionMessage;
try {
if (lock.writeLock().tryLock(TIMEOUT, TimeUnit.MILLISECONDS)) {
try {
// 如果不强制刷新,需要先判断缓存中是否存在数据。
if (!force && this.getCount() > 0) {
return;
}
RMap<K, String> tenantDataMap = this.getTenantDataMap();
tenantDataMap.clear();
if (map != null) {
tenantDataMap.putAll(map, 1000);
}
} finally {
lock.writeLock().unlock();
}
} else {
throw new TimeoutException();
}
} catch (Exception e) {
exceptionMessage = String.format(
"LOCK Operation of [RedisDictionaryCache::reload] encountered EXCEPTION [%s] for DICT [%s].",
e.getClass().getSimpleName(), valueClazz.getSimpleName());
log.warn(exceptionMessage);
throw new RedisCacheAccessException(exceptionMessage, e);
}
}
/**
* 删除缓存中指定的键。
*
* @param id 待删除数据的主键。
* @return 返回被删除的对象如果主键不存在返回null。
*/
@Override
public V invalidate(K id) {
if (id == null) {
return null;
}
String data = null;
String exceptionMessage;
try {
if (lock.writeLock().tryLock(TIMEOUT, TimeUnit.MILLISECONDS)) {
try {
this.getTenantDataMap().remove(id);
} finally {
lock.writeLock().unlock();
}
} else {
throw new TimeoutException();
}
} catch (Exception e) {
exceptionMessage = String.format(
"LOCK Operation of [RedisDictionaryCache::invalidate] encountered EXCEPTION [%s] for DICT [%s].",
e.getClass().getSimpleName(), valueClazz.getSimpleName());
log.warn(exceptionMessage);
throw new RedisCacheAccessException(exceptionMessage, e);
}
if (data == null) {
return null;
}
return JSON.parseObject(data, valueClazz);
}
/**
* 删除缓存中,参数列表中包含的键。
*
* @param keys 待删除数据的主键集合。
*/
@SuppressWarnings("unchecked")
@Override
public void invalidateSet(Set<K> keys) {
if (CollectionUtils.isEmpty(keys)) {
return;
}
Object[] keyArray = keys.toArray(new Object[]{});
String exceptionMessage;
try {
if (lock.writeLock().tryLock(TIMEOUT, TimeUnit.MILLISECONDS)) {
try {
this.getTenantDataMap().fastRemove((K[]) keyArray);
} finally {
lock.writeLock().unlock();
}
} else {
throw new TimeoutException();
}
} catch (Exception e) {
exceptionMessage = String.format(
"LOCK Operation of [RedisDictionaryCache::invalidateSet] encountered EXCEPTION [%s] for DICT [%s].",
e.getClass().getSimpleName(), valueClazz.getSimpleName());
log.warn(exceptionMessage);
throw new RedisCacheAccessException(exceptionMessage, e);
}
}
/**
* 清空缓存。
*/
@Override
public void invalidateAll() {
String exceptionMessage;
try {
if (lock.writeLock().tryLock(TIMEOUT, TimeUnit.MILLISECONDS)) {
try {
this.getTenantDataMap().clear();
} finally {
lock.writeLock().unlock();
}
} else {
throw new TimeoutException();
}
} catch (Exception e) {
exceptionMessage = String.format(
"LOCK Operation of [RedisDictionaryCache::invalidateAll] encountered EXCEPTION [%s] for DICT [%s].",
e.getClass().getSimpleName(), valueClazz.getSimpleName());
log.warn(exceptionMessage);
throw new RedisCacheAccessException(exceptionMessage, e);
}
}
}

View File

@@ -0,0 +1,369 @@
package com.orange.demo.common.redis.cache;
import com.alibaba.fastjson.JSON;
import com.orange.demo.common.core.constant.ApplicationConstant;
import com.orange.demo.common.core.exception.RedisCacheAccessException;
import com.orange.demo.common.core.object.TokenData;
import com.google.common.collect.LinkedListMultimap;
import com.google.common.collect.Multimap;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.collections4.CollectionUtils;
import org.redisson.api.RListMultimap;
import org.redisson.api.RMap;
import org.redisson.api.RedissonClient;
import java.util.*;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import java.util.function.Function;
import java.util.stream.Collectors;
/**
* 租户树形字典数据Redis缓存对象。
*
* @param <K> 字典表主键类型。
* @param <V> 字典表对象类型。
* @author Jerry
* @date 2020-08-08
*/
@Slf4j
public class RedisTenantTreeDictionaryCache<K, V> extends RedisTenantDictionaryCache<K, V> {
/**
* 获取字典父主键数据的函数对象。
*/
protected final Function<V, K> parentIdGetter;
/**
* 当前对象的构造器函数。
*
* @param redissonClient Redisson的客户端对象。
* @param dictionaryName 字典表的名称。等同于redis hash对象的key。
* @param valueClazz 值对象的Class对象。
* @param idGetter 获取当前类主键字段值的函数对象。
* @param parentIdGetter 获取当前类父主键字段值的函数对象。
* @param <K> 字典主键类型。
* @param <V> 字典对象类型
* @return 实例化后的树形字典内存缓存对象。
*/
public static <K, V> RedisTenantTreeDictionaryCache<K, V> create(
RedissonClient redissonClient,
String dictionaryName,
Class<V> valueClazz,
Function<V, K> idGetter,
Function<V, K> parentIdGetter) {
if (idGetter == null) {
throw new IllegalArgumentException("IdGetter can't be NULL.");
}
if (parentIdGetter == null) {
throw new IllegalArgumentException("ParentIdGetter can't be NULL.");
}
return new RedisTenantTreeDictionaryCache<>(
redissonClient, dictionaryName, valueClazz, idGetter, parentIdGetter);
}
/**
* 构造函数。
*
* @param redissonClient Redisson的客户端对象。
* @param dictionaryName 字典表的名称。等同于redis hash对象的key。
* @param valueClazz 值对象的Class对象。
* @param idGetter 获取当前类主键字段值的函数对象。
* @param parentIdGetter 获取当前类父主键字段值的函数对象。
*/
public RedisTenantTreeDictionaryCache(
RedissonClient redissonClient,
String dictionaryName,
Class<V> valueClazz,
Function<V, K> idGetter,
Function<V, K> parentIdGetter) {
super(redissonClient, dictionaryName, valueClazz, idGetter);
this.parentIdGetter = parentIdGetter;
}
protected RListMultimap<K, String> getTenantTreeDataMap() {
Long tenantId = TokenData.takeFromRequest().getTenantId();
StringBuilder s = new StringBuilder(64);
s.append(dictionaryName).append("-")
.append(tenantId).append(ApplicationConstant.TREE_DICT_CACHE_NAME_SUFFIX);
return redissonClient.getListMultimap(s.toString());
}
/**
* 获取该父主键的子数据列表。
*
* @param parentId 父主键Id。如果parentId为null则返回所有一级节点数据。
* @return 子数据列表。
*/
public List<V> getListByParentId(K parentId) {
List<String> dataList;
String exceptionMessage;
Long tenantId = TokenData.takeFromRequest().getTenantId();
try {
if (lock.readLock().tryLock(TIMEOUT, TimeUnit.MILLISECONDS)) {
try {
dataList = this.getTenantTreeDataMap().get(parentId);
} finally {
lock.readLock().unlock();
}
} else {
throw new TimeoutException();
}
} catch (Exception e) {
exceptionMessage = String.format(
"LOCK Operation of [RedisTreeDictionaryCache::getListByParentId] encountered EXCEPTION [%s] for DICT [%s].",
e.getClass().getSimpleName(), valueClazz.getSimpleName());
log.warn(exceptionMessage);
throw new RedisCacheAccessException(exceptionMessage, e);
}
if (CollectionUtils.isEmpty(dataList)) {
return new LinkedList<>();
}
List<V> resultList = new LinkedList<>();
dataList.forEach(data -> resultList.add(JSON.parseObject(data, valueClazz)));
return resultList;
}
/**
* 将参数List中的数据保存到缓存中同时保证getAll返回的数据列表与参数列表中数据项的顺序保持一致。
*
* @param dataList 待缓存的数据列表。
*/
@Override
public void putAll(List<V> dataList) {
if (CollectionUtils.isEmpty(dataList)) {
return;
}
// 锁外执行数据结构组装,降低锁的粒度,提高并发性。
Map<K, String> map = dataList.stream()
.collect(Collectors.toMap(idGetter, JSON::toJSONString));
Multimap<K, String> treeMap = LinkedListMultimap.create();
for (V data : dataList) {
treeMap.put(parentIdGetter.apply(data), JSON.toJSONString(data));
}
Set<Map.Entry<K, Collection<String>>> entries = treeMap.asMap().entrySet();
String exceptionMessage;
Long tenantId = TokenData.takeFromRequest().getTenantId();
try {
if (this.lock.writeLock().tryLock(TIMEOUT, TimeUnit.MILLISECONDS)) {
try {
this.getTenantDataMap().putAll(map, 1000);
RListMultimap<K, String> allTenantTreeMap = this.getTenantTreeDataMap();
for (Map.Entry<K, Collection<String>> entry : entries) {
allTenantTreeMap.removeAll(entry.getKey());
allTenantTreeMap.putAll(entry.getKey(), entry.getValue());
}
} finally {
lock.writeLock().unlock();
}
} else {
throw new TimeoutException();
}
} catch (Exception e) {
exceptionMessage = String.format(
"LOCK Operation of [RedisTreeDictionaryCache::putAll] encountered EXCEPTION [%s] for DICT [%s].",
e.getClass().getSimpleName(), valueClazz.getSimpleName());
log.warn(exceptionMessage);
throw new RedisCacheAccessException(exceptionMessage, e);
}
}
/**
* 将数据存入缓存。
*
* @param id 通常为字典数据的主键。
* @param data 字典数据对象。
*/
@Override
public void put(K id, V data) {
if (id == null || data == null) {
return;
}
String stringData = JSON.toJSONString(data);
K parentId = parentIdGetter.apply(data);
String exceptionMessage;
Long tenantId = TokenData.takeFromRequest().getTenantId();
try {
if (this.lock.writeLock().tryLock(TIMEOUT, TimeUnit.MILLISECONDS)) {
try {
RMap<K, String> tenantDataMap = this.getTenantDataMap();
String oldData = tenantDataMap.put(id, stringData);
if (oldData != null) {
tenantDataMap.remove(parentId, oldData);
}
getTenantDataMap().put(parentId, stringData);
} finally {
lock.writeLock().unlock();
}
} else {
throw new TimeoutException();
}
} catch (Exception e) {
exceptionMessage = String.format(
"LOCK Operation of [RedisTreeDictionaryCache::put] encountered EXCEPTION [%s] for DICT [%s].",
e.getClass().getSimpleName(), valueClazz.getSimpleName());
log.warn(exceptionMessage);
throw new RedisCacheAccessException(exceptionMessage, e);
}
}
/**
* 行为等同于接口中的描述。这里之所以重写是因为不确定redisson的读写锁
* 是否为可重入锁。
*
* @param dataList 待缓存的数据列表。
* @param force true则强制刷新如果false当缓存中存在数据时不刷新。
*/
@Override
public void reload(List<V> dataList, boolean force) {
// 锁外执行数据结构组装,降低锁的粒度,提高并发性。
Map<K, String> map = null;
Set<Map.Entry<K, Collection<String>>> entries = null;
if (CollectionUtils.isNotEmpty(dataList)) {
map = dataList.stream().collect(Collectors.toMap(idGetter, JSON::toJSONString));
Multimap<K, String> treeMap = LinkedListMultimap.create();
for (V data : dataList) {
treeMap.put(parentIdGetter.apply(data), JSON.toJSONString(data));
}
entries = treeMap.asMap().entrySet();
}
String exceptionMessage;
Long tenantId = TokenData.takeFromRequest().getTenantId();
try {
if (lock.writeLock().tryLock(TIMEOUT, TimeUnit.MILLISECONDS)) {
try {
// 如果不强制刷新,需要先判断缓存中是否存在数据。
if (!force && this.getCount() > 0) {
return;
}
RMap<K, String> tenantDataMap = this.getTenantDataMap();
tenantDataMap.clear();
RListMultimap<K, String> allTenantTreeMap = this.getTenantTreeDataMap();
allTenantTreeMap.clear();
if (map != null) {
tenantDataMap.putAll(map, 1000);
for (Map.Entry<K, Collection<String>> entry : entries) {
allTenantTreeMap.removeAll(entry.getKey());
allTenantTreeMap.putAll(entry.getKey(), entry.getValue());
}
}
} finally {
lock.writeLock().unlock();
}
} else {
throw new TimeoutException();
}
} catch (Exception e) {
exceptionMessage = String.format(
"LOCK Operation of [RedisDictionaryCache::reload] encountered EXCEPTION [%s] for DICT [%s].",
e.getClass().getSimpleName(), valueClazz.getSimpleName());
log.warn(exceptionMessage);
throw new RedisCacheAccessException(exceptionMessage, e);
}
}
/**
* 删除缓存中指定的键。
*
* @param id 待删除数据的主键。
* @return 返回被删除的对象如果主键不存在返回null。
*/
@Override
public V invalidate(K id) {
if (id == null) {
return null;
}
V data = null;
String exceptionMessage;
Long tenantId = TokenData.takeFromRequest().getTenantId();
try {
if (this.lock.writeLock().tryLock(TIMEOUT, TimeUnit.MILLISECONDS)) {
try {
String stringData = this.getTenantDataMap().remove(id);
if (stringData != null) {
data = JSON.parseObject(stringData, valueClazz);
K parentId = parentIdGetter.apply(data);
this.getTenantTreeDataMap().remove(parentId, stringData);
}
} finally {
lock.writeLock().unlock();
}
} else {
throw new TimeoutException();
}
} catch (Exception e) {
exceptionMessage = String.format(
"LOCK Operation of [RedisTreeDictionaryCache::invalidate] encountered EXCEPTION [%s] for DICT [%s].",
e.getClass().getSimpleName(), valueClazz.getSimpleName());
log.warn(exceptionMessage);
throw new RedisCacheAccessException(exceptionMessage, e);
}
return data;
}
/**
* 删除缓存中,参数列表中包含的键。
*
* @param keys 待删除数据的主键集合。
*/
@Override
public void invalidateSet(Set<K> keys) {
if (CollectionUtils.isEmpty(keys)) {
return;
}
String exceptionMessage;
Long tenantId = TokenData.takeFromRequest().getTenantId();
try {
if (lock.writeLock().tryLock(TIMEOUT, TimeUnit.MILLISECONDS)) {
try {
keys.forEach(id -> {
if (id != null) {
String stringData = this.getTenantDataMap().remove(id);
if (stringData != null) {
K parentId = parentIdGetter.apply(JSON.parseObject(stringData, valueClazz));
this.getTenantTreeDataMap().remove(parentId, stringData);
}
}
});
} finally {
lock.writeLock().unlock();
}
} else {
throw new TimeoutException();
}
} catch (Exception e) {
exceptionMessage = String.format(
"LOCK Operation of [RedisTreeDictionaryCache::invalidateSet] encountered EXCEPTION [%s] for DICT [%s].",
e.getClass().getSimpleName(), valueClazz.getSimpleName());
log.warn(exceptionMessage);
throw new RedisCacheAccessException(exceptionMessage, e);
}
}
/**
* 清空缓存。
*/
@Override
public void invalidateAll() {
String exceptionMessage;
Long tenantId = TokenData.takeFromRequest().getTenantId();
try {
if (lock.writeLock().tryLock(TIMEOUT, TimeUnit.MILLISECONDS)) {
try {
this.getTenantDataMap().clear();
this.getTenantTreeDataMap().clear();
} finally {
lock.writeLock().unlock();
}
} else {
throw new TimeoutException();
}
} catch (Exception e) {
exceptionMessage = String.format(
"LOCK Operation of [RedisTreeDictionaryCache::invalidateAll] encountered EXCEPTION [%s] for DICT [%s].",
e.getClass().getSimpleName(), valueClazz.getSimpleName());
log.warn(exceptionMessage);
throw new RedisCacheAccessException(exceptionMessage, e);
}
}
}

View File

@@ -2,6 +2,7 @@ package com.orange.demo.common.redis.cache;
import com.alibaba.fastjson.JSON;
import lombok.extern.slf4j.Slf4j;
import com.orange.demo.common.core.constant.ApplicationConstant;
import com.orange.demo.common.core.exception.RedisCacheAccessException;
import com.google.common.collect.LinkedListMultimap;
import com.google.common.collect.Multimap;
@@ -79,7 +80,8 @@ public class RedisTreeDictionaryCache<K, V> extends RedisDictionaryCache<K, V> {
Function<V, K> idGetter,
Function<V, K> parentIdGetter) {
super(redissonClient, dictionaryName, valueClazz, idGetter);
this.allTreeMap = redissonClient.getListMultimap(dictionaryName + "-TREE-DICT");
this.allTreeMap = redissonClient.getListMultimap(
dictionaryName + ApplicationConstant.TREE_DICT_CACHE_NAME_SUFFIX);
this.parentIdGetter = parentIdGetter;
}

View File

@@ -13,6 +13,7 @@
<modules>
<module>common-core</module>
<module>common-datafilter</module>
<module>common-redis</module>
<module>common-sequence</module>
<module>common-swagger</module>