如何通过自定义支持excel导出的动态表头

介绍

本文需要阅读过前置文档如何自定义Excel导出功能,动态表头的功能在前置文档的基础上做的进一步扩展,本文未提到的部分都参考这个前置文档。

在日常的业务开发中,我们在导出的场景会遇到需要设置动态表头的场景,比如统计商品在最近1个月的销量,固定表头列为商品的名称等基础信息,动态表头列为最近一个月的日期,在导出的时候设置每个日期的销量,本文将通过此业务场景提供示例代码。

1.自定义导出任务模型

package pro.shushi.pamirs.demo.api.model;

import pro.shushi.pamirs.file.api.model.ExcelExportTask;
import pro.shushi.pamirs.meta.annotation.Model;

@Model.model(DemoItemDynamicExcelExportTask.MODEL_MODEL)
@Model(displayName = "商品-Excel动态表头导出任务")
public class DemoItemDynamicExcelExportTask extends ExcelExportTask {
    public static final String MODEL_MODEL = "demo.DemoItemDynamicExcelExportTask";

}

2.自定义导出任务处理数据的扩展点

package pro.shushi.pamirs.demo.core.excel.exportdemo.extPoint;

import org.springframework.stereotype.Component;
import pro.shushi.pamirs.core.common.FetchUtil;
import pro.shushi.pamirs.core.common.cache.MemoryIterableSearchCache;
import pro.shushi.pamirs.demo.api.model.DemoItem;
import pro.shushi.pamirs.file.api.config.FileConstant;
import pro.shushi.pamirs.file.api.context.ExcelDefinitionContext;
import pro.shushi.pamirs.file.api.enmu.ExcelTemplateTypeEnum;
import pro.shushi.pamirs.file.api.entity.EasyExcelCellDefinition;
import pro.shushi.pamirs.file.api.extpoint.impl.ExcelExportSameQueryPageTemplate;
import pro.shushi.pamirs.file.api.model.ExcelExportTask;
import pro.shushi.pamirs.file.api.model.ExcelWorkbookDefinition;
import pro.shushi.pamirs.file.api.util.ExcelFixedHeadHelper;
import pro.shushi.pamirs.file.api.util.ExcelHelper;
import pro.shushi.pamirs.file.api.util.ExcelTemplateInit;
import pro.shushi.pamirs.framework.common.entry.TreeNode;
import pro.shushi.pamirs.meta.annotation.ExtPoint;
import pro.shushi.pamirs.meta.api.CommonApiFactory;
import pro.shushi.pamirs.meta.api.core.orm.ReadApi;
import pro.shushi.pamirs.meta.api.core.orm.systems.relation.RelationReadApi;
import pro.shushi.pamirs.meta.api.dto.config.ModelConfig;
import pro.shushi.pamirs.meta.api.dto.config.ModelFieldConfig;
import pro.shushi.pamirs.meta.api.session.PamirsSession;
import pro.shushi.pamirs.meta.enmu.TtypeEnum;
import pro.shushi.pamirs.meta.util.FieldUtils;

import java.util.*;

@Component
public class DemoItemDynamicExportExtPoint extends ExcelExportSameQueryPageTemplate<DemoItem> implements ExcelTemplateInit {
    public static final String TEMPLATE_NAME ="商品动态导出";

    @Override
    public List<ExcelWorkbookDefinition> generator() {
        ExcelFixedHeadHelper excelFixedHeadHelper = ExcelHelper.fixedHeader(DemoItem.MODEL_MODEL,TEMPLATE_NAME)
                .createBlock(TEMPLATE_NAME, DemoItem.MODEL_MODEL)
                .setType(ExcelTemplateTypeEnum.EXPORT);
        return Collections.singletonList(excelFixedHeadHelper.build());
    }

    public static void buildHeader(ExcelFixedHeadHelper excelFixedHeadHelper) {
        excelFixedHeadHelper.addColumn("name","名称")
                .addColumn("cateName","类目")
                .addColumn("searchFrom","搜索来源")
                .addColumn("description","描述")
                .addColumn("itemPrice","单价")
                .addColumn("inventoryQuantity","库存");
    }

    @Override
    @ExtPoint.Implement(expression = "context.model == \"" + DemoItem.MODEL_MODEL+"\" && context.name == \"" +TEMPLATE_NAME+"\"" )
    public List<Object> fetchExportData(ExcelExportTask exportTask, ExcelDefinitionContext context) {
        List<Object> result = super.fetchExportData(exportTask,context);
        Object block = result.get(0);
        if (block instanceof ArrayList) {
            ((List<Object>) block).forEach(o -> {
                if (o instanceof DemoItem) {
                    DemoItem item = (DemoItem) o;
                    // TODO 设置动态表头部分字段的值
                    item.get_d().put("2024-09-10", "1111");
                    item.get_d().put("2024-09-11", "2222");
                }
            });
        }
        return result;
    }

    protected <DM extends ReadApi> boolean selectRelationField(DM dataManager, String model, List<TreeNode<EasyExcelCellDefinition>> fieldNodeList, Collection<?> list, Integer maxSupportLength, int currentMaxLength) {
        Set<String> hasQueryFields = new HashSet<>();
        RelationReadApi relationManagerProcessor = CommonApiFactory.getApi(RelationReadApi.class);
        ModelConfig modelConfig = PamirsSession.getContext().getModelConfig(model);
        if (modelConfig == null) {
            return true;
        }
        MemoryIterableSearchCache<String, ModelFieldConfig> modelFieldsCache = new MemoryIterableSearchCache<>(modelConfig.getModelFieldConfigList(), ModelFieldConfig::getLname);
        for (TreeNode<EasyExcelCellDefinition> fieldNode : fieldNodeList) {
            String fieldKey = fieldNode.getKey();
            TreeNode<EasyExcelCellDefinition> parentFieldNode = fieldNode.getParent();
            if (parentFieldNode != null) {
                fieldKey = fieldKey.substring(parentFieldNode.getKey().length() + 1);
                int pi = fieldKey.indexOf(FileConstant.POINT_CHARACTER);
                if (pi != -1) {
                    fieldKey = fieldKey.substring(0, pi);
                }
            }
            ModelFieldConfig modelFieldConfig = modelFieldsCache.get(fieldKey);
            if (modelFieldConfig == null) {
                continue;
//                throw new IllegalArgumentException("模板中存在无效的模型字段 model=" + model + ", field=" + fieldKey);
            }
            if (!TtypeEnum.isRelationType(modelFieldConfig.getTtype())) {
                continue;
            }
            List<Object> needQueryList = new ArrayList<>();
            for (Object item : list) {
                if (relationManagerProcessor.isNeedQueryRelation(modelFieldConfig, FieldUtils.getFieldValue(item, modelFieldConfig.getLname()))) {
                    needQueryList.add(item);
                }
            }
            if (needQueryList.isEmpty()) {
                continue;
            }
            String field, indexString;
            int li = fieldKey.indexOf("["), ri = fieldKey.indexOf("]"), index;
            if (li != -1 && ri != -1) {
                indexString = fieldKey.substring(li + 1, ri);
                index = Integer.parseInt(indexString);
                field = fieldKey.substring(0, li);
                if (!hasQueryFields.contains(field)) {
                    currentMaxLength = computeCurrentMaxLength(maxSupportLength, currentMaxLength, needQueryList.size(), FetchUtil.listFieldQuery(dataManager, needQueryList, modelFieldsCache.get(field), maxSupportLength));
                    hasQueryFields.add(field);
                }
                if (index < needQueryList.size()) {
                    Object value = needQueryList.get(index);
                    if (!selectRelationField(dataManager, modelFieldConfig.getReferences(), fieldNode.getChildren(), Collections.singletonList(value), maxSupportLength, currentMaxLength)) {
                        return false;
                    }
                }
            } else if (li == -1 && ri == -1) {
                field = fieldKey;
                if (!hasQueryFields.contains(field)) {
                    currentMaxLength = computeCurrentMaxLength(maxSupportLength, currentMaxLength, needQueryList.size(), FetchUtil.listFieldQuery(dataManager, needQueryList, modelFieldsCache.get(field), maxSupportLength));
                    if (currentMaxLength == -1) {
                        return false;
                    }
                    hasQueryFields.add(field);
                }
                boolean isCollection = fieldNode.getValue().getIsCollection();
                List<Object> childList = new ArrayList<>();
                for (Object item : needQueryList) {
                    Object relationValue = FieldUtils.getFieldValue(item, field);
                    if (relationValue == null) {
                        continue;
                    }
                    if (isCollection) {
                        childList.addAll((Collection<?>) relationValue);
                    } else {
                        childList.add(relationValue);
                    }
                }
                if (!selectRelationField(dataManager, modelFieldConfig.getReferences(), fieldNode.getChildren(), childList, maxSupportLength, currentMaxLength)) {
                    return false;
                }
            }
        }
        return true;
    }

    private int computeCurrentMaxLength(int excelMaxSupportLength, int currentMaxLength, int needQuerySize, int listFieldQuerySize) {
        int incrementalSize = listFieldQuerySize - needQuerySize;
        if (incrementalSize <= 0) {
            return currentMaxLength;
        }
        currentMaxLength += incrementalSize;
        if (currentMaxLength > excelMaxSupportLength) {
            return -1;
        }
        return currentMaxLength;
    }
}

3.自定义导出任务的action

package pro.shushi.pamirs.demo.core.action;

import org.apache.commons.lang3.StringUtils;
import org.springframework.stereotype.Component;
import pro.shushi.pamirs.demo.api.model.DemoItemDynamicExcelExportTask;
import pro.shushi.pamirs.demo.core.excel.exportdemo.extPoint.DemoItemDynamicExportExtPoint;
import pro.shushi.pamirs.file.api.action.AbstractExcelExportTaskAction;
import pro.shushi.pamirs.file.api.config.ExcelConstant;
import pro.shushi.pamirs.file.api.context.ExcelDefinitionContext;
import pro.shushi.pamirs.file.api.enmu.FileExpEnumerate;
import pro.shushi.pamirs.file.api.model.ExcelWorkbookDefinition;
import pro.shushi.pamirs.file.api.util.ExcelFixedHeadHelper;
import pro.shushi.pamirs.file.api.util.ExcelHelper;
import pro.shushi.pamirs.file.api.util.ExcelWorkbookDefinitionUtil;
import pro.shushi.pamirs.meta.annotation.Action;
import pro.shushi.pamirs.meta.annotation.Function;
import pro.shushi.pamirs.meta.annotation.Model;
import pro.shushi.pamirs.meta.annotation.fun.extern.Slf4j;
import pro.shushi.pamirs.meta.api.dto.config.ModelConfig;
import pro.shushi.pamirs.meta.api.session.PamirsSession;
import pro.shushi.pamirs.meta.common.exception.PamirsException;
import pro.shushi.pamirs.meta.enmu.ActionContextTypeEnum;
import pro.shushi.pamirs.meta.enmu.FunctionOpenEnum;
import pro.shushi.pamirs.meta.enmu.FunctionTypeEnum;
import pro.shushi.pamirs.meta.enmu.ViewTypeEnum;

@Slf4j
@Component
@Model.model(DemoItemDynamicExcelExportTask.MODEL_MODEL)
public class DemoItemDynamicExcelExportTaskAction extends AbstractExcelExportTaskAction<DemoItemDynamicExcelExportTask> {

    @Action(displayName = "导出", contextType = ActionContextTypeEnum.CONTEXT_FREE, bindingType = {ViewTypeEnum.TABLE})
    public DemoItemDynamicExcelExportTask createExportTask(DemoItemDynamicExcelExportTask data) {
        if (data.getWorkbookDefinitionId() != null) {
            ExcelWorkbookDefinition workbookDefinition = new ExcelWorkbookDefinition();
            workbookDefinition.setId(data.getWorkbookDefinitionId());
            data.setWorkbookDefinition(workbookDefinition);
        }
        super.createExportTask(data);
        return data;
    }

    protected ExcelDefinitionContext fetchExcelDefinitionContextByWorkbookDefinition(DemoItemDynamicExcelExportTask data) {
        String model = data.getModel();
        if (StringUtils.isBlank(model)) {
            throw PamirsException.construct(FileExpEnumerate.EXPORT_MODEL_IS_NULL).errThrow();
        }
        ModelConfig modelConfig = PamirsSession.getContext().getSimpleModelConfig(model);
        if (modelConfig == null) {
            throw PamirsException.construct(FileExpEnumerate.EXPORT_MODEL_NOT_EXIST).errThrow();
        }
        ExcelFixedHeadHelper fixedHeadHelper = ExcelHelper.fixedHeader(model, DemoItemDynamicExportExtPoint.TEMPLATE_NAME)
                .setDisplayName(modelConfig.getDisplayName() + ExcelConstant.EXPORT_NAME)
                .createBlock(modelConfig.getDisplayName(), model);

        // 自定义excel START
        DemoItemDynamicExportExtPoint.buildHeader(fixedHeadHelper);
        // TODO 构建动态表头 addColumn第一个参数为动态字段的名称,第二个参数为在表头中的展示值,数据处理的扩展点内需要设置对应动态字段的值
        fixedHeadHelper.addColumn("2024-09-10", "2024-09-10");
        fixedHeadHelper.addColumn("2024-09-11", "2024-09-11");
        // 自定义excel END

        ExcelWorkbookDefinition workbookDefinition = fixedHeadHelper.build();
        data.setWorkbookDefinition(workbookDefinition);
        processClearExportStyle(data);
        return ExcelWorkbookDefinitionUtil.getDefinitionContext(workbookDefinition);
    }

    /**
     * @param data
     * @return
     */
    @Function(openLevel = FunctionOpenEnum.API)
    @Function.Advanced(type = FunctionTypeEnum.QUERY)
    public DemoItemDynamicExcelExportTask construct(DemoItemDynamicExcelExportTask data) {
        data.construct();
        return data;
    }

    @Override
    protected void doExport(DemoItemDynamicExcelExportTask exportTask, ExcelDefinitionContext context) {
        if (null != exportTask.getSync() && exportTask.getSync()) {
            excelFileService.doExportSync(exportTask, context);
        } else {
            excelFileService.doExportAsync(exportTask, context);
        }
    }
}

4.导出效果预览

如何通过自定义支持excel导出的动态表头

扩展学习

根据上面的例子,我们可以看出,所有需要对表头做自定义的场景都可以通过类似办法在DemoItemDynamicExcelExportTaskAction.fetchExcelDefinitionContextByWorkbookDefinition方法内进行处理

Oinone社区 作者:nation原创文章,如若转载,请注明出处:https://doc.oinone.top/backend/17174.html

访问Oinone官网:https://www.oinone.top获取数式Oinone低代码应用平台体验

(0)
nation的头像nation数式员工
上一篇 2024年9月10日 pm3:34
下一篇 2024年9月12日 am11:23

相关推荐

  • Oinone连接外部数据源方案

    场景描述 在实际业务场景中,有是有这样的需求:链接外部数据进行数据的获取;通常的做法:1、【推荐】通过集成平台的数据连接器,链接外部数据源进行数据操作;2、项目代码中链接数据源,即通过程序代码操作外部数据源的数据; 本篇文章只介绍通过程序代码操作外部数据源的方式. 整体方案 Oinone管理外部数据源,即yml中配置外部数据源; 后端通过Mapper的方式进行数据操作(增/删/查/改); 调用Mapper接口的时候,指定到外部数据源; 详细步骤 1、数据源配置(application.yml), 与正常的数据源配置一样 out_ds_name(外部数据源别名): driverClassName: com.mysql.cj.jdbc.Driver type: com.alibaba.druid.pool.DruidDataSource # local环境配置调整 url: jdbc:mysql://ip(host):端口/数据库Schema?useSSL=false&allowPublicKeyRetrieval=true&useServerPrepStmts=true&cachePrepStmts=true&useUnicode=true&characterEncoding=utf8&serverTimezone=Asia/Shanghai&autoReconnect=true&allowMultiQueries=true username: 用户名 password: 命名 initialSize: 5 maxActive: 200 minIdle: 5 maxWait: 60000 timeBetweenEvictionRunsMillis: 60000 testWhileIdle: true testOnBorrow: false testOnReturn: false poolPreparedStatements: true asyncInit: true 2、外部数据源其他配置外部数据源限制创建表结构的执行,可以通过配置指定【不创建DB,不创建数据表】 persistence: global: auto-create-database: true auto-create-table: true ds: out_ds_name(外部数据源别名): # 不创建DB auto-create-database: false # 不创建数据表 auto-create-table: false 3、后端写Mapper SQL Mapper跟使用原生mybaits/mybaits-plus写法一样,无特殊限制; Mapper和SQL写到一起,或者分开两个文件都可以 4、Mapper被Service或者Action调用1)启动的Application中@MapperScan需要扫描到对应的包。2)调用是与普通bean一样(即调用方式跟传统的方式样),唯一的区别就是加上DsHintApi,即指定Mapper所使用的数据源。 @Autowired private ScheduleItemMapper scheduleItemMapper; public saveData(Object data) { ScheduleQuery scheduleQuery = new ScheduleQuery(); //scheduleQuery.setActionName(); try (DsHintApi dsHint = DsHintApi.use(“外部数据源名称”)) { List<ScheduleItem> scheduleItems = scheduleItemMapper.selectListForSerial(scheduleQuery); // 具体业务逻辑 } } 其他参考:如何自定义sql语句:https://doc.oinone.top/backend/4759.html

    2024年5月17日
    1.3K00
  • 模型字段之序列化方式

    本文核心是带大家全面了解oinone的序列方式,包括支持的序列化类型、注意点、如果新增客户化序列化方式以及字段默认值的反序列化。 字段序列化方式说明 序列化方式 说明 备注 JSON JSON序列化 主要用于模型相关类型字段的序列化,是@Field.serialize默认选项 DOT 点拼接集合元素 COMMA 逗号拼接集合元素 BIT 按位与,2次幂数求和 非@Field.serialize可选项列表,用于二进制枚举序列化不需要配置,由oinone自动推断 字段序列化方式举例 1、给模型PetItemDetail 增加两个字段:petItemDetails类型为List 和 tags类型为List,并设置为不同的序列化方式,petItemDetails为JSON(缺省就是JSON,可不配),tags为COMMA。2、同时设置 @Field.Advanced(columnDefinition = "varchar(1024)"),防止序列化后存储过长。 @Model.model(PetItem.MODEL_MODEL) @Model(displayName = "宠物商品",summary="宠物商品",labelFields = {"itemName"}) public class PetItem extends AbstractDemoCodeModel{ public static final String MODEL_MODEL="demo.PetItem"; @Field(displayName = "品种") @Field.many2one @Field.Relation(relationFields = {"typeId"},referenceFields = {"id"}) private PetType type; @Field(displayName = "品种类型",invisible = true) private Long typeId; @Field(displayName = "详情", serialize = Field.serialize.JSON, store = NullableBoolEnum.TRUE) @Field.Advanced(columnDefinition = "varchar(1024)") private List<PetItemDetail> petItemDetails; @Field(displayName = "商品标签",serialize = Field.serialize.COMMA,store = NullableBoolEnum.TRUE,multi = true) @Field.Advanced(columnDefinition = "varchar(1024)") private List<String> tags; } 字段序列化注意点 必须使用Field#store属性将字段存储设置为NullableBoolEnum.TRUE。 使用Field#serialize属性指定序列化方式,默认为JSON。 如把PetItemDetail设置为存储模型,须在PetItem的petItemDetails字段上使用Field.Relation#store属性将关联关系存储设置为false。不然会同时存储petItemDetails字段和对应的PetItemDetail表记录 注册自己的序列化器 注册自己的序列化器(实现pro.shushi.pamirs.meta.api.core.orm.serialize.Serializer接口), 如oinone的DOT的序列化方式,用type()方法返回值做匹配,serialize和deserialize分别对应序列化和反序列化方法。 package pro.shushi.pamirs.framework.compute.serialize; import org.apache.commons.lang3.StringUtils; import org.springframework.stereotype.Component; import pro.shushi.pamirs.meta.annotation.fun.extern.Slf4j; import pro.shushi.pamirs.meta.api.core.orm.serialize.Serializer; import pro.shushi.pamirs.meta.common.constants.CharacterConstants; import pro.shushi.pamirs.meta.enmu.SerializeEnum; import pro.shushi.pamirs.meta.util.TypeUtils; import java.util.ArrayList; import java.util.Collections; import java.util.List; /** * 点表达式序列生成处理器实现 * @author shushi@shushi.pro * @version 1.0.0 */ @SuppressWarnings("rawtypes") @Slf4j @Component public class DotSerializeProcessor implements Serializer<Object, String> { @Override public String serialize(String ltype, Object value) { if (null == value) { return null; } if (List.class.isAssignableFrom(value.getClass())) { return StringUtils.join((List) value, CharacterConstants.SEPARATOR_DOT); } else { return StringUtils.join(Collections.singletonList(value), CharacterConstants.SEPARATOR_DOT); } } @SuppressWarnings("unchecked") @Override public Object deserialize(String ltype, String ltypeT, String value,…

    2024年5月24日
    1.3K00
  • 如何扩展自有的文件存储系统

    介绍 数式Oinone默认提供了阿里云、腾讯云、华为云、又拍云、Minio和本地文件存储这几种文件存储系统,如果我们有其他的文件存储系统需要对接,或者是扩展现有的文件系统,可以通过SPI继承AbstractFileClient注册新的文件存储系统。 代码示例 这里以扩展自有的本地文件系统为例 继承了内置的本地文件存储LocalFileClient,将其中上传文件的方法重写 package pro.shushi.pamirs.demo.core.file; import org.springframework.stereotype.Component; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; import org.springframework.web.bind.annotation.ResponseBody; import org.springframework.web.bind.annotation.RestController; import org.springframework.web.multipart.MultipartFile; import org.springframework.web.multipart.support.StandardMultipartHttpServletRequest; import pro.shushi.pamirs.framework.connectors.cdn.client.LocalFileClient; import pro.shushi.pamirs.meta.annotation.fun.extern.Slf4j; import pro.shushi.pamirs.meta.common.spi.SPI; import javax.servlet.http.HttpServletRequest; @Slf4j @Component // 注册新的文件存储系统类型 @SPI.Service(DemoLocalFileClient.TYPE) @RestController @RequestMapping("/demo_file") public class DemoLocalFileClient extends LocalFileClient { public static final String TYPE = "DEMO_LOCAL"; @Override public CdnFileForm getFormData(String fileName) { CdnConfig cdnConfig = getCdnConfig(); CdnFileForm fileForm = new CdnFileForm(); String uniqueFileName = Spider.getDefaultExtension(CdnFileNameApi.class).getNewFilename(fileName); String fileKey = getFileKey(cdnConfig.getMainDir(), uniqueFileName); //前端获取uploadUrl,上传文件到该地址 fileForm.setUploadUrl(cdnConfig.getUploadUrl() + "/demo_file/upload"); //上传后,前端将downloadUrl返回给后端 fileForm.setDownloadUrl(getDownloadUrl(fileKey)); fileForm.setFileName(uniqueFileName); Map<String, Object> formDataJson = new HashMap<>(); formDataJson.put("uniqueFileName", uniqueFileName); formDataJson.put("key", fileKey); fileForm.setFormDataJson(JSON.toJSONString(formDataJson)); return fileForm; } @ResponseBody @RequestMapping(value = "/upload", produces = "multipart/form-data;charset=UTF-8",method = RequestMethod.POST) public String uploadFileToLocal(HttpServletRequest request) { MultipartFile file = ((StandardMultipartHttpServletRequest) request).getFile("file"); // 例如可以根据file文件类型判断哪些文件是否可以上传 return super.uploadFileToLocal(request); } } 在application.yml内配置 cdn: oss: name: 本地文件系统 # 这里的type与代码中定义的文件存储系统类型对应 type: DEMO_LOCAL bucket: pamirs uploadUrl: http://127.0.0.1:8190 downloadUrl: http://127.0.0.1:6800 validTime: 3600000 timeout: 600000 active: true referer: localFolderUrl: /Users/demo/workspace/static

    2024年10月24日
    60600
  • 分库分表与自定义分表规则

    总体介绍 Oinone的分库分表方案是基于Sharding-JDBC的整合方案,要先具备一些Sharding-JDBC的知识。[Sharding-JDBC]https://shardingsphere.apache.org/document/current/cn/overview/ 做分库分表前,大家要有一个明确注意的点就是分表字段(也叫均衡字段)的选择,它是非常重要的,与业务场景非常相关。在明确了分库分表字段以后,甚至在功能上都要做一些妥协。比如分库分表字段在查询管理中做为查询条件是必须带上的,不然效率只会更低。 分表字段不允许更新,所以代码里更新策略设置类永不更新,并在设置了在页面修改的时候为readonly 配置分表策略 配置ShardingModel模型走分库分表的数据源pamirsSharding 为pamirsSharding配置数据源以及sharding规则 a. pamirs.sharding.define用于oinone的数据库表创建用 b. pamirs.sharding.rule用于分表规则配置 为pamirsSharding配置数据源以及sharding规则 1)指定模型对应数据源 pamirs: framework: system: system-ds-key: base system-models: – base.WorkerNode data: default-ds-key: pamirs ds-map: base: base modelDsMap: "[demo.ShardingModel]": pamirsSharding #配置模型对应的库 2)分库分表规则配置 pamirs: sharding: define: data-sources: ds: pamirs pamirsSharding: pamirs #申明pamirsSharding库对应的pamirs数据源 models: "[trigger.PamirsSchedule]": tables: 0..13 "[demo.ShardingModel]": tables: 0..7 table-separator: _ rule: pamirsSharding: #配置pamirsSharding库的分库分表规则 actual-ds: – pamirs #申明pamirsSharding库对应的pamirs数据源 sharding-rules: # Configure sharding rule ,以下配置跟sharding-jdbc配置一致 – tables: demo_core_sharding_model: #demo_core_sharding_model表规则配置 actualDataNodes: pamirs.demo_core_sharding_model_${0..7} tableStrategy: standard: shardingColumn: user_id shardingAlgorithmName: table_inline shardingAlgorithms: table_inline: type: INLINE props: algorithm-expression: demo_core_sharding_model_${(Long.valueOf(user_id) % 8)} props: sql.show: true 自定义规则 默认规则即通用的分库分表策略,如按照数据量、哈希等方式进行分库分表;通常默认规则是可以的。 但在一些复杂的业务场景下,使用默认规则可能无法满足需求,需要根据实际情况进行自定义。例如,某些业务可能有特定的数据分布模式或者查询特点,需要定制化的分库分表规则来优化数据访问性能或者满足业务需求。在这种情况下,使用自定义规则可以更好地适应业务的需求。 自定义分表规则示例 示例1:按月份分表(DATE_MONTH ) package pro.shushi.pamirs.demo.core.sharding; import cn.hutool.core.date.DateUtil; import com.google.common.collect.Range; import org.apache.shardingsphere.sharding.api.sharding.standard.PreciseShardingValue; import org.apache.shardingsphere.sharding.api.sharding.standard.RangeShardingValue; import org.apache.shardingsphere.sharding.api.sharding.standard.StandardShardingAlgorithm; import org.springframework.stereotype.Component; import pro.shushi.pamirs.meta.annotation.fun.extern.Slf4j; import java.util.*; /** * @author wangxian * @version 1.0 * @description */ @Component @Slf4j public class DateMonthShardingAlgorithm implements StandardShardingAlgorithm<Date> { private Properties props; @Override public String doSharding(Collection<String> availableTargetNames, PreciseShardingValue<Date> preciseShardingValue) { Date date = preciseShardingValue.getValue(); String suffix = "_" + (DateUtil.month(date) + 1); for (String tableName : availableTargetNames) { if (tableName.endsWith(suffix)) { return tableName; } } throw new IllegalArgumentException("未找到匹配的数据表"); } @Override public Collection<String> doSharding(Collection<String> availableTargetNames, RangeShardingValue<Date> rangeShardingValue) { List<String> list =…

    2024年5月11日
    1.1K00
  • 工作流用户待办过滤站内信

    工作流用户待办过滤站内信 全局过滤 启动工程application.yml中配置: pamirs: workflow: notify: false 个性化过滤 实现pro.shushi.pamirs.workflow.app.api.service.WorkflowMailFilterApi接口 返回true表示需要发送站内信 返回false表示不需要发送站内信 示例: import org.apache.commons.lang3.StringUtils; import pro.shushi.pamirs.message.model.PamirsMessage; import pro.shushi.pamirs.meta.annotation.Fun; import pro.shushi.pamirs.meta.annotation.Function; import pro.shushi.pamirs.user.api.model.PamirsUser; import pro.shushi.pamirs.workflow.app.api.model.WorkflowUserTask; import pro.shushi.pamirs.workflow.app.api.service.WorkflowMailFilterApi; /** * MyWorkflowMailFilterImpl * * @author yakir on 2025/02/24 16:28. */ @Fun(WorkflowMailFilterApi.FUN_NAMESPACE) public class MyWorkflowMailFilterImpl implements WorkflowMailFilterApi { @Override @Function public Boolean filter(WorkflowUserTask workflowUserTask, PamirsUser user, PamirsMessage message) { // 按用户待办过滤 workflowUserTask if (10000L == workflowUserTask.getInitiatorUid()){ return true; } // 按用户过滤 user if (1000L == user.getId()){ return true; } // 按站内信消息过滤 message if (StringUtils.contains(message.getBody(), "你好")) { return true; } return false; } }

    2025年2月24日
    73200

Leave a Reply

登录后才能评论