如何通过自定义支持excel导出的动态表头

介绍

本文需要阅读过前置文档如何自定义Excel导出功能,动态表头的功能在前置文档的基础上做的进一步扩展,本文未提到的部分都参考这个前置文档。

在日常的业务开发中,我们在导出的场景会遇到需要设置动态表头的场景,比如统计商品在最近1个月的销量,固定表头列为商品的名称等基础信息,动态表头列为最近一个月的日期,在导出的时候设置每个日期的销量,本文将通过此业务场景提供示例代码。

1.自定义导出任务模型

package pro.shushi.pamirs.demo.api.model;

import pro.shushi.pamirs.file.api.model.ExcelExportTask;
import pro.shushi.pamirs.meta.annotation.Model;

@Model.model(DemoItemDynamicExcelExportTask.MODEL_MODEL)
@Model(displayName = "商品-Excel动态表头导出任务")
public class DemoItemDynamicExcelExportTask extends ExcelExportTask {
    public static final String MODEL_MODEL = "demo.DemoItemDynamicExcelExportTask";

}

2.自定义导出任务处理数据的扩展点

package pro.shushi.pamirs.demo.core.excel.exportdemo.extPoint;

import org.springframework.stereotype.Component;
import pro.shushi.pamirs.core.common.FetchUtil;
import pro.shushi.pamirs.core.common.cache.MemoryIterableSearchCache;
import pro.shushi.pamirs.demo.api.model.DemoItem;
import pro.shushi.pamirs.file.api.config.FileConstant;
import pro.shushi.pamirs.file.api.context.ExcelDefinitionContext;
import pro.shushi.pamirs.file.api.enmu.ExcelTemplateTypeEnum;
import pro.shushi.pamirs.file.api.entity.EasyExcelCellDefinition;
import pro.shushi.pamirs.file.api.extpoint.impl.ExcelExportSameQueryPageTemplate;
import pro.shushi.pamirs.file.api.model.ExcelExportTask;
import pro.shushi.pamirs.file.api.model.ExcelWorkbookDefinition;
import pro.shushi.pamirs.file.api.util.ExcelFixedHeadHelper;
import pro.shushi.pamirs.file.api.util.ExcelHelper;
import pro.shushi.pamirs.file.api.util.ExcelTemplateInit;
import pro.shushi.pamirs.framework.common.entry.TreeNode;
import pro.shushi.pamirs.meta.annotation.ExtPoint;
import pro.shushi.pamirs.meta.api.CommonApiFactory;
import pro.shushi.pamirs.meta.api.core.orm.ReadApi;
import pro.shushi.pamirs.meta.api.core.orm.systems.relation.RelationReadApi;
import pro.shushi.pamirs.meta.api.dto.config.ModelConfig;
import pro.shushi.pamirs.meta.api.dto.config.ModelFieldConfig;
import pro.shushi.pamirs.meta.api.session.PamirsSession;
import pro.shushi.pamirs.meta.enmu.TtypeEnum;
import pro.shushi.pamirs.meta.util.FieldUtils;

import java.util.*;

@Component
public class DemoItemDynamicExportExtPoint extends ExcelExportSameQueryPageTemplate<DemoItem> implements ExcelTemplateInit {
    public static final String TEMPLATE_NAME ="商品动态导出";

    @Override
    public List<ExcelWorkbookDefinition> generator() {
        ExcelFixedHeadHelper excelFixedHeadHelper = ExcelHelper.fixedHeader(DemoItem.MODEL_MODEL,TEMPLATE_NAME)
                .createBlock(TEMPLATE_NAME, DemoItem.MODEL_MODEL)
                .setType(ExcelTemplateTypeEnum.EXPORT);
        return Collections.singletonList(excelFixedHeadHelper.build());
    }

    public static void buildHeader(ExcelFixedHeadHelper excelFixedHeadHelper) {
        excelFixedHeadHelper.addColumn("name","名称")
                .addColumn("cateName","类目")
                .addColumn("searchFrom","搜索来源")
                .addColumn("description","描述")
                .addColumn("itemPrice","单价")
                .addColumn("inventoryQuantity","库存");
    }

    @Override
    @ExtPoint.Implement(expression = "context.model == \"" + DemoItem.MODEL_MODEL+"\" && context.name == \"" +TEMPLATE_NAME+"\"" )
    public List<Object> fetchExportData(ExcelExportTask exportTask, ExcelDefinitionContext context) {
        List<Object> result = super.fetchExportData(exportTask,context);
        Object block = result.get(0);
        if (block instanceof ArrayList) {
            ((List<Object>) block).forEach(o -> {
                if (o instanceof DemoItem) {
                    DemoItem item = (DemoItem) o;
                    // TODO 设置动态表头部分字段的值
                    item.get_d().put("2024-09-10", "1111");
                    item.get_d().put("2024-09-11", "2222");
                }
            });
        }
        return result;
    }

    protected <DM extends ReadApi> boolean selectRelationField(DM dataManager, String model, List<TreeNode<EasyExcelCellDefinition>> fieldNodeList, Collection<?> list, Integer maxSupportLength, int currentMaxLength) {
        Set<String> hasQueryFields = new HashSet<>();
        RelationReadApi relationManagerProcessor = CommonApiFactory.getApi(RelationReadApi.class);
        ModelConfig modelConfig = PamirsSession.getContext().getModelConfig(model);
        if (modelConfig == null) {
            return true;
        }
        MemoryIterableSearchCache<String, ModelFieldConfig> modelFieldsCache = new MemoryIterableSearchCache<>(modelConfig.getModelFieldConfigList(), ModelFieldConfig::getLname);
        for (TreeNode<EasyExcelCellDefinition> fieldNode : fieldNodeList) {
            String fieldKey = fieldNode.getKey();
            TreeNode<EasyExcelCellDefinition> parentFieldNode = fieldNode.getParent();
            if (parentFieldNode != null) {
                fieldKey = fieldKey.substring(parentFieldNode.getKey().length() + 1);
                int pi = fieldKey.indexOf(FileConstant.POINT_CHARACTER);
                if (pi != -1) {
                    fieldKey = fieldKey.substring(0, pi);
                }
            }
            ModelFieldConfig modelFieldConfig = modelFieldsCache.get(fieldKey);
            if (modelFieldConfig == null) {
                continue;
//                throw new IllegalArgumentException("模板中存在无效的模型字段 model=" + model + ", field=" + fieldKey);
            }
            if (!TtypeEnum.isRelationType(modelFieldConfig.getTtype())) {
                continue;
            }
            List<Object> needQueryList = new ArrayList<>();
            for (Object item : list) {
                if (relationManagerProcessor.isNeedQueryRelation(modelFieldConfig, FieldUtils.getFieldValue(item, modelFieldConfig.getLname()))) {
                    needQueryList.add(item);
                }
            }
            if (needQueryList.isEmpty()) {
                continue;
            }
            String field, indexString;
            int li = fieldKey.indexOf("["), ri = fieldKey.indexOf("]"), index;
            if (li != -1 && ri != -1) {
                indexString = fieldKey.substring(li + 1, ri);
                index = Integer.parseInt(indexString);
                field = fieldKey.substring(0, li);
                if (!hasQueryFields.contains(field)) {
                    currentMaxLength = computeCurrentMaxLength(maxSupportLength, currentMaxLength, needQueryList.size(), FetchUtil.listFieldQuery(dataManager, needQueryList, modelFieldsCache.get(field), maxSupportLength));
                    hasQueryFields.add(field);
                }
                if (index < needQueryList.size()) {
                    Object value = needQueryList.get(index);
                    if (!selectRelationField(dataManager, modelFieldConfig.getReferences(), fieldNode.getChildren(), Collections.singletonList(value), maxSupportLength, currentMaxLength)) {
                        return false;
                    }
                }
            } else if (li == -1 && ri == -1) {
                field = fieldKey;
                if (!hasQueryFields.contains(field)) {
                    currentMaxLength = computeCurrentMaxLength(maxSupportLength, currentMaxLength, needQueryList.size(), FetchUtil.listFieldQuery(dataManager, needQueryList, modelFieldsCache.get(field), maxSupportLength));
                    if (currentMaxLength == -1) {
                        return false;
                    }
                    hasQueryFields.add(field);
                }
                boolean isCollection = fieldNode.getValue().getIsCollection();
                List<Object> childList = new ArrayList<>();
                for (Object item : needQueryList) {
                    Object relationValue = FieldUtils.getFieldValue(item, field);
                    if (relationValue == null) {
                        continue;
                    }
                    if (isCollection) {
                        childList.addAll((Collection<?>) relationValue);
                    } else {
                        childList.add(relationValue);
                    }
                }
                if (!selectRelationField(dataManager, modelFieldConfig.getReferences(), fieldNode.getChildren(), childList, maxSupportLength, currentMaxLength)) {
                    return false;
                }
            }
        }
        return true;
    }

    private int computeCurrentMaxLength(int excelMaxSupportLength, int currentMaxLength, int needQuerySize, int listFieldQuerySize) {
        int incrementalSize = listFieldQuerySize - needQuerySize;
        if (incrementalSize <= 0) {
            return currentMaxLength;
        }
        currentMaxLength += incrementalSize;
        if (currentMaxLength > excelMaxSupportLength) {
            return -1;
        }
        return currentMaxLength;
    }
}

3.自定义导出任务的action

package pro.shushi.pamirs.demo.core.action;

import org.apache.commons.lang3.StringUtils;
import org.springframework.stereotype.Component;
import pro.shushi.pamirs.demo.api.model.DemoItemDynamicExcelExportTask;
import pro.shushi.pamirs.demo.core.excel.exportdemo.extPoint.DemoItemDynamicExportExtPoint;
import pro.shushi.pamirs.file.api.action.AbstractExcelExportTaskAction;
import pro.shushi.pamirs.file.api.config.ExcelConstant;
import pro.shushi.pamirs.file.api.context.ExcelDefinitionContext;
import pro.shushi.pamirs.file.api.enmu.FileExpEnumerate;
import pro.shushi.pamirs.file.api.model.ExcelWorkbookDefinition;
import pro.shushi.pamirs.file.api.util.ExcelFixedHeadHelper;
import pro.shushi.pamirs.file.api.util.ExcelHelper;
import pro.shushi.pamirs.file.api.util.ExcelWorkbookDefinitionUtil;
import pro.shushi.pamirs.meta.annotation.Action;
import pro.shushi.pamirs.meta.annotation.Function;
import pro.shushi.pamirs.meta.annotation.Model;
import pro.shushi.pamirs.meta.annotation.fun.extern.Slf4j;
import pro.shushi.pamirs.meta.api.dto.config.ModelConfig;
import pro.shushi.pamirs.meta.api.session.PamirsSession;
import pro.shushi.pamirs.meta.common.exception.PamirsException;
import pro.shushi.pamirs.meta.enmu.ActionContextTypeEnum;
import pro.shushi.pamirs.meta.enmu.FunctionOpenEnum;
import pro.shushi.pamirs.meta.enmu.FunctionTypeEnum;
import pro.shushi.pamirs.meta.enmu.ViewTypeEnum;

@Slf4j
@Component
@Model.model(DemoItemDynamicExcelExportTask.MODEL_MODEL)
public class DemoItemDynamicExcelExportTaskAction extends AbstractExcelExportTaskAction<DemoItemDynamicExcelExportTask> {

    @Action(displayName = "导出", contextType = ActionContextTypeEnum.CONTEXT_FREE, bindingType = {ViewTypeEnum.TABLE})
    public DemoItemDynamicExcelExportTask createExportTask(DemoItemDynamicExcelExportTask data) {
        if (data.getWorkbookDefinitionId() != null) {
            ExcelWorkbookDefinition workbookDefinition = new ExcelWorkbookDefinition();
            workbookDefinition.setId(data.getWorkbookDefinitionId());
            data.setWorkbookDefinition(workbookDefinition);
        }
        super.createExportTask(data);
        return data;
    }

    protected ExcelDefinitionContext fetchExcelDefinitionContextByWorkbookDefinition(DemoItemDynamicExcelExportTask data) {
        String model = data.getModel();
        if (StringUtils.isBlank(model)) {
            throw PamirsException.construct(FileExpEnumerate.EXPORT_MODEL_IS_NULL).errThrow();
        }
        ModelConfig modelConfig = PamirsSession.getContext().getSimpleModelConfig(model);
        if (modelConfig == null) {
            throw PamirsException.construct(FileExpEnumerate.EXPORT_MODEL_NOT_EXIST).errThrow();
        }
        ExcelFixedHeadHelper fixedHeadHelper = ExcelHelper.fixedHeader(model, DemoItemDynamicExportExtPoint.TEMPLATE_NAME)
                .setDisplayName(modelConfig.getDisplayName() + ExcelConstant.EXPORT_NAME)
                .createBlock(modelConfig.getDisplayName(), model);

        // 自定义excel START
        DemoItemDynamicExportExtPoint.buildHeader(fixedHeadHelper);
        // TODO 构建动态表头 addColumn第一个参数为动态字段的名称,第二个参数为在表头中的展示值,数据处理的扩展点内需要设置对应动态字段的值
        fixedHeadHelper.addColumn("2024-09-10", "2024-09-10");
        fixedHeadHelper.addColumn("2024-09-11", "2024-09-11");
        // 自定义excel END

        ExcelWorkbookDefinition workbookDefinition = fixedHeadHelper.build();
        data.setWorkbookDefinition(workbookDefinition);
        processClearExportStyle(data);
        return ExcelWorkbookDefinitionUtil.getDefinitionContext(workbookDefinition);
    }

    /**
     * @param data
     * @return
     */
    @Function(openLevel = FunctionOpenEnum.API)
    @Function.Advanced(type = FunctionTypeEnum.QUERY)
    public DemoItemDynamicExcelExportTask construct(DemoItemDynamicExcelExportTask data) {
        data.construct();
        return data;
    }

    @Override
    protected void doExport(DemoItemDynamicExcelExportTask exportTask, ExcelDefinitionContext context) {
        if (null != exportTask.getSync() && exportTask.getSync()) {
            excelFileService.doExportSync(exportTask, context);
        } else {
            excelFileService.doExportAsync(exportTask, context);
        }
    }
}

4.导出效果预览

如何通过自定义支持excel导出的动态表头

扩展学习

根据上面的例子,我们可以看出,所有需要对表头做自定义的场景都可以通过类似办法在DemoItemDynamicExcelExportTaskAction.fetchExcelDefinitionContextByWorkbookDefinition方法内进行处理

Oinone社区 作者:nation原创文章,如若转载,请注明出处:https://doc.oinone.top/backend/17174.html

访问Oinone官网:https://www.oinone.top获取数式Oinone低代码应用平台体验

(0)
nation的头像nation数式员工
上一篇 2024年9月10日 pm3:34
下一篇 2024年9月12日 am11:23

相关推荐

  • 分库分表与自定义分表规则

    总体介绍 Oinone的分库分表方案是基于Sharding-JDBC的整合方案,要先具备一些Sharding-JDBC的知识。[Sharding-JDBC]https://shardingsphere.apache.org/document/current/cn/overview/ 做分库分表前,大家要有一个明确注意的点就是分表字段(也叫均衡字段)的选择,它是非常重要的,与业务场景非常相关。在明确了分库分表字段以后,甚至在功能上都要做一些妥协。比如分库分表字段在查询管理中做为查询条件是必须带上的,不然效率只会更低。 分表字段不允许更新,所以代码里更新策略设置类永不更新,并在设置了在页面修改的时候为readonly 配置分表策略 配置ShardingModel模型走分库分表的数据源pamirsSharding 为pamirsSharding配置数据源以及sharding规则 a. pamirs.sharding.define用于oinone的数据库表创建用 b. pamirs.sharding.rule用于分表规则配置 为pamirsSharding配置数据源以及sharding规则 1)指定模型对应数据源 pamirs: framework: system: system-ds-key: base system-models: – base.WorkerNode data: default-ds-key: pamirs ds-map: base: base modelDsMap: "[demo.ShardingModel]": pamirsSharding #配置模型对应的库 2)分库分表规则配置 pamirs: sharding: define: data-sources: ds: pamirs pamirsSharding: pamirs #申明pamirsSharding库对应的pamirs数据源 models: "[trigger.PamirsSchedule]": tables: 0..13 "[demo.ShardingModel]": tables: 0..7 table-separator: _ rule: pamirsSharding: #配置pamirsSharding库的分库分表规则 actual-ds: – pamirs #申明pamirsSharding库对应的pamirs数据源 sharding-rules: # Configure sharding rule ,以下配置跟sharding-jdbc配置一致 – tables: demo_core_sharding_model: #demo_core_sharding_model表规则配置 actualDataNodes: pamirs.demo_core_sharding_model_${0..7} tableStrategy: standard: shardingColumn: user_id shardingAlgorithmName: table_inline shardingAlgorithms: table_inline: type: INLINE props: algorithm-expression: demo_core_sharding_model_${(Long.valueOf(user_id) % 8)} props: sql.show: true 自定义规则 默认规则即通用的分库分表策略,如按照数据量、哈希等方式进行分库分表;通常默认规则是可以的。 但在一些复杂的业务场景下,使用默认规则可能无法满足需求,需要根据实际情况进行自定义。例如,某些业务可能有特定的数据分布模式或者查询特点,需要定制化的分库分表规则来优化数据访问性能或者满足业务需求。在这种情况下,使用自定义规则可以更好地适应业务的需求。 自定义分表规则示例 示例1:按月份分表(DATE_MONTH ) package pro.shushi.pamirs.demo.core.sharding; import cn.hutool.core.date.DateUtil; import com.google.common.collect.Range; import org.apache.shardingsphere.sharding.api.sharding.standard.PreciseShardingValue; import org.apache.shardingsphere.sharding.api.sharding.standard.RangeShardingValue; import org.apache.shardingsphere.sharding.api.sharding.standard.StandardShardingAlgorithm; import org.springframework.stereotype.Component; import pro.shushi.pamirs.meta.annotation.fun.extern.Slf4j; import java.util.*; /** * @author wangxian * @version 1.0 * @description */ @Component @Slf4j public class DateMonthShardingAlgorithm implements StandardShardingAlgorithm<Date> { private Properties props; @Override public String doSharding(Collection<String> availableTargetNames, PreciseShardingValue<Date> preciseShardingValue) { Date date = preciseShardingValue.getValue(); String suffix = "_" + (DateUtil.month(date) + 1); for (String tableName : availableTargetNames) { if (tableName.endsWith(suffix)) { return tableName; } } throw new IllegalArgumentException("未找到匹配的数据表"); } @Override public Collection<String> doSharding(Collection<String> availableTargetNames, RangeShardingValue<Date> rangeShardingValue) { List<String> list =…

    2024年5月11日
    1.2K00
  • 字段类型之关系描述的特殊场景(常量关联)

    场景概述 【字段类型之关系与引用】一文中已经描述了各种关系字段的常规写法,还有一些特殊场景如:关系映射中存在常量,或者M2M中间表是大于两个字段构成。 场景描述 1、PetTalent模型增加talentType字段2、PetItem与PetTalent的多对多关系增加talentType(达人类型),3、PetItemRelPetTalent中间表维护petItemId、petTalentId以及talentType,PetDogItem和PetCatItem分别重写petTalents字段,关系中增加常量描述。示意图如下: 实际操作步骤 Step1 新增 TalentTypeEnum package pro.shushi.pamirs.demo.api.enumeration; import pro.shushi.pamirs.meta.annotation.Dict; import pro.shushi.pamirs.meta.common.enmu.BaseEnum; @Dict(dictionary = TalentTypeEnum.DICTIONARY,displayName = "达人类型") public class TalentTypeEnum extends BaseEnum<TalentTypeEnum,Integer> { public static final String DICTIONARY ="demo.TalentTypeEnum"; public final static TalentTypeEnum DOG =create("DOG",1,"狗达人","狗达人"); public final static TalentTypeEnum CAT =create("CAT",2,"猫达人","猫达人"); } Step2 PetTalent模型增加talentType字段 package pro.shushi.pamirs.demo.api.model; import pro.shushi.pamirs.demo.api.enumeration.TalentTypeEnum; import pro.shushi.pamirs.meta.annotation.Field; import pro.shushi.pamirs.meta.annotation.Model; @Model.model(PetTalent.MODEL_MODEL) @Model(displayName = "宠物达人",summary="宠物达人",labelFields ={"name"}) public class PetTalent extends AbstractDemoIdModel{ public static final String MODEL_MODEL="demo.PetTalent"; @Field(displayName = "达人") private String name; @Field(displayName = "达人类型") private TalentTypeEnum talentType; } Step3 修改PetItem的petTalents字段,在关系描述中增加talentType(达人类型) @Field.many2many(relationFields = {"petItemId"},referenceFields = {"petTalentId","talentType"},through = PetItemRelPetTalent.MODEL_MODEL ) @Field.Relation(relationFields = {"id"}, referenceFields = {"id","talentType"}) @Field(displayName = "推荐达人",summary = "推荐该商品的达人们") private List<PetTalent> petTalents; Step4 PetDogItem增加petTalents字段,重写父类PetItem的关系描述 talentType配置为常量,填入枚举的值 增加domain描述用户页面选择的时候自动过滤出特定类型的达人,RSQL用枚举的name @Field(displayName = "推荐达人") @Field.many2many( through = "PetItemRelPetTalent", relationFields = {"petItemId"}, referenceFields = {"petTalentId","talentType"} ) @Field.Relation(relationFields = {"id"}, referenceFields = {"id", "#1#"}, domain = " talentType == DOG") private List<PetTalent> petTalents; Step5 PetCatItem增加petTalents字段,重写父类PetItem的关系描述 talentType配置为常量,填入枚举的值 增加domain描述用户页面选择的时候自动过滤出特定类型的达人,RSQL用枚举的name @Field(displayName = "推荐达人") @Field.many2many( through = "PetItemRelPetTalent", relationFields = {"petItemId"}, referenceFields = {"petTalentId","talentType"} ) @Field.Relation(relationFields = {"id"}, referenceFields = {"id", "#2#"}, domain = " talentType == CAT") private List<PetTalent> petTalents; Step6 PetCatItem增加petTalents字段,many2one关系示例 talentType配置为常量,填入枚举的值 增加domain描述用户页面选择的时候自动过滤出特定类型的达人,RSQL用枚举的name @Model.model(PetPet.MODEL_MODEL) @Model(displayName…

    2024年5月25日
    1.6K00
  • 流程设计流程结束通知SPI接口

    1.实现SPI接口 import pro.shushi.pamirs.meta.common.spi.SPI; import pro.shushi.pamirs.meta.common.spi.factory.SpringServiceLoaderFactory; import pro.shushi.pamirs.workflow.app.api.entity.WorkflowContext; import pro.shushi.pamirs.workflow.app.api.model.WorkflowInstance; @SPI(factory = SpringServiceLoaderFactory.class) public interface WorkflowEndNoticeApi { void execute(WorkflowContext context, WorkflowInstance instance); } 自定义通知逻辑 /** * 自定义扩展流程结束时扩展点 */ @Order(999) @Component @SPI.Service public class MyWorkflowEndNoticeApi implements WorkflowEndNoticeApi { @Override public void execute(WorkflowContext context, WorkflowInstance instance) { Long dataBizId = instance.getDataBizId(); //todo自定义逻辑 } }

    2023年12月26日
    1.1K00
  • 导入设计数据时dubbo超时导入失败

    问题描述 在本地启动导入设计数据的工程时,会出现dubbo调用超时导致设计数据无法完整导入的问题。 org.apache.dubbo.remoting.TimeoutException 产生原因 pom中的包依赖出现问题,导致没有使用正确的远程服务。 本地可能出现的异常报错堆栈信息如下: xception in thread "fixed-1-thread-10" PamirsException level: ERROR, code: 10100025, type: SYSTEM_ERROR, msg: 函数执行错误, extra:, extend: null at pro.shushi.pamirs.meta.common.exception.PamirsException$Builder.errThrow(PamirsException.java:190) at pro.shushi.pamirs.framework.faas.fun.manage.ManagementAspect.around(ManagementAspect.java:118) at sun.reflect.GeneratedMethodAccessor498.invoke(Unknown Source) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at org.springframework.aop.aspectj.AbstractAspectJAdvice.invokeAdviceMethodWithGivenArgs(AbstractAspectJAdvice.java:644) at org.springframework.aop.aspectj.AbstractAspectJAdvice.invokeAdviceMethod(AbstractAspectJAdvice.java:633) at org.springframework.aop.aspectj.AspectJAroundAdvice.invoke(AspectJAroundAdvice.java:70) at org.springframework.aop.framework.ReflectiveMethodInvocation.proceed(ReflectiveMethodInvocation.java:175) at org.springframework.aop.framework.CglibAopProxy$CglibMethodInvocation.proceed(CglibAopProxy.java:749) at org.springframework.aop.interceptor.ExposeInvocationInterceptor.invoke(ExposeInvocationInterceptor.java:95) at org.springframework.aop.framework.ReflectiveMethodInvocation.proceed(ReflectiveMethodInvocation.java:186) at org.springframework.aop.framework.CglibAopProxy$CglibMethodInvocation.proceed(CglibAopProxy.java:749) at org.springframework.aop.framework.CglibAopProxy$DynamicAdvisedInterceptor.intercept(CglibAopProxy.java:691) at pro.shushi.pamirs.framework.orm.DefaultWriteApi$$EnhancerBySpringCGLIB$$b4cea2b4.createOrUpdateBatchWithResult(<generated>) at pro.shushi.pamirs.meta.base.manager.data.OriginDataManager.createOrUpdateBatchWithResult(OriginDataManager.java:161) at pro.shushi.pamirs.meta.base.manager.data.OriginDataManager.createOrUpdateBatch(OriginDataManager.java:152) at pro.shushi.pamirs.ui.designer.service.installer.UiDesignerInstaller.lambda$install$0(UiDesignerInstaller.java:42) at pro.shushi.pamirs.core.common.function.AroundRunnable.run(AroundRunnable.java:26) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) at java.lang.Thread.run(Thread.java:748) Caused by: org.apache.dubbo.rpc.RpcException: Failed to invoke the method createOrUpdateBatchWithResult in the service org.apache.dubbo.rpc.service.GenericService. Tried 1 times of the providers [192.168.0.123:20880] (1/1) from the registry 127.0.0.1:2181 on the consumer 192.168.0.123 using the dubbo version 2.7.22. Last error is: Invoke remote method timeout. method: $invoke, provider: dubbo://192.168.0.123:20880/ui.designer.UiDesignerViewLayout.oio.defaultWriteApi?anyhost=true&application=pamirs-demo&application.version=1.0.0&check=false&deprecated=false&dubbo=2.0.2&dynamic=true&generic=true&group=pamirs&interface=ui.designer.UiDesignerViewLayout.oio.defaultWriteApi&metadata-type=remote&methods=*&payload=104857600&pid=69748&qos.enable=false&register.ip=192.168.0.123&release=2.7.15&remote.application=pamirs-test&retries=0&serialization=pamirs&service.name=ServiceBean:pamirs/ui.designer.UiDesignerViewLayout.oio.defaultWriteApi:1.0.0&side=consumer&sticky=false&timeout=5000&timestamp=1701136088893&version=1.0.0, cause: org.apache.dubbo.remoting.TimeoutException: Waiting server-side response timeout by scan timer. start time: 2023-11-28 10:23:05.835, end time: 2023-11-28 10:23:10.856, client elapsed: 695 ms, server elapsed: 4326 ms, timeout: 5000 ms, request: Request [id=0, version=2.0.2, twoway=true, event=false, broken=false, data=null], channel: /192.168.0.123:49449 -> /192.168.0.123:20880 at org.apache.dubbo.rpc.cluster.support.FailoverClusterInvoker.doInvoke(FailoverClusterInvoker.java:110) at org.apache.dubbo.rpc.cluster.support.AbstractClusterInvoker.invoke(AbstractClusterInvoker.java:265) at org.apache.dubbo.rpc.cluster.interceptor.ClusterInterceptor.intercept(ClusterInterceptor.java:47) at org.apache.dubbo.rpc.cluster.support.wrapper.AbstractCluster$InterceptorInvokerNode.invoke(AbstractCluster.java:92) at org.apache.dubbo.rpc.cluster.support.wrapper.MockClusterInvoker.invoke(MockClusterInvoker.java:98) at org.apache.dubbo.registry.client.migration.MigrationInvoker.invoke(MigrationInvoker.java:170) at org.apache.dubbo.rpc.proxy.InvokerInvocationHandler.invoke(InvokerInvocationHandler.java:96) at org.apache.dubbo.common.bytecode.proxy0.$invoke(proxy0.java) at pro.shushi.pamirs.framework.faas.distribution.computer.RemoteComputer.compute(RemoteComputer.java:124) at pro.shushi.pamirs.framework.faas.FunEngine.run(FunEngine.java:80) at pro.shushi.pamirs.distribution.faas.remote.spi.service.RemoteFunctionHelper.run(RemoteFunctionHelper.java:68) at pro.shushi.pamirs.framework.faas.fun.manage.ManagementAspect.around(ManagementAspect.java:109) … 20 more Caused…

    2023年11月28日
    1.0K00
  • 技术精要:数据导出与固化实用指南

    数据被认为是企业发展和决策的重要资产。随着业务的不断发展和数据量的不断增加,企业通常需要将数据从不同的源头导出,并将其固化到产品中,以便进行进一步的分析、处理和利用。数据导出与固化的过程涉及到数据的提取、清洗、整合和存储,是确保数据长期有效性和可用性的关键步骤。 了解数据导出与固化的流程和方法对于企业具有重要意义。通过有效的数据导出和固化,企业可以更好地管理和利用数据资源,提升决策的准确性和效率,实现业务的持续发展和创新。本次讨论将重点探讨数据导出与固化的流程和关键步骤,帮助参与者深入了解如何将数据从导出到产品中,为企业数据管理和应用提供有力支持。 1. 数据导出与固化:将数据从导出到产品中的流程 1.1. pom依赖: <dependency> <groupId>pro.shushi.pamirs.metadata.manager</groupId> <artifactId>pamirs-metadata-manager</artifactId> </dependency> 1.2 将第⼆步下载后的⽂件放⼊项⽬中(注意⽂件放置的位置)。放置⼯程的resources 下⾯。例如: 1.3 项⽬启动过程中,将⽂件中的数据导⼊(通常放在core模型的init包下 ⾯)。⽰例代码: package pro.shushi.pamirs.sys.setting.enmu; import com.google.common.collect.Lists; import org.apache.commons.collections4.CollectionUtils; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.ApplicationContext; import org.springframework.stereotype.Component; import pro.shushi.pamirs.boot.common.api.command.AppLifecycleCom mand; import pro.shushi.pamirs.boot.common.api.init.LifecycleCompleted AllInit; import pro.shushi.pamirs.boot.common.extend.MetaDataEditor; import pro.shushi.pamirs.core.common.InitializationUtil; import pro.shushi.pamirs.meta.annotation.fun.extern.Slf4j; import pro.shushi.pamirs.meta.api.dto.meta.Meta; import pro.shushi.pamirs.meta.domain.module.ModuleDefinition; import pro.shushi.pamirs.metadata.manager.core.helper.DesignerIn stallHelper; import pro.shushi.pamirs.metadata.manager.core.helper.WidgetInst allHelper; import java.util.List; import java.util.Map; @Slf4j @Component public class DemoAppMetaInstall implements MetaDataEditor, LifecycleCompletedAllInit { @Autowired private ApplicationContext applicationContext; @Override public void edit(AppLifecycleCommand command, Map<String, Meta> metaMap) { if (!doImport()) { return; } log.info("[设计器业务元数据导⼊]"); InitializationUtil bizInitializationUtil = InitializationUtil.get(metaMap, DemoModule.MODULE_MODULE/ ***改成⾃⼰的Module*/, DemoModule.MODULE_NAME/***改成⾃⼰的 Module*/); DesignerInstallHelper.mateInitialization(bizInitializatio nUtil, "install/meta.json"); log.info("[⾃定义组件元数据导⼊]"); // 写法1: 将组件元数据导⼊到⻚⾯设计器. 只有在安装设计器的 服务中执⾏才有效果 WidgetInstallHelper.mateInitialization(metaMap, "install/widget.json"); // 写法2: 与写法1相同效果 InitializationUtil uiInitializationUtil = InitializationUtil.get(metaMap, "ui_designer", "uiDesigner"); if (uiInitializationUtil != null) { DesignerInstallHelper.mateInitialization(uiInitialization Util, "install/widget.json"); } // 写法3: 业务⼯程和设计器分布式部署,且希望通过业务⼯程导⼊ ⾃定义组件元数据. 业务模块需要依赖⻚⾯设计器模块,然后指定业务模块导 ⼊ DesignerInstallHelper.mateInitialization(bizInitializatio nUtil, "install/widget.json"); } @Override public void process(AppLifecycleCommand command, Map<String, ModuleDefinition> runModuleMap) { if (!doImport()) { return; } log.info("[设计器业务数据导⼊]"); // ⽀持远程调⽤,但是执⾏的⽣命周期必须是 LifecycleCompletedAllInit或之后. 本地如果安装了设计器,则没有要 求 DesignerInstallHelper.bizInitialization("install/ meta.json"); log.info("[⾃定义组件业务数据导⼊]"); // 当开发环境和导⼊环境的⽂件服务不互通时, 可通过指定js和 css的⽂件压缩包,⾃动上传到导⼊环境,并替换导⼊组件数据中的⽂件url // WidgetInstallHelper.bizInitialization("install/ widget.json", "install/widget.zip"); WidgetInstallHelper.bizInitialization("install/ widget.json"); return; } private boolean doImport() { // ⾃定义导⼊判断. 避免⽤于设计的开发环境执⾏导⼊逻辑 String[] envs = applicationContext.getEnvironment().getActiveProfiles(); List<String> envList = Lists.newArrayList(envs); return…

    2024年2月27日
    2.0K00

Leave a Reply

登录后才能评论