Pārlūkot izejas kodu

组卷效率优化

laibulaizheli 2 mēneši atpakaļ
vecāks
revīzija
af08216fb9

+ 3 - 0
blade-service-api/blade-business-api/src/main/java/org/springblade/business/feign/MetadataClassificationClient.java

@@ -29,4 +29,7 @@ public interface MetadataClassificationClient {
 
     @PostMapping(API_PREFIX + "/getMetadataClassification")
     List<MetadataClassification> getMetadataClassification();
+
+    @PostMapping(API_PREFIX + "/createMetadataFiles")
+    boolean createMetadataFiles(@RequestBody List<Long> fileId);
 }

+ 1 - 0
blade-service-api/blade-manager-api/src/main/java/org/springblade/manager/feign/ArchiveTreeContractClient.java

@@ -78,4 +78,5 @@ public interface ArchiveTreeContractClient {
 
     @PostMapping(API_PREFIX+"/adsArchiveTreeContract")
     public void addArchiveTreeContract(@RequestBody List<ArchiveTreeContract> archiveTreeContracts,@RequestParam Long rootId);
+
 }

+ 53 - 12
blade-service/blade-archive/src/main/java/org/springblade/archive/service/impl/ArchivesAutoServiceImpl.java

@@ -76,6 +76,7 @@ import org.springframework.beans.factory.annotation.Autowired;
 import org.springframework.stereotype.Service;
 import com.baomidou.mybatisplus.core.metadata.IPage;
 import org.springframework.transaction.annotation.Transactional;
+import org.springframework.util.CollectionUtils;
 import org.springframework.web.multipart.MultipartFile;
 
 import javax.servlet.http.HttpServletResponse;
@@ -89,6 +90,7 @@ import java.time.LocalDateTime;
 import java.util.*;
 import java.util.List;
 import java.util.concurrent.ExecutorService;
+import java.util.concurrent.TimeUnit;
 import java.util.stream.Collectors;
 
 /**
@@ -1258,9 +1260,10 @@ public class ArchivesAutoServiceImpl extends BaseServiceImpl<ArchivesAutoMapper,
 		}
 		archiveFileClient.updateArchiveFileForCreateArchive(waitArchiveFiles);
 		try {
-			for (ArchiveFile saveVo : waitArchiveFiles) {
-				metadataClassificationClient.createMetadataFile(saveVo.getId(), 0);
-			}
+//			for (ArchiveFile saveVo : waitArchiveFiles) {
+//				metadataClassificationClient.createMetadataFile(saveVo.getId(), 0);
+//			}
+			batchCreateMetadataFiles(waitArchiveFiles);
 		} catch (Exception e) {
 			e.printStackTrace();
 		}
@@ -1332,9 +1335,7 @@ public class ArchivesAutoServiceImpl extends BaseServiceImpl<ArchivesAutoMapper,
 		}
 		archiveFileClient.updateArchiveFileForCreateArchive(waitArchiveFiles);
 		try {
-			for (ArchiveFile saveVo : waitArchiveFiles) {
-				metadataClassificationClient.createMetadataFile(saveVo.getId(), 0);
-			}
+			batchCreateMetadataFiles(waitArchiveFiles);
 		} catch (Exception e) {
 			e.printStackTrace();
 		}
@@ -1381,9 +1382,10 @@ public class ArchivesAutoServiceImpl extends BaseServiceImpl<ArchivesAutoMapper,
 		}
 		archiveFileClient.updateArchiveFileForCreateArchive(waitArchiveFiles);
 		try {
-			for (ArchiveFile saveVo : waitArchiveFiles) {
-				metadataClassificationClient.createMetadataFile(saveVo.getId(), 0);
-			}
+//			for (ArchiveFile saveVo : waitArchiveFiles) {
+//				metadataClassificationClient.createMetadataFile(saveVo.getId(), 0);
+//			}
+			batchCreateMetadataFiles(waitArchiveFiles);
 		} catch (Exception e) {
 			e.printStackTrace();
 		}
@@ -1436,9 +1438,10 @@ public class ArchivesAutoServiceImpl extends BaseServiceImpl<ArchivesAutoMapper,
 		}
 		archiveFileClient.updateArchiveFileForCreateArchive(waitArchiveFiles);
 		try {
-			for (ArchiveFile saveVo : waitArchiveFiles) {
-				metadataClassificationClient.createMetadataFile(saveVo.getId(), 0);
-			}
+//			for (ArchiveFile saveVo : waitArchiveFiles) {
+//				metadataClassificationClient.createMetadataFile(saveVo.getId(), 0);
+//			}
+			batchCreateMetadataFiles(waitArchiveFiles);
 		} catch (Exception e) {
 			e.printStackTrace();
 		}
@@ -4059,5 +4062,43 @@ public class ArchivesAutoServiceImpl extends BaseServiceImpl<ArchivesAutoMapper,
 		process.waitFor();
 	}
 
+	/**
+	 * 批量更新
+	 * @param waitArchiveFiles
+	 */
+	public void batchCreateMetadataFiles(List<ArchiveFile> waitArchiveFiles) {
+		if (CollectionUtils.isEmpty(waitArchiveFiles)) {
+			return;
+		}
+
+		// 提取所有文件ID
+		List<Long> fileIds = waitArchiveFiles.stream()
+				.map(ArchiveFile::getId)
+				.collect(Collectors.toList());
+
+		long startTime = System.currentTimeMillis();
+		log.info("[自动组卷-更新元数据前]{}", "");
+
+		// 直接调用批量创建接口
+		boolean success = metadataClassificationClient.createMetadataFiles(fileIds);
+		long endTime = System.currentTimeMillis();
+
+		log.info("[自动组卷-更新元数据后], 耗时: {} 毫秒", endTime-startTime);
+
+	}
+
+
+	private String formatExecutionTime(long millis) {
+		if (millis < 1000) {
+			return millis + "毫秒";
+		} else if (millis < 60000) {
+			return String.format("%.2f秒", millis / 1000.0);
+		} else {
+			long minutes = TimeUnit.MILLISECONDS.toMinutes(millis);
+			long seconds = TimeUnit.MILLISECONDS.toSeconds(millis) % 60;
+			return minutes + "分钟" + seconds + "秒";
+		}
+	}
+
 
 }

+ 6 - 0
blade-service/blade-business/src/main/java/org/springblade/business/feignClient/MetadataClassificationClientImpl.java

@@ -5,6 +5,7 @@ import org.springblade.business.entity.MetadataClassification;
 import org.springblade.business.feign.MetadataClassificationClient;
 import org.springblade.business.service.IMetadataClassificationService;
 import org.springblade.business.vo.MetadataClassificationVO;
+import org.springframework.web.bind.annotation.RequestParam;
 import org.springframework.web.bind.annotation.RestController;
 
 import java.util.List;
@@ -36,4 +37,9 @@ public class MetadataClassificationClientImpl implements MetadataClassificationC
     public List<MetadataClassification> getMetadataClassification() {
         return iMetadataClassificationService.getMetadataClassification();
     }
+
+    @Override
+    public boolean createMetadataFiles(List<Long> fileIds){
+        return iMetadataClassificationService.createMetadataFiles(fileIds, 0);
+    }
 }

+ 1 - 0
blade-service/blade-business/src/main/java/org/springblade/business/mapper/MetadataClassificationMapper.java

@@ -29,4 +29,5 @@ public interface MetadataClassificationMapper extends BaseMapper<MetadataClassif
 
 
     List<HashMap<String, Object>> getMetadaFileByFileId(@Param("fileId") Long fileId);
+    List<HashMap<String, Object>> getMetadaFileByFileIds(@Param("fileIds") List<Long> fileIds);
 }

+ 9 - 0
blade-service/blade-business/src/main/java/org/springblade/business/mapper/MetadataClassificationMapper.xml

@@ -95,4 +95,13 @@
         where file_id = #{fileId}
         order by create_time desc limit 1
     </select>
+    <select id="getMetadaFileByFileIds" resultType="java.util.Map">
+        SELECT *
+        FROM u_metadata_file
+        WHERE  is_deleted=0 and file_id IN
+        <foreach collection="fileIds" item="fileId" open="(" close=")" separator=",">
+            #{fileId}
+        </foreach>
+        ORDER BY file_id, create_time DESC
+    </select>
 </mapper>

+ 2 - 0
blade-service/blade-business/src/main/java/org/springblade/business/service/IMetadataClassificationService.java

@@ -26,4 +26,6 @@ public interface IMetadataClassificationService extends BaseService<MetadataClas
     boolean updateMetadata(MetadataClassificationVO vo);
 
     List<MetadataClassification> getMetadataClassification();
+
+    boolean createMetadataFiles(List<Long> fileIds, Integer type);
 }

+ 591 - 0
blade-service/blade-business/src/main/java/org/springblade/business/service/impl/MetadataClassificationServiceImpl.java

@@ -1,6 +1,8 @@
 package org.springblade.business.service.impl;
 
 
+import cn.hutool.core.collection.CollUtil;
+import cn.hutool.core.util.StrUtil;
 import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
 import lombok.AllArgsConstructor;
 import org.apache.commons.lang.StringUtils;
@@ -37,7 +39,9 @@ import org.springframework.dao.DataAccessException;
 import org.springframework.jdbc.core.JdbcTemplate;
 import org.springframework.stereotype.Service;
 
+import java.sql.Statement;
 import java.util.*;
+import java.util.function.Function;
 import java.util.stream.Collectors;
 
 @Service
@@ -581,4 +585,591 @@ public class MetadataClassificationServiceImpl
         List<MetadataClassification> metadataClassifications = baseMapper.selectList(metadata);
         return metadataClassifications ;
     }
+
+    /**
+     *  批量操作
+     * @param fileIds
+     * @param type
+     * @return
+     */
+
+    public boolean createMetadataFiles(List<Long> fileIds, Integer type) {
+        if (fileIds == null || fileIds.isEmpty()) return true;
+
+        try {
+            BladeUser user = AuthUtil.getUser();
+            // tenantId 是 String 类型
+            String tenantId = user != null ? String.valueOf(user.getTenantId()) : "-1";
+            Long userId = user != null ? user.getUserId() : -1L;
+            String now = DateUtil.formatDateTime(new Date());
+
+            // 批量获取文件信息
+            List<ArchiveFile> files = iArchiveFileService.listByIds(fileIds);
+            Map<Long, ArchiveFile> fileMap = new HashMap<>();
+            for (ArchiveFile file : files) {
+                fileMap.put(file.getId(), file);
+            }
+
+            // 批量获取元数据文件状态
+            List<HashMap<String, Object>> metadataMapList = baseMapper.getMetadaFileByFileIds(fileIds);
+            Map<Long, Map<String, Object>> metadataFileMap = new HashMap<>();
+            for (HashMap<String, Object> map : metadataMapList) {
+                Long fileId = (Long) map.get("file_id");
+                if (fileId != null) {
+                    metadataFileMap.put(fileId, map);
+                }
+            }
+
+            // 获取元数据字段
+            QueryWrapper<MetadataClassification> metadataWrapper = new QueryWrapper<>();
+            metadataWrapper.lambda().eq(MetadataClassification::getIsDeleted, 0);
+            List<MetadataClassification> metadataClassifications = baseMapper.selectList(metadataWrapper);
+            Map<String, MetadataClassification> mapAll = new HashMap<>();
+            for (MetadataClassification mc : metadataClassifications) {
+                mapAll.put(mc.getContainerName(), mc);
+            }
+
+            // === 优化点1:获取 ArchiveAuto 映射(循环调用)===
+            Map<Long, ArchivesAuto> archiveAutoMap = new HashMap<>();
+            Set<Long> archiveIds = files.stream()
+                    .map(ArchiveFile::getArchiveId)
+                    .filter(Objects::nonNull)
+                    .collect(Collectors.toSet());
+            if (!archiveIds.isEmpty()) {
+                for (Long archiveId : archiveIds) {
+                    try {
+                        ArchivesAuto archivesAuto = archiveAutoClient.saveArchiveAutoById(archiveId);
+                        if (archivesAuto != null) {
+                            archiveAutoMap.put(archiveId, archivesAuto);
+                        }
+                    } catch (Exception e) {
+                    }
+                }
+            }
+
+            // === 优化点2:获取 ContractInfo 映射(循环调用)===
+            Map<String, ContractInfo> contractInfoMap = new HashMap<>();
+            Set<String> contractIds = files.stream()
+                    .map(ArchiveFile::getContractId)
+                    .filter(contractId -> contractId != null && !contractId.isEmpty())
+                    .collect(Collectors.toSet());
+            if (!contractIds.isEmpty()) {
+                for (String contractId : contractIds) {
+                    try {
+                        Long contractIdLong = Long.parseLong(contractId);
+                        ContractInfo contractInfo = contractClient.getContractById(contractIdLong);
+                        if (contractInfo != null) {
+                            contractInfoMap.put(contractId, contractInfo);
+                        }
+                    } catch (Exception e) {
+
+                    }
+                }
+            }
+
+            // === 优化点3:处理所有唯一的 nodeId(循环调用)===
+            // 提取所有不重复的 nodeId
+//            Set<String> uniqueNodeIds = files.stream()
+//                    .map(ArchiveFile::getNodeId)
+//                    .filter(nodeId -> nodeId != null && !nodeId.isEmpty())
+//                    .collect(Collectors.toSet());
+//
+//            Map<String, ArchiveTreeContract> nodeContractMap = new HashMap<>();
+//            Map<String, List<ArchiveTreeContract>> ancestorListMap = new HashMap<>();
+//
+//            // 循环处理每个唯一 nodeId
+//            for (String nodeId : uniqueNodeIds) {
+//                try {
+//                    // 获取当前节点
+//                    ArchiveTreeContract currentNode = archiveTreeContractClient.getArchiveTreeContractById(Long.parseLong(nodeId));
+//                    if (currentNode == null) continue;
+//                    nodeContractMap.put(nodeId, currentNode);
+//
+//                    // 构建祖先层次结构
+//                    String ancestors = currentNode.getAncestors();
+//                    if (ancestors != null && !ancestors.isEmpty()) {
+//                        // 添加当前节点ID
+//                        ancestors = ancestors + "," + currentNode.getId();
+//
+//                        // 获取祖先节点列表
+//                        List<ArchiveTreeContract> ancestorList = archiveTreeContractClient.getArchiveTreeContractListByIds(ancestors);
+//                        ancestorListMap.put(nodeId, ancestorList);
+//                    }
+//                } catch (Exception e) {
+//                }
+//            }
+
+            // === 优化点3:批量获取节点数据并构建祖先映射 ===
+            Set<String> uniqueNodeIds = files.stream()
+                    .map(ArchiveFile::getNodeId)
+                    .filter(nodeId -> nodeId != null && !nodeId.isEmpty())
+                    .collect(Collectors.toSet());
+
+            Map<String, ArchiveTreeContract> nodeContractMap = new HashMap<>();
+            Map<String, List<ArchiveTreeContract>> ancestorListMap = new HashMap<>();
+
+            if (!uniqueNodeIds.isEmpty()) {
+                try {
+                    // 1. 批量获取所有节点
+                    String nodeIdsStr = String.join(",", uniqueNodeIds);
+                    List<ArchiveTreeContract> nodeList = archiveTreeContractClient.getArchiveTreeContractListByIds(nodeIdsStr);
+
+                    // 2. 直接使用nodeList中的节点(避免重复添加当前节点ID)
+                    Map<Long, ArchiveTreeContract> allAncestorsMap = new HashMap<>();
+                    for (ArchiveTreeContract node : nodeList) {
+                        // 所有当前节点直接加入祖先映射
+                        allAncestorsMap.put(node.getId(), node);
+                        nodeContractMap.put(String.valueOf(node.getId()), node);
+                    }
+
+                    // 3. 收集祖先ID(不包括当前节点)
+                    Set<Long> ancestorIdsToFetch = new HashSet<>();
+                    for (ArchiveTreeContract node : nodeList) {
+                        String ancestors = node.getAncestors();
+                        if (ancestors != null && !ancestors.isEmpty()) {
+                            String[] ids = ancestors.split(",");
+                            for (String id : ids) {
+                                if (!id.trim().isEmpty()) {
+                                    long ancestorId = Long.parseLong(id.trim());
+                                    // 仅添加不在当前节点集合中的祖先ID
+                                    if (!allAncestorsMap.containsKey(ancestorId)) {
+                                        ancestorIdsToFetch.add(ancestorId);
+                                    }
+                                }
+                            }
+                        }
+                    }
+
+                    // 4. 批量获取缺失的祖先节点
+                    if (!ancestorIdsToFetch.isEmpty()) {
+                        String ancestorIdsStr = ancestorIdsToFetch.stream()
+                                .map(String::valueOf)
+                                .collect(Collectors.joining(","));
+
+                        List<ArchiveTreeContract> missingAncestors =
+                                archiveTreeContractClient.getArchiveTreeContractListByIds(ancestorIdsStr);
+
+                        for (ArchiveTreeContract ancestor : missingAncestors) {
+                            allAncestorsMap.put(ancestor.getId(), ancestor);
+                        }
+                    }
+
+                    // 5. 为每个节点重建祖先列表
+                    for (ArchiveTreeContract node : nodeList) {
+                        String nodeIdStr = String.valueOf(node.getId());
+                        List<ArchiveTreeContract> ancestorList = new ArrayList<>();
+
+                        // 构建完整路径(祖先ID + 当前节点ID)
+                        String ancestors = node.getAncestors();
+                        String fullPath = ancestors != null && !ancestors.isEmpty() ?
+                                ancestors + "," + node.getId() :
+                                String.valueOf(node.getId());
+
+                        // 按顺序构建祖先列表
+                        String[] pathIds = fullPath.split(",");
+                        for (String idStr : pathIds) {
+                            if (!idStr.trim().isEmpty()) {
+                                Long id = Long.parseLong(idStr.trim());
+                                ArchiveTreeContract ancestor = allAncestorsMap.get(id);
+                                if (ancestor != null) {
+                                    ancestorList.add(ancestor);
+                                }
+                            }
+                        }
+
+                        ancestorListMap.put(nodeIdStr, ancestorList);
+                    }
+                } catch (Exception e) {
+                    log.error("批量处理节点失败", e);
+                }
+            }
+
+
+            // 准备批量SQL和参数
+            StringBuilder insertSql = new StringBuilder();
+            List<Object[]> insertParams = new ArrayList<>();
+            StringBuilder updateSql = new StringBuilder();
+            List<Object[]> updateParams = new ArrayList<>();
+
+            for (Long fileId : fileIds) {
+                ArchiveFile file = fileMap.get(fileId);
+                if (file == null || file.getNodeId() == null || file.getNodeId().isEmpty()) {
+                    continue;
+                }
+
+                Map<String, Object> metadataMap = metadataFileMap.get(fileId);
+                boolean isAdd = (metadataMap == null);
+                Map<String, Object> keyValue = new LinkedHashMap<>();
+
+                // 收集元数据值
+                // 收集元数据值 - 传入预处理的数据映射
+                this.collectFileMetadata(
+                        file, type, keyValue, isAdd,
+                        archiveAutoMap, contractInfoMap,
+                        nodeContractMap, ancestorListMap
+                );
+
+                if (isAdd) {
+                    buildInsertSqlParams(file, keyValue, mapAll, metadataClassifications,
+                            tenantId, userId, now, insertSql, insertParams);
+                } else {
+                    buildUpdateSqlParams(fileId, keyValue, mapAll, metadataMap,
+                            userId, now, updateSql, updateParams);
+                }
+            }
+
+            // 执行批量操作
+            if (insertSql.length() > 0 && !insertParams.isEmpty()) {
+                int[] insertResults = jdbcTemplate.batchUpdate(insertSql.toString(), insertParams);
+                logBatchResultsSimple(insertResults, "INSERT");
+            }
+
+            if (updateSql.length() > 0 && !updateParams.isEmpty()) {
+                int[] updateResults = jdbcTemplate.batchUpdate(updateSql.toString(), updateParams);
+                logBatchResultsSimple(updateResults, "UPDATE");
+            }
+
+            return true;
+        } catch (Exception e) {
+            e.printStackTrace();
+            return false;
+        }
+    }
+
+    private void buildInsertSqlParams(ArchiveFile file, Map<String, Object> keyValue,
+                                      Map<String, MetadataClassification> mapAll,
+                                      List<MetadataClassification> metadataClassifications,
+                                      String tenantId, Long userId, String now,
+                                      StringBuilder insertSql, List<Object[]> insertParams) {
+        // 准备参数列表
+        List<Object> params = new ArrayList<>();
+
+        // 固定字段参数
+        params.add(SnowFlakeUtil.getId());   // id
+        params.add(tenantId);                // tenant_id (String 类型)
+        params.add(userId);                  // create_user
+        params.add(now);                     // create_time
+        params.add(userId);                  // update_user
+        params.add(now);                     // update_time
+        params.add(0);                       // status
+        params.add(0);                       // is_deleted
+        params.add(file.getContractId());    // contract_id
+        params.add(file.getId());            // file_id
+
+        // 添加元数据字段参数
+        for (MetadataClassification mc : metadataClassifications) {
+            Object value = keyValue.getOrDefault(mc.getContainerName(), null);
+            params.add(value);
+        }
+
+        // 构建SQL模板(只构建一次)
+        if (insertSql.length() == 0) {
+            insertSql.append("INSERT INTO u_metadata_file (id, tenant_id, create_user, create_time, ");
+            insertSql.append("update_user, update_time, status, is_deleted, contract_id, file_id, ");
+
+            // 添加所有元数据字段
+            List<String> fieldKeys = new ArrayList<>();
+            for (MetadataClassification mc : metadataClassifications) {
+                fieldKeys.add(mc.getFieldKey());
+            }
+            insertSql.append(String.join(", ", fieldKeys));
+
+            insertSql.append(") VALUES (");
+
+            // 构建占位符字符串
+            StringBuilder placeholders = new StringBuilder();
+            for (int i = 0; i < (10 + metadataClassifications.size()); i++) {
+                if (i > 0) placeholders.append(", ");
+                placeholders.append("?");
+            }
+            insertSql.append(placeholders);
+
+            insertSql.append(")");
+        }
+
+        // 将参数转换为数组并添加到批处理列表
+        insertParams.add(params.toArray());
+    }
+
+    private void buildUpdateSqlParams(Long fileId, Map<String, Object> keyValue,
+                                      Map<String, MetadataClassification> mapAll,
+                                      Map<String, Object> metadataMap,
+                                      Long userId, String now,
+                                      StringBuilder updateSql, List<Object[]> updateParams) {
+        // 准备参数列表
+        List<Object> params = new ArrayList<>();
+        List<String> setFields = new ArrayList<>();
+
+        // 添加固定更新字段
+        setFields.add("update_user = ?");
+        params.add(userId);
+
+        setFields.add("update_time = ?");
+        params.add(now);
+
+        // 添加元数据字段参数
+        for (Map.Entry<String, Object> entry : keyValue.entrySet()) {
+            String containerName = entry.getKey();
+            Object value = entry.getValue();
+
+            if (mapAll.containsKey(containerName)) {
+                MetadataClassification mc = mapAll.get(containerName);
+                setFields.add(mc.getFieldKey() + " = ?");
+                params.add(value);
+            }
+        }
+
+        // 添加WHERE条件标识(最后处理)
+        params.add(fileId);
+
+        // 构建SQL模板(只构建一次)
+        if (updateSql.length() == 0 && !setFields.isEmpty()) {
+            updateSql.append("UPDATE u_metadata_file SET ");
+            updateSql.append(String.join(", ", setFields));
+            updateSql.append(" WHERE file_id = ?");
+        }
+
+        // 将参数转换为数组并添加到批处理列表
+        if (!setFields.isEmpty()) {
+            updateParams.add(params.toArray());
+        }
+    }
+
+
+    private void collectFileMetadata(ArchiveFile file, Integer type,
+                                     Map<String, Object> keyValue, boolean isAdd,Map<Long, ArchivesAuto> archiveAutoMap,
+                                     Map<String, ContractInfo> contractInfoMap,Map<String, ArchiveTreeContract> nodeContractMap,
+                                     Map<String, List<ArchiveTreeContract>> ancestorListMap) {
+        // 只有类型0需要处理
+        if (type != 0) return;
+
+        // 获取当前节点
+        ArchiveTreeContract currentNode = nodeContractMap.get(file.getNodeId());
+        if (currentNode == null) return;
+
+        // 获取祖先列表
+        List<ArchiveTreeContract> ancestorList = ancestorListMap.get(file.getNodeId());
+        if (ancestorList == null || ancestorList.isEmpty()) return;
+
+        // 准备各种层级结构
+        StringBuffer paperAddress = new StringBuffer();
+        StringBuffer nameStr = new StringBuffer();
+        StringBuffer unitProjectString = new StringBuffer();
+        StringBuffer unitProjectIdString = new StringBuffer();
+        StringBuffer divisionProjectString = new StringBuffer();
+        StringBuffer divisionProjectIdString = new StringBuffer();
+        StringBuffer itemizedProjectString = new StringBuffer();
+        StringBuffer itemizedProjectIdString = new StringBuffer();
+
+        // 处理每个层级
+        for (int i = 0; i < ancestorList.size(); i++) {
+            ArchiveTreeContract node = ancestorList.get(i);
+            metadataSetting(node, unitProjectString, unitProjectIdString,
+                    divisionProjectString, divisionProjectIdString,
+                    itemizedProjectString, itemizedProjectIdString);
+
+            if (i == 0) continue;
+
+            // 构建纸质文件位置
+            if (node.getId().equals(currentNode.getParentId())) {
+                paperAddress.append(node.getNodeName());
+                paperAddress.append(">");
+                paperAddress.append(currentNode.getNodeName());
+            }
+
+            nameStr.append(node.getNodeName());
+            nameStr.append("/");
+        }
+
+        // 设置基础元数据
+        keyValue.put("聚合层次", nameStr.toString());
+        keyValue.put("纸质文件位置", paperAddress.toString());
+
+        // 责任单位名称
+        String dutyUser = "";
+        if ((file.getDutyUser() == null || file.getDutyUser().isEmpty()) &&
+                (file.getContractId() != null && !file.getContractId().isEmpty())) {
+            ContractInfo contract = contractInfoMap.get(file.getContractId());
+            if (contract != null) {
+                if (contract.getContractType() == 1) {
+                    dutyUser = contract.getConstructionUnitName();
+                } else if (contract.getContractType() == 2) {
+                    dutyUser = contract.getSupervisionUnitName();
+                } else {
+                    dutyUser = contract.getContractorUnitName();
+                }
+            }
+        } else {
+            dutyUser = file.getDutyUser() != null ? file.getDutyUser() : "";
+        }
+        keyValue.put("立档单位名称", dutyUser);
+        keyValue.put("责任者名称", file.getDutyUser());
+
+        // 设置文件基本信息
+        keyValue.put("元数据目录文件", nameStr.toString());
+        keyValue.put("电子文件号", file.getId());
+        keyValue.put("文件页数", file.getFilePage());
+        keyValue.put("生成方式", file.getSourceType());
+        keyValue.put("题名", file.getFileName());
+
+        // 关键词生成
+        String keyWords = "";
+        if (file.getFileName() != null && file.getFileName().length() < 9) {
+            keyWords = file.getFileName();
+        } else if (file.getFileName() != null) {
+            keyWords = file.getFileName().substring(0, 8);
+        }
+        keyValue.put("关键词", keyWords);
+
+        // 摘要生成
+        String zhaiyao = "该文件为【" + (file.getSourceType() == 1 ? "原生" : "数字化") + "】于【" +
+                (file.getFileTime() != null ? file.getFileTime() : "") + "】形成,共【" +
+                (file.getFilePage() != null ? file.getFilePage() : 0) + "】页、【" +
+                (file.getFileName() != null ? file.getFileName() : "") + "】,责任者为【" +
+                dutyUser + "】";
+        keyValue.put("摘要", zhaiyao);
+        keyValue.put("文件日期", file.getFileTime());
+
+        // 设置档案信息(如果有)
+        if (file.getArchiveId() != null) {
+            ArchivesAuto archivesAuto = archiveAutoMap.get(file.getArchiveId());
+            if (archivesAuto != null) {
+                keyValue.put("密级", archivesAuto.getSecretLevel());
+                keyValue.put("档号", archivesAuto.getFileNumber());
+                keyValue.put("保管期限", archivesAuto.getStorageTime());
+                keyValue.put("文件件数", archivesAuto.getFileN());
+                keyValue.put("验证码", archivesAuto.getId());
+            }
+        }
+
+        // 文件编号和拍摄信息
+        keyValue.put("文号", file.getFileNumber());
+        keyValue.put("拍摄时间", file.getFilmingTime());
+        keyValue.put("分组号", file.getFilmCode());
+        keyValue.put("组内照片编号", file.getReferCode());
+
+        // 文件路径相关处理
+        if (file.getFileUrl() != null && !file.getFileUrl().isEmpty()) {
+            // 格式名称和存储位置
+            String fileUrl = file.getFileUrl();
+            if (fileUrl.contains(".")) {
+                keyValue.put("格式名称", fileUrl.substring(fileUrl.lastIndexOf(".") + 1));
+            } else {
+                keyValue.put("格式名称", "");
+            }
+
+            keyValue.put("存储位置", fileUrl);
+            keyValue.put("计算机文件名", fileUrl.substring(fileUrl.lastIndexOf("/") + 1));
+        }
+
+        // 文件大小和扫描信息
+        keyValue.put("计算机文件大小", file.getFileSize());
+        keyValue.put("扫描分辨率", "300dpi");
+        keyValue.put("扫描色彩模式", "彩色");
+
+        // 单位/分部/分项工程信息
+        keyValue.put("单位工程", trimLastComma(unitProjectString));
+        keyValue.put("单位工程编码", trimLastComma(unitProjectIdString));
+        keyValue.put("分部工程", trimLastComma(divisionProjectString));
+        keyValue.put("分部工程编码", trimLastComma(divisionProjectIdString));
+        keyValue.put("分项工程", trimLastComma(itemizedProjectString));
+        keyValue.put("分项工程编码", trimLastComma(itemizedProjectIdString));
+
+        // 关系默认值
+        keyValue.put("关系", "引用");
+
+        // 如果是竣工图
+        if ((currentNode.getIsBuiltDrawing() != null && currentNode.getIsBuiltDrawing() == 1) ||
+                (currentNode.getStorageType() != null && currentNode.getStorageType() == 2)) {
+            keyValue.put("图号", file.getDrawingNo());
+            keyValue.put("图幅", file.getSheetType());
+            keyValue.put("图表来源", file.getSheetSource());
+            keyValue.put("引用变更令 编号", file.getCiteChangeNumber());
+        }
+
+        // 存储类型处理
+        if (currentNode.getStorageType() != null) {
+            keyValue.put("业务事项", StorageTypeEnum.getByCode(currentNode.getStorageType()));
+        }
+
+        // 签名信息(只有当需要时处理,避免不必要查询)
+        if (isAdd) {
+            Task task = iTaskService.getTaskByFormDataId(file.getId().toString());
+            if (task != null) {
+                List<TaskParallel> taskParallelList = iTaskParallelService.queryApprovalUser(task.getProcessInstanceId());
+                if (taskParallelList != null && !taskParallelList.isEmpty()) {
+                    taskParallelList = taskParallelList.stream()
+                            .filter(taskParallel -> taskParallel.getEVisaStatus() == 1)
+                            .collect(Collectors.toList());
+
+                    if (!taskParallelList.isEmpty()) {
+                        // 处理签名信息
+                        StringBuffer nameString = new StringBuffer();
+                        StringBuffer dateString = new StringBuffer();
+                        List<Long> userIdList = new ArrayList<>();
+
+                        for (TaskParallel taskParallel : taskParallelList) {
+                            nameString.append(taskParallel.getTaskUserName()).append("、");
+                            dateString.append(taskParallel.getUpdateTime()).append("、");
+                            userIdList.add(Long.valueOf(task.getTaskUser()));
+                        }
+
+                        // 查询用户信息
+                        if (!userIdList.isEmpty()) {
+                            List<User> userList = iUserClient.userInfoByIds(userIdList);
+                            if (userList != null && !userList.isEmpty()) {
+                                List<String> postIds = userList.stream()
+                                        .map(User::getPostId)
+                                        .filter(Objects::nonNull)
+                                        .collect(Collectors.toList());
+
+                                R<List<String>> postNames = iSysClient.getPostNames(Func.join(postIds));
+                                if (postNames != null && postNames.getCode() == 200) {
+                                    keyValue.put("签名项", Func.join(postNames.getData()));
+                                }
+                            }
+                        }
+
+                        keyValue.put("签名者", trimLastComma(nameString));
+                        keyValue.put("签名时间", trimLastComma(dateString));
+                    }
+                }
+            }
+        }
+    }
+
+    // 辅助方法:去除字符串最后一个逗号/顿号
+    private String trimLastComma(StringBuffer buffer) {
+        if (buffer == null || buffer.length() == 0) return null;
+        return buffer.substring(0, buffer.length() - 1);
+    }
+
+    // 新增方法:记录批处理结果
+    private void logBatchResultsSimple(int[] results, String operation) {
+        int successCount = 0;
+        int failureCount = 0;
+        int affectedTotal = 0;
+
+        for (int rowCount : results) {
+            if (rowCount >= 0) {
+                successCount++;
+                affectedTotal += rowCount;
+            } else if (rowCount == Statement.EXECUTE_FAILED) {
+                failureCount++;
+            }
+        }
+
+        String msg = operation + " 操作结果: 成功批次 = " + successCount +
+                ", 失败批次 = " + failureCount +
+                ", 影响行数 = " + affectedTotal +
+                ", 总批次 = " + results.length;
+
+        if (failureCount == 0) {
+            log.error(msg);
+        } else {
+            log.error(msg);
+        }
+    }
+
+
 }