[prod] 去掉批处理的一些日志

This commit is contained in:
liu.zixi
2025-11-11 12:58:47 +08:00
parent 30efbd5610
commit b54d78d17b
4 changed files with 26 additions and 26 deletions

View File

@@ -59,7 +59,7 @@ public class CaseAiDocumentAsyncHandler {
caseKnowledgeService.uploadCaseDocument(cases);
break;
}
log.info("处理案例成功caseId: {}, 操作类型: {}", cases.getId(), optTypeEnum.getDesc());
// log.info("处理案例成功caseId: {}, 操作类型: {}", cases.getId(), optTypeEnum.getDesc());
} catch (Exception e) {
log.error("处理案例失败caseId: {}, 操作类型: {}", cases.getId(), optTypeEnum.getDesc(), e);
}

View File

@@ -1024,7 +1024,7 @@ public class CaseKnowledgeServiceImpl implements ICaseKnowledgeService {
@Override
public void batchCheckFileStatus() {
log.info("开始批量检查文件状态");
// log.info("开始批量检查文件状态");
// 1. 查询CaseDocumentLog表中前10条run_status等于0的数据并按创建时间升序排序
PageList<CaseDocumentLog> runningLogPage = caseDocumentLogDao.getGenericDao()
@@ -1034,11 +1034,11 @@ public class CaseKnowledgeServiceImpl implements ICaseKnowledgeService {
// 2. 如果没有符合条件的数据,完成
if (runningLogs == null || runningLogs.isEmpty()) {
log.info("没有需要检查状态的文档,批量检查完成");
// log.info("没有需要检查状态的文档,批量检查完成");
return;
}
log.info("找到{}条需要检查状态的文档记录", runningLogs.size());
// log.info("找到{}条需要检查状态的文档记录", runningLogs.size());
// 3. 把这些数据的taskId聚合成一个List<String>
List<String> taskIds = runningLogs.stream()
@@ -1048,11 +1048,11 @@ public class CaseKnowledgeServiceImpl implements ICaseKnowledgeService {
.collect(java.util.stream.Collectors.toList());
if (taskIds.isEmpty()) {
log.error("所有运行中的记录都没有有效的taskId");
// log.error("所有运行中的记录都没有有效的taskId");
return;
}
log.info("需要检查状态的taskId数量: {}", taskIds.size());
// log.info("需要检查状态的taskId数量: {}", taskIds.size());
// 4. 获取access_token
String accessToken = aiAccessTokenService.getAccessToken();
@@ -1092,7 +1092,7 @@ public class CaseKnowledgeServiceImpl implements ICaseKnowledgeService {
log.error("批量检查文件状态异常", e);
}
log.info("批量检查文件状态完成");
// log.info("批量检查文件状态完成");
}
/**
@@ -1359,7 +1359,7 @@ public class CaseKnowledgeServiceImpl implements ICaseKnowledgeService {
caseLog.setOptStatus(CaseDocumentLogOptStatusEnum.SUCCESS.getCode());
caseLog.setCaseStatus(CaseDocumentLogCaseStatusEnum.SUCCESS.getCode());
needUpdate = true;
log.info("文档向量化成功更新状态taskId: {}, caseId: {}", caseLog.getTaskId(), caseLog.getCaseId());
// log.info("文档向量化成功更新状态taskId: {}, caseId: {}", caseLog.getTaskId(), caseLog.getCaseId());
} else if ("failed".equals(fileStatus)) {
// 状态为failedrun_status、opt_status变更为1case_status变更为2
caseLog.setRunStatus(CaseDocumentLogRunStatusEnum.COMPLETED.getCode());
@@ -1367,20 +1367,20 @@ public class CaseKnowledgeServiceImpl implements ICaseKnowledgeService {
caseLog.setCaseStatus(CaseDocumentLogCaseStatusEnum.FAILED.getCode());
needUpdate = true;
needToSendEmail = true;
log.error("文档处理失败需要发送邮件更新状态taskId: {}, caseId: {}", caseLog.getTaskId(), caseLog.getCaseId());
// log.error("文档处理失败需要发送邮件更新状态taskId: {}, caseId: {}", caseLog.getTaskId(), caseLog.getCaseId());
} else {
// 其他状态uploaded、texted、vectoring不做数据变更
log.info("文档状态为{}暂不更新数据库taskId: {}", fileStatus, caseLog.getTaskId());
// log.info("文档状态为{}暂不更新数据库taskId: {}", fileStatus, caseLog.getTaskId());
}
// 如果需要更新执行update操作
if (needUpdate) {
caseLog.setSysUpdateTime(LocalDateTime.now());
caseDocumentLogDao.save(caseLog);
log.info("更新CaseDocumentLog成功logId: {}, taskId: {}, fileStatus: {}",
caseLog.getId(), caseLog.getTaskId(), fileStatus);
} else {
log.info("无需更新CaseDocumentLogtaskId: {}, fileStatus: {}", caseLog.getTaskId(), fileStatus);
// log.info("更新CaseDocumentLog成功logId: {}, taskId: {}, fileStatus: {}",
// caseLog.getId(), caseLog.getTaskId(), fileStatus);
// } else {
// log.info("无需更新CaseDocumentLogtaskId: {}, fileStatus: {}", caseLog.getTaskId(), fileStatus);
}
} catch (Exception e) {
log.error("更新日志状态异常taskId: {}, fileStatus: {}", caseLog.getTaskId(), fileStatus, e);
@@ -1397,7 +1397,7 @@ public class CaseKnowledgeServiceImpl implements ICaseKnowledgeService {
// 使用配置的收件人列表
List<String> recipients = caseAiProperties.getAlertEmailRecipients();
log.info("使用配置的收件人列表:{}", recipients);
// log.info("使用配置的收件人列表:{}", recipients);
if (recipients != null && !recipients.isEmpty()) {
try {
String to = String.join(",", recipients);

View File

@@ -20,8 +20,8 @@ public class CaseDocumentLogTask {
*/
@XxlJob("batchCheckFileStatusJob")
public void batchCheckFileStatusJob() {
log.info("开始批量查询文件状态");
// log.info("开始批量查询文件状态");
caseKnowledgeService.batchCheckFileStatus();
log.info("结束批量查询文件状态");
// log.info("结束批量查询文件状态");
}
}

View File

@@ -41,18 +41,18 @@ public class CaseUploadTask {
@XxlJob("oldDataUploadJob")
public void oldDataUploadJob() {
try {
log.info("开始执行旧案例上传任务");
// log.info("开始执行旧案例上传任务");
// 从Redis获取上次处理的最后一条记录ID
String lastProcessedId = stringRedisTemplate.opsForValue().get(CASE_UPLOAD_LAST_ID_KEY);
log.info("上次处理的最后一条记录ID: {}", lastProcessedId);
// log.info("上次处理的最后一条记录ID: {}", lastProcessedId);
// 查询符合条件的案例数据
List<Cases> casesToProcess = findCasesToProcess(lastProcessedId);
log.info("查询到待处理案例数量: {}", casesToProcess.size());
// log.info("查询到待处理案例数量: {}", casesToProcess.size());
if (casesToProcess.isEmpty()) {
log.info("没有需要处理的案例数据");
// log.info("没有需要处理的案例数据");
return;
}
@@ -82,7 +82,7 @@ public class CaseUploadTask {
}
}
log.info("过滤后需要处理的案例数量: {}", casesList.size());
// log.info("过滤后需要处理的案例数量: {}", casesList.size());
if (!casesList.isEmpty()) {
// 调用异步处理方法
@@ -91,12 +91,12 @@ public class CaseUploadTask {
// 将当前处理的最后一条数据ID存入Redis
String currentLastId = casesList.get(casesList.size() - 1).getId();
stringRedisTemplate.opsForValue().set(CASE_UPLOAD_LAST_ID_KEY, currentLastId);
log.info("已处理案例最后一条记录ID已更新为: {}", currentLastId);
} else {
log.info("没有新的案例需要处理");
// log.info("已处理案例最后一条记录ID已更新为: {}", currentLastId);
// } else {
// log.info("没有新的案例需要处理");
}
log.info("旧案例上传任务执行完成");
// log.info("旧案例上传任务执行完成");
} catch (Exception e) {
log.error("执行旧案例上传任务时发生异常", e);
}