提交 2dc98d29 authored 作者: 窦馨雨's avatar 窦馨雨

合并分支 'dxy' 到 'master'

同步飞书和勤策考勤数据 查看合并请求 !127
package com.sfa.job.domain.feishu.dao;
import com.sfa.job.pojo.feishu.response.FeishuLeaveInfoDTO;
import java.util.List;
/**
* Dao接口:定义所有核心方法,Service仅调用此处方法
*/
public interface FeishuLeaveInfoDao {
/**
* 解析飞书原始JSON为DTO列表(核心解析逻辑)
*/
List<FeishuLeaveInfoDTO> parseFeishuLeaveRawJson(String rawJson);
/**
* 同步飞书请假数据(核心新增/更新逻辑)
*/
String syncFeishuLeaveData(String syncDate);
/**
* 根据请假唯一ID查询DTO
*/
FeishuLeaveInfoDTO selectByLeaveRequestId(String leaveRequestId);
}
\ No newline at end of file
package com.sfa.job.domain.feishu.dao;
import com.alibaba.fastjson2.JSONArray;
import com.alibaba.fastjson2.JSONObject;
import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper;
import com.baomidou.mybatisplus.core.conditions.update.LambdaUpdateWrapper;
import com.sfa.common.core.utils.bean.BeanUtils;
import com.sfa.common.core.utils.sdk.FeiShuUtil;
import com.sfa.job.domain.feishu.entity.FeishuLeaveInfo;
import com.sfa.job.domain.feishu.mapper.FeishuLeaveInfoMapper;
import com.sfa.job.pojo.feishu.response.FeishuLeaveInfoDTO;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import org.springframework.util.CollectionUtils;
import javax.annotation.Resource;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.concurrent.ConcurrentHashMap;
import java.util.Map;
/**
* 飞书请假信息Dao实现类
*/
@Service
@Slf4j
public class FeishuLeaveInfoDaoImpl implements FeishuLeaveInfoDao {
@Resource
private FeishuLeaveInfoMapper feishuLeaveInfoMapper;
@Resource
private FeiShuUtil feiShuUtil;
// 本地缓存(本次同步有效,避免重复调用飞书接口)
private Map<String, String> employeeNoCache = new ConcurrentHashMap<>();
private String extractZhCnValue(JSONArray jsonArray) {
if (CollectionUtils.isEmpty(jsonArray)) {
return "";
}
for (int i = 0; i < jsonArray.size(); i++) {
JSONObject obj = jsonArray.getJSONObject(i);
// 补充空对象防护
if (obj != null && "zh-CN".equals(StringUtils.trimToEmpty(obj.getString("lang")))) {
return StringUtils.trimToEmpty(obj.getString("value"));
}
}
return "";
}
private String extractFirstArrayValue(JSONArray jsonArray) {
if (CollectionUtils.isEmpty(jsonArray)) {
return "";
}
Object first = jsonArray.get(0);
return first == null ? "" : StringUtils.trimToEmpty(first.toString());
}
private void parseCorrectProcessInfo(JSONArray jsonArray, FeishuLeaveInfoDTO dto) {
if (CollectionUtils.isEmpty(jsonArray)) {
dto.setProcessApplyTime("");
dto.setProcessId("");
dto.setProcessStatus("");
return;
}
JSONObject process = jsonArray.getJSONObject(0);
if (process != null) {
dto.setProcessApplyTime(StringUtils.trimToEmpty(process.getString("process_apply_time")));
dto.setProcessId(StringUtils.trimToEmpty(process.getString("process_id")));
dto.setProcessStatus(StringUtils.trimToEmpty(process.getString("process_status")));
}
}
/**
* 带缓存获取员工编号(避免重复调用飞书接口)
*/
private String getEmployeeNoWithCache(String employmentId) {
if (StringUtils.isBlank(employmentId)) {
return "";
}
// 先查缓存
if (employeeNoCache.containsKey(employmentId)) {
return employeeNoCache.get(employmentId);
}
// 缓存未命中,调用接口查询
String employeeNo = feiShuUtil.getEmployeeNoByEmploymentId(employmentId);
// 存入缓存
employeeNoCache.put(employmentId, employeeNo);
return employeeNo;
}
// ************************ 接口实现方法 ************************
@Override
public List<FeishuLeaveInfoDTO> parseFeishuLeaveRawJson(String rawJson) {
List<FeishuLeaveInfoDTO> dtoList = new ArrayList<>();
if (StringUtils.isBlank(rawJson)) {
log.warn("飞书原始JSON为空,解析结果为空");
return dtoList;
}
try {
JSONObject root = JSONObject.parseObject(rawJson);
JSONArray leaveList = root.getJSONArray("leave_request_list");
if (CollectionUtils.isEmpty(leaveList)) {
log.info("飞书响应无有效请假数据");
return dtoList;
}
for (int i = 0; i < leaveList.size(); i++) {
JSONObject leaveObj = leaveList.getJSONObject(i);
FeishuLeaveInfoDTO dto = new FeishuLeaveInfoDTO();
// 业务字段赋值
dto.setEmploymentId(StringUtils.trimToEmpty(leaveObj.getString("employment_id")));
dto.setEndTime(StringUtils.trimToEmpty(leaveObj.getString("end_time")));
dto.setGrantSource(StringUtils.trimToEmpty(leaveObj.getString("grant_source")));
dto.setLeaveDuration(StringUtils.trimToEmpty(leaveObj.getString("leave_duration")));
dto.setLeaveDurationUnit(leaveObj.getInteger("leave_duration_unit"));
dto.setLeaveRequestId(StringUtils.trimToEmpty(leaveObj.getString("leave_request_id")));
dto.setLeaveRequestStatus(leaveObj.getInteger("leave_request_status"));
dto.setLeaveTypeId(StringUtils.trimToEmpty(leaveObj.getString("leave_type_id")));
dto.setNotes(StringUtils.trimToEmpty(leaveObj.getString("notes")));
dto.setReturnTime(StringUtils.trimToEmpty(leaveObj.getString("return_time")));
dto.setStartTime(StringUtils.trimToEmpty(leaveObj.getString("start_time")));
dto.setSubmittedAt(StringUtils.trimToEmpty(leaveObj.getString("submitted_at")));
dto.setSubmittedBy(StringUtils.trimToEmpty(leaveObj.getString("submitted_by")));
dto.setTimeZone(StringUtils.trimToEmpty(leaveObj.getString("time_zone")));
// 提取中文名称和数组第一条数据
dto.setEmploymentName(extractZhCnValue(leaveObj.getJSONArray("employment_name")));
dto.setLeaveTypeName(extractZhCnValue(leaveObj.getJSONArray("leave_type_name")));
dto.setLeaveProcessId(extractFirstArrayValue(leaveObj.getJSONArray("leave_process_id")));
dto.setLeaveCorrectProcessId(extractFirstArrayValue(leaveObj.getJSONArray("leave_correct_process_id")));
// 解析嵌套更正流程
parseCorrectProcessInfo(leaveObj.getJSONArray("leave_correct_process_info"), dto);
dtoList.add(dto);
}
log.info("飞书请假数据解析完成,共{}条有效DTO", dtoList.size());
} catch (Exception e) {
log.error("飞书请假数据解析异常", e);
throw new RuntimeException("飞书请假数据解析失败", e);
}
return dtoList;
}
@Override
@Transactional(rollbackFor = Exception.class)
public String syncFeishuLeaveData(String syncDate) {
// 日期格式校验
if (StringUtils.isBlank(syncDate) || !syncDate.matches("^\\d{4}-\\d{2}-\\d{2}$")) {
log.error("同步日期格式错误,要求为 yyyy-MM-dd,传入值:{}", syncDate);
return String.format("同步失败:日期格式错误,要求为 yyyy-MM-dd,传入值:%s", syncDate);
}
// 调用飞书工具类获取原始JSON
String rawJson = feiShuUtil.getAllLeaveRecords(syncDate);
// 解析为DTO列表
List<FeishuLeaveInfoDTO> dtoList = parseFeishuLeaveRawJson(rawJson);
if (CollectionUtils.isEmpty(dtoList)) {
return String.format("同步[%s]无有效请假数据,新增0条,更新0条", syncDate);
}
// 4. 初始化统计参数
Date now = new Date();
int totalCount = dtoList.size();
int addCount = 0;
int updateCount = 0;
int skipCount = 0;
// 清空缓存(本次同步独立缓存)
employeeNoCache.clear();
// 遍历处理每条DTO
for (FeishuLeaveInfoDTO dto : dtoList) {
// 跳过无唯一ID的记录
if (StringUtils.isBlank(dto.getLeaveRequestId())) {
skipCount++;
continue;
}
// 设置系统字段
dto.setSyncCreateTime(now);
dto.setSyncUpdateTime(now);
dto.setIsDelete(FeishuLeaveInfoDTO.IS_DELETE_NO);
// 核心逻辑:查询并赋值 employee_no(带缓存)
String employeeNo = getEmployeeNoWithCache(dto.getEmploymentId());
dto.setEmploymentNo(employeeNo);
// 查询是否已存在
FeishuLeaveInfoDTO existDto = selectByLeaveRequestId(dto.getLeaveRequestId());
if (existDto == null) {
// 新增:DTO → 实体
FeishuLeaveInfo entity = BeanUtils.transitionDto(dto, FeishuLeaveInfo.class);
int insertResult = feishuLeaveInfoMapper.insert(entity);
if (insertResult > 0) {
addCount++;
log.info("新增请假数据成功,唯一ID:{},员工编号:{}", dto.getLeaveRequestId(), employeeNo);
}
} else {
// 更新:构造条件,批量更新
FeishuLeaveInfo entity = BeanUtils.transitionDto(dto, FeishuLeaveInfo.class);
LambdaUpdateWrapper<FeishuLeaveInfo> updateWrapper = new LambdaUpdateWrapper<>();
updateWrapper.eq(FeishuLeaveInfo::getLeaveRequestId, dto.getLeaveRequestId())
.eq(FeishuLeaveInfo::getLeaveRequestStatus, FeishuLeaveInfo.IS_DELETE_NO);
int updateResult = feishuLeaveInfoMapper.update(entity, updateWrapper);
if (updateResult > 0) {
updateCount++;
log.info("更新请假数据成功,唯一ID:{},员工编号:{}", dto.getLeaveRequestId(), employeeNo);
} else {
log.warn("更新请假数据无变更,唯一ID:{},员工编号:{}", dto.getLeaveRequestId(), employeeNo);
}
}
}
// 构造同步结果
return String.format("同步[%s]完成\n总计处理:%d条\n新增:%d条\n更新:%d条\n跳过无效记录:%d条",
syncDate, totalCount, addCount, updateCount, skipCount);
}
@Override
public FeishuLeaveInfoDTO selectByLeaveRequestId(String leaveRequestId) {
if (StringUtils.isBlank(leaveRequestId)) {
return null;
}
// 查询实体
LambdaQueryWrapper<FeishuLeaveInfo> queryWrapper = new LambdaQueryWrapper<>();
queryWrapper.eq(FeishuLeaveInfo::getLeaveRequestId, leaveRequestId)
.eq(FeishuLeaveInfo::getIsDelete, FeishuLeaveInfo.IS_DELETE_NO);
FeishuLeaveInfo entity = feishuLeaveInfoMapper.selectOne(queryWrapper);
// 转换为DTO返回
return BeanUtils.transitionDto(entity, FeishuLeaveInfoDTO.class);
}
}
\ No newline at end of file
package com.sfa.job.domain.feishu.entity;
import com.baomidou.mybatisplus.annotation.*;
import lombok.Data;
import java.io.Serializable;
import java.util.Date;
@Data
@TableName("feishu_leave_info")
public class FeishuLeaveInfo implements Serializable {
@TableField(exist = false)
private static final long serialVersionUID = 1L;
// 逻辑删除常量
// 未删除
@TableField(exist = false)
public static final Integer IS_DELETE_NO = 1;
// 已删除
@TableField(exist = false)
public static final Integer IS_DELETE_YES = 0;
@TableId(type = IdType.AUTO)
private Long id;
private String employmentId;
private String employmentName;
private String employmentNo;
private String endTime;
private String grantSource;
private String leaveDuration;
private Integer leaveDurationUnit;
private String leaveProcessId;
private String leaveRequestId;
private Integer leaveRequestStatus;
private String leaveTypeId;
private String leaveTypeName;
private String notes;
private String returnTime;
private String startTime;
private String submittedAt;
private String submittedBy;
private String timeZone;
private String leaveCorrectProcessId;
private String processApplyTime;
private String processId;
private String processStatus;
private Date syncCreateTime;
private Date syncUpdateTime;
@TableLogic(value = "1", delval = "0")
private Integer isDelete;
}
\ No newline at end of file
package com.sfa.job.domain.feishu.mapper;
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
import com.sfa.job.domain.feishu.entity.FeishuLeaveInfo;
import org.springframework.stereotype.Repository;
@Repository
public interface FeishuLeaveInfoMapper extends BaseMapper<FeishuLeaveInfo> {
}
\ No newline at end of file
package com.sfa.job.domain.qince.dao;
import com.sfa.job.pojo.qince.response.QinceUserStatisticDTO;
import java.time.LocalDate;
import java.util.Date;
import java.util.List;
import java.util.Map;
/**
* 勤策考勤用户统计 DAO 接口(纯 DTO 操作,不暴露 Entity)
*
*/
public interface IQinceUserStatisticDao {
/**
* 构建已存在考勤记录的映射(qcUserId+attDate → DTO)
* @param userIdList 用户ID列表
* @param dateList 考勤日期列表(Date 类型)
* @return 唯一键与 DTO 的映射(纯 DTO,无 Entity 暴露)
*/
Map<String, QinceUserStatisticDTO> buildExistAttendanceMap(List<Long> userIdList, List<LocalDate> dateList);
/**
* 批量新增考勤记录(入参为 DTO 列表)
* @param dtoList 考勤 DTO 列表
* @return 新增成功条数
*/
int batchInsert(List<QinceUserStatisticDTO> dtoList);
/**
* 批量更新考勤记录(入参为 DTO 列表,需包含主键 ID)
* @param dtoList 考勤 DTO 列表
* @return 更新成功条数
*/
int batchUpdate(List<QinceUserStatisticDTO> dtoList);
}
\ No newline at end of file
package com.sfa.job.domain.qince.dao;
import com.baomidou.dynamic.datasource.annotation.DS;
import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper;
import com.sfa.job.domain.qince.entity.QinceUserStatistic;
import com.sfa.job.domain.qince.mapper.QinceUserStatisticMapper;
import com.sfa.job.pojo.qince.response.QinceUserStatisticDTO;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.BeanUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.util.CollectionUtils;
import java.text.SimpleDateFormat;
import java.time.LocalDate;
import java.util.*;
import java.util.stream.Collectors;
/**
* 勤策考勤用户统计 DAO 实现类(纯 DTO 对外,内部转换 Entity 调用 Mapper)
*/
@Slf4j
@Service
@DS("bi")
public class QinceUserStatisticDaoImpl implements IQinceUserStatisticDao {
@Autowired
private QinceUserStatisticMapper qinceUserStatisticMapper;
// 配置项
private static final int BATCH_SIZE = 200;
// 线程安全的日期格式化器(适配 Date 类型,避免多线程问题)
private static final ThreadLocal<SimpleDateFormat> DATE_SDF = ThreadLocal.withInitial(() -> new SimpleDateFormat("yyyy-MM-dd"));
// ===================== 内部工具:DTO ↔ Entity 转换(隔离对外接口与持久化层) =====================
/**
* DTO 转换为 Entity(用于调用 Mapper 进行持久化)
*/
private QinceUserStatistic dtoToEntity(QinceUserStatisticDTO dto) {
if (dto == null) {
return null;
}
QinceUserStatistic entity = new QinceUserStatistic();
// BeanUtils 拷贝同名同类型字段(Date/Long/String 均兼容)
BeanUtils.copyProperties(dto, entity);
return entity;
}
/**
* Entity 转换为 DTO(用于隐藏 Entity,对外返回纯净 DTO)
*/
private QinceUserStatisticDTO entityToDto(QinceUserStatistic entity) {
if (entity == null) {
return null;
}
QinceUserStatisticDTO dto = new QinceUserStatisticDTO();
// BeanUtils 拷贝同名同类型字段
BeanUtils.copyProperties(entity, dto);
return dto;
}
/**
* DTO 列表 转换为 Entity 列表(批量操作适配)
*/
private List<QinceUserStatistic> dtoListToEntityList(List<QinceUserStatisticDTO> dtoList) {
if (CollectionUtils.isEmpty(dtoList)) {
return new ArrayList<>(0);
}
return dtoList.stream()
.map(this::dtoToEntity)
.filter(Objects::nonNull)
.collect(Collectors.toList());
}
/**
* Entity 列表 转换为 DTO 列表(批量操作适配)
*/
private List<QinceUserStatisticDTO> entityListToDtoList(List<QinceUserStatistic> entityList) {
if (CollectionUtils.isEmpty(entityList)) {
return new ArrayList<>(0);
}
return entityList.stream()
.map(this::entityToDto)
.collect(Collectors.toList());
}
// ===================== DAO 接口实现:纯 DTO 对外,内部转换调用 Mapper =====================
@Override
public Map<String, QinceUserStatisticDTO> buildExistAttendanceMap(List<Long> userIdList, List<LocalDate> dateList) {
if (CollectionUtils.isEmpty(userIdList) || CollectionUtils.isEmpty(dateList)) {
return new HashMap<>(0);
}
// 1. 构建 Mapper 查询条件(仅操作 Entity,按需查询字段提升性能)
LambdaQueryWrapper<QinceUserStatistic> queryWrapper = new LambdaQueryWrapper<QinceUserStatistic>()
.in(QinceUserStatistic::getQcUserId, userIdList)
.in(QinceUserStatistic::getAttDate, dateList)
.select(QinceUserStatistic::getId, QinceUserStatistic::getQcUserId,
QinceUserStatistic::getAttDate);
// 2. Mapper 操作 Entity,查询数据库(唯一直接操作 Entity 的地方)
List<QinceUserStatistic> existEntityList = qinceUserStatisticMapper.selectList(queryWrapper);
// 3. 转换 Entity 为 DTO,构建唯一键映射(对外隐藏 Entity,返回纯 DTO)
Map<String, QinceUserStatisticDTO> existDtoMap = new HashMap<>(existEntityList.size());
for (QinceUserStatistic entity : existEntityList) {
QinceUserStatisticDTO dto = entityToDto(entity);
String dateStr = DATE_SDF.get().format(entity.getAttDate());
String uniqueKey = entity.getQcUserId() + "_" + dateStr;
existDtoMap.put(uniqueKey, dto);
}
return existDtoMap;
}
@Override
public int batchInsert(List<QinceUserStatisticDTO> dtoList) {
int count = 0;
if (CollectionUtils.isEmpty(dtoList)) {
return count;
}
// 1. 分批处理,避免 SQL 语句过长(保护数据库,防止超出最大允许长度)
for (int i = 0; i < dtoList.size(); i += BATCH_SIZE) {
int end = Math.min(i + BATCH_SIZE, dtoList.size());
List<QinceUserStatisticDTO> batchDtoList = dtoList.subList(i, end);
// 2. 内部转换:DTO 列表 → Entity 列表(仅 Mapper 能处理 Entity)
List<QinceUserStatistic> batchEntityList = dtoListToEntityList(batchDtoList);
// 3. Mapper 执行批量插入(当前为循环单条,可优化为 MyBatis 批量 SQL)
for (QinceUserStatistic entity : batchEntityList) {
qinceUserStatisticMapper.insert(entity);
count++;
}
}
log.info("DAO层批量新增{}条考勤记录(DTO → Entity 转换后持久化)", count);
return count;
}
@Override
public int batchUpdate(List<QinceUserStatisticDTO> dtoList) {
int count = 0;
if (CollectionUtils.isEmpty(dtoList)) {
return count;
}
// 分批处理,提升执行效率
for (int i = 0; i < dtoList.size(); i += BATCH_SIZE) {
int end = Math.min(i + BATCH_SIZE, dtoList.size());
List<QinceUserStatisticDTO> batchDtoList = dtoList.subList(i, end);
// 内部转换:DTO 列表 → Entity 列表(需包含主键 ID,否则更新失败)
List<QinceUserStatistic> batchEntityList = dtoListToEntityList(batchDtoList);
// Mapper 执行批量更新(基于主键 ID,当前为循环单条,可优化为 MyBatis 批量 SQL)
for (QinceUserStatistic entity : batchEntityList) {
if (entity.getId() != null) {
qinceUserStatisticMapper.updateById(entity);
count++;
} else {
log.warn("考勤记录无主键 ID,跳过更新:qcUserId={}", entity.getQcUserId());
}
}
}
log.info("DAO层批量更新{}条考勤记录(DTO → Entity 转换后持久化)", count);
return count;
}
}
\ No newline at end of file
package com.sfa.job.domain.qince.entity;
import com.baomidou.mybatisplus.annotation.*;
import lombok.Data;
import java.io.Serializable;
import java.time.LocalDate;
import java.time.LocalDateTime;
@Data
@TableName("qince_user_statistic")
public class QinceUserStatistic implements Serializable {
private static final long serialVersionUID = 1L;
/**
* 主键ID 唯一标识
*/
@TableId(type = IdType.AUTO)
private Long id;
/**
* 部门名称
*/
@TableField("dept_name")
private String deptName;
/**
* 勤策用户ID
*/
@TableField("qc_user_id")
private Long qcUserId;
/**
* 性别
*/
@TableField("sex")
private String sex;
/**
* 用户名称
*/
@TableField("user_name")
private String userName;
/**
* 所属考勤组
*/
@TableField(value = "`group`")
private String group;
/**
* 用户工号
*/
@TableField("employee_code")
private String employeeCode;
/**
* 定位是否虚假模拟位置。0:非虚假模拟位置,1:虚假模拟位置
*/
@TableField("check_in_attd_lie_locate")
private Integer checkInAttdLieLocate;
/**
* 上班打卡位置
*/
@TableField("check_in_attd_address")
private String checkInAttdAddress;
/**
* 上班是否脱岗。0:正常,1:脱岗
*/
@TableField("check_in_attd_lc_error")
private Integer checkInAttdLcError;
/**
* 上班考勤状态。0:正常,1:迟到,2:异常
*/
@TableField("check_in_attd_status")
private Integer checkInAttdStatus;
/**
* 上班打卡时间
*/
@TableField("check_in_attd_time")
private LocalDateTime checkInAttdTime;
/**
* 考勤日期
*/
@TableField("att_date")
private LocalDate attDate;
/**
* 工作时长
*/
@TableField("work_time")
private Double workTime;
/**
* 备注信息
*/
@TableField("remarks")
private String remarks;
/**
* 下班定位是否虚假模拟位置。0:非虚假模拟位置,1:虚假模拟位置
*/
@TableField("check_out_attd_lie_locate")
private Integer checkOutAttdLieLocate;
/**
* 下班打卡位置
*/
@TableField("check_out_attd_address")
private String checkOutAttdAddress;
/**
* 下班是否脱岗。0:正常,1:脱岗
*/
@TableField("check_out_attd_lc_error")
private Integer checkOutAttdLcError;
/**
* 下班考勤状态。0:正常,1:迟到,2:异常
*/
@TableField("check_out_attd_status")
private Integer checkOutAttdStatus;
/**
* 下班考勤时间
*/
@TableField("check_out_attd_time")
private LocalDateTime checkOutAttdTime;
/**
* 删除标记。0:删除 1:正常
*/
@TableField(value = "is_delete")
private Integer isDelete = 1;
/**
* 创建时间
*/
@TableField(value = "create_time")
private LocalDateTime createTime;
/**
* 修改时间
*/
@TableField(value = "modify_time")
private LocalDateTime modifyTime;
}
\ No newline at end of file
package com.sfa.job.domain.qince.mapper;
import com.sfa.job.domain.qince.entity.QinceUserStatistic;
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
/**
* @author 45810
* @description 针对表【qince_user_statistic(勤策考勤明细表- 每天同步三次)】的数据库操作Mapper
* @createDate 2026-01-28 15:38:34
* @Entity com.sfa.job.domain.qince.qince.QinceUserStatistic
*/
public interface QinceUserStatisticMapper extends BaseMapper<QinceUserStatistic> {
}
package com.sfa.job.pojo.feishu.response;
import lombok.Data;
import java.util.Date;
@Data
public class FeishuLeaveInfoDTO {
private Long id;
// 逻辑删除常量(优化)
// 未删除
public static final Integer IS_DELETE_NO = 1;
// 已删除
public static final Integer IS_DELETE_YES = 0;
// 业务字段
private String employmentId;
private String employmentName;
private String employmentNo;
private String endTime;
private String grantSource;
private String leaveDuration;
private Integer leaveDurationUnit;
private String leaveProcessId;
private String leaveRequestId;
private Integer leaveRequestStatus;
private String leaveTypeId;
private String leaveTypeName;
private String notes;
private String returnTime;
private String startTime;
private String submittedAt;
private String submittedBy;
private String timeZone;
private String leaveCorrectProcessId;
// 嵌套更正流程字段
private String processApplyTime;
private String processId;
private String processStatus;
// 系统字段
private Date syncCreateTime;
private Date syncUpdateTime;
private Integer isDelete;
}
\ No newline at end of file
package com.sfa.job.pojo.qince.response;
import lombok.Data;
import org.springframework.cglib.core.Local;
import java.io.Serializable;
import java.time.LocalDate;
import java.time.LocalDateTime;
import java.util.Date;
/**
* 勤策考勤明细表 DTO 类
* 对应表:qince_user_statistic
* 表注释:勤策考勤明细表- 每天同步三次
*
* @author douxinyu
* @date 2026-01-28
*/
@Data
public class QinceUserStatisticDTO implements Serializable {
private static final long serialVersionUID = 1L;
/**
* 主键ID 唯一标识
*/
private Long id;
/**
* 部门名称
*/
private String deptName;
/**
* 勤策用户ID
*/
private Long qcUserId;
/**
* 性别
*/
private String sex;
/**
* 用户名称
*/
private String userName;
/**
* 所属考勤组
*/
private String group;
/**
* 用户工号
*/
private String employeeCode;
/**
* 定位是否虚假模拟位置。0:非虚假模拟位置,1:虚假模拟位置
*/
private Integer checkInAttdLieLocate;
/**
* 上班打卡位置
*/
private String checkInAttdAddress;
/**
* 上班是否脱岗。0:正常,1:脱岗
*/
private Integer checkInAttdLcError;
/**
* 上班考勤状态。0:正常,1:迟到,2:异常
*/
private Integer checkInAttdStatus;
/**
* 上班打卡时间
*/
private LocalDateTime checkInAttdTime;
/**
* 考勤日期
*/
private LocalDate attDate;
/**
* 工作时长
*/
private Double workTime;
/**
* 备注信息
*/
private String remarks;
/**
* 下班定位是否虚假模拟位置。0:非虚假模拟位置,1:虚假模拟位置
*/
private Integer checkOutAttdLieLocate;
/**
* 下班打卡位置
*/
private String checkOutAttdAddress;
/**
* 下班是否脱岗。0:正常,1:脱岗
*/
private Integer checkOutAttdLcError;
/**
* 下班考勤状态。0:正常,1:迟到,2:异常
*/
private Integer checkOutAttdStatus;
/**
* 下班考勤时间
*/
private LocalDateTime checkOutAttdTime;
/**
* 删除标记。0:删除 1:正常
*/
private Integer isDelete = 1;
/**
* 创建时间
*/
private LocalDateTime createTime;
/**
* 修改时间
*/
private LocalDateTime modifyTime;
}
\ No newline at end of file
package com.sfa.job.service.feishu;
import com.baomidou.dynamic.datasource.annotation.DS;
import com.sfa.job.domain.feishu.dao.FeishuLeaveInfoDao;
import com.sfa.job.pojo.feishu.response.FeishuLeaveInfoDTO;
import org.springframework.stereotype.Service;
import javax.annotation.Resource;
/**
* Service实现:仅简单调用Dao方法,无任何核心业务逻辑
*/
@Service
@DS("bi")
public class FeishuLeaveInfoServiceImpl implements IFeishuLeaveInfoService {
@Resource
private FeishuLeaveInfoDao feishuLeaveInfoDao;
@Override
public String syncFeishuLeaveData(String syncDate) {
return feishuLeaveInfoDao.syncFeishuLeaveData(syncDate);
}
@Override
public FeishuLeaveInfoDTO getByLeaveRequestId(String leaveRequestId) {
return feishuLeaveInfoDao.selectByLeaveRequestId(leaveRequestId);
}
}
\ No newline at end of file
package com.sfa.job.service.feishu;
import com.sfa.job.pojo.feishu.response.FeishuLeaveInfoDTO;
/**
* Service接口:仅定义调用方法,无核心逻辑
*/
public interface IFeishuLeaveInfoService {
/**
* 同步飞书请假数据(调用Dao,无核心逻辑)
*/
String syncFeishuLeaveData(String syncDate);
/**
* 根据请假唯一ID查询(调用Dao)
*/
FeishuLeaveInfoDTO getByLeaveRequestId(String leaveRequestId);
}
\ No newline at end of file
package com.sfa.job.service.qince;
import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
/**
* @Author: DouXinYu
* @Date: 2026-01-28 10:54
* @Description: 勤策用户考勤明细服务类
*/
public interface IQinceUserStatisticService {
JSONObject getUserStatistic(String startDate, String endDate, Integer page, Integer size) throws Exception;
/**
* 查询并保存前一天的勤策考勤数据(新增/修改判断)
* @return 处理的记录数
* @throws Exception 异常
*/
int queryAndSaveYesterdayAttendance()throws Exception;
/**
* 查询并保存当天的勤策考勤数据(新增/修改判断)
* @return 处理的记录数
* @throws Exception 异常
*/
int queryAndSaveTodayAttendance()throws Exception;
}
package com.sfa.job.service.qince.impl;
import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import com.baomidou.dynamic.datasource.annotation.DS;
import com.sfa.job.domain.qince.dao.IQinceUserStatisticDao;
import com.sfa.job.pojo.qince.response.QinceUserStatisticDTO;
import com.sfa.job.service.qince.IQinceUserStatisticService;
import com.sfa.job.util.QinCeUtils;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import org.springframework.util.CollectionUtils;
import org.springframework.util.StringUtils;
import java.time.LocalDate;
import java.time.LocalDateTime;
import java.time.ZoneId;
import java.time.format.DateTimeFormatter;
import java.time.format.DateTimeParseException;
import java.time.temporal.TemporalAdjusters;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
/**
* 勤策考勤同步实现类(全新版:全程 Java8+ 日期 API,无 Date 类型,过滤employee_code>=11位的数据不入库)
*/
@Slf4j
@Service
@DS("bi")
public class QinceUserStatisticServiceImpl implements IQinceUserStatisticService {
@Autowired
private QinCeUtils qinCeUtils;
@Autowired
private IQinceUserStatisticDao qinceUserStatisticDao;
// 配置项
private static final int API_MAX_SIZE = 10000;
// 工号过滤阈值:>=该长度则过滤不入库
private static final int EMPLOYEE_CODE_FILTER_MIN_LENGTH = 11;
// Java8+ 全局日期格式化器(线程安全,无需 ThreadLocal 包装)
private static final DateTimeFormatter DATE_FORMATTER = DateTimeFormatter.ofPattern("yyyy-MM-dd");
private static final DateTimeFormatter DATETIME_FORMATTER = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss");
// 系统默认时区(全局复用,避免多次创建)
private static final ZoneId DEFAULT_ZONE = ZoneId.systemDefault();
@Override
public JSONObject getUserStatistic(String startDate, String endDate, Integer page, Integer size) throws Exception {
int currentPage = 1;
JSONArray allUserArray = new JSONArray();
while (true) {
JSONObject pageResult = qinCeUtils.getUserStatistic(startDate, endDate, currentPage, API_MAX_SIZE);
JSONObject dataObj = null;
// 先以Object类型获取,避免强制转换报错
Object responseData = pageResult.get("response_data");
if (responseData != null) {
if (responseData instanceof JSONObject) {
dataObj = (JSONObject) responseData;
} else if (responseData instanceof String) {
String responseDataStr = (String) responseData;
if (StringUtils.hasText(responseDataStr)) {
try {
// 先尝试解析为 JSONObject
dataObj = JSONObject.parseObject(responseDataStr);
} catch (Exception e) {
log.warn("response_data 字符串无法解析为 JSONObject,尝试解析为 JSONArray,字符串内容:{}", responseDataStr.substring(0, Math.min(responseDataStr.length(), 200)));
// 如果解析 JSONObject 失败,尝试解析为 JSONArray(若接口直接返回数组)
JSONArray tempArray = JSONArray.parseArray(responseDataStr);
// 封装为 JSONObject(保持后续逻辑一致,data 字段对应该数组)
dataObj = new JSONObject();
dataObj.put("data", tempArray);
}
}
} else {
// 情况3:其他类型,直接封装为 JSONObject(避免后续空指针)
log.warn("response_data 为不支持的类型:{}", responseData.getClass().getName());
dataObj = new JSONObject();
}
}
// 从 dataObj 中获取 data 数组
JSONArray data = dataObj == null ? new JSONArray() : dataObj.getJSONArray("data");
if (CollectionUtils.isEmpty(data)) {
log.info("第{}页无数据,全量数据拉取完成,共{}个用户", currentPage, allUserArray.size());
break;
}
allUserArray.addAll(data);
log.debug("拉取第{}页,新增{}用户,累计{}用户", currentPage, data.size(), allUserArray.size());
if (data.size() < API_MAX_SIZE) {
log.info("第{}页数据不足{}条,判定为最后一页", currentPage, API_MAX_SIZE);
break;
}
currentPage++;
}
// 构造统一返回结果(兼容上层调用,自定义 response_data 字段)
JSONObject totalResult = new JSONObject();
totalResult.put("response_data", allUserArray);
totalResult.put("total", allUserArray.size());
return totalResult;
}
/**
* 同步【前一天】全量考勤数据 - 事务保证原子性(全程 LocalDate,无精度问题)
*/
@Override
@Transactional(rollbackFor = Exception.class)
public int queryAndSaveYesterdayAttendance() {
LocalDate yesterday = LocalDate.now().minusDays(1);
log.info("当前调用-目标同步前一天自然日:{}", yesterday.format(DATE_FORMATTER));
return syncAttendanceByTargetDate(yesterday);
}
/**
* 同步【当天】全量考勤数据 - 事务保证原子性(全程 LocalDate,无精度问题)
*/
@Override
@Transactional(rollbackFor = Exception.class)
public int queryAndSaveTodayAttendance() {
LocalDate today = LocalDate.now();
log.info("当前调用-目标同步当天自然日:{}", today.format(DATE_FORMATTER));
return syncAttendanceByTargetDate(today);
}
/**
* 核心同步方法:按指定日期同步,全程操作 DTO(全程 LocalDate/LocalDateTime,无 Date)
*/
private int syncAttendanceByTargetDate(LocalDate targetLocalDate) {
String targetDateStr = targetLocalDate.format(DATE_FORMATTER);
log.info("===== 开始同步{}勤策考勤数据 =====", targetDateStr);
try {
JSONObject fullData = this.getUserStatistic(targetDateStr, targetDateStr, 1, API_MAX_SIZE);
JSONArray allUserArray = fullData.getJSONArray("response_data");
if (CollectionUtils.isEmpty(allUserArray)) {
log.info("===== {}无任何用户考勤数据,同步结束 =====", targetDateStr);
return 0;
}
List<QinceUserStatisticDTO> attendanceDtoList = convertToAttendanceDTO(allUserArray, targetLocalDate);
if (CollectionUtils.isEmpty(attendanceDtoList)) {
log.info("===== {}无有效考勤记录生成,同步结束 =====", targetDateStr);
return 0;
}
log.info("{}考勤DTO转换完成,共生成{}条有效记录(已过滤工号>=11位数据)", targetDateStr, attendanceDtoList.size());
// 步骤3:调用 DAO 层批量新增/更新(全程 DTO 交互,无 Entity 暴露)
return batchSaveOrUpdate(attendanceDtoList);
} catch (Exception e) {
log.error("===== {}考勤数据同步失败 =====", targetDateStr, e);
return 0;
}
}
/**
* 转换为 DTO:全程 Java8+ 日期类型,新增工号>=11位过滤逻辑,严格匹配 DTO 结构
*/
private List<QinceUserStatisticDTO> convertToAttendanceDTO(JSONArray allUserArray, LocalDate targetLocalDate) {
List<QinceUserStatisticDTO> resultList = new ArrayList<>();
String targetDateStr = targetLocalDate.format(DATE_FORMATTER);
LocalDateTime now = LocalDateTime.now();
for (int i = 0; i < allUserArray.size(); i++) {
try {
JSONObject userJson = allUserArray.getJSONObject(i);
// 核心校验1:userId 为空则跳过(唯一标识,无则无效)
String userIdStr = getStringValue(userJson, "userId");
if (!StringUtils.hasText(userIdStr)) {
log.warn("当前调用-第{}条数据被过滤:无userId", i + 1);
continue;
}
QinceUserStatisticDTO dto = new QinceUserStatisticDTO();
// 基础字段赋值(严格匹配 DTO 字段名和类型)
dto.setQcUserId(Long.valueOf(userIdStr));
dto.setUserName(getStringValue(userJson, "userName"));
dto.setDeptName(getStringValue(userJson, "deptName"));
dto.setSex(StringUtils.hasText(getStringValue(userJson, "sex")) ? getStringValue(userJson, "sex") : null);
dto.setGroup(getStringValue(userJson, "group"));
// 工号赋值:直接赋值字符串,保留前置零
String codeStr = getStringValue(userJson, "code");
String employeeCode = StringUtils.hasText(codeStr) ? codeStr.trim() : null;
dto.setEmployeeCode(employeeCode);
// 工号不为空 且 长度>=11位 → 直接过滤,不加入结果集
if (StringUtils.hasText(employeeCode) && employeeCode.length() >= EMPLOYEE_CODE_FILTER_MIN_LENGTH) {
log.warn("当前调用-第{}条数据被过滤:工号[{}]长度为{}位,>=11位不入库",
i + 1, employeeCode, employeeCode.length());
continue;
}
// 自动填充字段:isDelete 已在 DTO 中默认赋值为 1,创建/修改时间赋值 LocalDateTime
dto.setCreateTime(now);
dto.setModifyTime(now);
// 初始化考勤核心字段默认值
dto.setWorkTime(null);
dto.setRemarks(null);
// 解析 attendance 数组(接口保证单日单用户仅 1 条)
JSONArray attendanceArray = userJson.getJSONArray("attendance");
if (!CollectionUtils.isEmpty(attendanceArray)) {
JSONObject attJson = attendanceArray.getJSONObject(0);
// 解析 attDate 为 LocalDate(纯日期,用于对比和赋值)
LocalDate attLocalDate = parseStrToLocalDate(attJson, "attDate");
if (attLocalDate != null) {
// 核心校验3:纯日期对比(无时分秒干扰,精准匹配目标日期)
if (targetLocalDate.equals(attLocalDate)) {
// 填充考勤核心字段
dto.setWorkTime(parseStrToDouble(attJson, "workTime"));
dto.setRemarks(StringUtils.hasText(getStringValue(attJson, "remarks")) ? getStringValue(attJson, "remarks").trim() : null);
// 给 DTO 的 attDate 赋值(解决数据库 att_date 为 null 的核心)
dto.setAttDate(attLocalDate);
// 填充上班打卡信息(全程 LocalDateTime,匹配 DTO 字段)
if (attJson.containsKey("checkin") && !attJson.getJSONArray("checkin").isEmpty()) {
JSONObject checkin = attJson.getJSONArray("checkin").getJSONObject(0);
fillCheckInInfo(dto, checkin);
}
// 填充下班打卡信息(全程 LocalDateTime,匹配 DTO 字段)
if (attJson.containsKey("checkout") && !attJson.getJSONArray("checkout").isEmpty()) {
JSONObject checkout = attJson.getJSONArray("checkout").getJSONObject(0);
fillCheckOutInfo(dto, checkout);
}
// 符合所有条件的记录添加到结果集
resultList.add(dto);
} else {
log.warn("当前调用-第{}条数据被过滤:attDate自然日不匹配(目标:{},实际:{})",
i + 1, targetDateStr, attLocalDate.format(DATE_FORMATTER));
}
} else {
log.warn("当前调用-第{}条数据被过滤:attDate解析失败或为空", i + 1);
}
} else {
log.warn("当前调用-第{}条数据被过滤:无attendance考勤数组", i + 1);
}
} catch (Exception e) {
log.error("当前调用-第{}条数据转换失败,跳过", i + 1, e);
continue;
}
}
log.info("当前调用-DTO转换完成,有效自然日考勤数据量:{}(已过滤工号>=11位数据)", resultList.size());
return resultList;
}
/**
* 填充上班打卡信息:全程 LocalDateTime,严格匹配 DTO 字段类型,无类型转换异常
*/
private void fillCheckInInfo(QinceUserStatisticDTO dto, JSONObject checkin) {
dto.setCheckInAttdLieLocate(getIntegerValue(checkin, "attdLieLocate"));
dto.setCheckInAttdAddress(getStringValue(checkin, "attdAddress"));
dto.setCheckInAttdLcError(getIntegerValue(checkin, "attdLcError"));
dto.setCheckInAttdStatus(getIntegerValue(checkin, "attdStatus"));
// 直接解析为 LocalDateTime,匹配 DTO 字段,无中间转换
dto.setCheckInAttdTime(parseStrToLocalDateTime(checkin, "attdTime"));
}
/**
* 填充下班打卡信息:全程 LocalDateTime,严格匹配 DTO 字段类型,无类型转换异常
*/
private void fillCheckOutInfo(QinceUserStatisticDTO dto, JSONObject checkout) {
dto.setCheckOutAttdLieLocate(getIntegerValue(checkout, "attdLieLocate"));
dto.setCheckOutAttdAddress(getStringValue(checkout, "attdAddress"));
dto.setCheckOutAttdLcError(getIntegerValue(checkout, "attdLcError"));
dto.setCheckOutAttdStatus(getIntegerValue(checkout, "attdStatus"));
// 直接解析为 LocalDateTime,匹配 DTO 字段,无中间转换
dto.setCheckOutAttdTime(parseStrToLocalDateTime(checkout, "attdTime"));
}
/**
* 批量新增/更新:全程 DTO 操作,Java8+ 日期类型,无 Date 依赖
*/
private int batchSaveOrUpdate(List<QinceUserStatisticDTO> attendanceDtoList) {
// 1. 调用 DAO 层,获取已存在的 DTO 映射(无 Entity 暴露)
Map<String, QinceUserStatisticDTO> existDtoMap = buildExistAttendanceMap(attendanceDtoList);
// 2. 拆分新增/更新列表
List<QinceUserStatisticDTO> insertList = new ArrayList<>();
List<QinceUserStatisticDTO> updateList = new ArrayList<>();
LocalDateTime now = LocalDateTime.now();
for (QinceUserStatisticDTO dto : attendanceDtoList) {
LocalDate attLocalDate = dto.getAttDate();
if (attLocalDate == null) {
log.warn("当前记录attDate为空,跳过批量操作:qcUserId={}", dto.getQcUserId());
continue;
}
String targetDateStr = attLocalDate.format(DATE_FORMATTER);
String uniqueKey = dto.getQcUserId() + "_" + targetDateStr;
if (existDtoMap.containsKey(uniqueKey)) {
QinceUserStatisticDTO existDto = existDtoMap.get(uniqueKey);
dto.setId(existDto.getId());
dto.setCreateTime(existDto.getCreateTime());
dto.setModifyTime(now);
updateList.add(dto);
} else {
// 不存在:直接新增(createTime/modifyTime 已赋值)
insertList.add(dto);
}
}
// 调用 DAO 层执行批量操作(纯 DTO 入参,无需关心持久化细节)
int insertCount = qinceUserStatisticDao.batchInsert(insertList);
int updateCount = qinceUserStatisticDao.batchUpdate(updateList);
int totalCount = insertCount + updateCount;
log.info("批量操作完成:新增{}条,更新{}条,总计{}条(已过滤工号>=11位数据)", insertCount, updateCount, totalCount);
return totalCount;
}
/**
* 构建已存在记录的映射:调用 DAO 层方法,返回纯 DTO 映射
*/
private Map<String, QinceUserStatisticDTO> buildExistAttendanceMap(List<QinceUserStatisticDTO> attendanceDtoList) {
List<Long> userIdList = attendanceDtoList.stream()
.map(QinceUserStatisticDTO::getQcUserId)
.distinct()
.collect(Collectors.toList());
List<LocalDate> dateList = attendanceDtoList.stream()
.map(QinceUserStatisticDTO::getAttDate)
.distinct()
.filter(java.util.Objects::nonNull)
.collect(Collectors.toList());
// 调用 DAO 层,获取纯 DTO 映射(DAO 层已同步修改为 LocalDate 入参)
return qinceUserStatisticDao.buildExistAttendanceMap(userIdList, dateList);
}
// ===================== 基础类型转换工具方法(全程 Java8+ 日期 API,无 Date,空值兼容+异常捕获) =====================
private String getStringValue(JSONObject json, String key) {
try {
String value = json.getString(key);
return StringUtils.hasText(value) ? value.trim() : null;
} catch (Exception e) {
return null;
}
}
private Integer getIntegerValue(JSONObject json, String key) {
try {
String value = json.getString(key);
return StringUtils.hasText(value) ? Integer.parseInt(value.trim()) : null;
} catch (Exception e) {
log.warn("数字字符串解析失败,key={}, 异常:", key, e);
return null;
}
}
/**
* 解析字符串为 LocalDate(纯日期,yyyy-MM-dd,适配 DTO 的 attDate 字段)
*/
private LocalDate parseStrToLocalDate(JSONObject json, String key) {
try {
String value = getStringValue(json, key);
if (value == null) {
return null;
}
return LocalDate.parse(value, DATE_FORMATTER);
} catch (DateTimeParseException e) {
log.warn("日期字符串解析为 LocalDate 失败,key={}, 异常:", key, e);
return null;
} catch (Exception e) {
log.warn("日期转换为 LocalDate 未知异常,key={}, 异常:", key, e);
return null;
}
}
/**
* 解析字符串为 LocalDateTime(带时分秒,yyyy-MM-dd HH:mm:ss,适配 DTO 的打卡时间字段)
*/
private LocalDateTime parseStrToLocalDateTime(JSONObject json, String key) {
try {
String value = getStringValue(json, key);
if (value == null) {
return null;
}
// 先尝试解析为完整日期时间
return LocalDateTime.parse(value, DATETIME_FORMATTER);
} catch (DateTimeParseException e) {
try {
// 若只有日期,补全时分秒为 00:00:00
LocalDate localDate = LocalDate.parse(getStringValue(json, key), DATE_FORMATTER);
return localDate.atStartOfDay();
} catch (DateTimeParseException ex) {
log.warn("日期字符串解析为 LocalDateTime 失败,key={}, 异常:", key, ex);
return null;
}
} catch (Exception e) {
log.warn("日期转换为 LocalDateTime 未知异常,key={}, 异常:", key, e);
return null;
}
}
/**
* 解析字符串为 Double(适配 workTime,避免转换异常)
*/
private Double parseStrToDouble(JSONObject json, String key) {
try {
String value = getStringValue(json, key);
if (value != null) {
return Double.valueOf(value);
}
} catch (Exception e) {
log.warn("数字字符串解析为 Double 失败,key={}, 异常:", key, e);
}
return null;
}
/**
* 测试专用:同步当前年份1月份全量数据(无参快捷调用)
* @return 总计同步记录数
*/
public int testSyncCurrentYearJanuaryAttendance() {
return testSyncJanuaryAttendance(LocalDate.now().getYear());
}
/**
* 测试专用:同步指定年份1月份全量数据
* @param year 目标年份,如2026
* @return 总计同步记录数
*/
public int testSyncJanuaryAttendance(int year) {
log.info("===== 【测试模式】开始同步{}年1月份全量勤策考勤数据 =====", year);
int totalCount = 0;
// 构造1月1日和1月最后一天
LocalDate janFirst = LocalDate.of(year, 1, 1);
LocalDate janLast = janFirst.with(TemporalAdjusters.lastDayOfMonth());
// 遍历1月份所有日期,循环同步(添加休眠避免API限流)
LocalDate currentDate = janFirst;
while (!currentDate.isAfter(janLast)) {
try {
log.info("【测试模式】开始同步{}", currentDate.format(DATE_FORMATTER));
// 复用原有核心同步方法,无需重复写逻辑
int dayCount = syncAttendanceByTargetDate(currentDate);
totalCount += dayCount;
log.info("【测试模式】完成同步{},当日同步{}条,累计{}条",
currentDate.format(DATE_FORMATTER), dayCount, totalCount);
// 休眠1秒,避免触发勤策API限流(可根据实际情况调整)
Thread.sleep(1000);
} catch (Exception e) {
log.error("【测试模式】同步{}失败,跳过该日期", currentDate.format(DATE_FORMATTER), e);
} finally {
// 日期+1,无论当日是否失败,继续下一天
currentDate = currentDate.plusDays(1);
}
}
log.info("===== 【测试模式】{}年1月份全量数据同步完成,总计同步{}条 =====", year, totalCount);
return totalCount;
}
}
\ No newline at end of file
......@@ -51,12 +51,14 @@ public class QinCeUtils {
// 修改人员
public static final String MODIFY_USER = "/api/employee/v3/modifyEmployee/";
// 商品信息查询
public static final String QUERY_PRODUCT_INFO = "/api/product/v1/queryProduct/";
// 商品价格更新
public static final String MODIFY_PRODUCT_PRICE = "/api/pd/v2/modifyProduct/";
// 获取考勤明细统计数据接口
public static final String GET_USER_STATISTIC = "/api/attStatistics/v1/getUserStatistic/";
public String builderUrl(String sidepath, Map<String, Object> params) {
String msgId = UUID.randomUUID().toString();
......@@ -196,4 +198,43 @@ public class QinCeUtils {
return false;
}
}
/**
* 构建获取考勤明细的请求参数
* @param startDate 开始日期(格式:yyyy-MM-dd)
* @param endDate 结束日期(格式:yyyy-MM-dd)
* @param page 当前第几页(非必填,默认1)
* @param size 当前页记录条数(非必填)
* @return 符合接口要求的参数Map
*/
public Map<String, Object> getUserStatisticParams(String startDate, String endDate, Integer page, Integer size) {
Map<String, Object> params = new HashMap<>();
// 必传参数:开始日期、结束日期
params.put("startDate", startDate);
params.put("endDate", endDate);
// 非必传参数:页码、每页条数(为空则不设置)
if (page != null) {
params.put("page", page);
}
if (size != null) {
params.put("size", size);
}
return params;
}
/**
* 调用勤策获取考勤明细统计数据接口
* @param startDate 开始日期(yyyy-MM-dd)
* @param endDate 结束日期(yyyy-MM-dd)
* @param page 页码(可为null,默认1)
* @param size 每页条数(可为null)
* @return 考勤明细数据JSON数组
* @throws Exception 接口调用异常(如网络错误、返回码非0等)
*/
public JSONObject getUserStatistic(String startDate, String endDate, Integer page, Integer size) throws Exception {
Map<String, Object> params = getUserStatisticParams(startDate, endDate, page, size);
String url = builderUrl(GET_USER_STATISTIC, params);
log.info("调用勤策考勤明细接口,URL:{},参数:{}", url, params);
return postQC(url, params);
}
}
package com.sfa.job.xxljob.feishu;
import com.xxl.job.core.context.XxlJobHelper;
import com.xxl.job.core.handler.annotation.XxlJob;
import com.sfa.job.service.feishu.IFeishuLeaveInfoService;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import java.time.LocalDate;
import java.time.format.DateTimeFormatter;
/**
* 飞书请假数据XXL-JOB定时任务类(适配实际Service:syncFeishuLeaveData(String syncDate))
* 任务:每天00:02同步前一天的请假数据
*/
@Slf4j
@Component
public class FeiShuLeaveXxlJob {
@Autowired
private IFeishuLeaveInfoService feishuLeaveInfoService;
// 日期格式化器(与勤策保持一致,适配 syncDate 入参:yyyy-MM-dd)
private static final DateTimeFormatter DATE_FORMATTER = DateTimeFormatter.ofPattern("yyyy-MM-dd");
/**
* 飞书请假数据同步任务(XXL-JOB核心方法)
* 调度配置:每天02:00执行(Cron表达式:0 0 2 * * ?)
*/
@XxlJob("feiShuLeaveSyncJob")
public void feiShuLeaveSyncJob() {
// 任务日志记录(XXL-JOB控制台可见)
XxlJobHelper.log("===== 开始执行飞书请假数据同步任务(同步前一天数据) =====");
// 获取前一天日期(格式:yyyy-MM-dd),适配Service入参要求
LocalDate yesterday = LocalDate.now().minusDays(1);
String syncDate = yesterday.format(DATE_FORMATTER);
XxlJobHelper.log("===== 本次同步目标日期:{} =====", syncDate);
try {
// 调用你实际的飞书请假同步方法(入参:前一天日期字符串)
String syncResult = feishuLeaveInfoService.syncFeishuLeaveData(syncDate);
// 任务结果日志(适配Service返回的字符串结果)
XxlJobHelper.log("===== 飞书请假数据同步任务执行完成,同步结果:{} =====", syncResult);
// 标记任务执行成功(XXL-JOB状态回调)
XxlJobHelper.handleSuccess("飞书请假数据同步成功,同步结果:" + syncResult);
} catch (Exception e) {
// 异常处理与日志记录
log.error("===== 飞书请假数据同步任务执行失败 =====", e);
XxlJobHelper.log("===== 飞书请假数据同步任务执行失败,异常信息:{} =====", e.getMessage());
// 标记任务执行失败(XXL-JOB状态回调)
XxlJobHelper.handleFail("飞书请假数据同步失败,异常信息:" + e.getMessage());
}
}
}
\ No newline at end of file
package com.sfa.job.xxljob.qince;
import com.xxl.job.core.context.XxlJobHelper;
import com.xxl.job.core.handler.annotation.XxlJob;
import com.sfa.job.service.qince.IQinceUserStatisticService;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
/**
* 勤策考勤数据XXL-JOB定时任务类
*/
@Slf4j
@Component
public class QinCeAttendanceXxlJob {
@Autowired
private IQinceUserStatisticService qinceUserStatisticService;
/**
* 勤策考勤数据同步任务(XXL-JOB核心方法)
* 调度配置:每天23:00执行(Cron表达式:0 0 23 * * ?)
*/
@XxlJob("qinCeAttendanceSyncTodayJob")
public void qinCeAttendanceSyncTodayJob() {
XxlJobHelper.log("===== 开始执行勤策考勤数据同步任务(同步当天数据) =====");
try {
// 调用已实现的同步当天考勤数据方法
int syncCount = qinceUserStatisticService.queryAndSaveTodayAttendance();
// 任务结果日志
XxlJobHelper.log("===== 勤策考勤数据同步任务执行完成,总计同步/更新{}条记录 =====", syncCount);
// 标记任务执行成功(XXL-JOB状态回调)
XxlJobHelper.handleSuccess("勤策考勤数据同步成功,总计同步/更新" + syncCount + "条记录");
} catch (Exception e) {
log.error("===== 勤策考勤数据同步任务执行失败 =====", e);
XxlJobHelper.log("===== 勤策考勤数据同步任务执行失败,异常信息:{} =====", e.getMessage());
XxlJobHelper.handleFail("勤策考勤数据同步失败,异常信息:" + e.getMessage());
}
}
/**
* 勤策考勤数据同步任务(XXL-JOB核心方法)
* 调度配置:每天00:02执行(Cron表达式:0 2 0 * * ?)
*/
@XxlJob("qinCeAttendanceSyncYesterdayJob")
public void qinCeAttendanceSyncYesterdayJob() {
XxlJobHelper.log("===== 开始执行勤策考勤数据同步任务(同步前一天数据) =====");
try {
// 调用已实现的同步前一天考勤数据方法
int syncCount = qinceUserStatisticService.queryAndSaveYesterdayAttendance();
// 任务结果日志
XxlJobHelper.log("===== 勤策考勤数据同步任务执行完成,总计同步/更新{}条记录 =====", syncCount);
// 标记任务执行成功(XXL-JOB状态回调)
XxlJobHelper.handleSuccess("勤策考勤数据同步成功,总计同步/更新" + syncCount + "条记录");
} catch (Exception e) {
// 异常处理与日志记录
log.error("===== 勤策考勤数据同步任务执行失败 =====", e);
XxlJobHelper.log("===== 勤策考勤数据同步任务执行失败,异常信息:{} =====", e.getMessage());
// 标记任务执行失败(XXL-JOB状态回调)
XxlJobHelper.handleFail("勤策考勤数据同步失败,异常信息:" + e.getMessage());
}
}
}
\ No newline at end of file
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE mapper PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN" "http://mybatis.org/dtd/mybatis-3-mapper.dtd">
<mapper namespace="com.sfa.job.domain.feishu.mapper.FeishuLeaveInfoMapper">
<!-- 基础结果集映射:匹配实体类字段(驼峰)与数据库表字段(下划线) -->
<resultMap id="BaseResultMap" type="com.sfa.job.domain.feishu.entity.FeishuLeaveInfo">
<id column="id" property="id" jdbcType="BIGINT"/>
<result column="employment_id" property="employmentId" jdbcType="VARCHAR"/>
<result column="employment_name" property="employmentName" jdbcType="VARCHAR"/>
<result column="employment_no" property="employmentNo" jdbcType="VARCHAR"/>
<result column="end_time" property="endTime" jdbcType="VARCHAR" />
<result column="grant_source" property="grantSource" jdbcType="VARCHAR"/>
<result column="leave_duration" property="leaveDuration" jdbcType="VARCHAR"/>
<result column="leave_duration_unit" property="leaveDurationUnit" jdbcType="INTEGER"/>
<result column="leave_process_id" property="leaveProcessId" jdbcType="VARCHAR" />
<result column="leave_request_id" property="leaveRequestId" jdbcType="VARCHAR" />
<result column="leave_request_status" property="leaveRequestStatus" jdbcType="INTEGER"/>
<result column="leave_type_id" property="leaveTypeId" jdbcType="VARCHAR" />
<result column="leave_type_name" property="leaveTypeName" jdbcType="VARCHAR" />
<result column="notes" property="notes" jdbcType="VARCHAR" />
<result column="return_time" property="returnTime" jdbcType="VARCHAR" />
<result column="start_time" property="startTime" jdbcType="VARCHAR" />
<result column="submitted_at" property="submittedAt" jdbcType="VARCHAR" />
<result column="submitted_by" property="submittedBy" jdbcType="VARCHAR" />
<result column="time_zone" property="timeZone" jdbcType="VARCHAR" />
<result column="leave_correct_process_id" property="leaveCorrectProcessId" jdbcType="VARCHAR" />
<result column="process_apply_time" property="processApplyTime" jdbcType="VARCHAR" />
<result column="process_id" property="processId" jdbcType="VARCHAR" />
<result column="process_status" property="processStatus" jdbcType="VARCHAR" />
<result column="sync_create_time" property="syncCreateTime" jdbcType="TIMESTAMP" />
<result column="sync_update_time" property="syncUpdateTime" jdbcType="TIMESTAMP" />
<result column="is_delete" property="isDelete" jdbcType="TINYINT" />
</resultMap>
<sql id="Base_Column_List">
employment_id, employment_name, employment_no, end_time, grant_source,
leave_duration, leave_duration_unit, leave_process_id, leave_request_id,
leave_request_status, leave_type_id, leave_type_name, notes, return_time,
start_time, submitted_at, submitted_by, time_zone, leave_correct_process_id,
process_apply_time, process_id, process_status, sync_create_time, sync_update_time,
is_delete
</sql>
</mapper>
\ No newline at end of file
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE mapper
PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN"
"http://mybatis.org/dtd/mybatis-3-mapper.dtd">
<mapper namespace="com.sfa.job.domain.qince.mapper.QinceUserStatisticMapper">
<resultMap id="BaseResultMap" type="com.sfa.job.domain.qince.entity.QinceUserStatistic">
<id property="id" column="id" />
<result property="deptName" column="dept_name" />
<result property="qcUserId" column="qc_user_id" />
<result property="sex" column="sex" />
<result property="userName" column="user_name" />
<result property="group" column="group" />
<result property="employeeCode" column="employee_code" />
<result property="checkInAttdLieLocate" column="check_in_attd_lie_locate" />
<result property="checkInAttdAddress" column="check_in_attd_address" />
<result property="checkInAttdLcError" column="check_in_attd_lc_error" />
<result property="checkInAttdStatus" column="check_in_attd_status" />
<result property="checkInAttdTime" column="check_in_attd_time" />
<result property="attDate" column="att_date" />
<result property="workTime" column="work_time" />
<result property="remarks" column="remarks" />
<result property="checkOutAttdLieLocate" column="check_out_attd_lie_locate" />
<result property="checkOutAttdAddress" column="check_out_attd_address" />
<result property="checkOutAttdLcError" column="check_out_attd_lc_error" />
<result property="checkOutAttdStatus" column="check_out_attd_status" />
<result property="checkOutAttdTime" column="check_out_attd_time" />
<result property="isDelete" column="is_delete" />
<result property="createTime" column="create_time" />
<result property="modifyTime" column="modify_time" />
</resultMap>
<sql id="Base_Column_List">
id,dept_name,qc_user_id,sex,user_name,group,
employee_code,check_in_attd_lie_locate,check_in_attd_address,check_in_attd_lc_error,check_in_attd_status,
check_in_attd_time,att_date,work_time,remarks,check_out_attd_lie_locate,
check_out_attd_address,check_out_attd_lc_error,check_out_attd_status,check_out_attd_time,is_delete,
create_time,modify_time
</sql>
</mapper>
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论