Parcourir la source

Merge remote-tracking branch 'origin/develop' into develop

maqingyang il y a 11 mois
Parent
commit
873eb6b72e
33 fichiers modifiés avec 620 ajouts et 103 suppressions
  1. 12 18
      src/main/java/com/kexun/controller/BusinessController.java
  2. 7 1
      src/main/java/com/kexun/controller/CooperatorController.java
  3. 6 0
      src/main/java/com/kexun/controller/CooperatorProductController.java
  4. 78 14
      src/main/java/com/kexun/controller/ReportController.java
  5. 6 0
      src/main/java/com/kexun/entity/BusinessInfo.java
  6. 6 3
      src/main/java/com/kexun/entity/CustomInfo.java
  7. 2 0
      src/main/java/com/kexun/mapper/CooperatorProductMapper.java
  8. 8 0
      src/main/java/com/kexun/mapper/ReportInfoMapper.java
  9. 2 0
      src/main/java/com/kexun/service/CooperatorProductService.java
  10. 0 1
      src/main/java/com/kexun/service/CustomInfoService.java
  11. 2 0
      src/main/java/com/kexun/service/ReportFileService.java
  12. 6 0
      src/main/java/com/kexun/service/ReportInfoService.java
  13. 41 17
      src/main/java/com/kexun/service/atomic/BaseFieldAtomicService.java
  14. 9 2
      src/main/java/com/kexun/service/field/BaseFieldPackageJsonService.java
  15. 5 0
      src/main/java/com/kexun/service/impl/CooperatorProductServiceImpl.java
  16. 1 5
      src/main/java/com/kexun/service/impl/CustomInfoServiceImpl.java
  17. 13 9
      src/main/java/com/kexun/service/impl/FileServiceImpl.java
  18. 7 0
      src/main/java/com/kexun/service/impl/ReportFileServiceImpl.java
  19. 30 8
      src/main/java/com/kexun/service/impl/ReportInfoServiceImpl.java
  20. 14 0
      src/main/java/com/kexun/service/impl/XmlFileHandlerServiceImpl.java
  21. 0 1
      src/main/java/com/kexun/service/xml/ParseHandleService.java
  22. 1 1
      src/main/java/com/kexun/task/ReadXmlFileJobHandler.java
  23. 4 2
      src/main/java/com/kexun/task/ReportParseJobHandler.java
  24. 104 0
      src/main/resources/application-dev.yml
  25. 104 0
      src/main/resources/application-prod.yml
  26. 104 0
      src/main/resources/application-test.yml
  27. 11 4
      src/main/resources/application.yml
  28. 3 0
      src/main/resources/mapper/BusinessMapper.xml
  29. 4 0
      src/main/resources/mapper/CooperatorProductMapper.xml
  30. 8 14
      src/main/resources/mapper/CustomInfoMapper.xml
  31. 1 0
      src/main/resources/mapper/ReportFileMapper.xml
  32. 6 0
      src/main/resources/mapper/ReportInfoMapper.xml
  33. 15 3
      src/test/java/Test3And6.java

+ 12 - 18
src/main/java/com/kexun/controller/BusinessController.java

@@ -289,27 +289,21 @@ public class BusinessController {
 
         //下面两个方法还不知道
         List<CustomInfo> list2 = CustomInfoService.QueryCustomTelephone(headInfId);
-        List<CustomInfo> list3 = CustomInfoService.QueryCustomAddress(headInfId);
 
-        System.out.println(list3);
-        String telephonelist = "";
-        for (int i = 0; i < list2.size(); i++) {
-            CustomInfo customInfo = list2.get(i);
-            telephonelist += (customInfo.telephone);
-            telephonelist += ";  ";
-        }
+        if(list2!=null) {
+            String telephonelist = "";
+            for (int i = 0; i < list2.size(); i++) {
+                CustomInfo customInfo = list2.get(i);
+                telephonelist += (customInfo.telephone);
+                telephonelist += ";  ";
+            }
+            if(list1!=null) {
+                list1.telephone = telephonelist;
 
-        String addresslist = "";
-        for (int i = 0; i < list3.size(); i++) {
-            CustomInfo customInfo = list3.get(i);
-            addresslist += (customInfo.address);
-            addresslist += ";  ";
-        }
-        if(list1!=null) {
-            list1.telephone = telephonelist;
-            list1.address = addresslist;
-            System.out.println(list1);
+            }
         }
+
+
         resJson.put("custom_info", list1);
 
 

+ 7 - 1
src/main/java/com/kexun/controller/CooperatorController.java

@@ -96,6 +96,12 @@ public class CooperatorController {
     public Result cooperatorAdd(@RequestBody String json)
     {
         JSONObject req = JSON.parseObject(json);
+        String cooperator_name=req.getString("cooperator_name");
+        CooperatorInfo dupCooperatorInfo= CooperatorService.findByCooperatorNum(cooperator_name);
+        if (dupCooperatorInfo!=null){
+            return Result.error("合作方已存在!");
+        }
+
         CooperatorInfo cooperatorInfo = new CooperatorInfo();
         cooperatorInfo.setCooperatorName(req.getString("cooperator_name"));
         long time1 = System.currentTimeMillis();
@@ -129,7 +135,7 @@ public class CooperatorController {
         String oldTemplateId = cooperatorInfo.getTemplateId();
         String oldCooperatorNum = cooperatorInfo.getCooperatorNum();
 
-        if(oldTemplateId!=req.getString("fieldTemplateId")) {
+        if(oldTemplateId!=req.getString("fieldTemplateId") && req.getString("fieldTemplateId")!="") {
             cooperatorInfo.setTemplateId(req.getString("fieldTemplateId"));
             List<CooperatorProduct> CooperatorProductList= CooperatorProductService.findCooperatorProductByNum(oldCooperatorNum);
             for (int i=0;i<CooperatorProductList.size();i++) {

+ 6 - 0
src/main/java/com/kexun/controller/CooperatorProductController.java

@@ -125,6 +125,12 @@ public class CooperatorProductController {
     public Result cooperatorProductApply(@RequestBody String json)
     {
         JSONObject req = JSON.parseObject(json);
+        List<CooperatorProduct> dupCooperatorProductInfo=CooperatorProductService.getProductByProductNum(req.getString("product_num"));
+        if(!dupCooperatorProductInfo.isEmpty()){
+            return Result.error("合作方产品编号重复!");
+        }
+
+
         CooperatorProduct cooperatorProduct = new CooperatorProduct();
 
         cooperatorProduct.setProductName(req.getString("product_name"));

+ 78 - 14
src/main/java/com/kexun/controller/ReportController.java

@@ -43,12 +43,7 @@ public class ReportController {
 
         return Result.success("ok",resJson);
     }
-    @GetMapping("del")
-    public Result del(@RequestParam Long id) {
 
-        reportFileService.removeById(id);
-        return Result.success();
-    }
 
 
     @GetMapping("/download/{filename}")
@@ -74,8 +69,6 @@ public class ReportController {
     public Result downloadMod(@PathVariable("modname") String modname) throws IOException {
         String downFilePath = modelPath+ modname;
 
-
-
         File file = new File(downFilePath);
         if (file.exists()) {
 
@@ -104,18 +97,89 @@ public class ReportController {
 //        reportService.downloadMod(fileName);
 //        return Result.success();
 //    }
-
+    @PostMapping("del")
+    public Result del(@RequestBody String json) {
+        JSONObject req = JSON.parseObject(json);
+        String id = req.getString("id");
+        String file_name = req.getString("file_name");
+        String type = req.getString("type");
+
+        ReportFileEntity reportFileEntity = reportFileService.getById(id);
+
+        //判断文件是哪种
+        System.out.println("file_name is "+ file_name);
+        if(type.equals("1")) {
+            if (file_name.endsWith("xml")) {
+                reportFileEntity.setFileNameXml("");
+            } else if (file_name.endsWith("txt")) {
+                reportFileEntity.setFileNameTxt("");
+            }
+        }
+        else {
+            reportFileEntity.setFileNameModel("");
+        }
+        File file = new File(filePath+file_name);
+        file.delete();
+        reportFileService.saveOrUpdate(reportFileEntity);
+        return Result.success();
+    }
     //原来的逻辑是新增,2024.1.2之后改为修改 如果没有文件,改为新增,方法需要重写
+    //目前仍需改:若已有文件,需将原有数据删除
     @PostMapping("fileReportAdd")
     public Result fileReportAdd(@RequestBody String json)
     {
         JSONObject req = JSON.parseObject(json);
-        ReportFileEntity reportFileEntity = new ReportFileEntity();
-        reportFileEntity.setFileName(req.getString("file_name"));
-        reportFileEntity.setFileNameXml(req.getString("fileNameXml"));
-        reportFileEntity.setFileNameTxt(req.getString("fileNameTxt"));
-        reportFileEntity.setFileNameModel(req.getString("fileModel"));
-        reportFileService.save(reportFileEntity);
+        long headInfId=Long.valueOf(req.getString("headInfId")) ;
+        long businessId=Long.valueOf(req.getString("businessId")) ;
+        String s = req.getString("fileNameTxt");
+        System.out.println("chaciren");
+        System.out.println(headInfId);
+        ReportFileEntity reportFileEntity = reportFileService.findByHeadInfId(headInfId);
+        if(reportFileEntity==null) {
+//            System.out.println("chawuciren");
+//            System.out.println(headInfId);
+
+            reportFileEntity = new ReportFileEntity();
+            reportFileEntity.setHeadInfId(headInfId);
+            reportFileEntity.setRptNo(req.getString("rptNo"));
+            reportFileEntity.setBusinessId(businessId);
+            reportFileEntity.setBusinessNum(req.getString("businessNum"));
+            if(req.getString("fileNameXml")=="1"){
+                reportFileEntity.setFileNameXml(req.getString("file_name"));
+            } else if (req.getString("fileNameTxt")=="1") {
+                reportFileEntity.setFileNameTxt(req.getString("file_name"));
+            } else  {
+                reportFileEntity.setFileNameModel(req.getString("file_name"));
+            }
+        }
+        else {
+//            System.out.println("chayouciren");
+//            System.out.println(headInfId);
+            if(req.getString("fileNameXml").equals("1")){
+                String fileXml =  reportFileEntity.getFileNameXml();
+                if(fileXml!=null){
+                    File file = new File(filePath+fileXml);
+                    file.delete();
+                }
+                reportFileEntity.setFileNameXml(req.getString("file_name"));
+            } else if (req.getString("fileNameTxt").equals("1")) {
+                String fileTxt =  reportFileEntity.getFileNameTxt();
+                if(fileTxt!=null){
+                    File file = new File(filePath+fileTxt);
+                    file.delete();
+                }
+                reportFileEntity.setFileNameTxt(req.getString("file_name"));
+            } else  {
+                String fileModel =  reportFileEntity.getFileNameModel();
+                if(fileModel!=null){
+                    File file = new File(filePath+fileModel);
+                    file.delete();
+                }
+                reportFileEntity.setFileNameModel(req.getString("file_name"));
+            }
+        }
+
+        reportFileService.saveOrUpdate(reportFileEntity);
         return Result.success();
     }
 }

+ 6 - 0
src/main/java/com/kexun/entity/BusinessInfo.java

@@ -20,6 +20,12 @@ public class BusinessInfo implements Serializable {
     @TableField("head_inf_id")
     private Long headInfId;
 
+    @TableField("rpt_no")
+    private String rptNo;
+
+    @TableField("business_id")
+    private Long businessId;
+
     @TableField(value = "business_num")
     private String businessNum;
 

+ 6 - 3
src/main/java/com/kexun/entity/CustomInfo.java

@@ -28,13 +28,16 @@ public class CustomInfo implements Serializable {
     @TableField(value = "id_num")
     private String idNum;
 
-    @TableField(value = "sex_code")
-    private String sexCode;
+    @TableField(value = "sex_desc")
+    private String sexDesc;
 
-    @TableField(value = " create_time")
+    @TableField(value = "create_time")
     private String createTime;
 
     public String telephone;
+
+    @TableField(value = "address")
+
     public String address;
 
     @TableField(exist = false)

+ 2 - 0
src/main/java/com/kexun/mapper/CooperatorProductMapper.java

@@ -26,6 +26,8 @@ public interface CooperatorProductMapper extends BaseMapper<CooperatorProduct> {
 
     List<CooperatorProduct> getCooperatorByModelQuery(@Param("modelNo") Long modelNo);
 
+    List<CooperatorProduct> getProductByProductNumQuery(@Param("productNum") String productNum);
+
     CooperatorProduct findProductByName(String product_name);
 
     List<CooperatorProduct> findCooperatorProductByNum(String cooperatorNum);

+ 8 - 0
src/main/java/com/kexun/mapper/ReportInfoMapper.java

@@ -3,6 +3,7 @@ package com.kexun.mapper;
 import com.kexun.entity.ReportInfo;
 import com.baomidou.mybatisplus.core.mapper.BaseMapper;
 import com.kexun.model.dto.ReportInfoDTO;
+import org.apache.ibatis.annotations.Param;
 
 /**
  * <p>
@@ -20,4 +21,11 @@ public interface ReportInfoMapper extends BaseMapper<ReportInfo> {
      */
     void updateReportInfo(ReportInfoDTO reportInfoDTO);
 
+    /**
+     * 修改报文信息状态
+     * @param status 状态
+     * @param id 主键
+     */
+    void updateReportInfoStatus(@Param("status") String status,@Param("id") Long id);
+
 }

+ 2 - 0
src/main/java/com/kexun/service/CooperatorProductService.java

@@ -24,6 +24,8 @@ public interface CooperatorProductService extends IService<CooperatorProduct> {
 
     List<CooperatorProduct> getCooperatorByModel(Long modelNo);
 
+    List<CooperatorProduct> getProductByProductNum(String productNum);
+
     List<CooperatorProduct> findCooperatorProductByNum(String cooperatorNum);
     CooperatorProduct findProductByName(String product_name);
 }

+ 0 - 1
src/main/java/com/kexun/service/CustomInfoService.java

@@ -13,6 +13,5 @@ public interface CustomInfoService extends IService<CustomInfo> {
 
     CustomInfo QueryCustomInfo(String headInfId);
     List<CustomInfo> QueryCustomTelephone(String headInfId);
-    List<CustomInfo> QueryCustomAddress(String headInfId);
 
 }

+ 2 - 0
src/main/java/com/kexun/service/ReportFileService.java

@@ -12,6 +12,8 @@ public interface ReportFileService extends IService<ReportFileEntity> {
 
 
     List<ReportFileEntity> QueryFileByNum(String businessNum);
+
+    ReportFileEntity findByHeadInfId(long headInfId);
 }
 
 

+ 6 - 0
src/main/java/com/kexun/service/ReportInfoService.java

@@ -30,6 +30,12 @@ public interface ReportInfoService extends IService<ReportInfo> {
      */
     void updateReportInfo(ReportInfoDTO reportInfoDTO);
 
+    /**
+     * 修改报文信息
+     * @param status
+     * @param id
+     */
+    void updateReportInfoStatus(String status,Long id);
 
     /**
      * 根据条件获取报文信息

+ 41 - 17
src/main/java/com/kexun/service/atomic/BaseFieldAtomicService.java

@@ -234,10 +234,13 @@ public class BaseFieldAtomicService {
                         .filter(e -> e.getEndMonth() != null &&
                                 DateUtils.getSpaceByUnit(DateUtils.convertStrToDate(e.getEndMonth(), DateUtils.YYYY_MM), currtDate, DateUtils.UNIT_MOUTHS) <= 12)
                         .collect(Collectors.toList());
-                BigDecimal last1YearHousingFundAvg = last1YearHousingList.stream()
+                BigDecimal last1YearHousingFund = last1YearHousingList.stream()
                         .map(e -> e.getMonthMoney() == null ? BigDecimal.ZERO : e.getMonthMoney())
-                        .reduce(BigDecimal.ZERO, BigDecimal::add)
-                        .divide(BigDecimal.valueOf(last1YearHousingList.size()), 2, BigDecimal.ROUND_HALF_UP);
+                        .reduce(BigDecimal.ZERO, BigDecimal::add);
+                BigDecimal last1YearHousingFundAvg = new BigDecimal("0.00");
+                if (last1YearHousingFund.compareTo(BigDecimal.ZERO) > 0) {
+                    last1YearHousingFundAvg = last1YearHousingFund.divide(BigDecimal.valueOf(last1YearHousingList.size()), 2, BigDecimal.ROUND_HALF_UP);
+                }
                 jsonResultMap.put(last1YearHousingFundAvgKey, last1YearHousingFundAvg);
                 //住房公积金-最近3年公积金平均值
                 String last3YearHousingFundAvgKey = fieldDefineCount.stream()
@@ -249,10 +252,14 @@ public class BaseFieldAtomicService {
                         .filter(e -> e.getEndMonth() != null &&
                                 DateUtils.getSpaceByUnit(DateUtils.convertStrToDate(e.getEndMonth(), DateUtils.YYYY_MM), currtDate, DateUtils.UNIT_MOUTHS) <= 36)
                         .collect(Collectors.toList());
-                BigDecimal last3YearHousingFundAvg = last3YearHousingList.stream()
+                BigDecimal last3YearHousingFund = last3YearHousingList.stream()
                         .map(e -> e.getMonthMoney() == null ? BigDecimal.ZERO : e.getMonthMoney())
-                        .reduce(BigDecimal.ZERO, BigDecimal::add)
-                        .divide(BigDecimal.valueOf(last3YearHousingList.size()), 2, BigDecimal.ROUND_HALF_UP);
+                        .reduce(BigDecimal.ZERO, BigDecimal::add);
+                BigDecimal last3YearHousingFundAvg = new BigDecimal("0.00");
+
+                if (last3YearHousingFund.compareTo(BigDecimal.ZERO) > 0) {
+                    last3YearHousingFundAvg = last3YearHousingFund.divide(BigDecimal.valueOf(last3YearHousingList.size()), 2, BigDecimal.ROUND_HALF_UP);
+                }
                 jsonResultMap.put(last3YearHousingFundAvgKey, last3YearHousingFundAvg);
 
                 List<Map> trsInfoList = new ArrayList<>();
@@ -1176,19 +1183,30 @@ public class BaseFieldAtomicService {
     public Map<String, String> relRepayInfoCalculate(JSONObject jsonObject, List<FieldDefineEntity> defineEntityList) {
         Map<String, String> jsonResultMap = new HashMap<>();
         SmryRepay smryRepay = jsonObject.toJavaObject(SmryRepay.class);
-        List<SmryRepayInfo> smryRepayInfoList = smryRepay.getSmryRepayInfoList();
+        List<SmryRepayInfo> smryRepayInfoList = Optional.ofNullable(smryRepay.getSmryRepayInfoList()).orElse(new ArrayList<SmryRepayInfo>());
         //按照借款人身份类别进行分组
-        Map<String, List<SmryRepayInfo>> repayInfoMap = smryRepayInfoList.stream().collect(Collectors.groupingBy(e -> e.getBoTypeCode()));
-
-        Map<String, List<FieldDefineEntity>> fieldDefineMap = defineEntityList.stream().collect(Collectors.groupingBy(e -> e.getParam()));
+        Map<String, List<SmryRepayInfo>> repayInfoMap = smryRepayInfoList.
+                stream()
+                .filter((e) -> e.getBoTypeCode() != null)
+                .collect(Collectors.groupingBy(e -> e.getBoTypeCode()));
+
+        Map<String, List<FieldDefineEntity>> fieldDefineMap = defineEntityList.stream()
+                .filter((e) -> e.getParam() != null)
+                .collect(Collectors.groupingBy(e -> e.getParam()));
         //个人
         List<FieldDefineEntity> perFieldDefineEntities = fieldDefineMap.get("1");
         //个人
         List<SmryRepayInfo> perSmryRepayInfos = repayInfoMap.get("1");
-        //按类别进行分组
-        Map<String, List<SmryRepayInfo>> perRespCodeMap = perSmryRepayInfos.stream().collect(Collectors.groupingBy(e -> e.getRelPayRespDesc()));
-        List<SmryRepayInfo> perGuaranteeList = perRespCodeMap.get("担保责任");
-        List<SmryRepayInfo> perOtherList = perRespCodeMap.get("其他相关还款责任");
+        List<SmryRepayInfo> perGuaranteeList = null;
+        List<SmryRepayInfo> perOtherList = null;
+        if (perSmryRepayInfos != null) {
+            //按类别进行分组
+            Map<String, List<SmryRepayInfo>> perRespCodeMap = perSmryRepayInfos.stream()
+                    .filter((e) -> e.getRelPayRespDesc() != null)
+                    .collect(Collectors.groupingBy(e -> e.getRelPayRespDesc()));
+            perGuaranteeList = perRespCodeMap.get("担保责任");
+            perOtherList = perRespCodeMap.get("其他相关还款责任");
+        }
         for (FieldDefineEntity perFieldDefineEntity : perFieldDefineEntities) {
             String path = perFieldDefineEntity.getPath();
             String name = perFieldDefineEntity.getName();
@@ -1207,9 +1225,15 @@ public class BaseFieldAtomicService {
         }
         //企业
         List<SmryRepayInfo> entSmryRepayInfos = repayInfoMap.get("2");
-        Map<String, List<SmryRepayInfo>> entRespCodeMap = entSmryRepayInfos.stream().collect(Collectors.groupingBy(e -> e.getRelPayRespDesc()));
-        List<SmryRepayInfo> entGuaranteeList = entRespCodeMap.get("担保责任");
-        List<SmryRepayInfo> entOtherList = entRespCodeMap.get("其他相关还款责任");
+        List<SmryRepayInfo> entGuaranteeList = null;
+        List<SmryRepayInfo> entOtherList = null;
+        if (entSmryRepayInfos != null) {
+            Map<String, List<SmryRepayInfo>> entRespCodeMap = entSmryRepayInfos.stream()
+                    .filter((e) -> e.getRelPayRespDesc() != null)
+                    .collect(Collectors.groupingBy(e -> e.getRelPayRespDesc()));
+            entGuaranteeList = entRespCodeMap.get("担保责任");
+            entOtherList = entRespCodeMap.get("其他相关还款责任");
+        }
         List<FieldDefineEntity> entFieldDefineEntities = fieldDefineMap.get("2");
         for (FieldDefineEntity entFieldDefineEntity : entFieldDefineEntities) {
             String path = entFieldDefineEntity.getPath();

+ 9 - 2
src/main/java/com/kexun/service/field/BaseFieldPackageJsonService.java

@@ -94,7 +94,7 @@ public class BaseFieldPackageJsonService implements IBaseFieldPackageJsonService
                 map.putAll(prdDefinMap);
                 fieldJsonMap.put(jsonKey, map);
             } else {
-                subPrdJsonMap.put(jsonKey, infoMap);
+                subPrdJsonMap.put(jsonKey, prdDefinMap);
                 fieldJsonMap.putAll(subPrdJsonMap);
             }
             //节点类型是列表,需要统计各个字段,那么计算用同一个函数,一块进行计算,获取一个计算结果
@@ -125,7 +125,14 @@ public class BaseFieldPackageJsonService implements IBaseFieldPackageJsonService
             Object object = SpringContextUtil.getBean(formulaClass);
             Class<?> aClass = object.getClass();
             Method method = aClass.getMethod(formulaMethod, JSONObject.class, List.class);
-            Object invoke = method.invoke(object, jsonObject, fieldDefineEntities);
+            Object invoke = null;
+            try {
+                invoke = method.invoke(object, jsonObject, fieldDefineEntities);
+            }catch (Exception e){
+                e.printStackTrace();
+                log.error("调用衍生字段处理类={},方法为={}",formulaClass,formulaMethod);
+                throw new Exception(e.getMessage());
+            }
             handleBaseField(invoke,infoMap,jsonMap,jsonKey,subJsonMap,infoList);
             //计算和产品关联的信息
             if(fieldDefineEntityList!=null && fieldDefineEntityList.size()>0){

+ 5 - 0
src/main/java/com/kexun/service/impl/CooperatorProductServiceImpl.java

@@ -68,6 +68,11 @@ public class CooperatorProductServiceImpl extends ServiceImpl<CooperatorProductM
         return cooperatorProductMapper.getCooperatorByModelQuery(modelNo);
     }
 
+    @Override
+    public List<CooperatorProduct> getProductByProductNum(String productNum){
+        return cooperatorProductMapper.getProductByProductNumQuery(productNum);
+    }
+
     public List<CooperatorProduct> findCooperatorProductByNum(String cooperatorNum) {
 //        System.out.println("aaa");
         List<CooperatorProduct> l = cooperatorProductMapper.findCooperatorProductByNum(cooperatorNum);

+ 1 - 5
src/main/java/com/kexun/service/impl/CustomInfoServiceImpl.java

@@ -34,11 +34,7 @@ public class CustomInfoServiceImpl extends ServiceImpl<CustomInfoMapper, CustomI
         return l;
     }
 
-    @Override
-    public List<CustomInfo> QueryCustomAddress(String headInfId){
-        List<CustomInfo> l = customInfoMapper.queryCustomAddress(headInfId);
-        return l;
-    }
+
 
 
 }

+ 13 - 9
src/main/java/com/kexun/service/impl/FileServiceImpl.java

@@ -32,15 +32,21 @@ public class FileServiceImpl {
      */
     @Value("${report.file.jsonTargetPath}")
     private String jsonTargetPath;
+    /**
+     * 解析出来的json文件
+     */
+    @Value("${report.file.xmlTargetPath}")
+    private String xmlTargetPath;
 
     /**
      * 上传文件
      * @param context
      * @param filename
+     * @param path
      */
-    private void writeFile(String context, String filename) {
+    private void writeFile(String context, String filename,String path) {
         try {
-            IOUtils.write(context, new FileOutputStream(jsonTargetPath + filename), "UTF-8");
+            IOUtils.write(context, new FileOutputStream(path + filename), "UTF-8");
         } catch (IOException e) {
             e.printStackTrace();
         }
@@ -55,8 +61,7 @@ public class FileServiceImpl {
      */
     public void saveAndWriteFile(ReportInfo reportInfo,String xmlContext,String jsonContext,String fileName){
         ReportFileEntity reportFileEntity = new ReportFileEntity();
-        reportFileEntity.setFileNameXml(fileName+".xml");
-        reportFileEntity.setHeadInfId(reportInfo.getId());
+        reportFileEntity.setHeadInfId(reportInfo.getHeadInfId());
         reportFileEntity.setRptNo(reportInfo.getRptNo());
         reportFileEntity.setBusinessId(reportInfo.getBusinessId());
         reportFileEntity.setBusinessNum(reportInfo.getBusinessNum());
@@ -65,11 +70,10 @@ public class FileServiceImpl {
         //保存文件信息表
         reportFileService.saveOrUpdate(reportFileEntity);
         //上传文件到服务器的目录
-        writeFile(xmlContext,fileName+".xml");
-        writeFile(jsonContext,fileName+".txt");
+        //xml文件
+        writeFile(xmlContext,fileName+".xml",xmlTargetPath);
+        //json文件
+        writeFile(jsonContext,fileName+".txt",jsonTargetPath);
     }
 
-
-
-
 }

+ 7 - 0
src/main/java/com/kexun/service/impl/ReportFileServiceImpl.java

@@ -1,7 +1,9 @@
 package com.kexun.service.impl;
 
+import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
 import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl;
 import com.kexun.entity.BusinessInfo;
+import com.kexun.entity.CooperatorInfo;
 import com.kexun.entity.ReportFileEntity;
 import com.kexun.mapper.BusinessMapper;
 import com.kexun.mapper.ReportFileMapper;
@@ -26,6 +28,11 @@ public class ReportFileServiceImpl extends ServiceImpl<ReportFileMapper, ReportF
         return l;
     }
 
+    @Override
+    public ReportFileEntity findByHeadInfId(long headInfId) {
+        QueryWrapper<ReportFileEntity> wrapper=new QueryWrapper<ReportFileEntity>().eq("head_inf_id",headInfId);
+        return getOne(wrapper);
+    }
 }
 
 

+ 30 - 8
src/main/java/com/kexun/service/impl/ReportInfoServiceImpl.java

@@ -9,6 +9,7 @@ import com.kexun.model.dto.ReportInfoDTO;
 import com.kexun.model.ro.ReportInfoRO;
 import com.kexun.service.ReportInfoService;
 import lombok.extern.slf4j.Slf4j;
+import org.omg.CORBA.IRObject;
 import org.springframework.beans.factory.annotation.Autowired;
 import org.springframework.stereotype.Service;
 import org.springframework.transaction.annotation.Transactional;
@@ -29,34 +30,54 @@ public class ReportInfoServiceImpl extends ServiceImpl<ReportInfoMapper, ReportI
 
     @Autowired
     private ReportInfoMapper reportInfoMapper;
+
     /**
      * 保存报文信息
+     *
      * @param reportInfo
      */
     @Override
     @Transactional(rollbackFor = Exception.class)
-    public void saveReportInfo(ReportInfo reportInfo){
-        super.save(reportInfo);
+    public void saveReportInfo(ReportInfo reportInfo) {
+        //根据business_id查询是否存在,存在的话,执行update语句
+        QueryWrapper queryWrapper = new QueryWrapper();
+        queryWrapper.eq("business_id", reportInfo.getBusinessId());
+        ReportInfo reportInfoResult = super.getOne(queryWrapper);
+        if (reportInfoResult != null) {
+            reportInfo.setId(reportInfoResult.getId());
+        }
+        super.saveOrUpdate(reportInfo);
     }
 
 
     @Override
     @Transactional(rollbackFor = Exception.class)
-    public void updateReportInfo(ReportInfoDTO reportInfoDTO){
+    public void updateReportInfo(ReportInfoDTO reportInfoDTO) {
         reportInfoMapper.updateReportInfo(reportInfoDTO);
     }
 
+    /**
+     * 修改状态
+     * @param status
+     * @param id
+     */
+    @Override
+    public void updateReportInfoStatus(String status, Long id) {
+        reportInfoMapper.updateReportInfoStatus(status,id);
+    }
+
     /**
      * 按条件查询报文信息
+     *
      * @param reportInfoDTO
      * @return
      */
     @Override
     public List<ReportInfo> listReportInfo(ReportInfoDTO reportInfoDTO) {
         QueryWrapper queryWrapper = new QueryWrapper();
-        if (reportInfoDTO.getStatus()!=null){
-            queryWrapper.eq("status",reportInfoDTO.getStatus());
-        }else {
+        if (reportInfoDTO.getStatus() != null) {
+            queryWrapper.eq("status", reportInfoDTO.getStatus());
+        } else {
             queryWrapper.eq("status", ReportStatusEnum.NO_PRASE.getValue());
         }
         List<ReportInfo> list = reportInfoMapper.selectList(queryWrapper);
@@ -65,11 +86,12 @@ public class ReportInfoServiceImpl extends ServiceImpl<ReportInfoMapper, ReportI
 
     /**
      * 根据主键查询报文信息
+     *
      * @param id
      * @return
      */
-    public ReportInfo getReportInfo(Long id){
-       return reportInfoMapper.selectById(id);
+    public ReportInfo getReportInfo(Long id) {
+        return reportInfoMapper.selectById(id);
     }
 
 

+ 14 - 0
src/main/java/com/kexun/service/impl/XmlFileHandlerServiceImpl.java

@@ -9,14 +9,17 @@ import com.kexun.model.Request;
 import com.kexun.service.ReportInfoService;
 import com.kexun.service.XmlFileHandlerService;
 import lombok.extern.log4j.Log4j;
+import org.apache.commons.io.FileUtils;
 import org.dom4j.*;
 import org.dom4j.io.SAXReader;
+import org.omg.CORBA.IRObject;
 import org.springframework.beans.factory.annotation.Autowired;
 import org.springframework.beans.factory.annotation.Value;
 import org.springframework.stereotype.Service;
 import org.springframework.util.StringUtils;
 
 import java.io.File;
+import java.io.IOException;
 import java.util.*;
 
 /**
@@ -32,6 +35,9 @@ public class XmlFileHandlerServiceImpl implements XmlFileHandlerService {
     @Value("${report.file.xmlSourcePath}")
     private String xmlSourcePath;
 
+    @Value("${report.file.xmlBackPath}")
+    private String xmlBackPath;
+
     @Autowired
     ReportInfoService reportInfoService;
 
@@ -48,6 +54,7 @@ public class XmlFileHandlerServiceImpl implements XmlFileHandlerService {
                 // 如果是目录,获取该目录下的内容集合
                 File[] fileLists = files.listFiles();
                 if (fileLists != null) {
+                    //创建目录文件路径
                     for (File fileList : fileLists) {
                         //只处理xml文件
                         if (fileList.isFile() && fileList.getName().endsWith(".xml")) {
@@ -62,6 +69,13 @@ public class XmlFileHandlerServiceImpl implements XmlFileHandlerService {
                             log.info("json串:" + xmlJson);
                             reportStruct = JSON.parseObject(xmlJson, ReportStruct.class);
                             saveReportInfo(reportStruct);
+                            //将该目录下的文件进行移动,移动到指定的备份目录
+                            File targetFile = new File(xmlBackPath+name);
+                            try {
+                                FileUtils.moveFile(file,targetFile);
+                            } catch (IOException e) {
+                                e.printStackTrace();
+                            }
                         }
                     }
                 } else {

+ 0 - 1
src/main/java/com/kexun/service/xml/ParseHandleService.java

@@ -117,7 +117,6 @@ public class ParseHandleService {
                                 factoryBean.getSpecService().handleXmlSpec(commonParam, jsonBean, iBaseEntity);
                             }
                             //先查询,根据rpt_no查询是否存在
-
                             QueryWrapper queryWrapper = new QueryWrapper();
                             queryWrapper.eq("rpt_no", commonParam.getRptNo());
                             //根据rpt_no查询是否存在,如果存在则更新

+ 1 - 1
src/main/java/com/kexun/task/ReadXmlFileJobHandler.java

@@ -44,7 +44,7 @@ public class ReadXmlFileJobHandler {
     public void readXmlFile() throws Exception {
         log.info("定时任务1开始读取文件");
         //读取xml文件
-        ReportStruct reportStruct = xmlFileHandlerService.readXmlFile();
+        xmlFileHandlerService.readXmlFile();
 
     }
 }

+ 4 - 2
src/main/java/com/kexun/task/ReportParseJobHandler.java

@@ -57,6 +57,7 @@ public class ReportParseJobHandler {
         ReportInfoDTO reportInfoQuery = new ReportInfoDTO();
         reportInfoQuery.setStatus(ReportStatusEnum.NO_PRASE.getValue());
         List<ReportInfo> reportInfos = reportInfoService.listReportInfo(reportInfoQuery);
+
         for (ReportInfo reportInfo : reportInfos) {
             //body报文体是Base64位,先解码 再解压
             String body = reportInfo.getBody();
@@ -83,6 +84,8 @@ public class ReportParseJobHandler {
                     //解析xml文件
                     fileName = fileName.substring(0, fileName.indexOf("."));
                     ReportParseDTO reportParseDTO = parseHandleService.parseHandle(reportInfo,xmlContext);
+                    reportInfo.setHeadInfId(reportParseDTO.getHeadInfId());
+                    reportInfo.setRptNo(reportParseDTO.getRptNo());
                     //保存xml文件和解析文件
                     fileService.saveAndWriteFile(reportInfo,xmlContext,reportParseDTO.getPrdJson(),fileName);
                     //修改解析报文信息的状态
@@ -95,11 +98,10 @@ public class ReportParseJobHandler {
                 } catch (Exception e) {
                     e.printStackTrace();
                     //解析失败,状态修改成失败
-                    reportInfo.setStatus(ReportStatusEnum.QUERY_FAIL.getValue());
+                    reportInfoService.updateReportInfoStatus(ReportStatusEnum.QUERY_FAIL.getValue(),reportInfo.getId());
                     log.error("解析出现异常,异常信息如下:{}", e.getStackTrace());
                     new RRException("解析出现异常,异常信息:" + e.getMessage(), -1);
                 }
-
             } else {
                 log.info("获取到的解析内容为空,不做处理");
             }

+ 104 - 0
src/main/resources/application-dev.yml

@@ -0,0 +1,104 @@
+#生产环境
+server:
+  port: 8888
+spring:
+  ####redis
+  redis:
+    database: 0
+    host: 127.0.0.1
+    jedis:
+      pool:
+        max-active: 8
+        max-idle: 8
+        max-wait: -1ms
+        min-idle: 0
+#    password: 123456
+    port: 6379
+    timeout: 15000ms
+  datasource:
+    driver-class-name: com.mysql.jdbc.Driver
+    url: jdbc:mysql://39.101.189.130:33306/cr_loan?useUnicode=true&characterEncoding=utf8&serverTimezone=GMT%2B8&useSSL=false&allowPublicKeyRetrieval=true
+    type: com.alibaba.druid.pool.DruidDataSource
+    username: credit
+    password: 1q2w3e4r
+    # 初始化大小,最小,最大
+    initialSize: 5
+    minIdle: 5
+    maxActive: 20
+    # 配置获取连接等待超时的时间(毫秒)
+    maxWait: 60000
+    # 配置间隔多久才进行一次检测,检测需要关闭的空闲连接,单位是毫秒
+    timeBetweenEvictionRunsMillis: 60000
+    # 配置有一个连接在连接池中的最小生存时间,单位是毫秒
+    minEvictableIdleTimeMillis: 300000
+    validationQuery: SELECT 1 FROM DUAL
+    testWhileIdle: true
+    testOnBorrow: false
+    testOnReturn: false
+    # 打开PSCache,指定每个连接上PSCache的大小
+    poolPreparedStatements: true
+    maxPoolPreparedStatementPerConnectionSize: 20
+    # 配置监控统计拦截的filters,去掉后监控界面sql将无法统计,'wall'用于防火墙
+    filters: stat, wall, log4j
+    # 通过connectProperties属性来打开mergeSql功能,慢SQL记录
+    connectionProperties: druid.stat.mergeSql=true;druid.stat.slowSqlMillis=5000
+###MybatisPlus
+mybatis-plus:
+  mapper-locations: classpath:mapper/*.xml
+  configuration:
+    log-impl: org.apache.ibatis.logging.stdout.StdOutImpl
+    map-underscore-to-camel-case: true
+  type-aliases-package: com.kexun.entity
+  type-handlers-package: com.kexun.typehandler
+
+##日志配置
+logging:
+  config: classpath:log4j.properties
+  level:
+    com.baomidou.mybatisplus.core.mapper: info
+
+config:
+  fileupload:
+    path: "/home/credit/file/"
+    warpath: "/home/credit/file/"
+
+report:
+  file:
+    # xml文件来源路径
+    #xmlSourcePath: "/home/credit/file/zxjx/"
+    xmlSourcePath: "/Users/lgs/work/xml/"
+    # xml文件的备份路径
+    #xmlBackPath: "/home/credit/file/back/"
+    xmlBackPath: "/Users/lgs/work/back/"
+    # 解析出来的json文件
+    #jsonTargetPath: "/home/credit/file/json/"
+    jsonTargetPath: "/Users/lgs/work/json/"
+    # 解析出来的xml文件
+    xmlTargetPath: "/Users/lgs/work/xml/"
+    # 存放的model文件
+    #modelTargetPath: "/home/credit/file/model/"
+    modelTargetPath: "/Users/lgs/work/model/"
+
+xxl:
+  job:
+    admin:
+      addresses: http://39.101.189.130:8080/xxl-job-admin/
+      ### 执行器AppName [选填]:执行器心跳注册分组依据;为空则关闭自动注册
+    executor:
+      appname: admin-api
+      ### 执行器注册 [选填]:优先使用该配置作为注册地址,为空时使用内嵌服务 ”IP:PORT“ 作为注册地址。从而更灵活的支持容器类型执行器动态IP和动态映射端口问题。
+      address:
+      ### 执行器IP [选填]:默认为空表示自动获取IP,多网卡时可手动设置指定IP,该IP不会绑定Host仅作为通讯实用;地址信息用于 "执行器注册" 和 "调度中心请求并触发任务";
+      ip:
+        ### 执行器端口号 [选填]:小于等于0则自动获取;默认端口为9999,单机部署多个执行器时,注意要配置不同执行器端口;
+      port: 9999
+        ### 执行器运行日志文件存储磁盘路径 [选填] :需要对该路径拥有读写权限;为空则使用默认路径;
+      logpath: /home/credit/logs/xxl-job/
+        ### 执行器日志保存天数 [选填] :值大于3时生效,启用执行器Log文件定期清理功能,否则不生效;
+      logretentiondays: 7
+      ### 执行器通讯TOKEN [选填]:非空时启用;
+    accessToken: default_token
+
+
+
+

+ 104 - 0
src/main/resources/application-prod.yml

@@ -0,0 +1,104 @@
+#生产环境
+server:
+  port: 8888
+spring:
+  ####redis
+  redis:
+    database: 0
+    host: 127.0.0.1
+    jedis:
+      pool:
+        max-active: 8
+        max-idle: 8
+        max-wait: -1ms
+        min-idle: 0
+    #    password: 123456
+    port: 6379
+    timeout: 15000ms
+  datasource:
+    driver-class-name: com.mysql.jdbc.Driver
+    url: jdbc:mysql://39.101.189.130:33306/cr_loan?useUnicode=true&characterEncoding=utf8&serverTimezone=GMT%2B8&useSSL=false&allowPublicKeyRetrieval=true
+    type: com.alibaba.druid.pool.DruidDataSource
+    username: credit
+    password: 1q2w3e4r
+    # 初始化大小,最小,最大
+    initialSize: 5
+    minIdle: 5
+    maxActive: 20
+    # 配置获取连接等待超时的时间(毫秒)
+    maxWait: 60000
+    # 配置间隔多久才进行一次检测,检测需要关闭的空闲连接,单位是毫秒
+    timeBetweenEvictionRunsMillis: 60000
+    # 配置有一个连接在连接池中的最小生存时间,单位是毫秒
+    minEvictableIdleTimeMillis: 300000
+    validationQuery: SELECT 1 FROM DUAL
+    testWhileIdle: true
+    testOnBorrow: false
+    testOnReturn: false
+    # 打开PSCache,指定每个连接上PSCache的大小
+    poolPreparedStatements: true
+    maxPoolPreparedStatementPerConnectionSize: 20
+    # 配置监控统计拦截的filters,去掉后监控界面sql将无法统计,'wall'用于防火墙
+    filters: stat, wall, log4j
+    # 通过connectProperties属性来打开mergeSql功能,慢SQL记录
+    connectionProperties: druid.stat.mergeSql=true;druid.stat.slowSqlMillis=5000
+###MybatisPlus
+mybatis-plus:
+  mapper-locations: classpath:mapper/*.xml
+  configuration:
+    log-impl: org.apache.ibatis.logging.stdout.StdOutImpl
+    map-underscore-to-camel-case: true
+  type-aliases-package: com.kexun.entity
+  type-handlers-package: com.kexun.typehandler
+
+##日志配置
+logging:
+  config: classpath:log4j.properties
+  level:
+    com.baomidou.mybatisplus.core.mapper: info
+
+config:
+  fileupload:
+    path: "/home/credit/file/"
+    warpath: "/home/credit/file/"
+
+report:
+  file:
+    # xml文件来源路径
+    xmlSourcePath: "/home/credit/file/zxjx/"
+    #xmlSourcePath: "/Users/lgs/work/xml/"
+    # xml文件的备份路径
+    xmlBackPath: "/home/credit/file/back/"
+    #xmlBackPath: "/Users/lgs/work/back/"
+    # 解析出来的json文件
+    jsonTargetPath: "/home/credit/file/json/"
+    #jsonTargetPath: "/Users/lgs/work/json/"
+    # 解析出来的xml文件
+    xmlTargetPath: "/home/credit/file/xml/"
+    # 存放的model文件
+    modelTargetPath: "/home/credit/file/model/"
+    #modelTargetPath: "/Users/lgs/work/model/"
+
+xxl:
+  job:
+    admin:
+      addresses: http://39.101.189.130:8080/xxl-job-admin/
+      ### 执行器AppName [选填]:执行器心跳注册分组依据;为空则关闭自动注册
+    executor:
+      appname: admin-api
+      ### 执行器注册 [选填]:优先使用该配置作为注册地址,为空时使用内嵌服务 ”IP:PORT“ 作为注册地址。从而更灵活的支持容器类型执行器动态IP和动态映射端口问题。
+      address:
+      ### 执行器IP [选填]:默认为空表示自动获取IP,多网卡时可手动设置指定IP,该IP不会绑定Host仅作为通讯实用;地址信息用于 "执行器注册" 和 "调度中心请求并触发任务";
+      ip:
+      ### 执行器端口号 [选填]:小于等于0则自动获取;默认端口为9999,单机部署多个执行器时,注意要配置不同执行器端口;
+      port: 9999
+      ### 执行器运行日志文件存储磁盘路径 [选填] :需要对该路径拥有读写权限;为空则使用默认路径;
+      logpath: /home/credit/logs/xxl-job/
+      ### 执行器日志保存天数 [选填] :值大于3时生效,启用执行器Log文件定期清理功能,否则不生效;
+      logretentiondays: 7
+      ### 执行器通讯TOKEN [选填]:非空时启用;
+    accessToken: default_token
+
+
+
+

+ 104 - 0
src/main/resources/application-test.yml

@@ -0,0 +1,104 @@
+#生产环境
+server:
+  port: 8888
+spring:
+  ####redis
+  redis:
+    database: 0
+    host: 127.0.0.1
+    jedis:
+      pool:
+        max-active: 8
+        max-idle: 8
+        max-wait: -1ms
+        min-idle: 0
+    #    password: 123456
+    port: 6379
+    timeout: 15000ms
+  datasource:
+    driver-class-name: com.mysql.jdbc.Driver
+    url: jdbc:mysql://39.101.189.130:33306/cr_loan?useUnicode=true&characterEncoding=utf8&serverTimezone=GMT%2B8&useSSL=false&allowPublicKeyRetrieval=true
+    type: com.alibaba.druid.pool.DruidDataSource
+    username: credit
+    password: 1q2w3e4r
+    # 初始化大小,最小,最大
+    initialSize: 5
+    minIdle: 5
+    maxActive: 20
+    # 配置获取连接等待超时的时间(毫秒)
+    maxWait: 60000
+    # 配置间隔多久才进行一次检测,检测需要关闭的空闲连接,单位是毫秒
+    timeBetweenEvictionRunsMillis: 60000
+    # 配置有一个连接在连接池中的最小生存时间,单位是毫秒
+    minEvictableIdleTimeMillis: 300000
+    validationQuery: SELECT 1 FROM DUAL
+    testWhileIdle: true
+    testOnBorrow: false
+    testOnReturn: false
+    # 打开PSCache,指定每个连接上PSCache的大小
+    poolPreparedStatements: true
+    maxPoolPreparedStatementPerConnectionSize: 20
+    # 配置监控统计拦截的filters,去掉后监控界面sql将无法统计,'wall'用于防火墙
+    filters: stat, wall, log4j
+    # 通过connectProperties属性来打开mergeSql功能,慢SQL记录
+    connectionProperties: druid.stat.mergeSql=true;druid.stat.slowSqlMillis=5000
+###MybatisPlus
+mybatis-plus:
+  mapper-locations: classpath:mapper/*.xml
+  configuration:
+    #log-impl: org.apache.ibatis.logging.stdout.StdOutImpl
+    map-underscore-to-camel-case: true
+  type-aliases-package: com.kexun.entity
+  type-handlers-package: com.kexun.typehandler
+
+##日志配置
+logging:
+  config: classpath:log4j.properties
+  level:
+    com.baomidou.mybatisplus.core.mapper: info
+
+config:
+  fileupload:
+    path: "/home/credit/file/"
+    warpath: "/home/credit/file/"
+
+report:
+  file:
+    # xml文件来源路径
+    xmlSourcePath: "/home/credit/file/zxjx/"
+    #xmlSourcePath: "/Users/lgs/work/xml/"
+    # xml文件的备份路径
+    xmlBackPath: "/home/credit/file/back/"
+    #xmlBackPath: "/Users/lgs/work/back/"
+    # 解析出来的json文件
+    jsonTargetPath: "/home/credit/file/json/"
+    #jsonTargetPath: "/Users/lgs/work/json/"
+    # 解析出来的xml文件
+    xmlTargetPath: "/home/credit/file/xml/"
+    # 存放的model文件
+    modelTargetPath: "/home/credit/file/model/"
+    #modelTargetPath: "/Users/lgs/work/model/"
+
+xxl:
+  job:
+    admin:
+      addresses: http://39.101.189.130:8080/xxl-job-admin/
+      ### 执行器AppName [选填]:执行器心跳注册分组依据;为空则关闭自动注册
+    executor:
+      appname: admin-api
+      ### 执行器注册 [选填]:优先使用该配置作为注册地址,为空时使用内嵌服务 ”IP:PORT“ 作为注册地址。从而更灵活的支持容器类型执行器动态IP和动态映射端口问题。
+      address:
+      ### 执行器IP [选填]:默认为空表示自动获取IP,多网卡时可手动设置指定IP,该IP不会绑定Host仅作为通讯实用;地址信息用于 "执行器注册" 和 "调度中心请求并触发任务";
+      ip:
+      ### 执行器端口号 [选填]:小于等于0则自动获取;默认端口为9999,单机部署多个执行器时,注意要配置不同执行器端口;
+      port: 9999
+      ### 执行器运行日志文件存储磁盘路径 [选填] :需要对该路径拥有读写权限;为空则使用默认路径;
+      logpath: /home/credit/logs/xxl-job/
+      ### 执行器日志保存天数 [选填] :值大于3时生效,启用执行器Log文件定期清理功能,否则不生效;
+      logretentiondays: 7
+      ### 执行器通讯TOKEN [选填]:非空时启用;
+    accessToken: default_token
+
+
+
+

+ 11 - 4
src/main/resources/application.yml

@@ -1,9 +1,11 @@
-
 #生产环境
 server:
   port: 8888
 
 spring:
+  profiles:
+    #dev:开发环境、test:测试环境、prod:生产环境
+    active: dev
   servlet:
     multipart:
       max-file-size: 50MB
@@ -81,10 +83,15 @@ config:
 report:
   file:
     # xml文件来源路径
-    #xmlSourcePath: "/home/credit/file/zxjx/"
-    xmlSourcePath: "/Users/lgs/work/xml/"
+    xmlSourcePath: "/home/credit/file/zxjx/"
+    #xmlSourcePath: "/Users/lgs/work/xml/"
+    # xml文件的备份路径
+    xmlBackPath: "/home/credit/file/back/"
+    #xmlBackPath: "/Users/lgs/work/back/"
     # 解析出来的json文件
-    jsonTargetPath: "/home/credit/file/json/"
+    jsonTargetPath: "/home/credit/file/txt/"
+    # 解析出来的xml文件
+    xmlTargetPath: "/home/credit/file/xml/"
     #jsonTargetPath: "/Users/lgs/work/json/"
     # 存放的model文件
     modelTargetPath: "/home/credit/file/model/"

+ 3 - 0
src/main/resources/mapper/BusinessMapper.xml

@@ -45,6 +45,9 @@
         SELECT DISTINCT
             r.id,
             r.head_inf_id,
+            r.rpt_no,
+            r.business_id,
+
             r.business_num,
             r.customer_name,
             r.create_time,

+ 4 - 0
src/main/resources/mapper/CooperatorProductMapper.xml

@@ -121,5 +121,9 @@
         WHERE model_id=#{modelNo}
     </select>
 
+    <select id="getProductByProductNumQuery" resultType="com.kexun.entity.CooperatorProduct">
+        SELECT * FROM prd_product
+        WHERE product_num=#{productNum}
+    </select>
 
 </mapper>

+ 8 - 14
src/main/resources/mapper/CustomInfoMapper.xml

@@ -26,32 +26,26 @@
             cr_base_inf.`name`,
             cr_base_inf.id_type_code,
             cr_base_inf.id_num,
-            cr_base_inf.sex_code
+            cr_base_inf.sex_desc,
+            cr_base_inf.address
+
         FROM
             cr_base_inf
         WHERE
-            cr_base_inf.head_inf_id = #{headInfId};
+            cr_base_inf.head_inf_id = #{headInfId}
+            ;
     </select>
 
     <select id="queryCustomTelephone" resultType="com.kexun.entity.CustomInfo">
         SELECT DISTINCT
             cr_base_phone.telephone
         FROM
-            cr_base_inf
-                LEFT JOIN  cr_base_phone on cr_base_inf.rpt_no = cr_base_phone.rpt_no
+            cr_base_phone
         WHERE
-            cr_base_inf.head_inf_id = #{headInfId};
+            cr_base_phone.head_inf_id = #{headInfId};
     </select>
 
-    <select id="queryCustomAddress" resultType="com.kexun.entity.CustomInfo">
-        SELECT DISTINCT
-            cr_base_residence.address
-        FROM
-            cr_base_inf
-                LEFT JOIN  cr_base_residence on cr_base_inf.rpt_no = cr_base_residence.rpt_no
-        WHERE
-            cr_base_inf.head_inf_id = #{headInfId};
-    </select>
+
 
 
 

+ 1 - 0
src/main/resources/mapper/ReportFileMapper.xml

@@ -31,6 +31,7 @@
 
         WHERE
             report_file.business_num = #{businessNum}
+        and report_file.is_deleted=0
     </select>
 
 

+ 6 - 0
src/main/resources/mapper/ReportInfoMapper.xml

@@ -38,4 +38,10 @@
         where id = #{id}
     </update>
 
+    <update id="updateReportInfoStatus">
+        update report_info
+        set status = #{status}
+        where id = #{id}
+    </update>
+
 </mapper>

+ 15 - 3
src/test/java/Test3And6.java

@@ -1,5 +1,9 @@
 import com.kexun.model.LastFiveModel;
+import org.apache.commons.io.FileUtils;
+import org.junit.jupiter.api.Test;
 
+import java.io.File;
+import java.io.IOException;
 import java.math.BigDecimal;
 import java.util.ArrayList;
 import java.util.List;
@@ -13,7 +17,17 @@ import java.util.List;
 public class Test3And6 {
 
 
-    public void test(){
+    @Test
+    public void test1() throws IOException {
+
+        File files = new File("/Users/lgs/work/xml/冯学娇_450923200204205360_593513420431565831-202305211850426247.xml");
+        File file = new File("/Users/lgs/work/back/冯学娇_450923200204205360_593513420431565831-202305211850426247.xml");
+        boolean b = files.renameTo(file);
+       FileUtils.moveFile(files,file);
+
+    }
+
+    public void test() {
 
         List<LastFiveModel> listD1 = new ArrayList<>();
 
@@ -46,8 +60,6 @@ public class Test3And6 {
         lastFiveModelD1_4.setTotAcctBal(new BigDecimal("0"));
         listD1.add(lastFiveModelD1_4);
 
-
-
     }
 
 }