Browse Source

Merge remote-tracking branch 'origin/dev' into dev

master
CMM 1 year ago
parent
commit
5d830d03c4
10 changed files with 100 additions and 186 deletions
  1. +1
    -1
      ningda-generator/src/main/java/com/ningdatech/generator/config/GeneratorCodeKingbaseConfig.java
  2. +4
    -4
      pmapi/pom.xml
  3. +2
    -2
      pmapi/src/main/java/com/ningdatech/pmapi/common/helper/RegionCacheHelper.java
  4. +6
    -5
      pmapi/src/main/java/com/ningdatech/pmapi/common/helper/impl/RegionsCacheHelperImpl.java
  5. +22
    -22
      pmapi/src/main/java/com/ningdatech/pmapi/expert/assembler/ExpertUserInfoAssembler.java
  6. +1
    -1
      pmapi/src/main/java/com/ningdatech/pmapi/expert/manage/ExpertManage.java
  7. +21
    -45
      pmapi/src/main/resources/application-dev.yml
  8. +21
    -45
      pmapi/src/main/resources/application-pre.yml
  9. +22
    -46
      pmapi/src/main/resources/application-prod.yml
  10. +0
    -15
      pom.xml

+ 1
- 1
ningda-generator/src/main/java/com/ningdatech/generator/config/GeneratorCodeKingbaseConfig.java View File

@@ -41,7 +41,7 @@ public class GeneratorCodeKingbaseConfig {
// 设置父包模块名
.moduleName("pmapi." + packageName)
// 设置mapperXml生成路径
.pathInfo(Collections.singletonMap(OutputFile.mapperXml,
.pathInfo(Collections.singletonMap(OutputFile.xml,
//设置自己的生成路径
path + "/com/ningdatech/pmapi/" + packageName + "/mapper"));
})


+ 4
- 4
pmapi/pom.xml View File

@@ -88,10 +88,6 @@
<scope>true</scope>
</dependency>
<dependency>
<groupId>com.alibaba</groupId>
<artifactId>druid-spring-boot-starter</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-aop</artifactId>
</dependency>
@@ -217,6 +213,10 @@
<groupId>com.baomidou</groupId>
<artifactId>mybatis-plus-boot-starter</artifactId>
</exclusion>
<exclusion>
<groupId>com.alibaba</groupId>
<artifactId>druid-spring-boot-starter</artifactId>
</exclusion>
</exclusions>
</dependency>
<!--浙政钉-->


+ 2
- 2
pmapi/src/main/java/com/ningdatech/pmapi/common/helper/RegionCacheHelper.java View File

@@ -105,8 +105,8 @@ public interface RegionCacheHelper {
*
* @param code 区域编码
* @param level 区域层级
* @return
* @return \
*/
String getUnionPathStr(String code, Integer level);
String getUnionPath(String code, Integer level);

}

+ 6
- 5
pmapi/src/main/java/com/ningdatech/pmapi/common/helper/impl/RegionsCacheHelperImpl.java View File

@@ -1,5 +1,6 @@
package com.ningdatech.pmapi.common.helper.impl;

import cn.hutool.core.collection.CollUtil;
import cn.hutool.core.text.StrPool;
import com.ningdatech.basic.exception.BizException;
import com.ningdatech.basic.util.CollUtils;
@@ -197,15 +198,15 @@ public class RegionsCacheHelperImpl extends AbstractRegionCacheHelper implements


@Override
public String getUnionPathStr(String code, Integer level) {
if (StringUtils.isBlank(code) || Objects.isNull(level)) {
return null;
public String getUnionPath(String code, Integer level) {
if (StrUtils.isBlank(code) || Objects.isNull(level)) {
return StrUtils.EMPTY;
}
List<String> unionPathStrList = new ArrayList<>();
buildUnionPathStrList(code, level, unionPathStrList);
Collections.reverse(unionPathStrList);
if (CollectionUtils.isEmpty(unionPathStrList)) {
return null;
if (CollUtil.isEmpty(unionPathStrList)) {
return StrUtils.EMPTY;
}
return String.join("@@", unionPathStrList);
}


+ 22
- 22
pmapi/src/main/java/com/ningdatech/pmapi/expert/assembler/ExpertUserInfoAssembler.java View File

@@ -44,43 +44,43 @@ public class ExpertUserInfoAssembler {
private final DictionaryCache dictionaryCache;
private final RegionCacheHelper regionCacheHelper;

public ExpertFullInfoVO buildExpertFullInfoVO(List<AttachFileVo> attachFiles, ExpertFullInfoAllDTO expertFullInfoAll) {
public ExpertFullInfoVO buildExpertFullInfo(List<AttachFileVo> attachFiles, ExpertFullInfoAllDTO expertFullInfoAll) {
ExpertUserFullInfoDTO expertUserInfo = expertFullInfoAll.getExpertUserInfoDTO();
// 字典字典段map
Map<String, List<DictionaryFieldInfo>> dictInfoMap = buildDictInfoMap(expertFullInfoAll.getExpertDictionaryList());
Map<String, List<DictionaryFieldInfo>> dictMap = buildDictInfoMap(expertFullInfoAll.getExpertDictionaryList());
// 专家文件资料map
Map<Long, FileBasicInfo> fileInfoMap = buildFileBasicInfoMap(attachFiles);
Map<Long, FileBasicInfo> fileMap = buildFileBasicInfoMap(attachFiles);
// 专家标签字段map
Map<String, List<TagFieldInfo>> tagInfoMap = buildTagFieldInfoMap(expertFullInfoAll.getExpertTagList());
Map<String, List<TagFieldInfo>> tagMap = buildTagFieldInfoMap(expertFullInfoAll.getExpertTagList());
// 专家履职意向地
List<ExpertRegionDTO> intentionWorkRegion = expertFullInfoAll.getExpertIntentionWorkRegionInfo();
List<ExpertRegionDTO> intentionWorkRegions = expertFullInfoAll.getExpertIntentionWorkRegionInfo();
// 专家回避单位列表
List<ExpertAvoidCompanyDTO> expertAvoidCompanyList = expertFullInfoAll.getExpertAvoidCompanyList();
List<ExpertAvoidCompanyDTO> avoidCompanies = expertFullInfoAll.getExpertAvoidCompanyList();
// 基本信息
ExpertBasicInfo basicInfo = buildExpertBasicInfo(expertUserInfo, dictInfoMap, tagInfoMap, intentionWorkRegion, fileInfoMap);
ExpertBasicInfo basicInfo = buildExpertBasicInfo(expertUserInfo, dictMap, tagMap, intentionWorkRegions, fileMap);
// 学历信息
ExpertEduInfo eduInfo = buildExpertEduInfo(expertUserInfo, dictInfoMap, fileInfoMap);
ExpertEduInfo eduInfo = buildExpertEduInfo(expertUserInfo, dictMap, fileMap);
// 职业信息
ExpertJobInfo jobInfo = buildExpertJobInfo(expertUserInfo, dictInfoMap);
ExpertJobInfo jobInfo = buildExpertJobInfo(expertUserInfo, dictMap);
jobInfo.setBusinessStrips(expertFullInfoAll.getBusinessStrips());
// 专业信息
ExpertProfessionalInfo professionalInfo = buildExpertProfessionalInfo(expertUserInfo, dictInfoMap
, tagInfoMap, fileInfoMap, expertAvoidCompanyList);
ExpertProfessionalInfo profession = buildExpertProfessionalInfo(expertUserInfo, dictMap, tagMap, fileMap, avoidCompanies);
// 推荐信息
ExpertRecommendInfo recommendInfo = new ExpertRecommendInfo();
recommendInfo.setRecommendedWay(dictInfoMap.get(DictExpertInfoTypeEnum.RECOMMENDED_WAY.getKey()));
recommendInfo.setRecommendationProofFile(getFileBasicInfoList(fileInfoMap, expertUserInfo.getRecommendationProofFileIdList()));
recommendInfo.setRecommendedWay(dictMap.get(DictExpertInfoTypeEnum.RECOMMENDED_WAY.getKey()));
recommendInfo.setRecommendationProofFile(getFileBasicInfoList(fileMap, expertUserInfo.getRecommendationProofFileIdList()));
// 其他信息
ExpertOtherInfo otherInfo = new ExpertOtherInfo();
otherInfo.setOther(tagInfoMap.get(ExpertTagEnum.OTHER.getKey()));
otherInfo.setOther(tagMap.get(ExpertTagEnum.OTHER.getKey()));
otherInfo.setRemark(expertUserInfo.getRemark());

// 专家申请履职意向展示列表
List<ExpertRegionInfo> applyIntentionWorkRegions = expertFullInfoAll.getExpertApplyIntentionWorkRegionInfo().stream().map(r -> {
ExpertRegionInfo expertApplyIntentionWorkRegion = new ExpertRegionInfo();
expertApplyIntentionWorkRegion.setRegionLevel(r.getRegionLevel());
expertApplyIntentionWorkRegion.setRegionCode(r.getRegionCode());
expertApplyIntentionWorkRegion.setRegionName(regionCacheHelper.getUnionPathStr(r.getRegionCode(), r.getRegionLevel()));
return expertApplyIntentionWorkRegion;
ExpertRegionInfo intentionWorkRegion = new ExpertRegionInfo();
intentionWorkRegion.setRegionLevel(r.getRegionLevel());
intentionWorkRegion.setRegionCode(r.getRegionCode());
intentionWorkRegion.setRegionName(regionCacheHelper.getUnionPath(r.getRegionCode(), r.getRegionLevel()));
return intentionWorkRegion;
}).collect(Collectors.toList());

if (CollUtil.isEmpty(basicInfo.getExpertIntentionWorkRegions())) {
@@ -91,7 +91,7 @@ public class ExpertUserInfoAssembler {
resExpertFullInfo.setBasicInfo(basicInfo);
resExpertFullInfo.setEduInfo(eduInfo);
resExpertFullInfo.setJobInfo(jobInfo);
resExpertFullInfo.setProfessionalInfo(professionalInfo);
resExpertFullInfo.setProfessionalInfo(profession);
resExpertFullInfo.setRecommendInfo(recommendInfo);
resExpertFullInfo.setExpertOtherInfo(otherInfo);
resExpertFullInfo.setExpertApplyIntentionWorkRegions(applyIntentionWorkRegions);
@@ -236,14 +236,14 @@ public class ExpertUserInfoAssembler {
basicInfo.setHometown(expertUserInfoDTO.getHometown());
basicInfo.setNationality(expertUserInfoDTO.getNationality());
basicInfo.setExpertSource(tagFieldInfoMap.get(ExpertTagEnum.EXPERT_SOURCE.getKey()));
expertRegionInfo.setRegionName(regionCacheHelper.getUnionPathStr(expertRegionInfo.getRegionCode(), expertRegionInfo.getRegionLevel()));
expertRegionInfo.setRegionName(regionCacheHelper.getUnionPath(expertRegionInfo.getRegionCode(), expertRegionInfo.getRegionLevel()));
basicInfo.setExpertRegionInfo(expertRegionInfo);

basicInfo.setExpertIntentionWorkRegions(expertIntentionWorkRegionInfo.stream().map(r -> {
ExpertRegionInfo expertIntentionWorkRegion = new ExpertRegionInfo();
expertIntentionWorkRegion.setRegionCode(r.getRegionCode());
expertIntentionWorkRegion.setRegionLevel(r.getRegionLevel());
expertIntentionWorkRegion.setRegionName(regionCacheHelper.getUnionPathStr(r.getRegionCode(), r.getRegionLevel()));
expertIntentionWorkRegion.setRegionName(regionCacheHelper.getUnionPath(r.getRegionCode(), r.getRegionLevel()));
return expertIntentionWorkRegion;
}).collect(Collectors.toList()));
basicInfo.setExpertType(dictionaryFieldInfoMap.get(DictExpertInfoTypeEnum.EXPERT_TYPE.getKey()));


+ 1
- 1
pmapi/src/main/java/com/ningdatech/pmapi/expert/manage/ExpertManage.java View File

@@ -135,7 +135,7 @@ public class ExpertManage {
}
List<Long> fileIdList = expertInfoCommonHelper.getExpertFileIdList(expertUserFullInfoAll);
List<AttachFileVo> attachFiles = fileService.getByIds(fileIdList);
return expertUserInfoAssembler.buildExpertFullInfoVO(attachFiles, expertUserFullInfoAll);
return expertUserInfoAssembler.buildExpertFullInfo(attachFiles, expertUserFullInfoAll);
}

@Transactional(rollbackFor = Exception.class)


+ 21
- 45
pmapi/src/main/resources/application-dev.yml View File

@@ -41,53 +41,29 @@ spring:
hibernate:
ddl-auto: update
datasource:
type: com.alibaba.druid.pool.DruidDataSource
type: com.zaxxer.hikari.HikariDataSource
driverClassName: com.kingbase8.Driver
url: jdbc:kingbase8://120.26.44.207:54321/nd_project_management?zeroDateTimeBehavior=convertToNull&useUnicode=true&characterEncoding=utf-8
username: SYSTEM
password: Ndkj1234
# 数据源
druid:
url: jdbc:kingbase8://120.26.44.207:54321/nd_project_management?zeroDateTimeBehavior=convertToNull&useUnicode=true&characterEncoding=utf-8
username: SYSTEM
password: Ndkj1234
# 初始连接数
initialSize: 5
# 最小连接池数量
minIdle: 10
# 最大连接池数量
maxActive: 20
# 配置获取连接等待超时的时间
maxWait: 60000
# 配置间隔多久才进行一次检测,检测需要关闭的空闲连接,单位是毫秒
timeBetweenEvictionRunsMillis: 60000
# 配置一个连接在池中最小生存的时间,单位是毫秒
minEvictableIdleTimeMillis: 300000
# 配置一个连接在池中最大生存的时间,单位是毫秒
maxEvictableIdleTimeMillis: 900000
# 配置检测连接是否有效
#mysql使用:SELECT 1 FROM DUAL
validationQuery: SELECT 1
testWhileIdle: true
testOnBorrow: false
testOnReturn: false
webStatFilter:
enabled: true
statViewServlet:
enabled: true
# 设置白名单,不填则允许所有访问
allow:
url-pattern: /druid/*
# 控制台管理用户名和密码
login-username: admin
login-password: admin
filter:
stat:
enabled: true
# 慢SQL记录
log-slow-sql: true
slow-sql-millis: 1000
merge-sql: true
wall:
config:
multi-statement-allow: true
hikari:
# 是客户端等待连接池连接的最大毫秒数
connection-timeout: 30000
# 是允许连接在连接池中空闲的最长时间
minimum-idle: 5
# 配置最大池大小
maximum-pool-size: 20
# 是允许连接在连接池中空闲的最长时间(以毫秒为单位)
idle-timeout: 60000
# 池中连接关闭后的最长生命周期(以毫秒为单位)
max-lifetime: 600000
# 配置从池返回的连接的默认自动提交行为。默认值为true。
auto-commit: true
# 开启连接监测泄露
leak-detection-threshold: 5000
# 测试连接数据库
connection-test-query: SELECT 1
#设置上传 单个文件的大小
servlet:
multipart:


+ 21
- 45
pmapi/src/main/resources/application-pre.yml View File

@@ -40,53 +40,29 @@ spring:
hibernate:
ddl-auto: update
datasource:
type: com.alibaba.druid.pool.DruidDataSource
type: com.zaxxer.hikari.HikariDataSource
driverClassName: com.kingbase8.Driver
url: jdbc:kingbase8://10.53.168.41:54321/nd_project_management?zeroDateTimeBehavior=convertToNull&useUnicode=true&characterEncoding=utf-8&nullCatalogMeansCurrent=true
username: SYSTEM
password: Ndkj1234
# 数据源
druid:
url: jdbc:kingbase8://10.53.168.41:54321/nd_project_management?zeroDateTimeBehavior=convertToNull&useUnicode=true&characterEncoding=utf-8&nullCatalogMeansCurrent=true
username: SYSTEM
password: Ndkj1234
# 初始连接数
initialSize: 5
# 最小连接池数量
minIdle: 10
# 最大连接池数量
maxActive: 20
# 配置获取连接等待超时的时间
maxWait: 60000
# 配置间隔多久才进行一次检测,检测需要关闭的空闲连接,单位是毫秒
timeBetweenEvictionRunsMillis: 60000
# 配置一个连接在池中最小生存的时间,单位是毫秒
minEvictableIdleTimeMillis: 300000
# 配置一个连接在池中最大生存的时间,单位是毫秒
maxEvictableIdleTimeMillis: 900000
# 配置检测连接是否有效
#mysql使用:SELECT 1 FROM DUAL
validationQuery: SELECT 1
testWhileIdle: true
testOnBorrow: false
testOnReturn: false
webStatFilter:
enabled: true
statViewServlet:
enabled: true
# 设置白名单,不填则允许所有访问
allow:
url-pattern: /druid/*
# 控制台管理用户名和密码
login-username: admin
login-password: admin
filter:
stat:
enabled: true
# 慢SQL记录
log-slow-sql: true
slow-sql-millis: 1000
merge-sql: true
wall:
config:
multi-statement-allow: true
hikari:
# 是客户端等待连接池连接的最大毫秒数
connection-timeout: 30000
# 是允许连接在连接池中空闲的最长时间
minimum-idle: 10
# 配置最大池大小
maximum-pool-size: 20
# 是允许连接在连接池中空闲的最长时间(以毫秒为单位)
idle-timeout: 60000
# 池中连接关闭后的最长生命周期(以毫秒为单位)
max-lifetime: 600000
# 配置从池返回的连接的默认自动提交行为。默认值为true。
auto-commit: true
# 开启连接监测泄露
leak-detection-threshold: 5000
# 测试连接数据库
connection-test-query: SELECT 1
#设置上传 单个文件的大小
servlet:
multipart:


+ 22
- 46
pmapi/src/main/resources/application-prod.yml View File

@@ -40,53 +40,29 @@ spring:
hibernate:
ddl-auto: update
datasource:
type: com.alibaba.druid.pool.DruidDataSource
type: com.zaxxer.hikari.HikariDataSource
driverClassName: com.kingbase8.Driver
url: jdbc:kingbase8://10.53.172.221:54321/nd_project_management?zeroDateTimeBehavior=convertToNull&useUnicode=true&characterEncoding=utf-8&nullCatalogMeansCurrent=true
username: SYSTEM
password: Ndkj1234
# 数据源
druid:
url: jdbc:kingbase8://10.53.172.221:54321/nd_project_management?zeroDateTimeBehavior=convertToNull&useUnicode=true&characterEncoding=utf-8&nullCatalogMeansCurrent=true
username: SYSTEM
password: Ndkj1234
# 初始连接数
initialSize: 5
# 最小连接池数量
minIdle: 10
# 最大连接池数量
maxActive: 20
# 配置获取连接等待超时的时间
maxWait: 60000
# 配置间隔多久才进行一次检测,检测需要关闭的空闲连接,单位是毫秒
timeBetweenEvictionRunsMillis: 60000
# 配置一个连接在池中最小生存的时间,单位是毫秒
minEvictableIdleTimeMillis: 300000
# 配置一个连接在池中最大生存的时间,单位是毫秒
maxEvictableIdleTimeMillis: 900000
# 配置检测连接是否有效
#mysql使用:SELECT 1 FROM DUAL
validationQuery: SELECT 1
testWhileIdle: true
testOnBorrow: false
testOnReturn: false
webStatFilter:
enabled: true
statViewServlet:
enabled: true
# 设置白名单,不填则允许所有访问
allow:
url-pattern: /druid/*
# 控制台管理用户名和密码
login-username: admin
login-password: admin
filter:
stat:
enabled: true
# 慢SQL记录
log-slow-sql: true
slow-sql-millis: 1000
merge-sql: true
wall:
config:
multi-statement-allow: true
hikari:
# 是客户端等待连接池连接的最大毫秒数
connection-timeout: 30000
# 是允许连接在连接池中空闲的最长时间
minimum-idle: 10
# 配置最大池大小
maximum-pool-size: 20
# 是允许连接在连接池中空闲的最长时间(以毫秒为单位)
idle-timeout: 60000
# 池中连接关闭后的最长生命周期(以毫秒为单位)
max-lifetime: 600000
# 配置从池返回的连接的默认自动提交行为。默认值为true。
auto-commit: true
# 开启连接监测泄露
leak-detection-threshold: 5000
# 测试连接数据库
connection-test-query: SELECT 1
#设置上传 单个文件的大小
servlet:
multipart:
@@ -145,7 +121,7 @@ flowable:
async-executor-activate: true
#关闭一些不需要的功能服务
rest-api-enabled: false
# database-schema-update: false
# database-schema-update: false
idm:
enabled: false
common:


+ 0
- 15
pom.xml View File

@@ -17,16 +17,12 @@
<maven.compiler.source>1.8</maven.compiler.source>
<maven.compiler.target>1.8</maven.compiler.target>
<jwt.version>3.7.0</jwt.version>
<com.alibaba.druid.version>1.2.8</com.alibaba.druid.version>
<mybatis.plus.version>3.5.3.2</mybatis.plus.version>
<io.springfox-swagger2.version>3.0.0</io.springfox-swagger2.version>
<swagger-knife.version>3.0.3</swagger-knife.version>
<fastjson.version>1.2.83</fastjson.version>
<guava.version>31.1-jre</guava.version>
<commons-pool2.version>2.11.1</commons-pool2.version>
<useragent.yauaa>6.11</useragent.yauaa>
<druid.version>1.2.8</druid.version>
<mica.ip2region>2.6.3</mica.ip2region>
</properties>

<dependencyManagement>
@@ -37,11 +33,6 @@
<version>5.2.2</version>
</dependency>
<dependency>
<groupId>com.alibaba</groupId>
<artifactId>druid-spring-boot-starter</artifactId>
<version>${druid.version}</version>
</dependency>
<dependency>
<groupId>org.apache.poi</groupId>
<artifactId>poi-ooxml</artifactId>
<version>5.2.2</version>
@@ -58,12 +49,6 @@
<type>pom</type>
<scope>import</scope>
</dependency>
<!--druid数据库连接池-->
<dependency>
<groupId>com.alibaba</groupId>
<artifactId>druid</artifactId>
<version>${com.alibaba.druid.version}</version>
</dependency>
<!-- mybatis plus -->
<dependency>
<groupId>com.baomidou</groupId>


Loading…
Cancel
Save