main
gaoshuguang 1 year ago
parent bbde3834c3
commit 10be376130

@ -95,9 +95,10 @@ public class Constant {
*
*/
public final static List<String> INTO_DB_LIST = Arrays.asList("237", "240", "241", "242", "243", "244", "245", "246",
"302", "304",
"501",
"803",
"302", "304","306",
"501","502","503",
"604","605",
"803","804",
"901", "902", "903", "904", "905", "917", "918", "922", "923", "924", "925");
/**
@ -172,7 +173,7 @@ public class Constant {
*/
//生产环境使用 begin
//public static final String SELECT_TF_DOWNLOAD_NOTIFY_BLACK = "SELECT ID,SYSTEMID,TABLENAME,BATCHNO,VERSION,PROTOCOL_TYPE,CREATETIME,RECORDNUM,UPDATEFLAG,UPDATETIME,DOMAIN1,DOMAIN2,DOMAIN3,DOMAIN4 FROM TF_DOWNLOAD_NOTIFY where createtime>=to_date('20230101','yyyyMMdd') and protocol_type in(101,102,103,104,105,106,107,108,111,237,240,241,242,243,244,245,246,302,304,501,801,802,803,808,809,901,902,903,904,905,917,918,919,920,922,923,924,925) and updateflag=0 and length(domain1)>18 and substr(domain1,0,3)='ftp'";
//public static final String SELECT_TF_DOWNLOAD_NOTIFY_BLACK = "SELECT ID,SYSTEMID,TABLENAME,BATCHNO,VERSION,PROTOCOL_TYPE,CREATETIME,RECORDNUM,UPDATEFLAG,UPDATETIME,DOMAIN1,DOMAIN2,DOMAIN3,DOMAIN4 FROM TF_DOWNLOAD_NOTIFY where createtime>=to_date('20230101','yyyyMMdd') and protocol_type in(101,102,103,104,105,106,107,108,111,237,240,241,242,243,244,245,246,302,304,306,501,502,503,604,605,804,801,802,803,808,809,901,902,903,904,905,917,918,919,920,922,923,924,925) and updateflag=0 and length(domain1)>18 and substr(domain1,0,3)='ftp'";
//public static final String SELECT_TF_DOWNLOAD_NOTIFY_MIN_FEE = "SELECT ID,SYSTEMID,TABLENAME,BATCHNO,VERSION,PROTOCOL_TYPE,CREATETIME,RECORDNUM,UPDATEFLAG,UPDATETIME,DOMAIN1,DOMAIN2,DOMAIN3,DOMAIN4 FROM TF_DOWNLOAD_NOTIFY_CS where createtime>=to_date('20230101','yyyyMMdd') and protocol_type in(250) and updateflag=0 and length(domain1)>18 and substr(domain1,0,3)='ftp'";
//public static final String SELECT_T_PARAM_NOTIFY = "SELECT PARAM_ID,PARAM_NAME,VERSION,CREATE_TIME,RECORD_COUNT,FLAG,PARAM_PATH,SPARE3 FROM T_PARAM_NOTIFY WHERE FLAG=0 and PARAM_ID IN(35,36) AND CREATE_TIME>=TO_DATE('20230101','yyyyMMdd')";
//public static final String UPDATE_TF_DOWNLOAD_NOTIFY_HR = "UPDATE TF_DOWNLOAD_NOTIFY SET UPDATEFLAG = 1 WHERE ID =";
@ -180,10 +181,14 @@ public class Constant {
//生产环境使用 end
//测试环境使用 begin
public static final String SELECT_TF_DOWNLOAD_NOTIFY_BLACK = "SELECT ID,SYSTEMID,TABLENAME,BATCHNO,VERSION,PROTOCOL_TYPE,CREATETIME,RECORDNUM,UPDATEFLAG,UPDATETIME,DOMAIN1,DOMAIN2,DOMAIN3,DOMAIN4 FROM TF_DOWNLOAD_NOTIFY_CS where createtime>=to_date('20230101','yyyyMMdd') and protocol_type in(101,102,103,104,105,106,107,108,111,237,240,241,242,243,244,245,246,302,304,501,801,802,803,808,809,901,902,903,904,905,917,918,919,920,922,923,924,925) and updateflag=0 and length(domain1)>18 and substr(domain1,0,3)='ftp'";
public static final String SELECT_TF_DOWNLOAD_NOTIFY_BLACK = "SELECT ID,SYSTEMID,TABLENAME,BATCHNO,VERSION,PROTOCOL_TYPE,CREATETIME,RECORDNUM,UPDATEFLAG,UPDATETIME,DOMAIN1,DOMAIN2,DOMAIN3,DOMAIN4 FROM TF_DOWNLOAD_NOTIFY_CS where createtime>=to_date('20230101','yyyyMMdd') and protocol_type in(101,102,103,104,105,106,107,108,111,237,240,241,242,243,244,245,246,302,304,306,501,502,503,604,605,804,801,802,803,808,809,901,902,903,904,905,917,918,919,920,922,923,924,925) and updateflag=0 and length(domain1)>18 and substr(domain1,0,3)='ftp'";
public static final String UPDATE_TF_DOWNLOAD_NOTIFY_HR = "UPDATE TF_DOWNLOAD_NOTIFY_CS SET UPDATEFLAG = 1 WHERE ID =";
//public static final String SELECT_TF_DOWNLOAD_NOTIFY_BLACK = "SELECT ID,SYSTEMID,TABLENAME,BATCHNO,VERSION,PROTOCOL_TYPE,CREATETIME,RECORDNUM,UPDATEFLAG,UPDATETIME,DOMAIN1,DOMAIN2,DOMAIN3,DOMAIN4 FROM TF_DOWNLOAD_NOTIFY_TEST where createtime>=to_date('20230101','yyyyMMdd') and protocol_type in(101,102,103,104,105,106,107,108,111,237,240,241,242,243,244,245,246,302,304,306,501,502,503,604,605,804,801,802,803,808,809,901,902,903,904,905,917,918,919,920,922,923,924,925) and updateflag=0 and length(domain1)>18 and substr(domain1,0,3)='ftp'";
//public static final String UPDATE_TF_DOWNLOAD_NOTIFY_HR = "UPDATE TF_DOWNLOAD_NOTIFY_TEST SET UPDATEFLAG = 1 WHERE ID =";
public static final String SELECT_TF_DOWNLOAD_NOTIFY_MIN_FEE = "SELECT ID,SYSTEMID,TABLENAME,BATCHNO,VERSION,PROTOCOL_TYPE,CREATETIME,RECORDNUM,UPDATEFLAG,UPDATETIME,DOMAIN1,DOMAIN2,DOMAIN3,DOMAIN4 FROM TF_DOWNLOAD_NOTIFY_CS where createtime>=to_date('20230101','yyyyMMdd') and protocol_type in(250) and updateflag=0 and length(domain1)>18 and substr(domain1,0,3)='ftp'";
public static final String SELECT_T_PARAM_NOTIFY = "SELECT PARAM_ID,PARAM_NAME,VERSION,CREATE_TIME,RECORD_COUNT,FLAG,PARAM_PATH,SPARE3 FROM T_PARAM_NOTIFY_CS WHERE FLAG=0 and PARAM_ID IN(35,36) AND CREATE_TIME>=TO_DATE('20230101','yyyyMMdd')";
public static final String UPDATE_TF_DOWNLOAD_NOTIFY_HR = "UPDATE TF_DOWNLOAD_NOTIFY_CS SET UPDATEFLAG = 1 WHERE ID =";
public static final String UPDATE_T_PARAM_NOTIFY_MD = "UPDATE T_PARAM_NOTIFY_CS SET FLAG = 1 WHERE PARAM_ID =";
//测试环境使用 end

@ -17,7 +17,7 @@ import javax.servlet.http.HttpServletResponse;
import java.io.PrintWriter;
@Component
@Slf4j
//@Slf4j
public class FilterConfig implements HandlerInterceptor {
@Override

@ -147,7 +147,7 @@ public static String driverClassName="";
System.out.println(" 联网中心dbf文件备份存在时间 : " + Newproperties.getProperty("dbfTime"));
System.out.println(" 下级分中心zlib文件存放时间(天) : " + Newproperties.getProperty("zlibTime"));
System.out.println(" 加载几天的增量(天) : " + Newproperties.getProperty("incDays"));
System.out.println(" 当前程序IDmachineId为 : " + Newproperties.getProperty("machineId"));
System.out.println(" 当前程序IDmachineId为 : " + properties.getProperty("machineId"));
System.out.println(" sqlite数据连接驱动 : " + Newproperties.getProperty("sqliteDriverName"));
System.out.println(" sqlite数据连接url : " + Newproperties.getProperty("sqliteUrl"));
System.out.println(" 下载文件保留天数(天) : " + Newproperties.getProperty("downloadTime"));

@ -4,6 +4,7 @@ package com.nm.gsgl.common.exception;
import com.alibaba.fastjson2.JSON;
import com.nm.gsgl.common.Result;
import com.nm.gsgl.common.enumeration.MessageEnum;
import com.nm.gsgl.common.utils.LogUtil;
import lombok.extern.slf4j.Slf4j;
import org.springframework.web.bind.annotation.ExceptionHandler;
import org.springframework.web.bind.annotation.RestControllerAdvice;
@ -14,7 +15,7 @@ import org.springframework.web.bind.annotation.RestControllerAdvice;
* @description:
*/
@RestControllerAdvice
@Slf4j
//@Slf4j
public class PPExceptionHandler {
/**
@ -23,7 +24,9 @@ public class PPExceptionHandler {
@ExceptionHandler(PPException.class)
public <T> T handlePPException(PPException e) {
Result res = Result.error(e.getErrorCode(), e.getErrorMessage());
log.info("异常处理模块出参:{}", JSON.toJSONString(res));
//log.info("异常处理模块出参:{}", JSON.toJSONString(res));
LogUtil.WriteLog_Error("异常处理模块出参" + JSON.toJSONString(res), "PPExceptionHandler");
return (T) JSON.toJSONString(res);
}
@ -33,9 +36,11 @@ public class PPExceptionHandler {
*/
@ExceptionHandler(Exception.class)
public <T> T handleException(Exception e) {
log.info(e.getMessage(),e);
//log.info(e.getMessage(),e);
LogUtil.WriteLog_Error(e.getMessage()+e, "PPExceptionHandler");
Result res = Result.error(MessageEnum..getCode(), MessageEnum..getMessage());
log.info("异常处理模块出参:{}", JSON.toJSONString(res));
LogUtil.WriteLog_Error("异常处理模块出参" + JSON.toJSONString(res), "PPExceptionHandler");
//log.info("异常处理模块出参:{}", JSON.toJSONString(res));
return (T) JSON.toJSONString(res);
}
}

@ -1,5 +1,6 @@
package com.nm.gsgl.common.timetask;
import com.nm.gsgl.common.utils.LogUtil;
import com.nm.gsgl.common.utils.UuidUtil;
import com.nm.gsgl.service.CallSendOutInterfaceService;
import com.nm.gsgl.service.DelOverdueService;
@ -27,7 +28,6 @@ import java.time.LocalDateTime;
@PropertySource(value = "classpath:static/timeTask.properties")
@Component
@EnableAsync
@Slf4j
public class TimeTaskInfo {
@Resource
@ -42,10 +42,6 @@ public class TimeTaskInfo {
private CallSendOutInterfaceService callSendOutInterfaceService;
@Resource
private IntoDatabaseService intoDatabaseService;
public static Logger minFeeLog = LoggerFactory.getLogger("MinFee");
public static Logger blackCardLog = LoggerFactory.getLogger("BlackCard");
public static Logger callDisInterfaceLog = LoggerFactory.getLogger("CallDisInterface");
public static Logger insertDBLog = LoggerFactory.getLogger("InsertDB");
/**
* ETC
@ -57,12 +53,17 @@ public class TimeTaskInfo {
@Scheduled(cron = "${time.corn2}")
public void timeTaskHRBlackCard() {
String uuid = UuidUtil.getUuid();
blackCardLog.info("[uuid:{}]定时执行华软中介库ETC参数下载及处理服务当前时间={}", uuid, LocalDateTime.now());
//blackCardLog.info("[uuid:{}]定时执行华软中介库ETC参数下载及处理服务当前时间={}", uuid, LocalDateTime.now());
LogUtil.WriteLog_HRBlackCard("[uuid:" + uuid + "]-定时执行华软中介库ETC参数下载及处理服务,当前时间=" + LocalDateTime.now(), "TimeTaskInfo-timeTaskHRBlackCard");
long start = System.currentTimeMillis();
ectService.paramDownload(uuid);
long end = System.currentTimeMillis();
blackCardLog.info("[uuid:{}]定时华软中介库ETC参数下载及处理服务执行完成耗时{}毫秒", uuid, end - start);
//blackCardLog.info("[uuid:{}]定时华软中介库ETC参数下载及处理服务执行完成耗时{}毫秒", uuid, end - start);
LogUtil.WriteLog_HRBlackCard("[uuid:" + uuid + "]-]定时华软中介库ETC参数下载及处理服务执行完成耗时毫秒=" + (end - start), "TimeTaskInfo-timeTaskHRBlackCard");
}
/**
* ETC
*
@ -73,11 +74,11 @@ public class TimeTaskInfo {
@Scheduled(cron = "${time.corn6}")
public void timeTaskMDBlackCard() {
String uuid = UuidUtil.getUuid();
blackCardLog.info("[uuid:{}]定时执行迈道中介库ETC参数下载及处理服务当前时间={}", uuid, LocalDateTime.now());
LogUtil.WriteLog_MDBlackCard("[uuid:" + uuid + "]-定时执行迈道中介库ETC参数下载及处理服务,当前时间=" + LocalDateTime.now(), "TimeTaskInfo-timeTaskHRBlackCard");
long start = System.currentTimeMillis();
ectService.paramMDDownload(uuid);
long end = System.currentTimeMillis();
blackCardLog.info("[uuid:{}]定时迈道中介库ETC参数下载及处理服务执行完成耗时{}毫秒", uuid, end - start);
LogUtil.WriteLog_MDBlackCard("[uuid:" + uuid + "]-]定时迈道中介库ETC参数下载及处理服务执行完成耗时毫秒=" + (end - start), "TimeTaskInfo-timeTaskHRBlackCard");
}
@ -91,11 +92,11 @@ public class TimeTaskInfo {
@Scheduled(cron = "${time.corn4}")
public void timeTaskMinFee() {
String uuid = UuidUtil.getUuid();
minFeeLog.info("[uuid:{}]定时执行ETC参数(最小费额)下载及处理服务,当前时间={}", uuid, LocalDateTime.now());
LogUtil.WriteLog_MinFee("[uuid:" + uuid + "]-定时执行ETC参数(最小费额)下载及处理服务,当前时间=" + LocalDateTime.now(), "TimeTaskInfo-timeTaskMinFee");
long start = System.currentTimeMillis();
minFeeService.minFeeParamDownload(uuid);
long end = System.currentTimeMillis();
minFeeLog.info("[uuid:{}]定时ETC参数(最小费额)下载及处理服务执行完成,耗时:{}毫秒", uuid, end - start);
LogUtil.WriteLog_MinFee("[uuid:" + uuid + "]-]定时ETC参数(最小费额)下载及处理服务执行完成,耗时毫秒=" + (end - start), "TimeTaskInfo-timeTaskMinFee");
}
/**
@ -108,16 +109,23 @@ public class TimeTaskInfo {
@Scheduled(cron = "${time.corn5}")
public void timeTaskCallSendOut() {
String uuid = UuidUtil.getUuid();
callDisInterfaceLog.info("[uuid:{}]定时调用自动下发接口,当前时间={}", uuid, LocalDateTime.now());
//callDisInterfaceLog.info("[uuid:{}]定时调用自动下发接口,当前时间={}", uuid, LocalDateTime.now());
LogUtil.WriteLog_CallDisInterface("[uuid:" + uuid + "]-定时调用自动下发接口,当前时间=" + LocalDateTime.now(), "TimeTaskInfo-timeTaskCallSendOut");
long start = System.currentTimeMillis();
boolean b = callSendOutInterfaceService.callSendOutInterface(uuid);
long end = System.currentTimeMillis();
if (b) {
callDisInterfaceLog.info("[uuid:{}]定时调用自动下发接口执行完成,耗时:{}毫秒", uuid, end - start);
//callDisInterfaceLog.info("[uuid:{}]定时调用自动下发接口执行完成,耗时:{}毫秒", uuid, end - start);
LogUtil.WriteLog_CallDisInterface("[uuid:" + uuid + "]-]定时调用自动下发接口执行完成,耗时毫秒=" + (end - start), "TimeTaskInfo-timeTaskCallSendOut");
} else {
callDisInterfaceLog.info("[uuid:{}]当前程序不调用自动下发程序接口,耗时:{}毫秒", uuid, end - start);
//callDisInterfaceLog.info("[uuid:{}]当前程序不调用自动下发程序接口,耗时:{}毫秒", uuid, end - start);
LogUtil.WriteLog_CallDisInterface("[uuid:" + uuid + "]-]当前程序不调用自动下发程序接口,耗时毫秒=" + (end - start), "TimeTaskInfo-timeTaskCallSendOut");
}
}
/**
*
*
@ -128,33 +136,24 @@ public class TimeTaskInfo {
@Scheduled(cron = "${time.corn1}")
public void timeTaskIntoDatabase() {
String uuid = UuidUtil.getUuid();
insertDBLog.info("[uuid:{}]-根据配置时间定时下载入库文件,当前时间{}", uuid, LocalDateTime.now());
//insertDBLog.info("[uuid:{}]-根据配置时间定时下载入库文件,当前时间{}", uuid, LocalDateTime.now());
LogUtil.WriteLog_InsertDB("[uuid:" + uuid + "]-根据配置时间定时将入库文件入库操作,当前时间=" + LocalDateTime.now(), "TimeTaskInfo-timeTaskIntoDatabase");
long start = System.currentTimeMillis();
boolean b = intoDatabaseService.downAndInsert(uuid);
long end = System.currentTimeMillis();
if (b) {
insertDBLog.info("[uuid:{}]-根据配置时间定时下载入库文件执行完成,耗时毫秒:{}", uuid, (end - start));
//insertDBLog.info("[uuid:{}]-根据配置时间定时下载入库文件执行完成,耗时毫秒:{}", uuid, (end - start));
LogUtil.WriteLog_InsertDB("[uuid:" + uuid + "]-]根据配置时间定时将入库文件入库执行完成,耗时毫秒=" + (end - start), "TimeTaskInfo-timeTaskIntoDatabase");
} else {
insertDBLog.info("[uuid:{}]-当前服务不执行定时下载入库文件,耗时毫秒:{}", uuid, (end - start));
//insertDBLog.info("[uuid:{}]-当前服务不执行定时下载入库文件,耗时毫秒:{}", uuid, (end - start));
LogUtil.WriteLog_InsertDB("[uuid:" + uuid + "]-]当前服务不执行定时将入库文件入库操作,耗时毫秒=" + (end - start), "TimeTaskInfo-timeTaskIntoDatabase");
}
}
///**
// * 根据配置时间定时执行sqlite备份
// *
// * @author shuguang
// * @date 2022-11-24 15:37
// */
//@Async("threadPool1")
//@Scheduled(cron = "${time.corn1}")
//public void timeTask1() {
// String uuid = UuidUtil.getUuid();
// log.info("[uuid:{}]定时备份sqlite任务当前时间={}", uuid, LocalDateTime.now());
// long start = System.currentTimeMillis();
// sqliteBackupsService.buildBackups(uuid);
// long end = System.currentTimeMillis();
// log.info("[uuid:{}]定时备份sqlite任务执行完成耗时{}毫秒", uuid, end - start);
//}
/**
*
*
@ -166,10 +165,14 @@ public class TimeTaskInfo {
@Scheduled(cron = "${time.corn3}")
public void timeTask3() {
String uuid = UuidUtil.getUuid();
log.info("[uuid:{}]定时删除超期文件任务,当前时间={}", uuid, LocalDateTime.now());
//log.info("[uuid:{}]定时删除超期文件任务,当前时间={}", uuid, LocalDateTime.now());
LogUtil.WriteLog_DeleteOverFile("[uuid:" + uuid + "]-定时删除超期文件任务,当前时间=" + LocalDateTime.now(), "TimeTaskInfo-timeTask3");
long start = System.currentTimeMillis();
delOverdueService.deleteOverFile(uuid);
long end = System.currentTimeMillis();
log.info("[uuid:{}]定时删除超期文件任务执行完成,耗时:{}毫秒", uuid, end - start);
//log.info("[uuid:{}]定时删除超期文件任务执行完成,耗时:{}毫秒", uuid, end - start);
LogUtil.WriteLog_DeleteOverFile("[uuid:" + uuid + "]-]定时删除超期文件任务执行完成,耗时毫秒=" + (end - start), "TimeTaskInfo-timeTask3");
}
}

@ -1,7 +1,6 @@
package com.nm.gsgl.common.utils;
import com.alibaba.fastjson.JSONObject;
import lombok.extern.slf4j.Slf4j;
import org.apache.http.HttpEntity;
import org.apache.http.HttpStatus;
import org.apache.http.client.config.RequestConfig;
@ -28,7 +27,6 @@ import java.util.Set;
* @date: 20230718 9:25
* @description: CloseableHttpClient
*/
@Slf4j
public class CloseableHttpClientUtils {
/**
* getRestfulURL
@ -79,7 +77,8 @@ public class CloseableHttpClientUtils {
//执行请求
response = client.execute(httpGet);
} catch (IOException e) {
log.error("发送get请求错误请求URL{},错误信息:{}", httpUrl, e.getMessage());
//log.error("发送get请求错误请求URL{},错误信息:{}", httpUrl, e.getMessage());
LogUtil.WriteLog_Error("发送get请求错误请求URL:" + httpUrl + "-错误信息" + e.getMessage(), "CloseableHttpClientUtils");
} finally {
//释放链接
releaseConnection(client, response);
@ -133,7 +132,7 @@ public class CloseableHttpClientUtils {
//执行请求
response = client.execute(httpPost);
} catch (IOException e) {
log.error("发送post请求错误,请求URL{},请求参数:{},错误信息:{}", httpUrl, data, e.getMessage());
LogUtil.WriteLog_Error("发送post请求错误请求URL:" + httpUrl +"请求参数:"+data+ "-错误信息" + e.getMessage(), "CloseableHttpClientUtils");
} finally {
//释放链接
releaseConnection(client, response);
@ -210,7 +209,8 @@ public class CloseableHttpClientUtils {
} catch (IOException e) {
log.error("发送post请求错误,请求URL{},请求参数:{},错误信息:{}", httpUrl, data, e.getMessage());
LogUtil.WriteLog_Error("发送post请求错误请求URL:" + httpUrl +"请求参数:"+data+ "-错误信息" + e.getMessage(), "CloseableHttpClientUtils");
LogUtil.WriteLog_Error("发送post请求错误请求URL:" + httpUrl +"请求参数:"+data+ "-错误信息" + e.getMessage(), "CloseableHttpClientUtils");
} finally {
try {
if (outputFile != null) {
@ -247,7 +247,8 @@ public class CloseableHttpClientUtils {
try {
response.close();
} catch (IOException e) {
log.error("关闭CloseableHttpResponse异常");
//log.error("关闭CloseableHttpResponse异常");
LogUtil.WriteLog_Error("关闭CloseableHttpResponse异常", "CloseableHttpClientUtils");
}
}
@ -255,7 +256,8 @@ public class CloseableHttpClientUtils {
try {
client.close();
} catch (IOException e) {
log.error("关闭CloseableHttpClient异常");
//log.error("关闭CloseableHttpClient异常");
LogUtil.WriteLog_Error("关闭CloseableHttpResponse异常", "CloseableHttpClientUtils");
}
}
}

@ -38,7 +38,7 @@ import java.util.Map;
* @date: 20230109 12:56
* @description: DBF
*/
@Slf4j
//@Slf4j
public class DBFUtil {
@ -136,7 +136,7 @@ public class DBFUtil {
inputStream.close();
System.out.println("读取数据完成...");
} catch (IOException e) {
log.error(e.getMessage(), e);
LogUtil.WriteLog_Error("报错:"+e.getMessage(), "FileUtil");
throw new PPException(MessageEnum.DBF.getCode(), MessageEnum.DBF.getMessage());
}
return data;
@ -278,7 +278,7 @@ public class DBFUtil {
writer.write(fos);
} catch (Exception e) {
log.info( "writeToDBF({}写入dbf失败{})", keyValue , e.getMessage());
//log.info( "writeToDBF({}写入dbf失败{})", keyValue , e.getMessage());
e.printStackTrace();
}
@ -359,4 +359,4 @@ public class DBFUtil {
}
}
}

@ -7,7 +7,6 @@ import com.nm.gsgl.entity.intermediary.DownloadNotify;
import com.nm.gsgl.entity.intermediary.ParamNotify;
import com.nm.gsgl.entity.intodb.mysql.CardBlackStatus;
import com.nm.gsgl.entity.sqlite.BusinessTasks;
import lombok.extern.slf4j.Slf4j;
import java.sql.Connection;
import java.sql.DriverManager;
@ -24,7 +23,6 @@ import java.util.Locale;
* @date: 20221014 8:20
* @description:
*/
@Slf4j
public class DatabaseUtil {
/**
@ -49,10 +47,10 @@ public class DatabaseUtil {
//数据库连接(连接、账号、密码)
connection = DriverManager.getConnection(url);
if (!connection.isClosed()) {
log.info("{}数据库连接成功", url);
//log.info("{}数据库连接成功", url);
//2.创建statement类对象用来执行SQL语句
statement = connection.createStatement();
log.info("开始执行sql语句{}", sqlParam);
//log.info("开始执行sql语句{}", sqlParam);
//3.ResultSet类用来存放获取的结果集
//要执行的SQL语句
rs = statement.executeQuery(sqlParam);
@ -79,10 +77,11 @@ public class DatabaseUtil {
}
return list;
} catch (ClassNotFoundException e) {
log.error("数据库连接异常{}", e.getMessage(), e);
LogUtil.WriteLog_Error("数据库连接异常" + e.getMessage()+e, "DatabaseUtil");
throw new PPException(MessageEnum..getCode(), MessageEnum..getMessage());
} catch (SQLException e) {
log.error("sql语句执行失败{}", e.getMessage(), e);
LogUtil.WriteLog_Error("sql语句执行失败" + e.getMessage()+e, "DatabaseUtil");
LogUtil.WriteLog_Error("sql语句执行失败" + e.getMessage()+e, "DatabaseUtil");
throw new PPException(MessageEnum.sql.getCode(), MessageEnum.sql.getMessage());
} finally {
try {
@ -124,20 +123,20 @@ public class DatabaseUtil {
//数据库连接(连接、账号、密码)
connection = DriverManager.getConnection(url);
if (!connection.isClosed()) {
log.info("{}数据库连接成功", url);
//log.info("{}数据库连接成功", url);
//2.创建statement类对象用来执行SQL语句
statement = connection.createStatement();
log.info("开始执行sql语句{}", sqlParam);
//log.info("开始执行sql语句{}", sqlParam);
//3.ResultSet类用来存放获取的结果集
//要执行的SQL语句
count = statement.executeUpdate(sqlParam);
}
return count;
} catch (ClassNotFoundException e) {
log.error("数据库连接异常{}", e.getMessage(), e);
LogUtil.WriteLog_Error("数据库连接异常" + e.getMessage()+e, "DatabaseUtil");
throw new PPException(MessageEnum..getCode(), MessageEnum..getMessage());
} catch (SQLException e) {
log.error("sql语句执行失败{}", e.getMessage(), e);
LogUtil.WriteLog_Error("sql语句执行失败" + e.getMessage()+e, "DatabaseUtil");
throw new PPException(MessageEnum.sql.getCode(), MessageEnum.sql.getMessage());
} finally {
@ -178,10 +177,10 @@ public class DatabaseUtil {
//数据库连接(连接、账号、密码)
connection = DriverManager.getConnection(url);
if (!connection.isClosed()) {
log.info("{}数据库连接成功", url);
//log.info("{}数据库连接成功", url);
//2.创建statement类对象用来执行SQL语句
statement = connection.createStatement();
log.info("开始执行sql语句{}", sqlParam);
//log.info("开始执行sql语句{}", sqlParam);
//3.ResultSet类用来存放获取的结果集
//要执行的SQL语句
rs = statement.executeQuery(sqlParam);
@ -192,10 +191,10 @@ public class DatabaseUtil {
//log.info("执行sql语句完成");
return count;
} catch (ClassNotFoundException e) {
log.error("数据库连接异常{}", e.getMessage(), e);
LogUtil.WriteLog_Error("数据库连接异常" + e.getMessage()+e, "DatabaseUtil");
throw new PPException(MessageEnum..getCode(), MessageEnum..getMessage());
} catch (SQLException e) {
log.error("sql语句执行失败{}", e.getMessage(), e);
LogUtil.WriteLog_Error("sql语句执行失败" + e.getMessage()+e, "DatabaseUtil");
throw new PPException(MessageEnum.sql.getCode(), MessageEnum.sql.getMessage());
} finally {
@ -241,10 +240,10 @@ public class DatabaseUtil {
//数据库连接(连接、账号、密码)
connection = DriverManager.getConnection(url, username, password);
if (!connection.isClosed()) {
log.info("{}数据库连接成功", driverName);
//log.info("{}数据库连接成功", driverName);
//2.创建statement类对象用来执行SQL语句
statement = connection.createStatement();
log.info("开始执行sql语句{}", sqlParam);
//log.info("开始执行sql语句{}", sqlParam);
//3.ResultSet类用来存放获取的结果集
//要执行的SQL语句
rs = statement.executeQuery(sqlParam);
@ -261,10 +260,10 @@ public class DatabaseUtil {
}
return appAliveStatus;
} catch (ClassNotFoundException e) {
log.error("数据库连接异常{}", e.getMessage(), e);
LogUtil.WriteLog_Error("数据库连接异常" + e.getMessage()+e, "DatabaseUtil");
throw new PPException(MessageEnum..getCode(), MessageEnum..getMessage());
} catch (SQLException e) {
log.error("sql语句执行失败{}", e.getMessage(), e);
LogUtil.WriteLog_Error("sql语句执行失败" + e.getMessage()+e, "DatabaseUtil");
throw new PPException(MessageEnum.sql.getCode(), MessageEnum.sql.getMessage());
} finally {
try {
@ -306,21 +305,21 @@ public class DatabaseUtil {
//数据库连接(连接、账号、密码)
connection = DriverManager.getConnection(url, username, password);
if (!connection.isClosed()) {
log.info("[uuid:{}]{}数据库连接成功", uuid, url);
//log.info("[uuid:{}]{}数据库连接成功", uuid, url);
//2.创建statement类对象用来执行SQL语句
statement = connection.createStatement();
log.info("[uuid:{}]开始执行sql语句{}", uuid, sqlParam);
//log.info("[uuid:{}]开始执行sql语句{}", uuid, sqlParam);
//3.ResultSet类用来存放获取的结果集
//要执行的SQL语句
count = statement.executeUpdate(sqlParam);
log.info("[uuid:{}]执行sql语句完成", uuid);
//log.info("[uuid:{}]执行sql语句完成", uuid);
}
return count;
} catch (ClassNotFoundException e) {
log.error("数据库连接异常{}", e.getMessage(), e);
LogUtil.WriteLog_Error("数据库连接异常" + e.getMessage()+e, "DatabaseUtil");
throw new PPException(MessageEnum..getCode(), MessageEnum..getMessage());
} catch (SQLException e) {
log.error("sql语句执行失败{}", e.getMessage(), e);
LogUtil.WriteLog_Error("sql语句执行失败" + e.getMessage()+e, "DatabaseUtil");
throw new PPException(MessageEnum.sql.getCode(), MessageEnum.sql.getMessage());
} finally {
try {
@ -358,20 +357,20 @@ public class DatabaseUtil {
//数据库连接(连接、账号、密码)
connection = DriverManager.getConnection(url, username, password);
if (!connection.isClosed()) {
log.info("{}数据库连接成功", url);
//log.info("{}数据库连接成功", url);
//2.创建statement类对象用来执行SQL语句
statement = connection.createStatement();
log.info("开始执行sql语句{}", sqlParam);
//log.info("开始执行sql语句{}", sqlParam);
//3.ResultSet类用来存放获取的结果集
//要执行的SQL语句
statement.executeUpdate(sqlParam);
}
//log.info("执行sql语句完成");
} catch (ClassNotFoundException e) {
log.error("数据库连接异常{}", e.getMessage(), e);
LogUtil.WriteLog_Error("数据库连接异常" + e.getMessage()+e, "DatabaseUtil");
throw new PPException(MessageEnum..getCode(), MessageEnum..getMessage());
} catch (SQLException e) {
log.error("sql语句执行失败{}", e.getMessage(), e);
LogUtil.WriteLog_Error("sql语句执行失败" + e.getMessage()+e, "DatabaseUtil");
throw new PPException(MessageEnum.sql.getCode(), MessageEnum.sql.getMessage());
} finally {
@ -414,10 +413,10 @@ public class DatabaseUtil {
//数据库连接(连接、账号、密码)
connection = DriverManager.getConnection(url, username, password);
if (!connection.isClosed()) {
log.info("{}数据库连接成功", url);
//log.info("{}数据库连接成功", url);
//2.创建statement类对象用来执行SQL语句
statement = connection.createStatement();
log.info("开始执行sql语句{}", sqlParam);
//log.info("开始执行sql语句{}", sqlParam);
//3.ResultSet类用来存放获取的结果集
//要执行的SQL语句
rs = statement.executeQuery(sqlParam);
@ -427,10 +426,10 @@ public class DatabaseUtil {
}
return count;
} catch (ClassNotFoundException e) {
log.error("数据库连接异常{}", e.getMessage(), e);
LogUtil.WriteLog_Error("数据库连接异常" + e.getMessage()+e, "DatabaseUtil");
throw new PPException(MessageEnum..getCode(), MessageEnum..getMessage());
} catch (SQLException e) {
log.error("sql语句执行失败{}", e.getMessage(), e);
LogUtil.WriteLog_Error("sql语句执行失败" + e.getMessage()+e, "DatabaseUtil");
throw new PPException(MessageEnum.sql.getCode(), MessageEnum.sql.getMessage());
} finally {
try {
@ -478,10 +477,10 @@ public class DatabaseUtil {
connection = DriverManager.getConnection(url, username, password);
//log.info("connection:{}", connection);
if (!connection.isClosed()) {
log.info("{}数据库连接成功", driverName);
//log.info("{}数据库连接成功", driverName);
//2.创建statement类对象用来执行SQL语句
statement = connection.createStatement();
log.info("开始执行sql语句{}", sqlParam);
//log.info("开始执行sql语句{}", sqlParam);
//3.ResultSet类用来存放获取的结果集
//要执行的SQL语句
rs = statement.executeQuery(sqlParam);
@ -506,10 +505,10 @@ public class DatabaseUtil {
}
return list;
} catch (ClassNotFoundException e) {
log.error("数据库连接异常{}", e.getMessage(), e);
LogUtil.WriteLog_Error("数据库连接异常" + e.getMessage()+e, "DatabaseUtil");
throw new PPException(MessageEnum..getCode(), MessageEnum..getMessage());
} catch (SQLException e) {
log.error("sql语句执行失败{}", e.getMessage(), e);
LogUtil.WriteLog_Error("sql语句执行失败" + e.getMessage()+e, "DatabaseUtil");
throw new PPException(MessageEnum.sql.getCode(), MessageEnum.sql.getMessage());
} finally {
try {
@ -553,10 +552,10 @@ public class DatabaseUtil {
//数据库连接(连接、账号、密码)
connection = DriverManager.getConnection(url, username, password);
if (!connection.isClosed()) {
log.info("{}数据库连接成功", driverName);
//log.info("{}数据库连接成功", driverName);
//2.创建statement类对象用来执行SQL语句
statement = connection.createStatement();
log.info("开始执行sql语句{}", sqlParam);
//log.info("开始执行sql语句{}", sqlParam);
//3.ResultSet类用来存放获取的结果集
//要执行的SQL语句
rs = statement.executeQuery(sqlParam);
@ -576,10 +575,10 @@ public class DatabaseUtil {
}
return list;
} catch (ClassNotFoundException e) {
log.error("数据库连接异常{}", e.getMessage(), e);
LogUtil.WriteLog_Error("数据库连接异常" + e.getMessage()+e, "DatabaseUtil");
throw new PPException(MessageEnum..getCode(), MessageEnum..getMessage());
} catch (SQLException e) {
log.error("sql语句执行失败{}", e.getMessage(), e);
LogUtil.WriteLog_Error("sql语句执行失败" + e.getMessage()+e, "DatabaseUtil");
throw new PPException(MessageEnum.sql.getCode(), MessageEnum.sql.getMessage());
} finally {
try {
@ -649,10 +648,10 @@ public class DatabaseUtil {
}
} catch (ClassNotFoundException e) {
log.error("数据库连接异常{}", e.getMessage(), e);
LogUtil.WriteLog_Error("数据库连接异常" + e.getMessage()+e, "DatabaseUtil");
throw new PPException(MessageEnum..getCode(), MessageEnum..getMessage());
} catch (SQLException e) {
log.error("sql语句执行失败{}", e.getMessage(), e);
LogUtil.WriteLog_Error("sql语句执行失败" + e.getMessage()+e, "DatabaseUtil");
throw new PPException(MessageEnum.sql.getCode(), MessageEnum.sql.getMessage());
} finally {
try {

@ -3,29 +3,23 @@ package com.nm.gsgl.common.utils;
import com.nm.gsgl.common.enumeration.ErrorCode;
import com.nm.gsgl.common.enumeration.MessageEnum;
import com.nm.gsgl.common.exception.PPException;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.net.ftp.FTP;
import org.apache.commons.net.ftp.FTPClient;
import org.apache.commons.net.ftp.FTPClientConfig;
import org.apache.commons.net.ftp.FTPFile;
import org.apache.commons.net.ftp.FTPReply;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.SocketException;
import java.nio.charset.StandardCharsets;
/**
* @author: shuguang
* @date: 20230110 13:38
* @description:
*/
@Slf4j
public class FTPUtil {
/**
@ -58,7 +52,8 @@ public class FTPUtil {
boolean b = ftp.changeWorkingDirectory(ftpPath);
//校验文件是否已存在
if (!checkFileExists(ftp, fileName)) {
log.error(ErrorCode.FtpCheckErrorCode.FILE_NO_EXISTS);
//log.error(ErrorCode.FtpCheckErrorCode.FILE_NO_EXISTS);
LogUtil.WriteLog_Error(ErrorCode.FtpCheckErrorCode.FILE_NO_EXISTS, "FTPUtil");
return false;
}
FTPFile[] fs = ftp.listFiles();
@ -74,7 +69,8 @@ public class FTPUtil {
}
ftp.logout();
} catch (IOException e) {
log.error(ErrorCode.FtpDownloadErrorCode.FILE_DOWNLOAD_ERROR_OCCURRED, e);
//log.error(ErrorCode.FtpDownloadErrorCode.FILE_DOWNLOAD_ERROR_OCCURRED, e);
LogUtil.WriteLog_Error(ErrorCode.FtpDownloadErrorCode.FILE_DOWNLOAD_ERROR_OCCURRED+e, "FTPUtil");
throw new PPException(MessageEnum.FTP.getCode(), MessageEnum.FTP.getMessage());
} finally {
//关闭ftpClint链接
@ -82,7 +78,8 @@ public class FTPUtil {
try {
ftp.disconnect();
} catch (IOException ioe) {
log.error(ErrorCode.FtpConnectCode.FTP_DISCONNECT_ERROR_OCCURRED, ioe);
//log.error(ErrorCode.FtpConnectCode.FTP_DISCONNECT_ERROR_OCCURRED, ioe);
LogUtil.WriteLog_Error(ErrorCode.FtpDownloadErrorCode.FILE_DOWNLOAD_ERROR_OCCURRED+ioe, "FTPUtil");
}
}
//一定要判断fos是否为Null,只有不为null时才可以关闭资源
@ -110,131 +107,30 @@ public class FTPUtil {
return listNames.length > 0;
}
/**
* @param ftpIp FTP IP
* @param ftpUserName FTP
* @param ftpPassword FTP
* @param ftpPort FTP
* @param ftpPath FTP
* @param localPath
* @param fileName
*/
public static boolean downloadFtpFile(String ftpIp, String ftpUserName, String ftpPassword, int ftpPort, String ftpPath, String localPath, String fileName) {
FTPClient ftpClient;
try {
ftpClient = getFTPClient(ftpIp, ftpUserName, ftpPassword, ftpPort);
ftpClient.setControlEncoding("UTF-8");
//ftpClient.setControlEncoding("GBK");
ftpClient.setFileType(FTPClient.BINARY_FILE_TYPE);
//ftpClient.enterLocalActiveMode();
//ftpClient.changeWorkingDirectory(ftpPath);
//判断本地路径是否存在,不存在进行创建
File file1 = new File(localPath);
if (!file1.exists()) {
file1.mkdirs();
}
//通知服务器开通给一个端口,防止挂死
ftpClient.enterLocalPassiveMode();
//把文件写入到本地路径
File localFile = new File(localPath + File.separatorChar + fileName);
OutputStream os = new FileOutputStream(localFile);
// 检验文件是否存在
boolean ftpFileExist = isFTPFileExist(ftpClient, ftpPath, fileName);
if (!ftpFileExist) {
log.info("ftp不存在{}该文件", fileName);
os.close();
localFile.delete();
return false;
} else {
log.info("ftp存在{}该文件,开始下载", fileName);
//切换路径
ftpClient.changeWorkingDirectory(ftpPath);
// 通知服务器开通给一个端口,防止挂死
ftpClient.enterLocalPassiveMode();
//存在文件时下载文件
//ftpClient.retrieveFile(fileName, os);
ftpClient.retrieveFile(new String(fileName.getBytes(StandardCharsets.UTF_8), StandardCharsets.ISO_8859_1), os);
}
os.flush();
os.close();
ftpClient.logout();
log.info("{}下载完成", localPath + fileName);
return true;
} catch (IOException e) {
log.info("ftp读写文件异常");
}
return false;
}
public static boolean ftpDownloadFile(String ftpIp, String ftpUserName, String ftpPassword, int ftpPort, String ftpPath, String localPath, String fileName) {
FTPClient ftpClient = new FTPClient();
try {
ftpClient.connect(ftpIp, ftpPort);//服务器ip 端口
ftpClient.login(ftpUserName, ftpPassword);//用户名密码
ftpClient.setFileType(FTPClient.BINARY_FILE_TYPE);
//ftpClient.setFileType(FTPClient.ASCII_FILE_TYPE);
// 限制缓冲区大小
ftpClient.setBufferSize(100000);
ftpClient.enterLocalPassiveMode();
ftpClient.setControlEncoding("GBK");//编码格式
} catch (IOException e) {
log.info("连接FTP失败{}", e.getMessage(), e);
}
log.info("{}连接成功", ftpIp);
File file = new File(localPath, fileName);
try {
OutputStream outputStream = new FileOutputStream(file);
ftpClient.changeWorkingDirectory(ftpPath);
//FTPFile[] allFile = ftpClient.listFiles();
//for (FTPFile a : allFile) {
// if (fileName.equals(a.getName())) {
//下载文件有时候要指定编码格式要不会下载为0kb。
ftpClient.retrieveFile(new String(fileName.getBytes("GBK"), StandardCharsets.ISO_8859_1), outputStream);
outputStream.flush();
//}
//}
outputStream.close();
// 退出FTP
ftpClient.logout();
ftpClient.disconnect();
log.info("{}下载完成", localPath + fileName);
return true;
} catch (IOException e) {
log.info("ftp读写文件异常{}", e.getMessage(), e);
}
return false;
}
/**
* @param ftpIp FTPIP
* @param ftpUserName ftp
* @param ftpPassword ftp
* @param ftpPort ftp
* @return
*/
public static FTPClient getFTPClient(String ftpIp, String ftpUserName, String ftpPassword, int ftpPort) {
FTPClient ftpClient = new FTPClient();
try {
ftpClient = new FTPClient();
ftpClient.connect(ftpIp, ftpPort);
ftpClient.login(ftpUserName, ftpPassword);
if (!FTPReply.isPositiveCompletion(ftpClient.getReplyCode())) {
log.info("未连接到FTP");
} else {
log.info("{}连接成功", ftpIp);
}
} catch (SocketException e) {
log.error("连接FTP失败{}", e.getMessage(), e);
} catch (IOException e) {
e.printStackTrace();
}
return ftpClient;
}
//public static FTPClient getFTPClient(String ftpIp, String ftpUserName, String ftpPassword, int ftpPort) {
// FTPClient ftpClient = new FTPClient();
// try {
// ftpClient = new FTPClient();
// ftpClient.connect(ftpIp, ftpPort);
// ftpClient.login(ftpUserName, ftpPassword);
// if (!FTPReply.isPositiveCompletion(ftpClient.getReplyCode())) {
// log.info("未连接到FTP");
// LogUtil.WriteLog_Error(ErrorCode.FtpDownloadErrorCode.FILE_DOWNLOAD_ERROR_OCCURRED+ioe, "FTPUtil");
// } else {
// log.info("{}连接成功", ftpIp);
// }
// } catch (SocketException e) {
// //log.error("连接FTP失败{}", e.getMessage(), e);
// LogUtil.WriteLog_Error("连接FTP失败"+e.getMessage(), "FTPUtil");
// } catch (IOException e) {
// e.printStackTrace();
// }
// return ftpClient;
//}
public static boolean isFTPFileExist(FTPClient ftp, String filePath, String fileName) {
@ -264,36 +160,36 @@ public class FTPUtil {
* @param uploadFilePath ()
* @param destPath ftp
*/
public static void ftpUploadFile(String ftpIp, int ftpPort, String ftpUserName, String ftpPassword, String uploadFilePath, String destPath){
FTPClient ftp;
try {
ftp = getFTPClient(ftpIp, ftpUserName, ftpPassword, ftpPort);
ftp.setControlEncoding("UTF-8");
// 获取本地文件并上传
String file = uploadFilePath;
FileInputStream input = new FileInputStream(file);
//跳转目录
boolean b = ftp.changeWorkingDirectory(destPath);
if(!b){
//boolean b1 = ftp.makeDirectory(destPath);
boolean makeDictionnary=makeDir(ftp,destPath);
}
ftp.setFileType(FTPClient.BINARY_FILE_TYPE);//必须要设置以二进制的方式传输文件
ftp.enterLocalPassiveMode();//被动模式
file = new String(file.getBytes("GBK"), StandardCharsets.ISO_8859_1);
if (!ftp.storeFile(new File(file).getName(), input)) {
log.error("失败,服务器返回:" + ftp.getReplyString());//获取上传失败的原因
} else {
log.info("文件:{} 上传成功", new File(file).getName());
}
input.close();
ftp.logout();
} catch (IOException e) {
log.error("ftp连接失败{}",e.getMessage(),e);
}
}
//public static void ftpUploadFile(String ftpIp, int ftpPort, String ftpUserName, String ftpPassword, String uploadFilePath, String destPath){
// FTPClient ftp;
// try {
// ftp = getFTPClient(ftpIp, ftpUserName, ftpPassword, ftpPort);
// ftp.setControlEncoding("UTF-8");
//
// // 获取本地文件并上传
// String file = uploadFilePath;
// FileInputStream input = new FileInputStream(file);
// //跳转目录
// boolean b = ftp.changeWorkingDirectory(destPath);
// if(!b){
// //boolean b1 = ftp.makeDirectory(destPath);
// boolean makeDictionnary=makeDir(ftp,destPath);
// }
// ftp.setFileType(FTPClient.BINARY_FILE_TYPE);//必须要设置以二进制的方式传输文件
// ftp.enterLocalPassiveMode();//被动模式
// file = new String(file.getBytes("GBK"), StandardCharsets.ISO_8859_1);
//
// if (!ftp.storeFile(new File(file).getName(), input)) {
// log.error("失败,服务器返回:" + ftp.getReplyString());//获取上传失败的原因
// } else {
// log.info("文件:{} 上传成功", new File(file).getName());
// }
// input.close();
// ftp.logout();
// } catch (IOException e) {
// log.error("ftp连接失败{}",e.getMessage(),e);
// }
//}
/**
* ftpftpClient
* @param ftp
@ -323,8 +219,8 @@ public class FTPUtil {
}
public static void main(String[] args) {
ftpUploadFile("172.20.225.132",9021,"ftpusername","shuguang123","D:\\桌面\\参数下载前端页面.txt","all/test001/002");
}
//public static void main(String[] args) {
// ftpUploadFile("172.20.225.132",9021,"ftpusername","shuguang123","D:\\桌面\\参数下载前端页面.txt","all/test001/002");
//}
}

@ -1,220 +1,220 @@
package com.nm.gsgl.common.utils;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.io.FileUtils;
import org.apache.commons.net.ftp.FTP;
import org.apache.commons.net.ftp.FTPClient;
import org.apache.commons.net.ftp.FTPFile;
import org.apache.commons.net.ftp.FTPReply;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.net.SocketException;
import java.nio.charset.StandardCharsets;
/**
* @author: shuguang
* @date: 20230210 8:24
* @description:
*/
@Slf4j
public class FTPUtils {
/**
* ftp
*/
FTPClient ftp;
/**
* ftp IP
*/
public String ftpServerIP;
/**
* ftp
*/
public int ftpPort;
/**
* ftp
*/
public String ftpUserID;
/**
* ftp
*/
public String ftpPassword;
/**
* ftp
*/
public String mdType;
/**
* create by: wjt
* description: ftp
* create time: 2022/3/1 11:10
*
* @return a
* @Param: nul
*/
public FTPUtils(String _ftpIp, String _ftpport, String _ftpuid, String _ftppwd, String _mdType){
this.ftpServerIP = _ftpIp;
this.ftpPort =Integer.parseInt(_ftpport);
this.ftpUserID = _ftpuid;
this.ftpPassword = _ftppwd;
mdType = _mdType;
}
/**
* create by: wjt
* description: ftp
* create time: 2022/3/1 11:24
*
* return a
* @Param: null
*/
public FTPClient getFTPClient() throws Exception {
try {
ftp = new FTPClient();
// 连接FPT服务器,设置IP及端口
ftp.connect(ftpServerIP, ftpPort);
// 设置用户名和密码
ftp.login(ftpUserID, ftpPassword);
// 设置连接超时时间,5000毫秒
ftp.setConnectTimeout(50000);
// 设置中文编码集,防止中文乱码
ftp.setControlEncoding("UTF-8");
if (!FTPReply.isPositiveCompletion(ftp.getReplyCode())) {
log.info("未连接到FTP用户名或密码错误");
ftp.disconnect();
} else {
log.info("FTP连接成功");
}
} catch (SocketException e) {
log.error("FTP的IP地址可能错误请正确配置");
} catch (IOException e) {
log.error("FTP的端口错误,请正确配置");
}
return ftp;
}
/**
* create by: wjt
* description: ftp
* create time: 2022/3/1 11:11
*
* @return a
* @Param: nul
*/
public boolean CheckFtpFileIsExist(String folderPath,String fileName, int u) throws Exception {
boolean istrue = false;
try {
ftp = getFTPClient();
ftp.changeWorkingDirectory(new String(folderPath.getBytes("UTF-8"),"ISO-8859-1"));
//设置FTP连接模式
ftp.enterLocalPassiveMode();
FTPFile files[] = ftp.listFiles();
for (FTPFile file : files) {
if (file.isFile() && file.getName().equals(fileName)) {
istrue=true;
break;
}
}
} catch (Exception ex) {
if (mdType == "1") {
log.info("FTP(" + fileName + ")不存在");
} else {
log.info("FTP(" + fileName + ")不存在");
}
istrue = false;
} finally {
ftp.logout();
ftp.disconnect();
}
return istrue;
}
/**
* create by: wjt
* description:
* create time: 2022/3/1 11:28
*
* return a
* @Param: null
* filePath:ftp
* fileName:
* downPath
* eghr
* String UrlStr = "ftp://10.15.110.19:21//comm_data/btvdex/black/source/download/2022/03/01/FP_MINFD_RES_15_20220301161508991.zip";
* String filePath= UrlStr.substring(UrlStr.indexOf("comm_data")-1,UrlStr.lastIndexOf('/')+1);
* String fileName= UrlStr.substring(UrlStr.lastIndexOf('/')+1);
* egmd
* String UrlStr = "/logs/audit-jihe/responseVehicleBlackListDown/20200723/AUDIT_VEHICLEBLACKLISTDOWN_RES_150201_20200723141621.zip";
* String filePath= UrlStr.substring(0,UrlStr.lastIndexOf('/')+1);
* String fileName= UrlStr.substring(UrlStr.lastIndexOf('/')+1);
*/
public boolean downLoadFTP(String filePath, String fileName, String downPath)
throws FileNotFoundException,SocketException,IOException,Exception {
// 默认失败
boolean flag = false;
FileOutputStream out = null;
try {
ftp = getFTPClient();
//以二进制流输出
ftp.setFileType(FTP.BINARY_FILE_TYPE);
//UTF-8 GBK
ftp.setControlEncoding("UTF-8");
// 跳转到ftp文件目录
ftp.changeWorkingDirectory(filePath);
// 连接模式
ftp.enterLocalPassiveMode();
File downFile = new File(downPath + File.separator + fileName);
boolean b = FileUtil.delFile(downFile);
boolean b1 = FileUtils.deleteQuietly(downFile);
out = new FileOutputStream(downFile);
// 绑定输出流下载文件,需要设置编码集,不然可能出现文件为空的情况
flag = ftp.retrieveFile(new String(fileName.getBytes(StandardCharsets.UTF_8), StandardCharsets.ISO_8859_1), out);
int replyCode = ftp.getReplyCode();
System.out.println(replyCode);
// 下载成功删除文件,看项目需求
// ftp.deleteFile(new String(fileName.getBytes("UTF-8"),"ISO-8859-1"));
out.flush();
out.close();
ftp.logout();
if (flag) {
log.info("下载成功");
} else {
log.info("下载失败");
}
} catch (FileNotFoundException e) {
e.printStackTrace();
log.error("没有找到(" + filePath + ")(" + fileName + ")文件");
} catch (SocketException e) {
e.printStackTrace();
log.error("连接FTP(" + ftpServerIP + ":" + ftpPort + ")失败");
} catch (IOException e) {
e.printStackTrace();
log.error("文件读取错误:" + e.getMessage());
}
finally {
//关闭ftpClint链接
if (ftp.isConnected()) {
try {
ftp.disconnect();
} catch (IOException e) {
e.printStackTrace();
}
}
if (out!=null){
//关闭io
out.close();
}
//System.gc();
//try {
// Thread.sleep(100);
//}catch (Exception e){
// e.printStackTrace();
//}
}
return flag;
}
}
//package com.nm.gsgl.common.utils;
//
//import lombok.extern.slf4j.Slf4j;
//import org.apache.commons.io.FileUtils;
//import org.apache.commons.net.ftp.FTP;
//import org.apache.commons.net.ftp.FTPClient;
//import org.apache.commons.net.ftp.FTPFile;
//import org.apache.commons.net.ftp.FTPReply;
//
//import java.io.File;
//import java.io.FileNotFoundException;
//import java.io.FileOutputStream;
//import java.io.IOException;
//import java.io.OutputStream;
//import java.net.SocketException;
//import java.nio.charset.StandardCharsets;
//
///**
// * @author: shuguang
// * @date: 2023年02月10日 8:24
// * @description:
// */
//@Slf4j
//public class FTPUtils {
// /**
// * ftp实例
// */
// FTPClient ftp;
// /**
// * ftp IP地址
// */
// public String ftpServerIP;
// /**
// * ftp端口
// */
// public int ftpPort;
// /**
// * ftp用户名
// */
// public String ftpUserID;
// /**
// * ftp密码
// */
// public String ftpPassword;
// /**
// * ftp中介库类型
// */
// public String mdType;
//
// /**
// * create by: wjt
// * description: ftp帮助类
// * create time: 2022/3/1 11:10
// *
// * @return a
// * @Param: nul
// */
// public FTPUtils(String _ftpIp, String _ftpport, String _ftpuid, String _ftppwd, String _mdType){
// this.ftpServerIP = _ftpIp;
// this.ftpPort =Integer.parseInt(_ftpport);
// this.ftpUserID = _ftpuid;
// this.ftpPassword = _ftppwd;
// mdType = _mdType;
// }
//
// /**
// * create by: wjt
// * description: 实例化ftp
// * create time: 2022/3/1 11:24
// *
// * return a
// * @Param: null
// */
// public FTPClient getFTPClient() throws Exception {
// try {
// ftp = new FTPClient();
// // 连接FPT服务器,设置IP及端口
// ftp.connect(ftpServerIP, ftpPort);
// // 设置用户名和密码
// ftp.login(ftpUserID, ftpPassword);
// // 设置连接超时时间,5000毫秒
// ftp.setConnectTimeout(50000);
// // 设置中文编码集,防止中文乱码
// ftp.setControlEncoding("UTF-8");
// if (!FTPReply.isPositiveCompletion(ftp.getReplyCode())) {
// log.info("未连接到FTP用户名或密码错误");
// ftp.disconnect();
// } else {
// log.info("FTP连接成功");
// }
// } catch (SocketException e) {
// log.error("FTP的IP地址可能错误请正确配置");
// } catch (IOException e) {
// log.error("FTP的端口错误,请正确配置");
// }
// return ftp;
// }
//
// /**
// * create by: wjt
// * description: 查验ftp上文件是否存在
// * create time: 2022/3/1 11:11
// *
// * @return a
// * @Param: nul
// */
// public boolean CheckFtpFileIsExist(String folderPath,String fileName, int u) throws Exception {
// boolean istrue = false;
// try {
// ftp = getFTPClient();
// ftp.changeWorkingDirectory(new String(folderPath.getBytes("UTF-8"),"ISO-8859-1"));
// //设置FTP连接模式
// ftp.enterLocalPassiveMode();
// FTPFile files[] = ftp.listFiles();
// for (FTPFile file : files) {
// if (file.isFile() && file.getName().equals(fileName)) {
// istrue=true;
// break;
// }
// }
// } catch (Exception ex) {
// if (mdType == "1") {
// log.info("FTP(" + fileName + ")不存在");
// } else {
// log.info("FTP(" + fileName + ")不存在");
// }
// istrue = false;
// } finally {
// ftp.logout();
// ftp.disconnect();
// }
// return istrue;
// }
//
// /**
// * create by: wjt
// * description: 下载文件
// * create time: 2022/3/1 11:28
// *
// * return a
// * @Param: null
// * filePath:ftp上文件地址
// * fileName:下载文件名称
// * downPath本地存储路径
// * eghr
// * String UrlStr = "ftp://10.15.110.19:21//comm_data/btvdex/black/source/download/2022/03/01/FP_MINFD_RES_15_20220301161508991.zip";
// * String filePath= UrlStr.substring(UrlStr.indexOf("comm_data")-1,UrlStr.lastIndexOf('/')+1);
// * String fileName= UrlStr.substring(UrlStr.lastIndexOf('/')+1);
// * egmd
// * String UrlStr = "/logs/audit-jihe/responseVehicleBlackListDown/20200723/AUDIT_VEHICLEBLACKLISTDOWN_RES_150201_20200723141621.zip";
// * String filePath= UrlStr.substring(0,UrlStr.lastIndexOf('/')+1);
// * String fileName= UrlStr.substring(UrlStr.lastIndexOf('/')+1);
// */
// public boolean downLoadFTP(String filePath, String fileName, String downPath)
// throws FileNotFoundException,SocketException,IOException,Exception {
// // 默认失败
// boolean flag = false;
// FileOutputStream out = null;
// try {
// ftp = getFTPClient();
// //以二进制流输出
// ftp.setFileType(FTP.BINARY_FILE_TYPE);
// //UTF-8 GBK
// ftp.setControlEncoding("UTF-8");
// // 跳转到ftp文件目录
// ftp.changeWorkingDirectory(filePath);
// // 连接模式
// ftp.enterLocalPassiveMode();
// File downFile = new File(downPath + File.separator + fileName);
// boolean b = FileUtil.delFile(downFile);
// boolean b1 = FileUtils.deleteQuietly(downFile);
//
// out = new FileOutputStream(downFile);
// // 绑定输出流下载文件,需要设置编码集,不然可能出现文件为空的情况
// flag = ftp.retrieveFile(new String(fileName.getBytes(StandardCharsets.UTF_8), StandardCharsets.ISO_8859_1), out);
// int replyCode = ftp.getReplyCode();
// System.out.println(replyCode);
// // 下载成功删除文件,看项目需求
// // ftp.deleteFile(new String(fileName.getBytes("UTF-8"),"ISO-8859-1"));
// out.flush();
// out.close();
// ftp.logout();
// if (flag) {
// log.info("下载成功");
// } else {
// log.info("下载失败");
// }
// } catch (FileNotFoundException e) {
// e.printStackTrace();
// log.error("没有找到(" + filePath + ")(" + fileName + ")文件");
// } catch (SocketException e) {
// e.printStackTrace();
// log.error("连接FTP(" + ftpServerIP + ":" + ftpPort + ")失败");
// } catch (IOException e) {
// e.printStackTrace();
// log.error("文件读取错误:" + e.getMessage());
// }
// finally {
// //关闭ftpClint链接
// if (ftp.isConnected()) {
// try {
// ftp.disconnect();
// } catch (IOException e) {
// e.printStackTrace();
// }
// }
// if (out!=null){
// //关闭io
// out.close();
// }
// //System.gc();
// //try {
// // Thread.sleep(100);
// //}catch (Exception e){
// // e.printStackTrace();
// //}
// }
// return flag;
// }
//}

@ -48,7 +48,7 @@ import java.util.zip.ZipOutputStream;
* @date: 20221013 10:52
* @description:
*/
@Slf4j
//@Slf4j
public class FileUtil {
/**
@ -214,11 +214,11 @@ public class FileUtil {
*/
public static void delete(File file) {
boolean result = file.delete();
if (result) {
log.info("删除: {} 成功", file.getAbsolutePath());
} else {
log.warn("删除: {} 失败", file.getAbsolutePath());
}
//if (result) {
// log.info("删除: {} 成功", file.getAbsolutePath());
//} else {
// log.warn("删除: {} 失败", file.getAbsolutePath());
//}
}
@ -360,7 +360,8 @@ public class FileUtil {
try {
FileUtils.copyFile(new File(oldFilePath), new File(newFilePath));
} catch (IOException e) {
log.error("拷贝备份文件{},到{}失败:{}", oldFilePath, newFilePath, e.getMessage(), e);
//log.error("拷贝备份文件{},到{}失败:{}", oldFilePath, newFilePath, e.getMessage(), e);
LogUtil.WriteLog_Error("拷贝备份文件" + oldFilePath+"到"+newFilePath+"失败,报错:"+e.getMessage(), "FileUtil");
throw new PPException(MessageEnum..getCode(), MessageEnum..getMessage());
}
}
@ -377,7 +378,7 @@ public class FileUtil {
public static List<String> upZipFile(String zipName, String targetDirName) {
//记录解压后的文件路径
List<String> srcList = new ArrayList<>();
log.info("开始解压缩文件{}", zipName);
//log.info("开始解压缩文件{}", zipName);
try {
byte[] buffer = new byte[1024];
ZipInputStream zis = new ZipInputStream(new FileInputStream(zipName));
@ -397,11 +398,12 @@ public class FileUtil {
//解压完成一个entry再解压下一个
zipEntry = zis.getNextEntry();
}
log.info("解压缩文件完成,文件数为{}", i);
//log.info("解压缩文件完成,文件数为{}", i);
zis.closeEntry();
zis.close();
} catch (IOException e) {
log.error("解压缩{}失败:{}", zipName, e.getMessage(), e);
//log.error("解压缩{}失败:{}", zipName, e.getMessage(), e);
LogUtil.WriteLog_Error("解压缩" + zipName+"失败,报错:"+e.getMessage(), "FileUtil");
throw new PPException(MessageEnum..getCode(), MessageEnum..getMessage());
}
return srcList;
@ -497,7 +499,7 @@ public class FileUtil {
try {
file.createNewFile();
} catch (IOException e) {
log.error(e.getMessage(), e);
//log.error(e.getMessage(), e);
}
}
@ -568,7 +570,7 @@ public class FileUtil {
fos.close();
//log.info("路径下:{},字符串:{}写入txt完成", filePath, str);
} catch (Exception e) {
log.error(e.getMessage(), e);
//log.error(e.getMessage(), e);
throw new PPException(MessageEnum..getCode(), MessageEnum..getMessage());
}
}
@ -625,7 +627,8 @@ public class FileUtil {
fileOutputStream.close();
} catch (IOException e) {
log.error(e.getMessage(), e);
//log.error(e.getMessage(), e);
LogUtil.WriteLog_Error("报错:"+e.getMessage(), "FileUtil");
throw new PPException(MessageEnum..getCode(), MessageEnum..getMessage());
} finally {
try {
@ -636,7 +639,7 @@ public class FileUtil {
inputStream.close();
}
} catch (IOException e) {
log.error(e.getMessage(), e);
//log.error(e.getMessage(), e);
}
}
}
@ -652,7 +655,8 @@ public class FileUtil {
try {
realPath = URLDecoder.decode(realPath, "utf-8");
} catch (Exception e) {
log.error(e.getMessage(), e);
//log.error(e.getMessage(), e);
LogUtil.WriteLog_Error("报错:"+e.getMessage(), "FileUtil");
throw new PPException(MessageEnum..getCode(), MessageEnum..getMessage());
}
return realPath;
@ -677,7 +681,8 @@ public class FileUtil {
i = bis.read(buffer);
}
} catch (Exception e) {
log.error(e.getMessage(), e);
//log.error(e.getMessage(), e);
LogUtil.WriteLog_Error("报错:"+e.getMessage(), "FileUtil");
throw new PPException(MessageEnum..getCode(), MessageEnum..getMessage());
} finally {
if (bis != null) {
@ -686,14 +691,16 @@ public class FileUtil {
// 删除临时文件
filePath.delete();
} catch (IOException e) {
log.info(e.getMessage(), e);
//log.info(e.getMessage(), e);
LogUtil.WriteLog_Error("报错:"+e.getMessage(), "FileUtil");
}
}
if (fis != null) {
try {
fis.close();
} catch (IOException e) {
log.error(e.getMessage(), e);
//log.error(e.getMessage(), e);
LogUtil.WriteLog_Error("报错:"+e.getMessage(), "FileUtil");
}
}
}
@ -750,9 +757,10 @@ public class FileUtil {
out.flush();
out.close();
in.close();
log.info("{}文件下载成功", multipartFile.getOriginalFilename());
//log.info("{}文件下载成功", multipartFile.getOriginalFilename());
} catch (IOException e) {
log.error("文件下载失败{}", e.getMessage(), e);
//log.error("文件下载失败{}", e.getMessage(), e);
LogUtil.WriteLog_Error("报错:"+e.getMessage(), "FileUtil");
throw new PPException(MessageEnum..getCode(), MessageEnum..getMessage());
}
}

@ -0,0 +1,147 @@
package com.nm.gsgl.common.utils;
import java.io.File;
import java.io.IOException;
import java.io.RandomAccessFile;
import java.nio.channels.FileLock;
import java.text.SimpleDateFormat;
import java.time.LocalDateTime;
import java.time.format.DateTimeFormatter;
import java.time.temporal.ChronoUnit;
import java.util.Date;
public class LogUtil {
public static String TomcatPath = System.getProperty("catalina.home");
private static String getFilePath() {
return TomcatPath ;
}
public static void WriteLog_Info(String msg, String disStr) {
SimpleDateFormat f = new SimpleDateFormat("yyyy-MM-dd-HH");
String fileName = f.format(new Date()) + "_Info";
WriteLog(fileName, msg, disStr);
}
public static void WriteLog_Error(String msg, String disStr) {
SimpleDateFormat f = new SimpleDateFormat("yyyy-MM-dd-HH");
String fileName = f.format(new Date()) + "_Error";
WriteLog(fileName, msg, disStr);
}
public static void WriteLog_InsertDB(String msg, String disStr) {
SimpleDateFormat f = new SimpleDateFormat("yyyy-MM-dd-HH");
String fileName = f.format(new Date()) + "_InsertDB";
WriteLog(fileName, msg, disStr);
}
public static void WriteLog_CallDisInterface(String msg, String disStr) {
SimpleDateFormat f = new SimpleDateFormat("yyyy-MM-dd-HH");
String fileName = f.format(new Date()) + "_CallDisInterface";
WriteLog(fileName, msg, disStr);
}
public static void WriteLog_MinFee(String msg, String disStr) {
SimpleDateFormat f = new SimpleDateFormat("yyyy-MM-dd-HH");
String fileName = f.format(new Date()) + "_MinFee";
WriteLog(fileName, msg, disStr);
}
public static void WriteLog_HRBlackCard(String msg, String disStr) {
SimpleDateFormat f = new SimpleDateFormat("yyyy-MM-dd-HH");
String fileName = f.format(new Date()) + "_HRBlackCard";
WriteLog(fileName, msg, disStr);
}
public static void WriteLog_MDBlackCard(String msg, String disStr) {
SimpleDateFormat f = new SimpleDateFormat("yyyy-MM-dd-HH");
String fileName = f.format(new Date()) + "_MDBlackCard";
WriteLog(fileName, msg, disStr);
}
public static void WriteLog_ValidateDual(String msg, String disStr) {
SimpleDateFormat f = new SimpleDateFormat("yyyy-MM-dd-HH");
String fileName = f.format(new Date()) + "_ValidateDual";
WriteLog(fileName, msg, disStr);
}
public static void WriteLog_BackupSqlite(String msg, String disStr) {
SimpleDateFormat f = new SimpleDateFormat("yyyy-MM-dd-HH");
String fileName = f.format(new Date()) + "_BackupSqlite";
WriteLog(fileName, msg, disStr);
}
public static void WriteLog_DeleteOverFile(String msg, String disStr) {
SimpleDateFormat f = new SimpleDateFormat("yyyy-MM-dd-HH");
String fileName = f.format(new Date()) + "_DeleteOverFile";
WriteLog(fileName, msg, disStr);
}
public static void WriteLog_HRZipDownloadFile(String msg, String disStr) {
SimpleDateFormat f = new SimpleDateFormat("yyyy-MM-dd-HH");
String fileName = f.format(new Date()) + "_HRZipDownloadFile";
WriteLog(fileName, msg, disStr);
}
private static void WriteLog(String suffix, String content, String disStr) {
try {
SimpleDateFormat format = new SimpleDateFormat("yyyyMMdd");
String baseDir = getFilePath() + "/logs/ParamDownload/";
FileUtil.fileCreat(baseDir);
StringBuffer sb = new StringBuffer();
String str = LocalDateTime.now().format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss SSS")) + "-->类【" + disStr + "】:";
sb.append(str);
sb.append(content);
sb.append("\r\n");
String logPath = baseDir;
File logPathFile = new File(logPath);
if (!logPathFile.isDirectory()) {
Boolean _f = logPathFile.mkdir();
}
String fileDic = format.format(new Date());
File filedir = new File(logPath + fileDic);
if (!filedir.isDirectory()) {
filedir.mkdir();
File[] fs = logPathFile.listFiles();
LocalDateTime now = LocalDateTime.now();
int delFlag = Integer.parseInt(now.minus(30, ChronoUnit.DAYS).format(DateTimeFormatter.ofPattern("yyyyMMdd")));
for (File f : fs) {
int deldir = Integer.parseInt(f.getName());
if (deldir < delFlag) {
if (f.isDirectory()) {
deleteDirectory(f);
}
}
}
}
String fullFilePath = logPath + fileDic + "/" + suffix + ".txt";
try (RandomAccessFile reader = new RandomAccessFile(new File(fullFilePath), "rw");
FileLock lock1 = reader.getChannel().lock()) {
reader.seek(reader.length());
reader.write(sb.toString().getBytes());
} catch (IOException e) {
System.out.println(e.getMessage());
}
} catch (Exception exception) {
System.out.println(exception.getMessage());
}
}
private static void deleteDirectory(File file) {
if (file.isFile()) {// 表示该文件不是文件夹
file.delete();
} else {
// 首先得到当前的路径
String[] childFilePaths = file.list();
for (String childFilePath : childFilePaths) {
File childFile = new File(file.getAbsolutePath() + "/" + childFilePath);
deleteDirectory(childFile);
}
file.delete();
}
}
}

@ -18,7 +18,7 @@ import java.security.NoSuchAlgorithmException;
* @date: 20221016 16:46
* @description: MD5
*/
@Slf4j
//@Slf4j
public class MD5Util {
/**
* MD5
@ -38,7 +38,8 @@ public class MD5Util {
//BigInteger(参数1,参数2) 参数1 是 1为正数 0为零 -1为负数
return new BigInteger(1, md.digest()).toString(16);
} catch (NoSuchAlgorithmException e) {
log.error(e.getMessage(), e);
//log.error(e.getMessage(), e);
LogUtil.WriteLog_Error("报错:"+e.getMessage(), "MD5Util");
}
return null;
}
@ -62,14 +63,14 @@ public class MD5Util {
BigInteger bi = new BigInteger(1, md5.digest());
value = bi.toString(16);
} catch (Exception e) {
log.error(e.getMessage(), e);
LogUtil.WriteLog_Error("报错:"+e.getMessage(), "MD5Util");
} finally {
try {
if(in != null){
in.close();
}
} catch (IOException e) {
log.error(e.getMessage(), e);
LogUtil.WriteLog_Error("报错:"+e.getMessage(), "MD5Util");
}
}
return value;
@ -89,7 +90,7 @@ public class MD5Util {
try {
value = DigestUtils.md5DigestAsHex(new FileInputStream(filePath));
} catch (Exception e) {
log.error(e.getMessage(), e);
LogUtil.WriteLog_Error("报错:"+e.getMessage(), "MD5Util");
}
return value;
}

@ -38,6 +38,7 @@ public class PathUtil {
public static String dbfBackPth;
public static String downFile;
public static String downFileBackup;
public static String downZipInsertBackup;
public static String backUpPath;
public static String sqbPath;
public static String sqbBackPath;
@ -64,6 +65,7 @@ public class PathUtil {
downFile = java.net.URLDecoder.decode(uploadPath + "\\downFile\\", "utf-8");
unZip = java.net.URLDecoder.decode(uploadPath + "\\unZip\\", "utf-8");
downFileBackup = java.net.URLDecoder.decode(uploadPath + "\\Backup\\", "utf-8");
downZipInsertBackup = java.net.URLDecoder.decode(uploadPath + "\\downZipInsertBackup\\", "utf-8");
backUpPath = java.net.URLDecoder.decode(TomcatPath + "\\backUpFile\\" + webName, "utf-8");
sqbPath = java.net.URLDecoder.decode(uploadPath + "\\sqb\\", "utf-8");
sqbBackPath = java.net.URLDecoder.decode(uploadPath + "\\sqbBack\\", "utf-8");
@ -89,6 +91,7 @@ public class PathUtil {
downFile = java.net.URLDecoder.decode(uploadPath + "/downFile/", "utf-8");
unZip = java.net.URLDecoder.decode(uploadPath + "/unZip/", "utf-8");
downFileBackup = java.net.URLDecoder.decode(uploadPath + "/Backup/", "utf-8");
downZipInsertBackup = java.net.URLDecoder.decode(uploadPath + "/downZipInsertBackup/", "utf-8");
backUpPath = java.net.URLDecoder.decode(TomcatPath + "/backUpFile/" + webName, "utf-8");
sqbPath = java.net.URLDecoder.decode(uploadPath + "/sqb/", "utf-8");
sqbBackPath = java.net.URLDecoder.decode(uploadPath + "/sqbBack/", "utf-8");

@ -12,7 +12,7 @@ import java.util.Date;
import java.util.HashMap;
import java.util.Map;
@Slf4j
//@Slf4j
public class TokenUtil {
/**
* token
@ -43,7 +43,8 @@ public class TokenUtil {
.withExpiresAt(date)
.sign(algorithm);
} catch (Exception e) {
log.error("获取token异常", e);
//log.error("获取token异常", e);
LogUtil.WriteLog_Error("获取token异常"+e.getMessage(), "TokenUtil");
return null;
}
return token;
@ -72,7 +73,8 @@ public class TokenUtil {
session.setAttribute("Manno", Manno);
return true;
} catch (Exception e) {
log.error("验证token异常", e);
//log.error("验证token异常", e);
LogUtil.WriteLog_Error("验证token异常"+e.getMessage(), "TokenUtil");
return false;
}
}
@ -88,8 +90,9 @@ public class TokenUtil {
DecodedJWT jwt = verifier.verify(token);
return true;
} catch (Exception e) {
log.error("验证token异常", e);
//log.error("验证token异常", e);
LogUtil.WriteLog_Error("验证token异常"+e.getMessage(), "TokenUtil");
return false;
}
}
}
}

@ -38,7 +38,7 @@ import java.util.zip.ZipInputStream;
* @date: 20221109 10:03
* @description:
*/
@Slf4j
//@Slf4j
public class UnzipUtil {
private static final int BUFFER = 1024;
private static final String CODING_GBK = "GBK";
@ -56,7 +56,8 @@ public class UnzipUtil {
//解压缩执行方法
decompressFile(new File(zipPath), new File(unzipPath), srcList);
} catch (Exception e) {
log.error("解压缩失败:{}", e.getMessage(), e);
//log.error("解压缩失败:{}", e.getMessage(), e);
LogUtil.WriteLog_Error("解压缩失败:"+e.getMessage(), "UnzipUtil");
throw new PPException(MessageEnum..getCode(), MessageEnum..getMessage());
}
return srcList;

@ -24,7 +24,7 @@ import java.util.zip.InflaterInputStream;
* @date: 20230131 10:37
* @description: ZLib
*/
@Slf4j
//@Slf4j
public class ZLibUtils {
@ -39,11 +39,12 @@ public class ZLibUtils {
fos = new FileOutputStream(outFlie);
byte[] bytes = FileUtils.readFileToByteArray(new File(inFile));
compress = compress(bytes, fos);
log.info("[uuid:{}]压缩Zlib成功", uuid);
//log.info("[uuid:{}]压缩Zlib成功", uuid);
return true;
} catch (IOException e) {
e.printStackTrace();
log.error("[uuid:{}]压缩Zlib失败{}", uuid,e.getMessage(),e);
//log.error("[uuid:{}]压缩Zlib失败{}", uuid,e.getMessage(),e);
LogUtil.WriteLog_Error("压缩Zlib失败"+e.getMessage(), "ZLibUtils");
}finally {
//一定要判断fos是否为Null,只有不为null时才可以关闭资源
if (compress != null) {

@ -3,7 +3,6 @@ package com.nm.gsgl.controller;
import com.nm.gsgl.common.utils.UuidUtil;
import com.nm.gsgl.entity.ZipFileInfo;
import com.nm.gsgl.service.DownloadZipService;
import lombok.extern.slf4j.Slf4j;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RestController;
@ -15,7 +14,6 @@ import javax.servlet.http.HttpServletResponse;
* @date: 20230225 3:06
* @description:
*/
@Slf4j
@RestController
public class DownloadZipController {
@Resource

@ -1,132 +1,132 @@
package com.nm.gsgl.controller;
import com.nm.gsgl.common.Constant;
import com.nm.gsgl.common.dbfmodule.CMinFeeCreator;
import com.nm.gsgl.common.enumeration.MessageEnum;
import com.nm.gsgl.common.exception.PPException;
import com.nm.gsgl.common.utils.DateTimeUtil;
import com.nm.gsgl.common.utils.PathUtil;
import com.nm.gsgl.common.utils.UuidUtil;
import com.nm.gsgl.entity.FileInfo;
import com.nm.gsgl.entity.Res;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.time.DateUtils;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RestController;
import java.io.File;
import java.time.LocalDateTime;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.Date;
import java.util.List;
/**
* @author: shuguang
* @date: 20230220 15:57
* @description:
*/
@Slf4j
@RestController
public class TestController {
//加载几天的增量
@Value("${incDays}")
public String incDays;
@Value("${NFSFilePath}")
public String nfsFilePath;
@GetMapping("/webServiceInfo/creatDBFTest")
public Res sendOutTest() {
String uuid = UuidUtil.getUuid();
long start;
int count = 0;
try {
log.info("[uuid:{}]根据测试生成黑名单DBF文件当前时间={}", uuid, LocalDateTime.now());
start = System.currentTimeMillis();
String dateStr="2023-07-30 14:00:00";
Date date = DateTimeUtil.getFormatDate(dateStr, Constant.YYYY_MM_DD_HH_MM_SS);
String NFSFilePath = "/usr/sg/cardBlackTest";
String dbfDir = "CARDBLACK";
String qlDbName = "TB_CARDBLACKALL";
String dbfBackPth = "/dbf/DbfBack/";
String downFileBackup = "/cardTest/";
String dbfWrtPth = "/usr/sg/cardBlackTest/dbf/WriteDbf/";
String bfName = "TB_CARDBLACKINC";
String dbName = "CARDBLACKLISTINC.DBF";
String version = "20230730104";
String fileModulName = PathUtil.fileModulName;
byte[] bytMd5 = new byte[256];
int etcType = 1;
String formateString = DateTimeUtil.getFormateString(date, Constant.YYYYMMDDHHMMSS);
String format = DateTimeUtil.getFormateString(date, Constant.YYYY_MM_DD_HH_MM_SS);
String zlibName;
String zlibFileName;
String dbfFileName = dbfWrtPth + dbfDir + File.separator + formateString + Constant.STR_ + dbName;
//增量目录名称
StringBuilder incDirLst = new StringBuilder();
boolean fDir = false;
for (int i = 0; i < Integer.parseInt(incDays); i++) {
if (fDir) {
incDirLst.append("|");
}
incDirLst.append(NFSFilePath).append(downFileBackup).append(dbfDir).append(File.separator).append(DateTimeUtil.getFormateString(DateTimeUtil.addDateDays(date, -i), Constant.YYYYMMDD));
fDir = true;
}
//全量dbf文件
String dbfQlFileDir = nfsFilePath + PathUtil.dbfBackPth + dbfDir;
//获取备份dbf列表中最新的一个全量黑名单
String backQLName = "";
List<FileInfo> fileInfos = new ArrayList<>();
File[] files = new File(dbfQlFileDir).listFiles();
if (null != files && files.length > 0) {
for (File file : files) {
if (file.isFile() && file.getName().endsWith(Constant.STR_DBF) && file.getName().contains(qlDbName)) {
FileInfo fileInfo = new FileInfo();
fileInfo.setFileName(file.getName());
fileInfo.setLastModified(new Date(file.lastModified()));
fileInfo.setFilePath(file.getPath());
fileInfos.add(fileInfo);
}
}
//获取当前时间加载增量时间之前的全量文件名
Date tenDaysAgo = DateTimeUtil.addDateDays(date, -(Integer.parseInt(incDays)));
log.info("[uuid:{}]获取{}天前全量DBF", uuid, incDays);
if (fileInfos.size() > 0) {
//对文件的最后修改时间进行排序
fileInfos.sort(Comparator.comparing(FileInfo::getLastModified).reversed());
for (FileInfo fileInfo : fileInfos) {
if (DateUtils.isSameDay(tenDaysAgo, fileInfo.getLastModified())) {
backQLName = fileInfo.getFileName();
}
}
} else {
log.error("[uuid:{}]获取获取{}天前全量DBF文件失败", uuid, incDays);
}
log.info("[uuid:{}]获取获取{}天前全量DBF名称{}", uuid, incDays, backQLName);
}
//需要查询出最新全量
String dbfQlFileName = nfsFilePath + PathUtil.dbfBackPth + dbfDir + File.separator + backQLName;
zlibName = bfName + Constant.STR_ + formateString + Constant.STR_ + version + Constant.STR_ZLIB;
zlibFileName = dbfWrtPth + dbfDir + File.separator + zlibName;
//如果没有最新的全量DBF文件时不生成增量文件
if (StringUtils.isNotBlank(backQLName)) {
log.info("[uuid:{}]调用动态库生成增量入参,全量黑名单DBF文件全路径{}", uuid, dbfQlFileName);
log.info("[uuid:{}]调用动态库生成增量入参, 模板dbf文件全路径{}", uuid, fileModulName + dbName);
log.info("[uuid:{}]调用动态库生成增量入参, 生成DBF文件全路径{}", uuid, dbfFileName);
log.info("[uuid:{}]调用动态库生成增量入参, 生成DBFzlib文件全路径{}", uuid, zlibFileName);
count = CMinFeeCreator.INSTANCE.MakeBlackInc(etcType, version, format, incDirLst.toString(), dbfQlFileName, fileModulName + dbName, dbfFileName, zlibFileName, bytMd5);
log.info("[uuid:{}]调用动态库生成增量DBF文件完成,返回值为:{}", uuid, count);
}
} catch (NumberFormatException e) {
log.error("[uuid:{}]调用动态库生成DBF及SQB失败{}", uuid, e.getMessage(), e);
throw new PPException(MessageEnum.DBFSQB.getCode(), MessageEnum.DBFSQB.getMessage()
+ ",DBF动态库返回值为" + count );
}
long end = System.currentTimeMillis();
log.info("[uuid:{}]根据测试生成DBF文件耗时{}毫秒", uuid, end - start);
return Res.success(end - start);
}
}
//package com.nm.gsgl.controller;
//
//import com.nm.gsgl.common.Constant;
//import com.nm.gsgl.common.dbfmodule.CMinFeeCreator;
//import com.nm.gsgl.common.enumeration.MessageEnum;
//import com.nm.gsgl.common.exception.PPException;
//import com.nm.gsgl.common.utils.DateTimeUtil;
//import com.nm.gsgl.common.utils.PathUtil;
//import com.nm.gsgl.common.utils.UuidUtil;
//import com.nm.gsgl.entity.FileInfo;
//import com.nm.gsgl.entity.Res;
//import lombok.extern.slf4j.Slf4j;
//import org.apache.commons.lang3.StringUtils;
//import org.apache.commons.lang3.time.DateUtils;
//import org.springframework.beans.factory.annotation.Value;
//import org.springframework.web.bind.annotation.GetMapping;
//import org.springframework.web.bind.annotation.RestController;
//
//import java.io.File;
//import java.time.LocalDateTime;
//import java.util.ArrayList;
//import java.util.Comparator;
//import java.util.Date;
//import java.util.List;
//
///**
// * @author: shuguang
// * @date: 2023年02月20日 15:57
// * @description:
// */
//@Slf4j
//@RestController
//public class TestController {
// //加载几天的增量
// @Value("${incDays}")
// public String incDays;
// @Value("${NFSFilePath}")
// public String nfsFilePath;
//
// @GetMapping("/webServiceInfo/creatDBFTest")
// public Res sendOutTest() {
// String uuid = UuidUtil.getUuid();
// long start;
// int count = 0;
// try {
// log.info("[uuid:{}]根据测试生成黑名单DBF文件当前时间={}", uuid, LocalDateTime.now());
// start = System.currentTimeMillis();
// String dateStr="2023-07-30 14:00:00";
// Date date = DateTimeUtil.getFormatDate(dateStr, Constant.YYYY_MM_DD_HH_MM_SS);
// String NFSFilePath = "/usr/sg/cardBlackTest";
// String dbfDir = "CARDBLACK";
// String qlDbName = "TB_CARDBLACKALL";
// String dbfBackPth = "/dbf/DbfBack/";
// String downFileBackup = "/cardTest/";
// String dbfWrtPth = "/usr/sg/cardBlackTest/dbf/WriteDbf/";
// String bfName = "TB_CARDBLACKINC";
// String dbName = "CARDBLACKLISTINC.DBF";
// String version = "20230730104";
// String fileModulName = PathUtil.fileModulName;
// byte[] bytMd5 = new byte[256];
// int etcType = 1;
// String formateString = DateTimeUtil.getFormateString(date, Constant.YYYYMMDDHHMMSS);
// String format = DateTimeUtil.getFormateString(date, Constant.YYYY_MM_DD_HH_MM_SS);
// String zlibName;
// String zlibFileName;
// String dbfFileName = dbfWrtPth + dbfDir + File.separator + formateString + Constant.STR_ + dbName;
// //增量目录名称
// StringBuilder incDirLst = new StringBuilder();
// boolean fDir = false;
// for (int i = 0; i < Integer.parseInt(incDays); i++) {
// if (fDir) {
// incDirLst.append("|");
// }
// incDirLst.append(NFSFilePath).append(downFileBackup).append(dbfDir).append(File.separator).append(DateTimeUtil.getFormateString(DateTimeUtil.addDateDays(date, -i), Constant.YYYYMMDD));
// fDir = true;
// }
//
// //全量dbf文件
// String dbfQlFileDir = nfsFilePath + PathUtil.dbfBackPth + dbfDir;
// //获取备份dbf列表中最新的一个全量黑名单
// String backQLName = "";
// List<FileInfo> fileInfos = new ArrayList<>();
// File[] files = new File(dbfQlFileDir).listFiles();
// if (null != files && files.length > 0) {
// for (File file : files) {
// if (file.isFile() && file.getName().endsWith(Constant.STR_DBF) && file.getName().contains(qlDbName)) {
// FileInfo fileInfo = new FileInfo();
// fileInfo.setFileName(file.getName());
// fileInfo.setLastModified(new Date(file.lastModified()));
// fileInfo.setFilePath(file.getPath());
// fileInfos.add(fileInfo);
// }
// }
// //获取当前时间加载增量时间之前的全量文件名
// Date tenDaysAgo = DateTimeUtil.addDateDays(date, -(Integer.parseInt(incDays)));
// log.info("[uuid:{}]获取{}天前全量DBF", uuid, incDays);
// if (fileInfos.size() > 0) {
// //对文件的最后修改时间进行排序
// fileInfos.sort(Comparator.comparing(FileInfo::getLastModified).reversed());
// for (FileInfo fileInfo : fileInfos) {
// if (DateUtils.isSameDay(tenDaysAgo, fileInfo.getLastModified())) {
// backQLName = fileInfo.getFileName();
// }
// }
// } else {
// log.error("[uuid:{}]获取获取{}天前全量DBF文件失败", uuid, incDays);
// }
// log.info("[uuid:{}]获取获取{}天前全量DBF名称{}", uuid, incDays, backQLName);
// }
// //需要查询出最新全量
// String dbfQlFileName = nfsFilePath + PathUtil.dbfBackPth + dbfDir + File.separator + backQLName;
// zlibName = bfName + Constant.STR_ + formateString + Constant.STR_ + version + Constant.STR_ZLIB;
// zlibFileName = dbfWrtPth + dbfDir + File.separator + zlibName;
// //如果没有最新的全量DBF文件时不生成增量文件
// if (StringUtils.isNotBlank(backQLName)) {
// log.info("[uuid:{}]调用动态库生成增量入参,全量黑名单DBF文件全路径{}", uuid, dbfQlFileName);
// log.info("[uuid:{}]调用动态库生成增量入参, 模板dbf文件全路径{}", uuid, fileModulName + dbName);
// log.info("[uuid:{}]调用动态库生成增量入参, 生成DBF文件全路径{}", uuid, dbfFileName);
// log.info("[uuid:{}]调用动态库生成增量入参, 生成DBFzlib文件全路径{}", uuid, zlibFileName);
// count = CMinFeeCreator.INSTANCE.MakeBlackInc(etcType, version, format, incDirLst.toString(), dbfQlFileName, fileModulName + dbName, dbfFileName, zlibFileName, bytMd5);
// log.info("[uuid:{}]调用动态库生成增量DBF文件完成,返回值为:{}", uuid, count);
// }
// } catch (NumberFormatException e) {
// log.error("[uuid:{}]调用动态库生成DBF及SQB失败{}", uuid, e.getMessage(), e);
// throw new PPException(MessageEnum.调用动态库生成DBF和SQB操作失败.getCode(), MessageEnum.调用动态库生成DBF和SQB操作失败.getMessage()
// + ",DBF动态库返回值为" + count );
// }
// long end = System.currentTimeMillis();
// log.info("[uuid:{}]根据测试生成DBF文件耗时{}毫秒", uuid, end - start);
// return Res.success(end - start);
// }
//}

@ -36,7 +36,6 @@ import java.util.List;
* @date: 20230220 15:57
* @description:
*/
@Slf4j
@RestController
public class VueTestController {
@Value("${sqliteDriverName}")
@ -55,7 +54,7 @@ public class VueTestController {
private String HROracleName;
@Value("${HROraclePass}")
private String HROraclePass;
public static Logger callDisInterfaceLog = LoggerFactory.getLogger("CallDisInterface");
//public static Logger callDisInterfaceLog = LoggerFactory.getLogger("CallDisInterface");
@Resource
private AppAliveStatusMapper appAliveStatusMapper;
@Resource
@ -64,7 +63,7 @@ public class VueTestController {
@PostMapping("/VueTest")
public Page VueTest(@RequestBody PageInfo pageInfo) {
String uuid = UuidUtil.getUuid();
log.info("[uuid:{}]-开始查询Sqlite业务状态表", uuid);
//log.info("[uuid:{}]-开始查询Sqlite业务状态表", uuid);
List<BusinessTasks> businessTasks;
if (StringUtils.isNotBlank(pageInfo.getDownloadTime())) {
@ -74,7 +73,7 @@ public class VueTestController {
businessTasks = DatabaseUtil.selectBusines(sqliteDriverName, sqliteUrl, "SELECT id,fileName,size,recordCount,downStatus,protocolType,downloadTime,consumTime,version,newVersion,proStatus,middleDatabase,handleStatus,handleTime,publishStatus,publishTime FROM business_tasks_info");
}
businessTasks.sort(Comparator.comparing(BusinessTasks::getDownloadTime).reversed());
log.info("[uuid:{}]-查询Sqlite业务状态表完成条数为{}", uuid, businessTasks.size());
//log.info("[uuid:{}]-查询Sqlite业务状态表完成条数为{}", uuid, businessTasks.size());
Page pages = PagesUtils.getPages(pageInfo.getPageNum(), pageInfo.getPageSize(), businessTasks);
return pages;
@ -83,7 +82,7 @@ public class VueTestController {
@GetMapping("/currentActive")
public Res currentActive() {
String uuid = UuidUtil.getUuid();
log.info("[uuid:{}]-开始查询本机级别及当前活跃机器", uuid);
//log.info("[uuid:{}]-开始查询本机级别及当前活跃机器", uuid);
MachineInfo machineInfo = new MachineInfo();
if (Constant.STR_ONE.equals(machineId)) {
machineInfo.setMachineName("主机");
@ -97,21 +96,21 @@ public class VueTestController {
machineInfo.setActiveName("备机");
}
log.info("[uuid:{}]-查询完成,本机级别:{},当前活跃机器:{}", uuid, machineInfo.getMachineName(), machineInfo.getActiveName());
//log.info("[uuid:{}]-查询完成,本机级别:{},当前活跃机器:{}", uuid, machineInfo.getMachineName(), machineInfo.getActiveName());
return Res.success(machineInfo);
}
@GetMapping("/webServiceInfo/sendOutTest")
public Res sendOutTest() {
String uuid = UuidUtil.getUuid();
callDisInterfaceLog.info("[uuid:{}]定时调用自动下发接口,当前时间={}", uuid, LocalDateTime.now());
//callDisInterfaceLog.info("[uuid:{}]定时调用自动下发接口,当前时间={}", uuid, LocalDateTime.now());
long start = System.currentTimeMillis();
boolean b = callSendOutInterfaceService.callSendOutInterface(uuid);
long end = System.currentTimeMillis();
if (b) {
callDisInterfaceLog.info("[uuid:{}]定时调用自动下发接口执行完成,耗时:{}毫秒", uuid, end - start);
//callDisInterfaceLog.info("[uuid:{}]定时调用自动下发接口执行完成,耗时:{}毫秒", uuid, end - start);
} else {
callDisInterfaceLog.info("[uuid:{}]当前程序不调用自动下发程序接口,耗时:{}毫秒", uuid, end - start);
//callDisInterfaceLog.info("[uuid:{}]当前程序不调用自动下发程序接口,耗时:{}毫秒", uuid, end - start);
}
return Res.success(end - start);
}

@ -0,0 +1,82 @@
package com.nm.gsgl.entity.business.db;
import com.baomidou.mybatisplus.annotation.TableField;
import com.baomidou.mybatisplus.annotation.TableName;
import lombok.Data;
import org.apache.ibatis.type.JdbcType;
/**
* @author: shuguang
* @date: 20240517 16:51
* @description: 804
*/
@Data
@TableName("CHECKRESULT_INFO_TABLE")
public class CheckResultInfo {
@TableField("CHECKID")
private String checkId;
@TableField("VEHICLESIGN")
private String vehicleSign;
@TableField("VEHICLECLASS")
private Integer vehicleClass;
@TableField("APPOINTMENTID")
private String appointmentId;
@TableField("DRIVERTELEPHONE")
private String driverTelephone;
@TableField("VEHICLEID")
private String vehicleId;
@TableField("FREIGHTTYPES")
private String freightTypes;
@TableField("VEHICLETYPE")
private Integer vehicleType;
@TableField("CRATETYPE")
private Integer crateType;
@TableField("ENWEIGHT")
private Integer enWeight;
@TableField("EXWEIGHT")
private Integer exWeight;
@TableField(value = "CHECKTIME", jdbcType = JdbcType.DATE, update = "to_date(#{CHECKTIME},'yyyy-mm-dd hh24:mi:ss')")
private String checkTime;
@TableField("ENSTATIONID")
private String enStationId;
@TableField("EXSTATIONID")
private String exStationId;
@TableField("GROUPID")
private String groupId;
@TableField("INSPECTOR")
private String inspector;
@TableField("REVIEWER")
private String reviewer;
@TableField("CHECKRESULT")
private Integer checkResult;
@TableField("REASON")
private String reason;
@TableField("MEDIATYPE")
private Integer mediaType;
@TableField("TRANSACTIONID")
private String transactionId;
@TableField("PASSID")
private String passId;
@TableField(value = "EXTIME", jdbcType = JdbcType.DATE, update = "to_date(#{EXTIME},'yyyy-mm-dd hh24:mi:ss')")
private String exTime;
@TableField("transpaytype")
private Integer transPayType;
@TableField("FEE")
private Long fee;
@TableField("PAYFEE")
private Long payFee;
@TableField("MEMO")
private String memo;
@TableField("OPERATION")
private Integer operation;
@TableField("EXEMPTION")
private Integer exemption;
@TableField("BASICFILENAME")
private String basicFilename;
}

@ -0,0 +1,50 @@
package com.nm.gsgl.entity.business.db;
import com.baomidou.mybatisplus.annotation.TableField;
import com.baomidou.mybatisplus.annotation.TableName;
import lombok.Data;
import org.apache.ibatis.type.JdbcType;
import java.util.Date;
/**
* @author: shuguang
* @date: 20240517 16:27
* @description: 604
*/
@Data
@TableName("CT_GATHERDETAIL_TABLE")
public class CtGatherDetail {
@TableField("PASSID")
private String passId;
@TableField("VEHICLEID")
private String vehicleId;
@TableField("VEHICLETYPE")
private Integer vehicleType;
@TableField("VEHICLECLASS")
private Integer vehicleClass;
@TableField("AXLECOUNT")
private Integer axleCount ;
@TableField("PAYERTYPE")
private Integer payerType;
@TableField("PAYERID")
private String payerId;
@TableField("RECEIVERID")
private String receiverId;
@TableField(value = "RECEIVETIME",jdbcType = JdbcType.DATE,update = "to_date(#{RECEIVETIME},'yyyy-mm-dd hh24:mi:ss')")
private String receiveTime;
@TableField("FEE")
private Long fee;
@TableField("BASICFILENAME")
private String basicFilename;
@TableField("DATEMARK")
private Date dateMark;
@TableField("BAK1")
private Integer bak1;
@TableField("BAK2")
private Integer bak2;
@TableField("BAK3")
private String bak3;
@TableField("BAK4")
private String bak4;
}

@ -0,0 +1,76 @@
package com.nm.gsgl.entity.business.db;
import com.baomidou.mybatisplus.annotation.TableField;
import com.baomidou.mybatisplus.annotation.TableName;
import lombok.Data;
import org.apache.ibatis.type.JdbcType;
/**
* @author: shuguang
* @date: 20240517 16:32
* @description: 502
*/
@Data
@TableName("CT_OTHER_CLEAR")
public class CtOtherClear {
@TableField("MESSAGEID")
private Integer messageId;
@TableField("ID")
private String id;
@TableField("PAYFEE")
private Long payFee;
@TableField("FEE")
private Long fee;
@TableField("DISCOUNTFEE")
private Long discountFee;
@TableField("MEDIATYPE")
private Integer mediaType;
@TableField("OBUSIGN")
private Integer obuSign;
@TableField("MEDIANO")
private String mediaNo;
@TableField("ENTOLLLANEID")
private String enTollLaneId;
@TableField(value = "ENTIME",jdbcType = JdbcType.DATE,update = "to_date(#{ENTIME},'yyyy-mm-dd hh24:mi:ss')")
private String enTime;
@TableField("ENWEIGHT")
private Integer enWeight;
@TableField("ENAXLECOUNT")
private Integer enAxleCount ;
@TableField("EXTOLLLANEID")
private String exTollLaneId;
@TableField("ENVEHICLEID")
private String enVehicleId;
@TableField("EXVEHICLEID")
private String exVehicleId;
@TableField("IDENTIFYVEHICLEID")
private String identifyVehicleId;
@TableField("ENVEHICLETYPE")
private Integer enVehicleType;
@TableField("EXVEHICLETYPE")
private Integer exVehicleType;
@TableField("VEHICLECLASS")
private Integer vehicleClass;
@TableField("ENTOLLSTATIONNAME")
private String enTollStationName;
@TableField("EXTOLLSTATIONNAME")
private String exTollStationName;
@TableField("PAYTYPE")
private Integer payType;
@TableField("IDENTIFICATION")
private Integer identification;
@TableField("DESCRIPTION")
private String description;
@TableField("SPECIALTYPE")
private String specialType;
@TableField("VEHICLESIGNID")
private String vehicleSignId;
@TableField("PASSID")
private String passId;
@TableField("SPLITPROVINCE")
private String splitProvince;
@TableField("AMOUNT")
private Long amount;
}

@ -0,0 +1,41 @@
package com.nm.gsgl.entity.business.db;
import com.baomidou.mybatisplus.annotation.TableField;
import com.baomidou.mybatisplus.annotation.TableName;
import lombok.Data;
import org.apache.ibatis.type.JdbcType;
/**
* @author: shuguang
* @date: 20240517 16:43
* @description: 503
*/
@Data
@TableName("CT_OTHER_CLEAR_SUM")
public class CtOtherClearSum {
@TableField("SERPROVINCEID")
private String serProvinceId;
@TableField(value = "CLEARDATE",jdbcType = JdbcType.DATE,update = "to_date(#{CLEARDATE},'yyyy-mm-dd')")
private String clearDate;
@TableField(value = "PROCESSTIME",jdbcType = JdbcType.DATE,update = "to_date(#{PROCESSTIME},'yyyy-mm-dd hh24:mi:ss')")
private String processTime;
@TableField("PAYERAMOUNT")
private Long payerAmount;
@TableField("PAYERMESSAGECOUNT")
private Integer payerMessageCount;
@TableField("RECEIVERAMOUNT")
private String receiverAmount;
@TableField("RECEIVERMESSAGECOUNT")
private Integer receiverMessageCount;
@TableField("AMOUNTDUE")
private Long amountDue;
@TableField("RESULTOFPAYER")
private String resultOfPayer;
@TableField("RESULTOFRECEIVER")
private String resultOfReceiver;
@TableField("ISSPLIT")
private Integer isSplit;
@TableField("BASICFILENAME")
private String basicFilename;
}

@ -0,0 +1,46 @@
package com.nm.gsgl.entity.business.db;
import com.baomidou.mybatisplus.annotation.TableField;
import com.baomidou.mybatisplus.annotation.TableName;
import lombok.Data;
import org.apache.ibatis.type.JdbcType;
import java.util.Date;
/**
* @author: shuguang
* @date: 20240517 16:15
* @description: 605
*/
@Data
@TableName("CT_PAYERDETAIL_TABLE")
public class CtPayerDetail {
@TableField("PASSID")
private String passId;
@TableField("VEHICLEID")
private String vehicleId;
@TableField("ORIGIN")
private Long origin;
@TableField("PAYERTYPE")
private Integer payerType;
@TableField("PAYERID")
private String payerId;
@TableField(value = "PAYTIME",jdbcType = JdbcType.DATE,update = "to_date(#{PAYTIME},'yyyy-mm-dd hh24:mi:ss')")
private String payTime;
@TableField("PAYFEE")
private Long payFee;
@TableField("BASICFILENAME")
private String basicFilename;
@TableField("DATEMARK")
private Date dateMark;
@TableField("BAK1")
private Integer bak1;
@TableField("BAK2")
private Integer bak2;
@TableField("BAK3")
private String bak3;
@TableField("BAK4")
private String bak4;
}

@ -0,0 +1,51 @@
package com.nm.gsgl.entity.business.db;
import com.baomidou.mybatisplus.annotation.TableField;
import com.baomidou.mybatisplus.annotation.TableName;
import lombok.Data;
import org.apache.ibatis.type.JdbcType;
import java.util.Date;
/**
* @author: shuguang
* @date: 20240527 11:41
* @description: 306
*/
@Data
@TableName("CT_REFUND_LATERPAY_SUM")
public class CtRefundLaterpaySum {
@TableField(value = "CLEARDATE",jdbcType = JdbcType.DATE,update = "to_date(#{CLEARDATE},'yyyy-mm-dd')")
private String clearDate;
@TableField(value = "PROCESSTIME",jdbcType = JdbcType.DATE,update = "to_date(#{PROCESSTIME},'yyyy-mm-dd hh24:mi:ss')")
private String processTime;
@TableField("RECEIVEAMOUNT")
private Integer receiveAmount;
@TableField("PAYAMOUNT")
private Integer payAmount;
@TableField("AMOUNT")
private Integer amount;
@TableField("REFUNDCOUNT")
private Integer refundCount;
@TableField("RESTITUTIONCOUNT")
private Integer restitutionCount;
@TableField("RECEIVEDETAIL")
private String receiveDetail;
@TableField("PAYDETAIL")
private String payDetail;
@TableField("BASICFILENAME")
private String basicFilename;
@TableField("DATEMARK")
private Date dateMark;
@TableField("ISSPLIT")
private Integer isSplit;
@TableField("PROVRESTITUTIONCOUNT")
private Integer provrestitutionCount;
@TableField("PROVRECEIVEAMOUT")
private Integer provreceiveAmout;
@TableField("PROVREFUNDCOUNT")
private Integer provrefundCount;
@TableField("PROVPAYAMOUT")
private Integer provpayAmout;
}

@ -36,11 +36,11 @@ public class OutOtherGantry {
*/
@TableField(value = "SPLITTIME",jdbcType = JdbcType.DATE,update = "to_date(#{SPLITTIME},'yyyy-mm-dd')")
private String splitTime;
/**
*
*/
@TableField("SPLITPROVINCE")
private String splitProvince;
///**
// *多条省内交易信息
// */
//@TableField("SPLITPROVINCE")
//private String splitProvince;
/**
*zip
*/

@ -95,4 +95,9 @@ public class ServerLaterPayTableInfo {
*/
@TableField("BASICFILENAME")
private String basicFilename;
/**
* 0- 1-
*/
@TableField("ISSPLIT")
private Integer isSplit;
}

@ -6,6 +6,8 @@ import com.baomidou.mybatisplus.annotation.TableName;
import lombok.Data;
import org.apache.ibatis.type.JdbcType;
import java.util.Date;
/**
* @author: shuguang
* @date: 20230223 14:43
@ -85,4 +87,6 @@ public class ServerRefundTableInfo {
*/
@TableField("BASICFILENAME")
private String basicFilename;
@TableField("DT")
private Date dT;
}

@ -58,4 +58,9 @@ public class ClearSum {
*/
@TableField("BASICFILENAME")
private String basicFilename;
/**
* 0- 1-
*/
@TableField("ISSPLIT")
private Integer isSplit;
}

@ -0,0 +1,14 @@
package com.nm.gsgl.mapper;
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
import com.nm.gsgl.entity.business.db.CheckResultInfo;
import org.apache.ibatis.annotations.Mapper;
/**
* @author: shuguang
* @date: 20240521 10:32
* @description: 804
*/
@Mapper
public interface CheckResultInfoMapper extends BaseMapper<CheckResultInfo> {
}

@ -0,0 +1,14 @@
package com.nm.gsgl.mapper;
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
import com.nm.gsgl.entity.business.db.CtGatherDetail;
import org.apache.ibatis.annotations.Mapper;
/**
* @author: shuguang
* @date: 20240521 10:30
* @description: 604
*/
@Mapper
public interface CtGatherDetailMapper extends BaseMapper<CtGatherDetail> {
}

@ -0,0 +1,14 @@
package com.nm.gsgl.mapper;
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
import com.nm.gsgl.entity.business.db.CtOtherClear;
import org.apache.ibatis.annotations.Mapper;
/**
* @author: shuguang
* @date: 20240521 10:27
* @description: 502
*/
@Mapper
public interface CtOtherClearMapper extends BaseMapper<CtOtherClear> {
}

@ -0,0 +1,14 @@
package com.nm.gsgl.mapper;
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
import com.nm.gsgl.entity.business.db.CtOtherClearSum;
import org.apache.ibatis.annotations.Mapper;
/**
* @author: shuguang
* @date: 20240521 10:29
* @description: 503
*/
@Mapper
public interface CtOtherClearSumMapper extends BaseMapper<CtOtherClearSum> {
}

@ -0,0 +1,14 @@
package com.nm.gsgl.mapper;
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
import com.nm.gsgl.entity.business.db.CtPayerDetail;
import org.apache.ibatis.annotations.Mapper;
/**
* @author: shuguang
* @date: 20240521 10:31
* @description: 605
*/
@Mapper
public interface CtPayerDetailMapper extends BaseMapper<CtPayerDetail> {
}

@ -0,0 +1,14 @@
package com.nm.gsgl.mapper;
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
import com.nm.gsgl.entity.business.db.CtRefundLaterpaySum;
import org.apache.ibatis.annotations.Mapper;
/**
* @author: shuguang
* @date: 20240527 14:27
* @description: 306
*/
@Mapper
public interface CtRefundLaterpaySumMapper extends BaseMapper<CtRefundLaterpaySum> {
}

@ -1,19 +1,19 @@
package com.nm.gsgl.service;
import com.nm.gsgl.entity.sqlite.BusinessTasks;
/**
* @author: shuguang
* @date: 20230206 14:04
* @description: DBFservice
*/
public interface CreateDbfService {
/**
* zipjsonDBF
*
* @param uuid uuid
* @author shuguang
* @date 2023-02-13 8:38
*/
void zipToDbf(String uuid, BusinessTasks businessTask);
}
//package com.nm.gsgl.service;
//
//import com.nm.gsgl.entity.sqlite.BusinessTasks;
//
///**
// * @author: shuguang
// * @date: 2023年02月06日 14:04
// * @description: DBF写入service
// */
//public interface CreateDbfService {
// /**
// * zip文件转json插入对应的DBF文件
// *
// * @param uuid uuid
// * @author shuguang
// * @date 2023-02-13 8:38
// */
// void zipToDbf(String uuid, BusinessTasks businessTask);
//}

@ -26,40 +26,40 @@ public interface DualMachineService {
boolean validateDualSpare(String uuid, int machineId);
/**
* ()--
*
* @param uuid uuid
* @param machineId ID
* @author shuguang
* @date 2023-02-18 19:58
*/
boolean validateDualHR(String uuid, int machineId);
/**
* ()--
*
* @param uuid uuid
* @param machineId ID
* @author shuguang
* @date 2023-02-18 19:58
*/
boolean validateDualSpareHR(String uuid, int machineId);
/**
* ()--
*
* @param uuid uuid
* @param machineId ID
* @author shuguang
* @date 2023-02-18 19:58
*/
boolean validateDualMD(String uuid, int machineId);
/**
* ()--
*
* @param uuid uuid
* @param machineId ID
* @author shuguang
* @date 2023-02-18 19:58
*/
boolean validateDualSpareMD(String uuid, int machineId);
///**
// * 验证机器是否启动,保证双机方案的正常运转(主程序调用)--华软
// *
// * @param uuid uuid
// * @param machineId 机器ID
// * @author shuguang
// * @date 2023-02-18 19:58
// */
//boolean validateDualHR(String uuid, int machineId);
///**
// * 验证机器是否启动,保证双机方案的正常运转(备用程序调用)--华软
// *
// * @param uuid uuid
// * @param machineId 机器ID
// * @author shuguang
// * @date 2023-02-18 19:58
// */
//boolean validateDualSpareHR(String uuid, int machineId);
///**
// * 验证机器是否启动,保证双机方案的正常运转(主程序调用)--迈道
// *
// * @param uuid uuid
// * @param machineId 机器ID
// * @author shuguang
// * @date 2023-02-18 19:58
// */
//boolean validateDualMD(String uuid, int machineId);
///**
// * 验证机器是否启动,保证双机方案的正常运转(备用程序调用)--迈道
// *
// * @param uuid uuid
// * @param machineId 机器ID
// * @author shuguang
// * @date 2023-02-18 19:58
// */
//boolean validateDualSpareMD(String uuid, int machineId);
}

@ -11,7 +11,6 @@ public interface SqliteBackupsService {
* @author shuguang
* @date 2023-02-21 9:01
* @param uuid uuid
* @return boolean
*/
boolean buildBackups(String uuid);
void buildBackups(String uuid);
}

@ -2,8 +2,8 @@ package com.nm.gsgl.service.impl;
import com.nm.gsgl.common.dbfmodule.CMinFeeCreator;
import com.nm.gsgl.common.utils.FileUtil;
import com.nm.gsgl.common.utils.LogUtil;
import com.nm.gsgl.common.utils.PathUtil;
import lombok.extern.slf4j.Slf4j;
import org.springframework.boot.ApplicationArguments;
import org.springframework.boot.ApplicationRunner;
import org.springframework.stereotype.Component;
@ -16,7 +16,6 @@ import java.time.LocalDateTime;
* createTime 2018-11-07 22:37
**/
@Component
@Slf4j
public class ApplicationRunnerImpl implements ApplicationRunner {
@Override
@ -25,17 +24,21 @@ public class ApplicationRunnerImpl implements ApplicationRunner {
String sqliteBackupsPath = PathUtil.backUpPath + "/sqliteBackups/identifier.sqlite";
File backUp = new File(sqliteBackupsPath);
if (backUp.exists()) {
log.info("identifier.sqlite存在备份恢复备份当前时间={}", LocalDateTime.now());
//log.info("identifier.sqlite存在备份恢复备份当前时间={}", LocalDateTime.now());
LogUtil.WriteLog_Info("identifier.sqlite存在备份恢复备份当前时间="+LocalDateTime.now(), "ApplicationRunnerImpl");
File my = new File(sqlitePath);
my.delete();
FileUtil.copyFile(sqliteBackupsPath, sqlitePath);
long start = System.currentTimeMillis();
long end = System.currentTimeMillis();
log.info("identifier.sqlite恢复执行完成耗时{}毫秒", end - start);
//log.info("identifier.sqlite恢复执行完成耗时{}毫秒", end - start);
LogUtil.WriteLog_Info("identifier.sqlite恢复执行完成耗时毫秒"+(end - start), "ApplicationRunnerImpl");
}else{
log.info("identifier.sqlite不存在备份使用当前文件");
//log.info("identifier.sqlite不存在备份使用当前文件");
LogUtil.WriteLog_Info("identifier.sqlite不存在备份使用当前文件", "ApplicationRunnerImpl");
}
log.info("初始化动态库目录为{}", PathUtil.classPath);
//log.info("初始化动态库目录为{}", PathUtil.classPath);
LogUtil.WriteLog_Info("初始化动态库目录为"+PathUtil.classPath, "ApplicationRunnerImpl");
CMinFeeCreator.INSTANCE.InitWorkDir(PathUtil.classPath);
}
}

@ -1,13 +1,10 @@
package com.nm.gsgl.service.impl;
import com.nm.gsgl.common.utils.CloseableHttpClientUtils;
import com.nm.gsgl.common.utils.LogUtil;
import com.nm.gsgl.entity.intermediary.AppAliveStatus;
import com.nm.gsgl.mapper.AppAliveStatusMapper;
import com.nm.gsgl.service.CallSendOutInterfaceService;
import com.nm.gsgl.service.DualMachineService;
import lombok.extern.slf4j.Slf4j;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;
@ -20,15 +17,13 @@ import javax.annotation.Resource;
*/
@Service
public class CallSendOutInterfaceServiceImpl implements CallSendOutInterfaceService {
@Resource
private DualMachineService dualMachineService;
@Value("${machineId}")
private String machineId;
@Value("${sendOutInterfaceUrl}")
private String sendOutInterfaceUrl;
@Resource
private AppAliveStatusMapper appAliveStatusMapper;
public static Logger log = LoggerFactory.getLogger("CallDisInterface");
//public static Logger log = LoggerFactory.getLogger("CallDisInterface");
/**
*
@ -42,13 +37,16 @@ public class CallSendOutInterfaceServiceImpl implements CallSendOutInterfaceServ
//双机方案
//获取本机的机器ID,本机的机器ID 1-主机2-备用机
int mId = Integer.parseInt(machineId);
log.info("[uuid:{}]获取当前机器的id为{}", uuid, mId);
//log.info("[uuid:{}]获取当前机器的id为{}", uuid, mId);
LogUtil.WriteLog_CallDisInterface("[uuid:" + uuid + "]-获取当前机器的id为" + mId, "CallSendOutInterfaceServiceImpl");
AppAliveStatus appAliveStatus = appAliveStatusMapper.selectById(1);
int aLiveId = appAliveStatus.getALiveId();
if (mId == aLiveId) {
log.info("[uuid:{}]开始调用自动下发程序接口-{}进行下发", uuid, sendOutInterfaceUrl);
//log.info("[uuid:{}]开始调用自动下发程序接口-{}进行下发", uuid, sendOutInterfaceUrl);
LogUtil.WriteLog_CallDisInterface("[uuid:" + uuid + "]-开始调用自动下发程序接口进行下发" + sendOutInterfaceUrl, "CallSendOutInterfaceServiceImpl");
CloseableHttpClientUtils.doGet(sendOutInterfaceUrl, null, null, 600000);
log.info("[uuid:{}]调用自动下发程序接口-{}进行下发完成", uuid, sendOutInterfaceUrl);
//log.info("[uuid:{}]调用自动下发程序接口-{}进行下发完成", uuid, sendOutInterfaceUrl);
LogUtil.WriteLog_CallDisInterface("[uuid:" + uuid + "]-调用自动下发程序接口下发完成" + sendOutInterfaceUrl, "CallSendOutInterfaceServiceImpl");
return true;
} else {
return false;

@ -8,6 +8,7 @@ import com.nm.gsgl.common.exception.PPException;
import com.nm.gsgl.common.utils.DatabaseUtil;
import com.nm.gsgl.common.utils.DateTimeUtil;
import com.nm.gsgl.common.utils.FileUtil;
import com.nm.gsgl.common.utils.LogUtil;
import com.nm.gsgl.common.utils.MD5Util;
import com.nm.gsgl.common.utils.PathUtil;
import com.nm.gsgl.entity.FileInfo;
@ -19,8 +20,6 @@ import com.nm.gsgl.mapper.dis.NewFileInfoMapper;
import com.nm.gsgl.service.CreateDbfByDllService;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.time.DateUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
@ -64,7 +63,7 @@ public class CreateDbfByDllServiceImpl implements CreateDbfByDllService {
private NameTableAutoMapper nameTableAutoMapper;
@Resource
private NewFileInfoMapper newFileInfoMapper;
public static Logger log = LoggerFactory.getLogger("BlackCard");
//public static Logger log = LoggerFactory.getLogger("BlackCard");
/**
* dllosDBF
@ -100,7 +99,8 @@ public class CreateDbfByDllServiceImpl implements CreateDbfByDllService {
//压缩文件存放-会被删除
if (!FileUtil.fileExists(localPath + fileName)) {
log.info("[uuid:{}]下载的zip文件{}不存在", uuid, localPath + fileName);
//log.info("[uuid:{}]下载的zip文件{}不存在", uuid, localPath + fileName);
LogUtil.WriteLog_Error("[uuid:" + uuid + "]-下载的zip文件不存在" + localPath + fileName, "CreateDbfByDllServiceImpl");
throw new PPException(MessageEnum.zip.getCode(), MessageEnum.zip.getMessage());
}
//调用动态库生成DBF文件
@ -208,24 +208,29 @@ public class CreateDbfByDllServiceImpl implements CreateDbfByDllService {
if (type == 1) {
zlibName = bfName + Constant.STR_ + formateString + Constant.STR_ + version + Constant.STR_ZLIB;
zlibSqbName = bfSqbName + Constant.STR_ + formateString + Constant.STR_ + version + Constant.STR_ZLIB;
log.info("[uuid:{}]调用动态库生成全量DBF文件中,协议类型为:{}zip文件名为{}", uuid, protocolType, fileName);
//log.info("[uuid:{}]调用动态库生成全量DBF文件中,协议类型为:{}zip文件名为{}", uuid, protocolType, fileName);
LogUtil.WriteLog_HRBlackCard("[uuid:" + uuid + "]-调用动态库生成全量DBF文件中,协议类型为" + protocolType+"zip文件名为" + fileName, "CreateDbfByDllServiceImpl");
String dayBackPath = NFSFilePath + PathUtil.downFileBackup + dbfDir + File.separator;
FileUtil.fileCreat(dayBackPath);
FileUtil.copyFile(localPath + fileName, dayBackPath + fileName);
//count=100;
count = CMinFeeCreator.INSTANCE.MakeBlackAll(etcType, version, format, localPath + fileName, fileModulName + dbName, dbfFileName, zlibFileName, bytMd5);
log.info("[uuid:{}]调用动态库生成全量DBF文件完成,返回值为:{}", uuid, count);
//log.info("[uuid:{}]调用动态库生成全量DBF文件完成,返回值为:{}", uuid, count);
LogUtil.WriteLog_HRBlackCard("[uuid:" + uuid + "]-调用动态库生成全量DBF文件完成,返回值为" + count, "CreateDbfByDllServiceImpl");
log.info("[uuid:{}]调用动态库生成全量SQB文件中,协议类型为:{}zip文件名为{}", uuid, protocolType, fileName);
//log.info("[uuid:{}]调用动态库生成全量SQB文件中,协议类型为:{}zip文件名为{}", uuid, protocolType, fileName);
LogUtil.WriteLog_HRBlackCard("[uuid:" + uuid + "]-调用动态库生成全量SQB文件中,协议类型为" + protocolType+"zip文件名为" + fileName, "CreateDbfByDllServiceImpl");
//CMinFeeCreator.INSTANCE.InitWorkDir(PathUtil.dllPath);
countSqb = CMinFeeCreator.INSTANCE.MakeBlackAllSQB(etcType, version, format,
(localPath + fileName).replace("\\", "/"), sqbPathName.replace("\\", "/"),
sqbZlibPathName.replace("\\", "/"), bytMd5);
log.info("[uuid:{}]调用动态库生成增量SQB文件完成,返回值为:{}", uuid, countSqb);
//log.info("[uuid:{}]调用动态库生成增量SQB文件完成,返回值为:{}", uuid, countSqb);
LogUtil.WriteLog_HRBlackCard("[uuid:" + uuid + "]-调用动态库生成增量SQB文件完成,返回值为" + countSqb, "CreateDbfByDllServiceImpl");
} else {
log.info("[uuid:{}]调用动态库生成增量DBF文件中,协议类型为:{}zip文件名为{}", uuid, protocolType, fileName);
//log.info("[uuid:{}]调用动态库生成增量DBF文件中,协议类型为:{}zip文件名为{}", uuid, protocolType, fileName);
LogUtil.WriteLog_HRBlackCard("[uuid:" + uuid + "]-调用动态库生成增量DBF文件中,协议类型为" + protocolType+"zip文件名为" + fileName, "CreateDbfByDllServiceImpl");
//每天生成一个备份zip文件夹存放增量下载的zip文件
String dayBackPath = NFSFilePath + PathUtil.downFileBackup + dbfDir + File.separator + dateTime + File.separator;
FileUtil.fileCreat(dayBackPath);
@ -242,7 +247,8 @@ public class CreateDbfByDllServiceImpl implements CreateDbfByDllService {
NewFileInfo newFileInfo = newFileInfoMapper.selectById(protocolType);
if (newFileInfo != null) {
Date updateTime = newFileInfo.getUpdateTime();
log.info("[uuid:{}]根据主键protocolType={}查询DIS_NEWFILEINFO_JAVA表中上次的生成时间为{}", uuid, protocolType, updateTime);
//log.info("[uuid:{}]根据主键protocolType={}查询DIS_NEWFILEINFO_JAVA表中上次的生成时间为{}", uuid, protocolType, updateTime);
LogUtil.WriteLog_HRBlackCard("[uuid:" + uuid + "]-根据主键protocolType=" + protocolType+"查询DIS_NEWFILEINFO_JAVA表中上次的生成时间为" + updateTime, "CreateDbfByDllServiceImpl");
int hours = DateTimeUtil.getHour(date);
if (updateTime != null) {
if (hours != DateTimeUtil.getHour(updateTime)) {
@ -260,7 +266,8 @@ public class CreateDbfByDllServiceImpl implements CreateDbfByDllService {
//如果是增量先查询上一版本的版本号
if (Constant.ONE_HOUR_ISSUED.contains(protocolType)) {
version = getVersion(protocolType, date, uuid);
log.info("[uuid:{}]最终增量本次版本号为{}", uuid, version);
//log.info("[uuid:{}]最终增量本次版本号为{}", uuid, version);
LogUtil.WriteLog_HRBlackCard("[uuid:" + uuid + "]-最终增量本次版本号为" + version, "CreateDbfByDllServiceImpl");
}
//增量目录名称
StringBuilder incDirLst = new StringBuilder();
@ -293,7 +300,8 @@ public class CreateDbfByDllServiceImpl implements CreateDbfByDllService {
//获取当前时间加载增量时间之前的全量文件名
Date tenDaysAgo = DateTimeUtil.addDateDays(new Date(), -(Integer.parseInt(incDays)));
String tenDaysVersion = DateTimeUtil.getFormateString(tenDaysAgo, Constant.YYMMDD) + "103";
log.info("[uuid:{}]获取{}天前全量DBF", uuid, incDays);
//log.info("[uuid:{}]获取{}天前全量DBF", uuid, incDays);
LogUtil.WriteLog_HRBlackCard("[uuid:" + uuid + "]-获取" + incDays+"天前全量DBF" , "CreateDbfByDllServiceImpl");
if (fileInfos.size() > 0) {
//对文件的最后修改时间进行排序
fileInfos.sort(Comparator.comparing(FileInfo::getLastModified).reversed());
@ -306,12 +314,14 @@ public class CreateDbfByDllServiceImpl implements CreateDbfByDllService {
backQLName = fileInfos.get(0).getFileName();
}
} else {
log.error("[uuid:{}]获取获取{}天前全量DBF文件失败", uuid, incDays);
//log.error("[uuid:{}]获取获取{}天前全量DBF文件失败", uuid, incDays);
LogUtil.WriteLog_Error("[uuid:" + uuid + "]-获取" + incDays+"天前全量DBF文件失败" , "CreateDbfByDllServiceImpl");
//更新sqlite任务表的任务状态为2-报错
DatabaseUtil.insertSqlite(sqliteDriverName, sqliteUrl, Constant.ERROR_UPDATE_BUSINESS_HANDLER + businessTask.getId());
throw new PPException(MessageEnum..getCode(), MessageEnum..getMessage());
}
log.info("[uuid:{}]获取获取{}天前全量DBF名称{}", uuid, incDays, backQLName);
//log.info("[uuid:{}]获取获取{}天前全量DBF名称{}", uuid, incDays, backQLName);
LogUtil.WriteLog_HRBlackCard("[uuid:" + uuid + "]-获取" + incDays+"天前全量DBF名称为"+backQLName , "CreateDbfByDllServiceImpl");
}
//需要查询出最新全量
String dbfQlFileName = NFSFilePath + PathUtil.dbfBackPth + dbfDir + File.separator + backQLName;
@ -322,15 +332,23 @@ public class CreateDbfByDllServiceImpl implements CreateDbfByDllService {
sqbZlibPathName = sqbPath + dbfDir + File.separator + zlibSqbName;
//如果没有最新的全量DBF文件时不生成增量文件
if (StringUtils.isNotBlank(backQLName)) {
log.info("[uuid:{}]调用动态库生成增量入参,全量黑名单DBF文件全路径{}", uuid, dbfQlFileName);
log.info("[uuid:{}]调用动态库生成增量入参, 模板dbf文件全路径{}", uuid, fileModulName + dbName);
log.info("[uuid:{}]调用动态库生成增量入参, 生成DBF文件全路径{}", uuid, dbfFileName);
log.info("[uuid:{}]调用动态库生成增量入参, 生成DBFzlib文件全路径{}", uuid, zlibFileName);
//log.info("[uuid:{}]调用动态库生成增量入参,全量黑名单DBF文件全路径{}", uuid, dbfQlFileName);
//log.info("[uuid:{}]调用动态库生成增量入参, 模板dbf文件全路径{}", uuid, fileModulName + dbName);
//log.info("[uuid:{}]调用动态库生成增量入参, 生成DBF文件全路径{}", uuid, dbfFileName);
//log.info("[uuid:{}]调用动态库生成增量入参, 生成DBFzlib文件全路径{}", uuid, zlibFileName);
LogUtil.WriteLog_HRBlackCard("[uuid:" + uuid + "]-调用动态库生成增量入参,全量黑名单DBF文件全路径" + dbfQlFileName , "CreateDbfByDllServiceImpl");
LogUtil.WriteLog_HRBlackCard("[uuid:" + uuid + "]-调用动态库生成增量入参,模板dbf文件全路径" + fileModulName + dbName , "CreateDbfByDllServiceImpl");
LogUtil.WriteLog_HRBlackCard("[uuid:" + uuid + "]-调用动态库生成增量入参,生成DBF文件全路径" + dbfFileName , "CreateDbfByDllServiceImpl");
LogUtil.WriteLog_HRBlackCard("[uuid:" + uuid + "]-调用动态库生成增量入参,生成DBFzlib文件全路径" + zlibFileName , "CreateDbfByDllServiceImpl");
count = CMinFeeCreator.INSTANCE.MakeBlackInc(etcType, version, format, incDirLst.toString(), dbfQlFileName, fileModulName + dbName, dbfFileName, zlibFileName, bytMd5);
log.info("[uuid:{}]调用动态库生成增量DBF文件完成,返回值为:{}", uuid, count);
//log.info("[uuid:{}]调用动态库生成增量DBF文件完成,返回值为:{}", uuid, count);
LogUtil.WriteLog_HRBlackCard("[uuid:" + uuid + "]-调用动态库生成增量DBF文件完成,返回值为" + count , "CreateDbfByDllServiceImpl");
}
//
log.info("[uuid:{}]调用动态库生成增量SQB文件中,协议类型为:{}zip文件名为{}", uuid, protocolType, fileName);
//log.info("[uuid:{}]调用动态库生成增量SQB文件中,协议类型为:{}zip文件名为{}", uuid, protocolType, fileName);
LogUtil.WriteLog_HRBlackCard("[uuid:" + uuid + "]-调用动态库生成增量SQB文件中,协议类型为" + protocolType+"zip文件名为"+fileName , "CreateDbfByDllServiceImpl");
//获取最新的全量sqb文件全路径
//全量文件
@ -351,7 +369,8 @@ public class CreateDbfByDllServiceImpl implements CreateDbfByDllService {
//获取当前时间加载增量时间之前的全量文件名
Date tenDaysAgo = DateTimeUtil.addDateDays(new Date(), -(Integer.parseInt(incDays)));
String tenDaysVersion = DateTimeUtil.getFormateString(tenDaysAgo, Constant.YYMMDD) + "103";
log.info("[uuid:{}]获取{}天前全量SQB", uuid, incDays);
//log.info("[uuid:{}]获取{}天前全量SQB", uuid, incDays);
LogUtil.WriteLog_HRBlackCard("[uuid:" + uuid + "]-获取" + incDays+"天前全量SQB" , "CreateDbfByDllServiceImpl");
if (fileInfos2.size() > 0) {
//对文件的最后修改时间进行排序
fileInfos2.sort(Comparator.comparing(FileInfo::getLastModified).reversed());
@ -364,37 +383,50 @@ public class CreateDbfByDllServiceImpl implements CreateDbfByDllService {
pblackAllFileName = fileInfos2.get(0).getFileName();
}
} else {
log.error("[uuid:{}]获取获取{}天前全量SQB文件失败", uuid, incDays);
//log.error("[uuid:{}]获取获取{}天前全量SQB文件失败", uuid, incDays);
LogUtil.WriteLog_Error("[uuid:" + uuid + "]-获取" + incDays+"天前全量SQB文件失败" , "CreateDbfByDllServiceImpl");
//更新sqlite任务表的任务状态为2-报错
DatabaseUtil.insertSqlite(sqliteDriverName, sqliteUrl, Constant.ERROR_UPDATE_BUSINESS_HANDLER + businessTask.getId());
throw new PPException(MessageEnum..getCode(), MessageEnum..getMessage());
}
log.info("[uuid:{}]获取获取{}天前全量SQB名称{}", uuid, incDays, pblackAllFileName);
//log.info("[uuid:{}]获取获取{}天前全量SQB名称{}", uuid, incDays, pblackAllFileName);
LogUtil.WriteLog_HRBlackCard("[uuid:" + uuid + "]-获取" + incDays+"天前全量SQB名称"+backQLName , "CreateDbfByDllServiceImpl");
}
//如果没有最新的全量SQB文件时不生成增量文件
if (StringUtils.isNotBlank(pblackAllFileName)) {
log.info("[uuid:{}]调用动态库生成增量入参,全量SQB文件全路径{}", uuid, (sqbQlFileDir + File.separator + pblackAllFileName).replace("\\", "/"));
log.info("[uuid:{}]调用动态库生成增量入参, 生成SQB文件全路径{}", uuid, sqbPathName.replace("\\", "/"));
log.info("[uuid:{}]调用动态库生成增量入参, 生成SQBzlib文件全路径{}", uuid, sqbZlibPathName.replace("\\", "/"));
//log.info("[uuid:{}]调用动态库生成增量入参,全量SQB文件全路径{}", uuid, (sqbQlFileDir + File.separator + pblackAllFileName).replace("\\", "/"));
//log.info("[uuid:{}]调用动态库生成增量入参, 生成SQB文件全路径{}", uuid, sqbPathName.replace("\\", "/"));
//log.info("[uuid:{}]调用动态库生成增量入参, 生成SQBzlib文件全路径{}", uuid, sqbZlibPathName.replace("\\", "/"));
LogUtil.WriteLog_HRBlackCard("[uuid:" + uuid + "]-调用动态库生成增量入参,全量SQB文件全路径" + (sqbQlFileDir + File.separator + pblackAllFileName).replace("\\", "/") , "CreateDbfByDllServiceImpl");
LogUtil.WriteLog_HRBlackCard("[uuid:" + uuid + "]-调用动态库生成增量入参,生成SQB文件全路径" + sqbPathName.replace("\\", "/") , "CreateDbfByDllServiceImpl");
LogUtil.WriteLog_HRBlackCard("[uuid:" + uuid + "]-调用动态库生成增量入参,生成SQBzlib文件全路径" + sqbZlibPathName.replace("\\", "/") , "CreateDbfByDllServiceImpl");
countSqb = CMinFeeCreator.INSTANCE.MakeBlackIncSQB(etcType, version, format, incDirLst.toString(),
(sqbQlFileDir + File.separator + pblackAllFileName).replace("\\", "/"),
sqbPathName.replace("\\", "/"), sqbZlibPathName.replace("\\", "/"), bytMd5);
log.info("[uuid:{}]调用动态库生成增量SQB文件完成,返回值为:{}", uuid, countSqb);
//log.info("[uuid:{}]调用动态库生成增量SQB文件完成,返回值为:{}", uuid, countSqb);
LogUtil.WriteLog_HRBlackCard("[uuid:" + uuid + "]-]调用动态库生成增量SQB文件完成,返回值为" + countSqb , "CreateDbfByDllServiceImpl");
}
} else {
log.info("[uuid:{}]该时间点{}已下发/或者该点不下发", uuid, protocolType);
//log.info("[uuid:{}]该时间点{}已下发/或者该点不下发", uuid, protocolType);
LogUtil.WriteLog_HRBlackCard("[uuid:" + uuid + "]-该时间点" + protocolType+"已下发/或者该点不下发" , "CreateDbfByDllServiceImpl");
//更新sqlite任务表的任务状态为3-不下发
DatabaseUtil.insertSqlite(sqliteDriverName, sqliteUrl,
Constant.NOT_DIS_UPDATE_BUSINESS_HANDLER + DateTimeUtil.getFormateString(new Date(), Constant.YYYY_MM_DD_HH_MM_SS) + Constant.WHERE_ID + businessTask.getId());
log.info("[uuid:{}]更新business_tasks_info任务表的下发状态为3-不下发id为:{}", uuid, businessTask.getId());
//log.info("[uuid:{}]更新business_tasks_info任务表的下发状态为3-不下发id为:{}", uuid, businessTask.getId());
LogUtil.WriteLog_HRBlackCard("[uuid:" + uuid + "]-更新business_tasks_info任务表的下发状态为3-不下发id为" + businessTask.getId() , "CreateDbfByDllServiceImpl");
}
}
log.info("[uuid:{}]调用动态库生成DBF返回值count为{}", uuid, count);
log.info("[uuid:{}]调用动态库生成SQB返回值count为{}", uuid, countSqb);
//log.info("[uuid:{}]调用动态库生成DBF返回值count为{}", uuid, count);
//log.info("[uuid:{}]调用动态库生成SQB返回值count为{}", uuid, countSqb);
LogUtil.WriteLog_HRBlackCard("[uuid:" + uuid + "]-调用动态库生成DBF返回值count为" + count , "CreateDbfByDllServiceImpl");
LogUtil.WriteLog_HRBlackCard("[uuid:" + uuid + "]-调用动态库生成SQB返回值count为" + countSqb , "CreateDbfByDllServiceImpl");
if (count != 0 && countSqb != 0) {
if (count > 0 && countSqb > 0) {
log.info("[uuid:{}]开始生成DBF备份文件", uuid);
//log.info("[uuid:{}]开始生成DBF备份文件", uuid);
LogUtil.WriteLog_HRBlackCard("[uuid:" + uuid + "]-]开始生成DBF备份文件" , "CreateDbfByDllServiceImpl");
String formateStr = DateTimeUtil.getFormateString(date, Constant.YYYYMMDDHHMMSS);
String newDbfBackUpName = formateStr + Constant.STR_ + bfName + Constant.STR_ + formateString + Constant.STR_ + version + Constant.STR_DBF;
//dbf备份目录
@ -406,7 +438,8 @@ public class CreateDbfByDllServiceImpl implements CreateDbfByDllService {
FileUtil.copyFile(dbfFileName, dbfBlackFileName);
//FileUtil.moveFileReName(NFSFilePath + PathUtil.dbfBackPth + dbName, dbfBlackFileName);
log.info("[uuid:{}]开始生成SQB备份文件", uuid);
//log.info("[uuid:{}]开始生成SQB备份文件", uuid);
LogUtil.WriteLog_HRBlackCard("[uuid:" + uuid + "]-]开始生成SQB备份文件" , "CreateDbfByDllServiceImpl");
//sqb备份目录
FileUtil.fileCreat(NFSFilePath + PathUtil.sqbBackPath + dbfDir);
String newSqbBackUpName = formateStr + Constant.STR_ + bfSqbName + Constant.STR_ + formateString + Constant.STR_ + version + Constant.STR_SQB;
@ -416,7 +449,8 @@ public class CreateDbfByDllServiceImpl implements CreateDbfByDllService {
//将zlib文件上传到指定的FTP上方便分发时使用及下载
//获取生成DBF的zlib的文件名
String zlibFileNameMd5 = FileUtil.zlibFileName(NFSFilePath + PathUtil.dbfWrtPth + dbfDir, zlibName.replace(Constant.STR_ZLIB, Constant.NULL_STRING));
log.info("[uuid:{}]获取zlib的文件名称为:{}", uuid, zlibFileNameMd5);
//log.info("[uuid:{}]获取zlib的文件名称为:{}", uuid, zlibFileNameMd5);
LogUtil.WriteLog_HRBlackCard("[uuid:" + uuid + "]-获取zlib的文件名称为" + zlibFileNameMd5, "CreateDbfByDllServiceImpl");
//获取生成SQB的zlib的文件名
String zlibFileNameMd5Sqb = FileUtil.zlibFileName(NFSFilePath + PathUtil.sqbPath + dbfDir, zlibSqbName.replace(Constant.STR_ZLIB, Constant.NULL_STRING));
@ -514,25 +548,31 @@ public class CreateDbfByDllServiceImpl implements CreateDbfByDllService {
String nowString = DateTimeUtil.getFormateString(new Date(), Constant.YYYY_MM_DD_HH_MM_SS);
String sqlParam = Constant.UPDATE_BUSINESS_HANDLER + nowString + Constant.PUBLISH_TIME + nowString + Constant.WHERE_ID + businessTask.getId();
if (DatabaseUtil.insertSqlite(sqliteDriverName, sqliteUrl, sqlParam) > 0) {
log.info("[uuid:{}]更新business_tasks_info调用动态库任务列表成功id为:{}", uuid, businessTask.getId());
//log.info("[uuid:{}]更新business_tasks_info调用动态库任务列表成功id为:{}", uuid, businessTask.getId());
LogUtil.WriteLog_HRBlackCard("[uuid:" + uuid + "]-更新business_tasks_info调用动态库任务列表成功,id为" + businessTask.getId(), "CreateDbfByDllServiceImpl");
}
log.info("[uuid:{}]调用动态库生成DBF任务,下载协议类型:{},zip文件名为:{}执行成功", uuid, protocolType, fileName);
//log.info("[uuid:{}]调用动态库生成DBF任务,下载协议类型:{},zip文件名为:{}执行成功", uuid, protocolType, fileName);
LogUtil.WriteLog_HRBlackCard("[uuid:" + uuid + "]-调用动态库生成DBF及SQB任务,下载协议类型为" + protocolType+"zip文件名为"+fileName+"执行成功", "CreateDbfByDllServiceImpl");
} else {
//LogUtil.WriteLog_HRBlackCard("[uuid:" + uuid + "]-调用动态库生成DBF及SQB任务,下载协议类型为" + protocolType+"zip文件名为"+fileName+"执行失败等待下次轮询", "CreateDbfByDllServiceImpl");
//更新sqlite任务表的任务状态为2-报错(等待下次轮询)
DatabaseUtil.insertSqlite(sqliteDriverName, sqliteUrl, Constant.ERROR_UPDATE_BUSINESS_HANDLER + businessTask.getId());
throw new PPException(MessageEnum.DBFSQB.getCode(), MessageEnum.DBFSQB.getMessage()
+ ",DBF动态库返回值为" + count + ",SQB动态库返回值为" + countSqb);
}
} else {
log.info("[uuid:{}]该时间点{}已下发/或者该点不下发", uuid, protocolType);
//log.info("[uuid:{}]该时间点{}已下发/或者该点不下发", uuid, protocolType);
LogUtil.WriteLog_HRBlackCard("[uuid:" + uuid + "]-该时间点" + protocolType+"已下发/或者该点不下发", "CreateDbfByDllServiceImpl");
//更新sqlite任务表的任务状态为3-不下发
DatabaseUtil.insertSqlite(sqliteDriverName, sqliteUrl,
Constant.NOT_DIS_UPDATE_BUSINESS_HANDLER + DateTimeUtil.getFormateString(new Date(), Constant.YYYY_MM_DD_HH_MM_SS) + Constant.WHERE_ID + businessTask.getId());
log.info("[uuid:{}]更新business_tasks_info任务表的下发状态为3-不下发id为:{}", uuid, businessTask.getId());
//log.info("[uuid:{}]更新business_tasks_info任务表的下发状态为3-不下发id为:{}", uuid, businessTask.getId());
LogUtil.WriteLog_HRBlackCard("[uuid:" + uuid + "]-更新business_tasks_info任务表的下发状态为3-不下发id为" + businessTask.getId(), "CreateDbfByDllServiceImpl");
}
} catch (Exception e) {
log.error("[uuid:{}]调用动态库生成DBF及SQB失败{}", uuid, e.getMessage(), e);
//log.error("[uuid:{}]调用动态库生成DBF及SQB失败{}", uuid, e.getMessage(), e);
LogUtil.WriteLog_Error("[uuid:" + uuid + "]调用动态库生成DBF及SQB失败" + e.getMessage(), "CreateDbfByDllServiceImpl");
//更新sqlite任务表的任务状态为2-报错
DatabaseUtil.insertSqlite(sqliteDriverName, sqliteUrl, Constant.ERROR_UPDATE_BUSINESS_HANDLER + businessTask.getId());
throw new PPException(MessageEnum.DBFSQB.getCode(), MessageEnum.DBFSQB.getMessage()
@ -608,7 +648,8 @@ public class CreateDbfByDllServiceImpl implements CreateDbfByDllService {
if (newFileInfo == null) {
newVersion =versionPre + 104;
} else {
log.info("[uuid:{}]获取上一版本号为{}", uuid, newFileInfo.getVersion());
//log.info("[uuid:{}]获取上一版本号为{}", uuid, newFileInfo.getVersion());
LogUtil.WriteLog_HRBlackCard("[uuid:" + uuid + "]-获取上一版本号为" + newFileInfo.getVersion(), "CreateDbfByDllServiceImpl");
if (newFileInfo.getVersion().substring(0, 6).equals(versionPre)) {
newVersion = String.valueOf(Integer.parseInt(newFileInfo.getVersion()) + 1);
} else {

@ -9,6 +9,7 @@ import com.nm.gsgl.common.exception.PPException;
import com.nm.gsgl.common.utils.DatabaseUtil;
import com.nm.gsgl.common.utils.DateTimeUtil;
import com.nm.gsgl.common.utils.FileUtil;
import com.nm.gsgl.common.utils.LogUtil;
import com.nm.gsgl.common.utils.MD5Util;
import com.nm.gsgl.common.utils.PathUtil;
import com.nm.gsgl.common.utils.PropertiesUtil;
@ -20,8 +21,6 @@ import com.nm.gsgl.mapper.NameTableMapper;
import com.nm.gsgl.mapper.dis.DisMinFeeFileMapper;
import com.nm.gsgl.service.CreateSqbByDllService;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
@ -50,7 +49,7 @@ public class CreateSqbByDllServiceImpl implements CreateSqbByDllService {
public String NFSFilePath;
@Resource
private DisMinFeeFileMapper disMinFeeFileMapper;
public static Logger log = LoggerFactory.getLogger("MinFee");
//public static Logger log = LoggerFactory.getLogger("MinFee");
/**
* dllosSQB
@ -72,7 +71,8 @@ public class CreateSqbByDllServiceImpl implements CreateSqbByDllService {
String version = businessTask.getNewVersion();
//压缩文件存放-会被删除
if (!FileUtil.fileExists(localPath + fileName)) {
log.info("[uuid:{}]下载的zip文件{}不存在", uuid, localPath + fileName);
//log.info("[uuid:{}]下载的zip文件{}不存在", uuid, localPath + fileName);
LogUtil.WriteLog_MinFee("[uuid:" + uuid + "]-下载的zip文件不存在" +localPath + fileName, "CreateSqbByDllServiceImpl");
//更新sqlite任务表的任务状态为2-报错
DatabaseUtil.insertSqlite(sqliteDriverName, sqliteUrl, Constant.ERROR_UPDATE_BUSINESS_HANDLER + businessTask.getId());
throw new PPException(MessageEnum.zip.getCode(), MessageEnum.zip.getMessage());
@ -89,9 +89,11 @@ public class CreateSqbByDllServiceImpl implements CreateSqbByDllService {
String fileDt = split[2].replace(Constant.STR_ZIP, Constant.NULL_STRING);
QueryWrapper<NameTable> queryWrapper = new QueryWrapper<>();
queryWrapper.eq("GBSTATIONID", exId);
log.info("[uuid:{}]查询的NAME_TABLE中GBSTATIONID={}的数据", uuid, exId);
//log.info("[uuid:{}]查询的NAME_TABLE中GBSTATIONID={}的数据", uuid, exId);
LogUtil.WriteLog_MinFee("[uuid:" + uuid + "]-查询的NAME_TABLE中GBSTATIONID=" +exId, "CreateSqbByDllServiceImpl");
NameTable nameTable = nameTableMapper.selectOne(queryWrapper);
log.info("[uuid:{}]查询的NAME_TABLE中GBSTATIONID={}的数据为{}", uuid, exId, nameTable);
//log.info("[uuid:{}]查询的NAME_TABLE中GBSTATIONID={}的数据为{}", uuid, exId, nameTable);
LogUtil.WriteLog_MinFee("[uuid:" + uuid + "]-查询的NAME_TABLE中GBSTATIONID=" +exId+"的数据为"+nameTable, "CreateSqbByDllServiceImpl");
if (nameTable != null) {
String subComNO;
//if (Constant.WU_XIN_STAID.contains(exId)) {
@ -121,7 +123,8 @@ public class CreateSqbByDllServiceImpl implements CreateSqbByDllService {
//}
}
Integer staNo = nameTable.getStaNo();
log.info("[uuid:{}]获取分公司编号为{}", uuid, subComNO);
//log.info("[uuid:{}]获取分公司编号为{}", uuid, subComNO);
LogUtil.WriteLog_MinFee("[uuid:" + uuid + "]-获取分公司编号为=" +subComNO, "CreateSqbByDllServiceImpl");
//String zlibName = "ALLROADMINFEE_" + fileDt + Constant.STR_ + exId + Constant.STR_ + subComNO + Constant.STR_ + staNo;
String zlibName = "ALLROADMINFEE_" + fileDt + Constant.STR_ + staNo;
@ -140,7 +143,8 @@ public class CreateSqbByDllServiceImpl implements CreateSqbByDllService {
//获取上一版本最新的文件夹
//上一版本路径
List<Long> longs = FileUtil.folderNames(sqlLiteDir + File.separator + "SQLLITE" + File.separator);
log.info("[uuid:{}]获取SQB下的文件夹个数为{}", uuid, longs.size());
//log.info("[uuid:{}]获取SQB下的文件夹个数为{}", uuid, longs.size());
LogUtil.WriteLog_MinFee("[uuid:" + uuid + "]-获取SQB下的文件夹个数为=" +longs.size(), "CreateSqbByDllServiceImpl");
//排序
longs.sort(Comparator.naturalOrder());
String sqlLiteName = "ALLROADMINFEE_" + staInfo + Constant.STR_SQB;
@ -152,26 +156,31 @@ public class CreateSqbByDllServiceImpl implements CreateSqbByDllService {
String pre_sqlLiteName = FileUtil.zlibFileName(pre_sqlLitePath, exId);
String preSqlLitePath = pre_sqlLitePath + File.separator + pre_sqlLiteName;
log.info("[uuid:{}]获取SQB上一个版本号是{}sqb文件全路径是{}", uuid, pre_Version, preSqlLitePath);
//log.info("[uuid:{}]获取SQB上一个版本号是{}sqb文件全路径是{}", uuid, pre_Version, preSqlLitePath);
LogUtil.WriteLog_MinFee("[uuid:" + uuid + "]-获取SQB上一个版本号是" +pre_Version+"sqb文件全路径是"+preSqlLitePath, "CreateSqbByDllServiceImpl");
//System.out.println(pre_sqlLitePath + File.separator + pre_sqlLiteName);
//前一版本拷贝新版本-并重新命名
if (StringUtils.isNotBlank(pre_sqlLiteName) && FileUtil.fileExists(pre_sqlLitePath + File.separator + pre_sqlLiteName)) {
FileUtil.fileCopyReName(pre_sqlLitePath + File.separator + pre_sqlLiteName, sqbFileDll);
} else {
log.info("[uuid:{}]获取上一版本SQB文件不存在新创建文件{}", uuid, sqbFileDll);
//log.info("[uuid:{}]获取上一版本SQB文件不存在新创建文件{}", uuid, sqbFileDll);
LogUtil.WriteLog_MinFee("[uuid:" + uuid + "]-获取上一版本SQB文件不存在新创建文件" +sqbFileDll, "CreateSqbByDllServiceImpl");
FileUtil.newFileCreat(sqbFileDll);
}
} else {
log.info("[uuid:{}]获取上一版本SQB文件不存在新创建文件{}", uuid, sqbFileDll);
//log.info("[uuid:{}]获取上一版本SQB文件不存在新创建文件{}", uuid, sqbFileDll);
LogUtil.WriteLog_MinFee("[uuid:" + uuid + "]-获取上一版本SQB文件不存在新创建文件" +sqbFileDll, "CreateSqbByDllServiceImpl");
FileUtil.newFileCreat(sqbFileDll);
}
String zlibFileDll = sqlLitePathDll + zlibName + Constant.STR_ZLIB;
log.info("[uuid:{}]调用动态库生成最小费额SQB文件中,协议类型为:{}zip文件名为{}", uuid, protocolType, fileName);
//log.info("[uuid:{}]调用动态库生成最小费额SQB文件中,协议类型为:{}zip文件名为{}", uuid, protocolType, fileName);
LogUtil.WriteLog_MinFee("[uuid:" + uuid + "]-调用动态库生成最小费额SQB文件中,协议类型为" +protocolType+"zip文件名为"+fileName, "CreateSqbByDllServiceImpl");
String zipFilePath = (localPath + fileName).replace("\\", "/");
String sqbFileDllPath = sqbFileDll.replace("\\", "/");
String zlibFileDllPath = zlibFileDll.replace("\\", "/");
int count = CMinFeeCreator.INSTANCE.MakeMinFee(zipFilePath, sqbFileDllPath, zlibFileDllPath, bytMd5);
log.info("[uuid:{}]调用动态库生成最小费额SQB文件返回count为{}", uuid, count);
//log.info("[uuid:{}]调用动态库生成最小费额SQB文件返回count为{}", uuid, count);
LogUtil.WriteLog_MinFee("[uuid:" + uuid + "]-调用动态库生成最小费额SQB文件返回count为" +count, "CreateSqbByDllServiceImpl");
if (count >= 0) {
String sqbInsertPath = NFSFilePath + File.separator + "ParamInsertDb" + File.separator + "MinFeeInToDataBase" + File.separator;
FileUtil.fileCreat(sqbInsertPath);
@ -182,8 +191,8 @@ public class CreateSqbByDllServiceImpl implements CreateSqbByDllService {
//获取生成的zlib的文件名
String zlibFileNameMd5 = FileUtil.zlibFileName(sqlLitePathDll, zlibName.replace(Constant.STR_ZLIB, Constant.NULL_STRING) + Constant.STR_);
log.info("[uuid:{}]获取zlib的文件名称为:{}", uuid, zlibFileNameMd5);
//log.info("[uuid:{}]获取zlib的文件名称为:{}", uuid, zlibFileNameMd5);
LogUtil.WriteLog_MinFee("[uuid:" + uuid + "]-获取zlib的文件名称为" +zlibFileNameMd5, "CreateSqbByDllServiceImpl");
//获取高路分中心需要下发的收费站编码,如果是多个站,站编码用,间隔
String glMinFeeFZXs = PropertiesUtil.getValue("GLMinFeeFZXs");
String glMinFeeStaNos = PropertiesUtil.getValue("GLMinFeeStaNos");
@ -247,21 +256,25 @@ public class CreateSqbByDllServiceImpl implements CreateSqbByDllService {
String nowString = DateTimeUtil.getFormateString(new Date(), Constant.YYYY_MM_DD_HH_MM_SS);
String sqlParam = Constant.UPDATE_BUSINESS_HANDLER + nowString + Constant.PUBLISH_TIME + nowString + Constant.WHERE_ID + businessTask.getId();
if (DatabaseUtil.insertSqlite(sqliteDriverName, sqliteUrl, sqlParam) > 0) {
log.info("[uuid:{}]更新business_tasks_info调用动态库任务列表成功id为:{}", uuid, businessTask.getId());
//log.info("[uuid:{}]更新business_tasks_info调用动态库任务列表成功id为:{}", uuid, businessTask.getId());
LogUtil.WriteLog_MinFee("[uuid:" + uuid + "]-更新business_tasks_info调用动态库任务列表成功id为" +businessTask.getId(), "CreateSqbByDllServiceImpl");
}
} else {
log.error("[uuid:{}]调用动态库生成SQB操作失败返回值count为{}", uuid, count);
//log.error("[uuid:{}]调用动态库生成SQB操作失败返回值count为{}", uuid, count);
LogUtil.WriteLog_Error("[uuid:" + uuid + "]-调用动态库生成最小费额SQB操作失败返回值count为" +count, "CreateSqbByDllServiceImpl");
//更新sqlite任务表的任务状态为2-报错
DatabaseUtil.insertSqlite(sqliteDriverName, sqliteUrl, Constant.ERROR_UPDATE_BUSINESS_HANDLER + businessTask.getId());
throw new PPException(MessageEnum.SQB.getCode(), MessageEnum.SQB.getMessage());
}
} else {
log.info("[uuid:{}]查询的NAME_TABLE中GBSTATIONID={}的数据为null,跳过本次处理", uuid, exId);
//log.info("[uuid:{}]查询的NAME_TABLE中GBSTATIONID={}的数据为null,跳过本次处理", uuid, exId);
LogUtil.WriteLog_MinFee("[uuid:" + uuid + "]-查询的NAME_TABLE中GBSTATIONID=" +exId+"的数据为null,跳过本次处理", "CreateSqbByDllServiceImpl");
String nowString = DateTimeUtil.getFormateString(new Date(), Constant.YYYY_MM_DD_HH_MM_SS);
String sqlParam = Constant.UPDATE_BUSINESS_HANDLER + nowString + Constant.PUBLISH_TIME + nowString + Constant.WHERE_ID + businessTask.getId();
if (DatabaseUtil.insertSqlite(sqliteDriverName, sqliteUrl, sqlParam) > 0) {
log.info("[uuid:{}]更新business_tasks_info调用动态库任务列表成功id为:{}", uuid, businessTask.getId());
//log.info("[uuid:{}]更新business_tasks_info调用动态库任务列表成功id为:{}", uuid, businessTask.getId());
LogUtil.WriteLog_MinFee("[uuid:" + uuid + "]-更新business_tasks_info调用动态库任务列表成功id为" +businessTask.getId(), "CreateSqbByDllServiceImpl");
}
}

@ -3,10 +3,10 @@ package com.nm.gsgl.service.impl;
import com.nm.gsgl.common.Constant;
import com.nm.gsgl.common.utils.DatabaseUtil;
import com.nm.gsgl.common.utils.FileUtil;
import com.nm.gsgl.common.utils.LogUtil;
import com.nm.gsgl.common.utils.PathUtil;
import com.nm.gsgl.entity.FileInfo;
import com.nm.gsgl.service.DelOverdueService;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;
@ -20,7 +20,6 @@ import java.util.List;
* @description:
*/
@Service
@Slf4j
public class DelOverdueServiceImpl implements DelOverdueService {
/**
* zip
@ -61,38 +60,47 @@ public class DelOverdueServiceImpl implements DelOverdueService {
*/
@Override
public void deleteOverFile(String uuid) {
log.info("[uuid:{}]开始删除超期的zip下载文件路径{}", uuid, NFSFilePath + PathUtil.downFile);
//log.info("[uuid:{}]开始删除超期的zip下载文件路径{}", uuid, NFSFilePath + PathUtil.downFile);
LogUtil.WriteLog_DeleteOverFile("[uuid:" + uuid + "]-开始删除超期的zip下载文件路径" + NFSFilePath + PathUtil.downFile, "DelOverdueServiceImpl");
List<FileInfo> fileInfos = new ArrayList<>();
FileUtil.obtainFileInfo(NFSFilePath + PathUtil.downFile, fileInfos);
FileUtil.deleteOverdueFile(fileInfos, Integer.parseInt(downloadTime));
log.info("[uuid:{}]删除超期的zip下载文件路径{}完成", uuid, NFSFilePath + PathUtil.downFile);
//log.info("[uuid:{}]删除超期的zip下载文件路径{}完成", uuid, NFSFilePath + PathUtil.downFile);
LogUtil.WriteLog_DeleteOverFile("[uuid:" + uuid + "]-删除超期的zip下载文件路径" + NFSFilePath + PathUtil.downFile+"完成", "DelOverdueServiceImpl");
log.info("[uuid:{}]开始删除下载的zip文件备份文件路径{}", uuid, NFSFilePath + PathUtil.downFileBackup);
//log.info("[uuid:{}]开始删除下载的zip文件备份文件路径{}", uuid, NFSFilePath + PathUtil.downFileBackup);
LogUtil.WriteLog_DeleteOverFile("[uuid:" + uuid + "]-开始删除下载的zip文件备份文件路径" + NFSFilePath + PathUtil.downFileBackup, "DelOverdueServiceImpl");
List<FileInfo> fileInfos2 = new ArrayList<>();
FileUtil.obtainFileInfo(NFSFilePath + PathUtil.downFileBackup, fileInfos2);
FileUtil.deleteOverdueFile(fileInfos2, Integer.parseInt(zipTime));
log.info("[uuid:{}]删除下载的zip文件备份文件路径{}完成", uuid, NFSFilePath + PathUtil.downFileBackup);
//log.info("[uuid:{}]删除下载的zip文件备份文件路径{}完成", uuid, NFSFilePath + PathUtil.downFileBackup);
LogUtil.WriteLog_DeleteOverFile("[uuid:" + uuid + "]-删除下载的zip文件备份文件路径" + NFSFilePath + PathUtil.downFile+"完成", "DelOverdueServiceImpl");
log.info("[uuid:{}]开始删除zlib文件路径{}", uuid, NFSFilePath + PathUtil.dbfWrtPth);
//log.info("[uuid:{}]开始删除zlib文件路径{}", uuid, NFSFilePath + PathUtil.dbfWrtPth);
LogUtil.WriteLog_DeleteOverFile("[uuid:" + uuid + "]-开始删除zlib文件路径" + NFSFilePath + PathUtil.dbfWrtPth, "DelOverdueServiceImpl");
List<FileInfo> fileInfos3 = new ArrayList<>();
FileUtil.obtainFileInfo(NFSFilePath + PathUtil.dbfWrtPth, fileInfos3);
FileUtil.deleteOverdueFile(fileInfos3, Integer.parseInt(zlibLWTime));
log.info("[uuid:{}]删除zlib文件路径{}完成", uuid, NFSFilePath + PathUtil.dbfWrtPth);
//log.info("[uuid:{}]删除zlib文件路径{}完成", uuid, NFSFilePath + PathUtil.dbfWrtPth);
LogUtil.WriteLog_DeleteOverFile("[uuid:" + uuid + "]-删除zlib文件路径" + NFSFilePath + PathUtil.dbfWrtPth+"完成", "DelOverdueServiceImpl");
log.info("[uuid:{}]开始删除dbf文件备份路径{}", uuid, NFSFilePath + PathUtil.dbfBackPth);
//log.info("[uuid:{}]开始删除dbf文件备份路径{}", uuid, NFSFilePath + PathUtil.dbfBackPth);
LogUtil.WriteLog_DeleteOverFile("[uuid:" + uuid + "]-开始删除dbf文件备份路径" + NFSFilePath + PathUtil.dbfBackPth, "DelOverdueServiceImpl");
List<FileInfo> fileInfos4 = new ArrayList<>();
FileUtil.obtainFileInfo(NFSFilePath + PathUtil.dbfBackPth, fileInfos4);
FileUtil.deleteOverdueFile(fileInfos4, Integer.parseInt(dbfTime));
log.info("[uuid:{}]删除dbf文件备份路径{}完成", uuid, NFSFilePath + PathUtil.dbfBackPth);
log.info("[uuid:{}]开始删除目录下的超过{}天文件及空的文件夹路径{}", uuid, dbfTime, NFSFilePath);
List<FileInfo> fileInfos5 = new ArrayList<>();
//log.info("[uuid:{}]删除dbf文件备份路径{}完成", uuid, NFSFilePath + PathUtil.dbfBackPth);
LogUtil.WriteLog_DeleteOverFile("[uuid:" + uuid + "]-删除dbf文件备份路径" + NFSFilePath + PathUtil.dbfBackPth+"完成", "DelOverdueServiceImpl");
LogUtil.WriteLog_DeleteOverFile("[uuid:" + uuid + "]-开始删除目录下的空的文件夹路径" + NFSFilePath, "DelOverdueServiceImpl");
FileUtil.deleteEmptyFolder(new File(NFSFilePath));
FileUtil.obtainFileInfo(NFSFilePath, fileInfos5);
FileUtil.deleteOverdueFile(fileInfos5, Integer.parseInt(dbfTime));
log.info("[uuid:{}]删除目录下的超过{}天文件及空的文件夹{}完成", uuid, dbfTime, NFSFilePath);
LogUtil.WriteLog_DeleteOverFile("[uuid:" + uuid + "]-删除目录下的空的文件夹路径" + NFSFilePath+"完成", "DelOverdueServiceImpl");
//删除超期的sqlite任务表信息
log.info("[uuid:{}]开始删除超期的sqlite任务表business_tasks_info信息", uuid);
//log.info("[uuid:{}]开始删除超期的sqlite任务表business_tasks_info信息", uuid);
LogUtil.WriteLog_DeleteOverFile("[uuid:" + uuid + "]-开始删除超期的sqlite任务表business_tasks_info信息,sqlite数据保留天数为" +retainDays , "DelOverdueServiceImpl");
DatabaseUtil.insertSqlite(sqliteDriverName, sqliteUrl, Constant.DEL_BUSINESS_OVERDUE.replace(Constant.STR_AAAAA, retainDays));
log.info("[uuid:{}]删除超期的sqlite任务表business_tasks_info信息完成", uuid);
//log.info("[uuid:{}]删除超期的sqlite任务表business_tasks_info信息完成", uuid);
LogUtil.WriteLog_DeleteOverFile("[uuid:" + uuid + "]-删除超期的sqlite任务表business_tasks_info信息完成" , "DelOverdueServiceImpl");
}
}

@ -7,12 +7,12 @@ import com.nm.gsgl.common.exception.PPException;
import com.nm.gsgl.common.utils.DatabaseUtil;
import com.nm.gsgl.common.utils.FTPUtil;
import com.nm.gsgl.common.utils.FileUtil;
import com.nm.gsgl.common.utils.LogUtil;
import com.nm.gsgl.common.utils.PathUtil;
import com.nm.gsgl.entity.ZipFileInfo;
import com.nm.gsgl.entity.intermediary.DownloadNotify;
import com.nm.gsgl.entity.intermediary.ParamNotify;
import com.nm.gsgl.service.DownloadZipService;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;
@ -26,7 +26,7 @@ import java.util.List;
* @description:
*/
@Service
@Slf4j
//@Slf4j
public class DownloadZipServiceImpl implements DownloadZipService {
/**
* FTP
@ -83,7 +83,8 @@ public class DownloadZipServiceImpl implements DownloadZipService {
*/
@Override
public void downloadZip(String uuid, ZipFileInfo zipFileInfo, HttpServletResponse response) {
log.info("[uuid:{}]-开始下载zip文件", uuid);
//log.info("[uuid:{}]-开始下载zip文件", uuid);
LogUtil.WriteLog_Info("[uuid:" + uuid + "]-开始下载zip文件" , "DownloadZipServiceImpl");
String localPath = PathUtil.downFile;
FileUtil.fileCreat(localPath);
String fileName = zipFileInfo.getFileName();
@ -92,10 +93,12 @@ public class DownloadZipServiceImpl implements DownloadZipService {
//设置文件路径
FileUtil.fileInput(response, fileName, new File(downFilePath));
} else {
log.info("[uuid:{}]-开始从FTP上下载zip文件", uuid);
//log.info("[uuid:{}]-开始从FTP上下载zip文件", uuid);
LogUtil.WriteLog_Info("[uuid:" + uuid + "]-开始从FTP上下载zip文件" , "DownloadZipServiceImpl");
//根据中介库类型查询相应的表格从FTP上下载到下载目录然后传给前端
if (zipFileInfo.getMiddleDatabase() == 1) {
log.info("[uuid:{}]-zip文件在华软FTP", uuid);
//log.info("[uuid:{}]-zip文件在华软FTP", uuid);
LogUtil.WriteLog_Info("[uuid:" + uuid + "]-zip文件在华软FTP" , "DownloadZipServiceImpl");
//华软
List<DownloadNotify> downloadNotifys = DatabaseUtil.selectDownloadNotify(driverName, HRConnectionStringOracle, HROracleName, HROraclePass,
"SELECT ID,SYSTEMID,TABLENAME,BATCHNO,VERSION,PROTOCOL_TYPE,CREATETIME,RECORDNUM,UPDATEFLAG,UPDATETIME,DOMAIN1,DOMAIN2,DOMAIN3,DOMAIN4 FROM TF_DOWNLOAD_NOTIFY where ID="+zipFileInfo.getId());
@ -124,7 +127,8 @@ public class DownloadZipServiceImpl implements DownloadZipService {
} else {
//迈道
log.info("[uuid:{}]-zip文件在迈道FTP", uuid);
//log.info("[uuid:{}]-zip文件在迈道FTP", uuid);
LogUtil.WriteLog_Info("[uuid:" + uuid + "]-zip文件在迈道FTP" , "DownloadZipServiceImpl");
String mdIdVersion = String.valueOf(zipFileInfo.getId());
String mdId = mdIdVersion.substring(0, 2);
String mdVersion = mdIdVersion.substring(2);
@ -147,6 +151,7 @@ public class DownloadZipServiceImpl implements DownloadZipService {
}
}
log.info("[uuid:{}]-下载zip文件完成", uuid);
//log.info("[uuid:{}]-下载zip文件完成", uuid);
LogUtil.WriteLog_Info("[uuid:" + uuid + "]-下载zip文件完成" , "DownloadZipServiceImpl");
}
}

@ -1,17 +1,10 @@
package com.nm.gsgl.service.impl;
import com.baomidou.mybatisplus.core.conditions.update.UpdateWrapper;
import com.nm.gsgl.common.Constant;
import com.nm.gsgl.common.config.MyEnvironmentPostProcessor;
import com.nm.gsgl.common.utils.DatabaseUtil;
import com.nm.gsgl.common.utils.DateTimeUtil;
import com.nm.gsgl.common.utils.LogUtil;
import com.nm.gsgl.entity.intermediary.AppAliveStatus;
import com.nm.gsgl.mapper.AppAliveStatusMapper;
import com.nm.gsgl.service.DualMachineService;
import lombok.extern.slf4j.Slf4j;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Isolation;
import org.springframework.transaction.annotation.Transactional;
@ -28,32 +21,32 @@ import java.util.Date;
public class DualMachineServiceImpl implements DualMachineService {
@Resource
private AppAliveStatusMapper appAliveStatusMapper;
private static final String driverName = MyEnvironmentPostProcessor.driverClassName;
public static Logger log = LoggerFactory.getLogger("BlackCard");
/**
*
*/
@Value("${HRConnectionStringOracle}")
private String HRConnectionStringOracle;
@Value("${HROracleName}")
private String HROracleName;
@Value("${HROraclePass}")
private String HROraclePass;
/**
*
*/
@Value("${MDConnectionStringOracle}")
private String MDConnectionStringOracle;
@Value("${MDOracleName}")
private String MDOracleName;
@Value("${MDOraclePass}")
private String MDOraclePass;
//@Resource
//private TomcatWebUtil tomcatWebUtil;
private static final String appNameMain = "ParamDownload";
private static final String appNameSpare = "ParamDownloadSpare";
//private static final String driverName = MyEnvironmentPostProcessor.driverClassName;
////public static Logger log = LoggerFactory.getLogger("BlackCard");
///**
// * 华软中介库数据库相关信息
// */
//@Value("${HRConnectionStringOracle}")
//private String HRConnectionStringOracle;
//@Value("${HROracleName}")
//private String HROracleName;
//@Value("${HROraclePass}")
//private String HROraclePass;
///**
// * 迈道中介库数据库相关信息
// */
//@Value("${MDConnectionStringOracle}")
//private String MDConnectionStringOracle;
//@Value("${MDOracleName}")
//private String MDOracleName;
//@Value("${MDOraclePass}")
//private String MDOraclePass;
//
////@Resource
////private TomcatWebUtil tomcatWebUtil;
//
//private static final String appNameMain = "ParamDownload";
//private static final String appNameSpare = "ParamDownloadSpare";
/**
@ -69,7 +62,8 @@ public class DualMachineServiceImpl implements DualMachineService {
@Override
public boolean validateDual(String uuid, int machineId) {
AppAliveStatus appAliveStatus = appAliveStatusMapper.selectById(1);
log.info("[uuid:{}]查询APP_ALIVE_STATUS中APPID为1的数据{}", uuid,appAliveStatus);
//log.info("[uuid:{}]查询APP_ALIVE_STATUS中APPID为1的数据{}", uuid,appAliveStatus);
LogUtil.WriteLog_ValidateDual("[uuid:" + uuid + "]-查询APP_ALIVE_STATUS中APPID为1的数据" + appAliveStatus, "DualMachineServiceImpl");
Date date = new Date();
//Date mTime = appAliveStatus.getMTime();
Date sTime = appAliveStatus.getSTime();
@ -81,14 +75,17 @@ public class DualMachineServiceImpl implements DualMachineService {
UpdateWrapper<AppAliveStatus> updateWrapper = new UpdateWrapper<>();
updateWrapper.eq("APPID", 1);
if (appAliveStatus.getALiveId() == machineId) {
log.info("[uuid:{}]当前的应用的ID为{},当前活跃的机器为:{}开始下载程序,并更新程序存活时间", uuid,machineId, appAliveStatus.getALiveId());
//log.info("[uuid:{}]当前的应用的ID为{},当前活跃的机器为:{}开始下载程序,并更新程序存活时间", uuid,machineId, appAliveStatus.getALiveId());
LogUtil.WriteLog_ValidateDual("[uuid:" + uuid + "]-当前的应用的ID为" + machineId+"当前活跃的机器为:"+appAliveStatus.getALiveId()+"开始下载程序,并更新程序存活时间", "DualMachineServiceImpl");
updateWrapper.set("MTIME", date);
appAliveStatusMapper.update(null, updateWrapper);
return true;
} else {
log.info("[uuid:{}]当前活跃的机器为:{},开始验证是否超时", uuid, appAliveStatus.getALiveId());
//log.info("[uuid:{}]当前活跃的机器为:{},开始验证是否超时", uuid, appAliveStatus.getALiveId());
LogUtil.WriteLog_ValidateDual("[uuid:" + uuid + "]-开始验证是否超时,当前活跃的机器为" + appAliveStatus.getALiveId(), "DualMachineServiceImpl");
if ((time2 + ((long) deadTime * deadCount * 1000) < time)) {
log.info("[uuid:{}]修改APP_ALIVE_STATUS表中活跃主机ALIVEID为{},并更新备用机存活时间", uuid, 1);
//log.info("[uuid:{}]修改APP_ALIVE_STATUS表中活跃主机ALIVEID为{},并更新备用机存活时间", uuid, 1);
LogUtil.WriteLog_ValidateDual("[uuid:" + uuid + "]-修改APP_ALIVE_STATUS表中活跃主机ALIVEID为" + 1+"并更新备用机存活时间", "DualMachineServiceImpl");
updateWrapper.set("MTIME", date);
updateWrapper.set("ALIVEID", 1);
appAliveStatusMapper.update(null, updateWrapper);
@ -97,7 +94,8 @@ public class DualMachineServiceImpl implements DualMachineService {
}
updateWrapper.set("MTIME", date);
appAliveStatusMapper.update(null, updateWrapper);
log.info("[uuid:{}]当前活跃的机器为{}不是ALIVEID=1的程序更新主程序存活时间程序休眠等待下次轮询", uuid, appAliveStatus.getALiveId());
//log.info("[uuid:{}]当前活跃的机器为{}不是ALIVEID=1的程序更新主程序存活时间程序休眠等待下次轮询", uuid, appAliveStatus.getALiveId());
LogUtil.WriteLog_ValidateDual("[uuid:" + uuid + "]-当前活跃的机器为" + appAliveStatus.getALiveId()+"不是ALIVEID=1的程序更新主程序存活时间程序休眠等待下次轮询", "DualMachineServiceImpl");
return false;
}
@ -114,6 +112,7 @@ public class DualMachineServiceImpl implements DualMachineService {
@Transactional(isolation = Isolation.READ_COMMITTED)
public boolean validateDualSpare(String uuid, int machineId) {
AppAliveStatus appAliveStatus = appAliveStatusMapper.selectById(1);
LogUtil.WriteLog_ValidateDual("[uuid:" + uuid + "]-查询APP_ALIVE_STATUS中APPID为1的数据" + appAliveStatus, "DualMachineServiceImpl");
Date date = new Date();
Date mTime = appAliveStatus.getMTime();
int deadCount = appAliveStatus.getDeadCount();
@ -123,14 +122,19 @@ public class DualMachineServiceImpl implements DualMachineService {
UpdateWrapper<AppAliveStatus> updateWrapper = new UpdateWrapper<>();
updateWrapper.eq("APPID", 1);
if (appAliveStatus.getALiveId() == machineId) {
log.info("[uuid:{}]当前的应用的ID为{},当前活跃的机器为:{}开始下载程序,并更新程序存活时间", uuid,machineId, appAliveStatus.getALiveId());
//log.info("[uuid:{}]当前的应用的ID为{},当前活跃的机器为:{}开始下载程序,并更新程序存活时间", uuid,machineId, appAliveStatus.getALiveId());
LogUtil.WriteLog_ValidateDual("[uuid:" + uuid + "]-当前的应用的ID为" + machineId+"当前活跃的机器为:"+appAliveStatus.getALiveId()+"开始下载程序,并更新程序存活时间", "DualMachineServiceImpl");
updateWrapper.set("STIME", date);
appAliveStatusMapper.update(null, updateWrapper);
return true;
} else {
log.info("[uuid:{}]当前活跃的机器为:{},开始验证是否超时", uuid, appAliveStatus.getALiveId());
//log.info("[uuid:{}]当前活跃的机器为:{},开始验证是否超时", uuid, appAliveStatus.getALiveId());
LogUtil.WriteLog_ValidateDual("[uuid:" + uuid + "]-开始验证是否超时,当前活跃的机器为" + appAliveStatus.getALiveId(), "DualMachineServiceImpl");
if ((time1 + ((long) deadTime * deadCount * 1000) < time)) {
log.info("[uuid:{}]修改APP_ALIVE_STATUS表中活跃主机ALIVEID为{},并更新备用机存活时间", uuid, 2);
//log.info("[uuid:{}]修改APP_ALIVE_STATUS表中活跃主机ALIVEID为{},并更新备用机存活时间", uuid, 2);
LogUtil.WriteLog_ValidateDual("[uuid:" + uuid + "]-修改APP_ALIVE_STATUS表中活跃主机ALIVEID为" + 2+"并更新备用机存活时间", "DualMachineServiceImpl");
updateWrapper.set("STIME", date);
updateWrapper.set("ALIVEID", 2);
appAliveStatusMapper.update(null, updateWrapper);
@ -139,145 +143,146 @@ public class DualMachineServiceImpl implements DualMachineService {
}
updateWrapper.set("STIME", date);
appAliveStatusMapper.update(null, updateWrapper);
log.info("[uuid:{}]当前活跃的机器为{}不是ALIVEID=2的程序程序休眠更新备用机存活时间等待下次轮询", uuid, appAliveStatus.getALiveId());
//log.info("[uuid:{}]当前活跃的机器为{}不是ALIVEID=2的程序程序休眠更新备用机存活时间等待下次轮询", uuid, appAliveStatus.getALiveId());
LogUtil.WriteLog_ValidateDual("[uuid:" + uuid + "]-当前活跃的机器为" + appAliveStatus.getALiveId()+"不是ALIVEID=2的程序更新主程序存活时间程序休眠等待下次轮询", "DualMachineServiceImpl");
return false;
}
/**
*
*
* @param uuid uuid
* @param machineId ID
* @author shuguang
* @date 2023-02-18 19:58
*/
@Override
public boolean validateDualHR(String uuid, int machineId) {
//查询表 APP_ALIVE_STATUS中的主备状态---(华软和迈道中介库)
return HRAndMDMain(uuid, HRConnectionStringOracle, HROracleName, HROraclePass,machineId);
}
/**
* ()--
*
* @param uuid uuid
* @param machineId ID
* @author shuguang
* @date 2023-02-18 19:58
*/
@Override
public boolean validateDualMD(String uuid, int machineId) {
//查询表 APP_ALIVE_STATUS中的主备状态---(华软和迈道中介库)
return HRAndMDMain(uuid, MDConnectionStringOracle, MDOracleName, MDOraclePass,machineId);
}
/**
* ()
*
* @param uuid uuid
* @param connectionStringOracle
* @param oracleName
* @param oraclePass
* @return boolean
* @author shuguang
* @date 2023-02-20 14:16
*/
private boolean HRAndMDMain(String uuid, String connectionStringOracle, String oracleName, String oraclePass,int machineId) {
AppAliveStatus appAliveStatus = DatabaseUtil.selectAppAliveStatus(driverName, connectionStringOracle, oracleName, oraclePass, Constant.SELECT_APP_ALIVE_STATUS);
Date date = new Date();
String nowDateStr = DateTimeUtil.getFormateString(date, Constant.YYYY_MM_DD_HH_MM_SS);
String sqlParam = Constant.UPDATE_APP_ALIVE_STATUS_MTIME.replace(Constant.STR_AAAAA, nowDateStr);
DatabaseUtil.upOrInsertSql(driverName, connectionStringOracle, oracleName, oraclePass, sqlParam);
if (appAliveStatus.getALiveId() == machineId) {
log.info("[uuid:{}]当前的应用的ID为{},当前活跃的机器为:{}开始下载程序,并更新程序存活时间", uuid,machineId, appAliveStatus.getALiveId());
return true;
} else {
log.info("[uuid:{}]当前活跃的机器为:{},开始验证是否超时", uuid, appAliveStatus.getALiveId());
Date mTime = appAliveStatus.getMTime();
int deadCount = appAliveStatus.getDeadCount();
int deadTime = appAliveStatus.getDeadTime();
long time = date.getTime();
long time1 = mTime.getTime();
if ((time1 + ((long) deadTime * deadCount * 1000) < time)) {
log.info("[uuid:{}]修改APP_ALIVE_STATUS表中活跃主机ALIVEID为{},并更新备用机存活时间", uuid, 1);
DatabaseUtil.upOrInsertSql(driverName, connectionStringOracle, oracleName, oraclePass, Constant.UPDATE_APP_ALIVE_STATUS_ALIVEID_1);
//启动主机
//tomcatWebUtil.startWebApp(appNameMain);
return true;
}
}
log.info("[uuid:{}]当前活跃的机器为{}不是ALIVEID=1的程序程序休眠等待下次轮询", uuid, appAliveStatus.getALiveId());
return false;
}
/**
* ()--
*
* @param uuid uuid
* @param machineId ID
* @author shuguang
* @date 2023-02-18 19:58
*/
@Override
public boolean validateDualSpareMD(String uuid, int machineId) {
//查询表 APP_ALIVE_STATUS中的主备状态---(华软和迈道中介库)
return HRAndMDSpare(uuid, MDConnectionStringOracle, MDOracleName, MDOraclePass,machineId);
}
/**
* ()
*
* @param uuid uuid
* @param machineId ID 12
* @author shuguang
* @date 2023-02-18 19:58
*/
@Override
public boolean validateDualSpareHR(String uuid, int machineId) {
//查询表 APP_ALIVE_STATUS中的主备状态---(华软和迈道中介库)
return HRAndMDSpare(uuid, HRConnectionStringOracle, HROracleName, HROraclePass,machineId);
}
/**
* ()
*
* @param uuid uuid
* @param connectionStringOracle
* @param oracleName
* @param oraclePass
* @return boolean
* @author shuguang
* @date 2023-02-20 14:16
*/
private boolean HRAndMDSpare(String uuid, String connectionStringOracle, String oracleName, String oraclePass,int machineId) {
AppAliveStatus appAliveStatus = DatabaseUtil.selectAppAliveStatus(driverName, connectionStringOracle, oracleName, oraclePass, Constant.SELECT_APP_ALIVE_STATUS);
Date date = new Date();
String nowDateStr = DateTimeUtil.getFormateString(date, Constant.YYYY_MM_DD_HH_MM_SS);
String sqlParam = Constant.UPDATE_APP_ALIVE_STATUS_STIME.replace(Constant.STR_AAAAA, nowDateStr);
DatabaseUtil.upOrInsertSql(driverName, connectionStringOracle, oracleName, oraclePass, sqlParam);
if (appAliveStatus.getALiveId() == machineId) {
log.info("[uuid:{}]当前的应用的ID为:{},当前活跃的机器为:{},开始下载程序,并更新程序存活时间", uuid,machineId, appAliveStatus.getALiveId());
return true;
} else {
log.info("[uuid:{}]当前活跃的机器为:{},开始验证是否超时", uuid, appAliveStatus.getALiveId());
Date mTime = appAliveStatus.getMTime();
int deadCount = appAliveStatus.getDeadCount();
int deadTime = appAliveStatus.getDeadTime();
long time = date.getTime();
long time1 = mTime.getTime();
if ((time1 + ((long) deadTime * deadCount * 1000) < time)) {
log.info("[uuid:{}]修改APP_ALIVE_STATUS表中活跃主机ALIVEID为{},并更新备用机存活时间", uuid, 2);
DatabaseUtil.upOrInsertSql(driverName, connectionStringOracle, oracleName, oraclePass, Constant.UPDATE_APP_ALIVE_STATUS_ALIVEID_2);
//启动备用机
//tomcatWebUtil.startWebApp(appNameSpare);
return true;
}
}
log.info("[uuid:{}]当前活跃的机器为:{},不是ALIVEID=2的程序程序休眠等待下次轮询", uuid, appAliveStatus.getALiveId());
return false;
}
///**
// * 验证机器是否启动,保证双机方案的正常运转
// *
// * @param uuid uuid
// * @param machineId 机器ID
// * @author shuguang
// * @date 2023-02-18 19:58
// */
//@Override
//public boolean validateDualHR(String uuid, int machineId) {
// //查询表 APP_ALIVE_STATUS中的主备状态---(华软和迈道中介库)
// return HRAndMDMain(uuid, HRConnectionStringOracle, HROracleName, HROraclePass,machineId);
//
//}
//
//
///**
// * 验证机器是否启动,保证双机方案的正常运转(主程序调用)--迈道
// *
// * @param uuid uuid
// * @param machineId 机器ID
// * @author shuguang
// * @date 2023-02-18 19:58
// */
//@Override
//public boolean validateDualMD(String uuid, int machineId) {
// //查询表 APP_ALIVE_STATUS中的主备状态---(华软和迈道中介库)
// return HRAndMDMain(uuid, MDConnectionStringOracle, MDOracleName, MDOraclePass,machineId);
//
//}
//
///**
// * 查询华软或迈道中介库数据表查看当前应用(主机)
// *
// * @param uuid uuid
// * @param connectionStringOracle 华软或迈道中介库连接地址
// * @param oracleName 华软或迈道数据库名称
// * @param oraclePass 华软或迈道数据库密码
// * @return boolean
// * @author shuguang
// * @date 2023-02-20 14:16
// */
//private boolean HRAndMDMain(String uuid, String connectionStringOracle, String oracleName, String oraclePass,int machineId) {
// AppAliveStatus appAliveStatus = DatabaseUtil.selectAppAliveStatus(driverName, connectionStringOracle, oracleName, oraclePass, Constant.SELECT_APP_ALIVE_STATUS);
// Date date = new Date();
// String nowDateStr = DateTimeUtil.getFormateString(date, Constant.YYYY_MM_DD_HH_MM_SS);
// String sqlParam = Constant.UPDATE_APP_ALIVE_STATUS_MTIME.replace(Constant.STR_AAAAA, nowDateStr);
// DatabaseUtil.upOrInsertSql(driverName, connectionStringOracle, oracleName, oraclePass, sqlParam);
// if (appAliveStatus.getALiveId() == machineId) {
// log.info("[uuid:{}]当前的应用的ID为{},当前活跃的机器为:{}开始下载程序,并更新程序存活时间", uuid,machineId, appAliveStatus.getALiveId());
// return true;
// } else {
// log.info("[uuid:{}]当前活跃的机器为:{},开始验证是否超时", uuid, appAliveStatus.getALiveId());
// Date mTime = appAliveStatus.getMTime();
// int deadCount = appAliveStatus.getDeadCount();
// int deadTime = appAliveStatus.getDeadTime();
// long time = date.getTime();
// long time1 = mTime.getTime();
// if ((time1 + ((long) deadTime * deadCount * 1000) < time)) {
// log.info("[uuid:{}]修改APP_ALIVE_STATUS表中活跃主机ALIVEID为{},并更新备用机存活时间", uuid, 1);
// DatabaseUtil.upOrInsertSql(driverName, connectionStringOracle, oracleName, oraclePass, Constant.UPDATE_APP_ALIVE_STATUS_ALIVEID_1);
// //启动主机
// //tomcatWebUtil.startWebApp(appNameMain);
// return true;
// }
// }
// log.info("[uuid:{}]当前活跃的机器为{}不是ALIVEID=1的程序程序休眠等待下次轮询", uuid, appAliveStatus.getALiveId());
// return false;
//}
//
///**
// * 验证机器是否启动,保证双机方案的正常运转(备用程序调用)--迈道
// *
// * @param uuid uuid
// * @param machineId 机器ID
// * @author shuguang
// * @date 2023-02-18 19:58
// */
//@Override
//public boolean validateDualSpareMD(String uuid, int machineId) {
// //查询表 APP_ALIVE_STATUS中的主备状态---(华软和迈道中介库)
// return HRAndMDSpare(uuid, MDConnectionStringOracle, MDOracleName, MDOraclePass,machineId);
//}
///**
// * 验证机器是否启动,保证双机方案的正常运转(备用程序调用)
// *
// * @param uuid uuid
// * @param machineId 机器ID 主机默认为1备用机默认为2
// * @author shuguang
// * @date 2023-02-18 19:58
// */
//@Override
//public boolean validateDualSpareHR(String uuid, int machineId) {
// //查询表 APP_ALIVE_STATUS中的主备状态---(华软和迈道中介库)
// return HRAndMDSpare(uuid, HRConnectionStringOracle, HROracleName, HROraclePass,machineId);
//}
//
///**
// * 查询华软或迈道中介库数据表查看当前应用(备用机)
// *
// * @param uuid uuid
// * @param connectionStringOracle 华软或迈道中介库连接地址
// * @param oracleName 华软或迈道数据库名称
// * @param oraclePass 华软或迈道数据库密码
// * @return boolean
// * @author shuguang
// * @date 2023-02-20 14:16
// */
//private boolean HRAndMDSpare(String uuid, String connectionStringOracle, String oracleName, String oraclePass,int machineId) {
// AppAliveStatus appAliveStatus = DatabaseUtil.selectAppAliveStatus(driverName, connectionStringOracle, oracleName, oraclePass, Constant.SELECT_APP_ALIVE_STATUS);
// Date date = new Date();
// String nowDateStr = DateTimeUtil.getFormateString(date, Constant.YYYY_MM_DD_HH_MM_SS);
// String sqlParam = Constant.UPDATE_APP_ALIVE_STATUS_STIME.replace(Constant.STR_AAAAA, nowDateStr);
// DatabaseUtil.upOrInsertSql(driverName, connectionStringOracle, oracleName, oraclePass, sqlParam);
// if (appAliveStatus.getALiveId() == machineId) {
// log.info("[uuid:{}]当前的应用的ID为:{},当前活跃的机器为:{},开始下载程序,并更新程序存活时间", uuid,machineId, appAliveStatus.getALiveId());
// return true;
// } else {
// log.info("[uuid:{}]当前活跃的机器为:{},开始验证是否超时", uuid, appAliveStatus.getALiveId());
// Date mTime = appAliveStatus.getMTime();
// int deadCount = appAliveStatus.getDeadCount();
// int deadTime = appAliveStatus.getDeadTime();
// long time = date.getTime();
// long time1 = mTime.getTime();
// if ((time1 + ((long) deadTime * deadCount * 1000) < time)) {
// log.info("[uuid:{}]修改APP_ALIVE_STATUS表中活跃主机ALIVEID为{},并更新备用机存活时间", uuid, 2);
// DatabaseUtil.upOrInsertSql(driverName, connectionStringOracle, oracleName, oraclePass, Constant.UPDATE_APP_ALIVE_STATUS_ALIVEID_2);
// //启动备用机
// //tomcatWebUtil.startWebApp(appNameSpare);
// return true;
// }
// }
// log.info("[uuid:{}]当前活跃的机器为:{},不是ALIVEID=2的程序程序休眠等待下次轮询", uuid, appAliveStatus.getALiveId());
// return false;
//}
}

@ -4,17 +4,15 @@ import com.nm.gsgl.common.Constant;
import com.nm.gsgl.common.config.MyEnvironmentPostProcessor;
import com.nm.gsgl.common.utils.DatabaseUtil;
import com.nm.gsgl.common.utils.FileUtil;
import com.nm.gsgl.common.utils.LogUtil;
import com.nm.gsgl.common.utils.PathUtil;
import com.nm.gsgl.entity.sqlite.BusinessTasks;
import com.nm.gsgl.service.CreateDbService;
import com.nm.gsgl.service.CreateDbfByDllService;
import com.nm.gsgl.service.DualMachineService;
import com.nm.gsgl.service.EctService;
import com.nm.gsgl.service.FtpDownloadService;
import com.nm.gsgl.service.SqliteBackupsService;
import com.nm.gsgl.service.ZipToSqbService;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;
@ -37,8 +35,6 @@ public class EctServiceImpl implements EctService {
@Resource
private CreateDbfByDllService createDbfByDllService;
@Resource
private CreateDbService createDbService;
@Resource
private ZipToSqbService zipToSqbService;
@Value("${machineId}")
@ -70,7 +66,7 @@ public class EctServiceImpl implements EctService {
private String NFSFilePath;
@Resource
private SqliteBackupsService sqliteBackupsService;
public static Logger log = LoggerFactory.getLogger("BlackCard");
//public static Logger log = LoggerFactory.getLogger("BlackCard");
/**
*
*
@ -83,7 +79,9 @@ public class EctServiceImpl implements EctService {
//双机方案
//获取本机的机器ID,本机的机器ID 1-主机2-备用机
int mId = Integer.parseInt(machineId);
log.info("[uuid:{}]获取当前机器的id为{}", uuid, mId);
//log.info("[uuid:{}]获取当前机器的id为{}", uuid, mId);
LogUtil.WriteLog_HRBlackCard("[uuid:" + uuid + "]-获取当前机器的id为" + mId, "EctServiceImpl");
//验证是否当前服务程序处理事件
boolean validateHR;
if (mId == 1) {
@ -92,7 +90,8 @@ public class EctServiceImpl implements EctService {
validateHR = dualMachineService.validateDualSpare(uuid, mId);
}
//如果是当前服务处理,继续下一步,否则程序定时任务不做处理,等待下次轮询
log.info("[uuid:{}]验证是否当前服务程序处理事件{}", uuid, validateHR);
//log.info("[uuid:{}]验证是否当前服务程序处理事件{}", uuid, validateHR);
LogUtil.WriteLog_HRBlackCard("[uuid:" + uuid + "]-验证是否当前服务程序处理事件" + validateHR, "EctServiceImpl");
if (validateHR) {
//开始下载扫描迈道,华软下载任务
ftpDownloadService.getHrMdTask(uuid);
@ -115,19 +114,11 @@ public class EctServiceImpl implements EctService {
} else if (Constant.DLL_SQB_LIST.contains(protocolType)) {
zipToSqbService.createSqbByDll(uuid, businessTask);
isFinished = true;
//最小费额---单独线程处理
//} else if (Constant.STR_SQB_250.equals(protocolType)) {
// createSqbByDllService.createMinFeeSqbByDll(uuid, businessTask);
// isFinished = true;
//入库操作
//}
//else if (Constant.INTO_DB_LIST.contains(protocolType)) {
// createDbService.zipToDataBase(uuid, businessTask);
// isFinished = true;
}
if (isFinished) {
//更新中介库华软的下载状态
log.info("[uuid:{}]-更新中介库TF_DOWNLOAD_NOTIFY(id为:{})下载状态为1-已下载", uuid, id);
//log.info("[uuid:{}]-更新中介库TF_DOWNLOAD_NOTIFY(id为:{})下载状态为1-已下载", uuid, id);
LogUtil.WriteLog_HRBlackCard("[uuid:" + uuid + "]-更新中介库TF_DOWNLOAD_NOTIFY(id为:" + id+")下载状态为1-已下载", "EctServiceImpl");
DatabaseUtil.updateSql(driverName, HRConnectionStringOracle, HROracleName, HROraclePass, Constant.UPDATE_TF_DOWNLOAD_NOTIFY_HR + id, uuid);
//删除下载的zip文件
FileUtil.delete(new File(NFSFilePath + PathUtil.downFile + businessTask.getFileName()));
@ -153,7 +144,8 @@ public class EctServiceImpl implements EctService {
//双机方案
//获取本机的机器ID,本机的机器ID 1-主机2-备用机
int mId = Integer.parseInt(machineId);
log.info("[uuid:{}]获取当前机器的id为{}", uuid, mId);
//log.info("[uuid:{}]获取当前机器的id为{}", uuid, mId);
LogUtil.WriteLog_MDBlackCard("[uuid:" + uuid + "]-获取当前机器的id为" + mId, "EctServiceImpl");
//验证迈道
boolean validateMD;
if (mId == 1) {
@ -183,14 +175,14 @@ public class EctServiceImpl implements EctService {
if (isFinished) {
//更新迈道中介库下载任务信息
log.info("[uuid:{}]更新中介库T_PARAM_NOTIFY(PARAM_ID为:{},VERSION为{})下载状态为1-已下载", uuid, type, version);
//log.info("[uuid:{}]更新中介库T_PARAM_NOTIFY(PARAM_ID为:{},VERSION为{})下载状态为1-已下载", uuid, type, version);
LogUtil.WriteLog_MDBlackCard("[uuid:" + uuid + "]-更新中介库T_PARAM_NOTIFY(id为:" + id+")下载状态为1-已下载", "EctServiceImpl");
DatabaseUtil.updateSql(driverName, MDConnectionStringOracle, MDOracleName, MDOraclePass,
Constant.UPDATE_T_PARAM_NOTIFY_MD + type + Constant.AND_VERSION_MD + version + Constant.SINGLE_LEAD, uuid);
//删除下载的zip文件
FileUtil.delete(new File(NFSFilePath + PathUtil.downFile + businessTask.getFileName()));
sqliteBackupsService.buildBackups(uuid);
} else {
log.info("[uuid:{}]数据处理失败中介下载任务id{}不更新", uuid, id);
}
}

@ -2,12 +2,11 @@ package com.nm.gsgl.service.impl;
import com.nm.gsgl.common.Constant;
import com.nm.gsgl.common.config.MyEnvironmentPostProcessor;
import com.nm.gsgl.common.enumeration.MessageEnum;
import com.nm.gsgl.common.exception.PPException;
import com.nm.gsgl.common.utils.DatabaseUtil;
import com.nm.gsgl.common.utils.DateTimeUtil;
import com.nm.gsgl.common.utils.FTPUtil;
import com.nm.gsgl.common.utils.FileUtil;
import com.nm.gsgl.common.utils.LogUtil;
import com.nm.gsgl.common.utils.MD5Util;
import com.nm.gsgl.common.utils.PathUtil;
import com.nm.gsgl.common.utils.StringUtil;
@ -15,7 +14,6 @@ import com.nm.gsgl.entity.intermediary.DownloadNotify;
import com.nm.gsgl.entity.intermediary.ParamNotify;
import com.nm.gsgl.entity.sqlite.BusinessTasks;
import com.nm.gsgl.service.FtpDownloadService;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;
@ -30,7 +28,6 @@ import java.util.List;
* @description: FTP
*/
@Service
@Slf4j
public class FtpDownloadServiceImpl implements FtpDownloadService {
/**
* FTP
@ -92,14 +89,18 @@ public class FtpDownloadServiceImpl implements FtpDownloadService {
*/
@Override
public void getHrMdTask(String uuid) {
log.info("[uuid:{}]开始查询华软中介服务上的下载任务数据", uuid);
//log.info("[uuid:{}]开始查询华软中介服务上的下载任务数据", uuid);
LogUtil.WriteLog_HRZipDownloadFile("[uuid:" + uuid + "]-开始查询华软中介服务上的下载任务数据" , "FtpDownloadServiceImpl-getHrMdTask");
//查询下载任务数据
List<DownloadNotify> downloadNotifys = DatabaseUtil.selectDownloadNotify(driverName, HRConnectionStringOracle, HROracleName, HROraclePass, Constant.SELECT_TF_DOWNLOAD_NOTIFY_BLACK);
String localPath = NFSFilePath + PathUtil.downFile;
FileUtil.fileCreat(localPath);
log.info("[uuid:{}]查询华软中介服务上的下载任务数据条数为{}", uuid, downloadNotifys.size());
//log.info("[uuid:{}]查询华软中介服务上的下载任务数据条数为{}", uuid, downloadNotifys.size());
LogUtil.WriteLog_HRZipDownloadFile("[uuid:" + uuid + "]-查询华软中介服务上的下载任务数据条数为"+downloadNotifys.size() , "FtpDownloadServiceImpl-getHrMdTask");
if (downloadNotifys.size() == 0) {
log.info("[uuid:{}]华软中介服务上无下载任务", uuid);
//log.info("[uuid:{}]华软中介服务上无下载任务", uuid);
LogUtil.WriteLog_HRZipDownloadFile("[uuid:" + uuid + "]-华软中介服务无下载任务" , "FtpDownloadServiceImpl-getHrMdTask");
} else {
for (DownloadNotify downloadNotify : downloadNotifys) {
long start = System.currentTimeMillis();
@ -121,28 +122,42 @@ public class FtpDownloadServiceImpl implements FtpDownloadService {
String fileName = domain1.substring(domain1.lastIndexOf("/") + 1);
String ftpFilePath = domain1.replace(ftpUrl, Constant.NULL_STRING).replace(fileName, Constant.NULL_STRING);
if (businessTasks.size() == 0 || !FileUtil.fileExists(localPath + fileName)) {
log.info("[uuid:{}]开始下载华软中介服务上的任务数据,protocolType为{}", uuid, protocolType);
//log.info("[uuid:{}]开始下载华软中介服务上的任务数据,protocolType为{}", uuid, protocolType);
LogUtil.WriteLog_HRZipDownloadFile("[uuid:" + uuid + "]-开始下载华软中介服务上的任务数据,protocolType为"+protocolType , "FtpDownloadServiceImpl-getHrMdTask");
boolean isDownload = false;
try {
isDownload = FTPUtil.downloadFile(HRFtpIP, HRFtpUsername, HRPassword, Integer.parseInt(HRPort), ftpFilePath, localPath, fileName);
} catch (Exception e) {
log.error("[uuid:{}]{}-下载zip文件({}){}失败,等待下次轮询", uuid, protocolType, id, tableName);
//log.error("[uuid:{}]{}-下载zip文件({}){}失败,等待下次轮询", uuid, protocolType, id, tableName);
LogUtil.WriteLog_HRZipDownloadFile("[uuid:" + uuid + "]-下载zip文件protocolType为"+protocolType+"id为"+id+"业务主表表名为"+tableName , "FtpDownloadServiceImpl-getHrMdTask");
//throw new PPException(MessageEnum.FTP文件下载失败.getCode(), MessageEnum.FTP文件下载失败.getMessage());
}
long end = System.currentTimeMillis();
log.info("[uuid:{}]下载耗时:{}毫秒", uuid, end - start);
//log.info("[uuid:{}]下载耗时:{}毫秒", uuid, end - start);
LogUtil.WriteLog_HRZipDownloadFile("[uuid:" + uuid + "]-下载耗时毫秒为"+(end - start) , "FtpDownloadServiceImpl-getHrMdTask");
if (isDownload) {
//校验md5
String downlMd5 = MD5Util.getMd5ByFilePath(localPath + fileName);
log.info("[uuid:{}]{}-下载zip文件({}){},原MD5值为{}", uuid, protocolType, id, tableName, md5);
log.info("[uuid:{}]{}-下载zip文件({}){},下载的MD5值为{}", uuid, protocolType, id, tableName, downlMd5);
//log.info("[uuid:{}]{}-下载zip文件({}){},原MD5值为{}", uuid, protocolType, id, tableName, md5);
//log.info("[uuid:{}]{}-下载zip文件({}){},下载的MD5值为{}", uuid, protocolType, id, tableName, downlMd5);
LogUtil.WriteLog_HRZipDownloadFile("[uuid:" + uuid + "]-下载zip文件protocolType为"+protocolType+"id为"+id+"业务主表表名为"+tableName +"原MD5值为"+md5,
"FtpDownloadServiceImpl-getHrMdTask");
LogUtil.WriteLog_HRZipDownloadFile("[uuid:" + uuid + "]-下载zip文件protocolType为"+protocolType+"id为"+id+"业务主表表名为"+tableName +"下载的MD5值为"+downlMd5,
"FtpDownloadServiceImpl-getHrMdTask");
if (StringUtils.isBlank(downlMd5) || !md5.equals(downlMd5)) {
log.info("[uuid:{}]{}-下载zip文件({}){},MD5校验失败", uuid, protocolType, id, tableName);
//log.info("[uuid:{}]{}-下载zip文件({}){},MD5校验失败", uuid, protocolType, id, tableName);
LogUtil.WriteLog_HRZipDownloadFile("[uuid:" + uuid + "]-下载zip文件protocolType为"+protocolType+"id为"+id+"业务主表表名为"+tableName +"MD5校验失败",
"FtpDownloadServiceImpl-getHrMdTask");
//throw new PPException(MessageEnum.MD5校验失败.getCode(), MessageEnum.MD5校验失败.getMessage());
} else {
log.info("[uuid:{}]{}-下载zip文件({}){},MD5校验成功", uuid, protocolType, id, tableName);
//log.info("[uuid:{}]{}-下载zip文件({}){},MD5校验成功", uuid, protocolType, id, tableName);
LogUtil.WriteLog_HRZipDownloadFile("[uuid:" + uuid + "]-下载zip文件protocolType为"+protocolType+"id为"+id+"业务主表表名为"+tableName +"MD5校验成功",
"FtpDownloadServiceImpl-getHrMdTask");
//获取新的版本号
String newVersion = StringUtil.getNewVersion(protocolType, version);
//插入sqlite数据库任务表中
@ -167,14 +182,18 @@ public class FtpDownloadServiceImpl implements FtpDownloadService {
String sqliteInsert = StringUtil.sqliteInsert(bt);
if (businessTasks.size() == 0) {
DatabaseUtil.insertSqlite(sqliteDriverName, sqliteUrl, sqliteInsert);
log.info("[uuid:{}]插入sqlite数据库business_tasks_info表中任务数据,id为{}protocolType为{}", uuid, id, protocolType);
//log.info("[uuid:{}]插入sqlite数据库business_tasks_info表中任务数据,id为{}protocolType为{}", uuid, id, protocolType);
LogUtil.WriteLog_HRZipDownloadFile("[uuid:" + uuid + "]-插入sqlite数据库business_tasks_info表中任务数据,id为"+id+"protocolType为"+protocolType,
"FtpDownloadServiceImpl-getHrMdTask");
}
}
}
} else {
log.info("[uuid:{}]sqlite数据库中已存在下载的记录同时下载路径下存在该文件{},跳过下载执行后面的处理程序,id为{}", uuid, fileName, id);
//log.info("[uuid:{}]sqlite数据库中已存在下载的记录同时下载路径下存在该文件{},跳过下载执行后面的处理程序,id为{}", uuid, fileName, id);
LogUtil.WriteLog_HRZipDownloadFile("[uuid:" + uuid + "]-sqlite数据库中已存在下载的记录同时下载路径下存在该文件"+fileName+"跳过下载执行后面的处理程序,id为"+id,
"FtpDownloadServiceImpl-getHrMdTask");
}
}
@ -193,15 +212,20 @@ public class FtpDownloadServiceImpl implements FtpDownloadService {
*/
@Override
public void getMdMdTask(String uuid) {
log.info("[uuid:{}]开始查询迈道中介服务上的下载任务数据", uuid);
//log.info("[uuid:{}]开始查询迈道中介服务上的下载任务数据", uuid);
LogUtil.WriteLog_MDBlackCard("[uuid:" + uuid + "]-开始查询迈道中介服务上的下载任务数据" , "FtpDownloadServiceImpl-getHrMdTask");
//查询下载任务数据
List<ParamNotify> paramNotifyList = DatabaseUtil.selectParamNotify(driverName, MDConnectionStringOracle, MDOracleName, MDOraclePass, Constant.SELECT_T_PARAM_NOTIFY);
String localPath = NFSFilePath + PathUtil.downFile;
FileUtil.fileCreat(localPath);
log.info("[uuid:{}]查询迈道中介服务上的下载任务数据条数为{}", uuid, paramNotifyList.size());
//log.info("[uuid:{}]查询迈道中介服务上的下载任务数据条数为{}", uuid, paramNotifyList.size());
LogUtil.WriteLog_MDBlackCard("[uuid:" + uuid + "]-查询迈道中介服务上的下载任务数据条数为"+paramNotifyList.size() , "FtpDownloadServiceImpl-getMdMdTask");
if (paramNotifyList.size() == 0) {
log.info("[uuid:{}]迈道中介服务上JSON无下载任务", uuid);
} else {
//log.info("[uuid:{}]迈道中介服务上JSON无下载任务", uuid);
LogUtil.WriteLog_MDBlackCard("[uuid:" + uuid + "]-迈道中介服务无下载任务" , "FtpDownloadServiceImpl-getMdMdTask");
} else {
for (ParamNotify paramNotify : paramNotifyList) {
String paramId = paramNotify.getPARAM_ID();
String paramPath = paramNotify.getPARAM_PATH();
@ -223,20 +247,31 @@ public class FtpDownloadServiceImpl implements FtpDownloadService {
isDownload = FTPUtil.downloadFile(MDFtpIP, MDFtpUsername, MDPassword, Integer.parseInt(MDPort), ftpFilePath, localPath, fileName);
} catch (Exception e) {
log.error("[uuid:{}]{}-下载zip文件({}){}失败,等待下次轮询{}", uuid, paramName, paramId + version, fileName, e.getMessage(), e);
//throw new PPException(MessageEnum.FTP文件下载失败.getCode(), MessageEnum.FTP文件下载失败.getMessage());
//log.error("[uuid:{}]{}-下载zip文件({}){}失败,等待下次轮询{}", uuid, paramName, paramId + version, fileName, e.getMessage(), e);
LogUtil.WriteLog_HRZipDownloadFile("[uuid:" + uuid + "]-下载zip文件paramName为"+paramName+"paramId为"+paramId+"fileName"+fileName , "FtpDownloadServiceImpl-getHrMdTask");
}
long end = System.currentTimeMillis();
if (isDownload) {
//校验md5
String downlMd5 = MD5Util.getMd5ByFilePath(localPath + fileName);
log.info("[uuid:{}]-下载zip文件({}){},原MD5值为{}", uuid, paramId, fileName, md5);
log.info("[uuid:{}]-下载zip文件({}){},下载的MD5值为{}", uuid, paramId, fileName, downlMd5);
//log.info("[uuid:{}]-下载zip文件({}){},原MD5值为{}", uuid, paramId, fileName, md5);
//log.info("[uuid:{}]-下载zip文件({}){},下载的MD5值为{}", uuid, paramId, fileName, downlMd5);
LogUtil.WriteLog_MDBlackCard("[uuid:" + uuid + "]-下载zip文件paramName为"+paramName+"paramId为"+paramId+"fileName"+fileName +"原MD5值为"+md5,
"FtpDownloadServiceImpl-getMdMdTask");
LogUtil.WriteLog_MDBlackCard("[uuid:" + uuid + "]-下载zip文件paramName为"+paramName+"paramId为"+paramId+"fileName"+fileName +"下载的MD5值为"+downlMd5,
"FtpDownloadServiceImpl-getMdMdTask");
if (StringUtils.isBlank(downlMd5) || !md5.equals(downlMd5)) {
log.info("[uuid:{}]{}-下载zip文件({}){},MD5校验失败", uuid, paramName, paramId, fileName);
//log.info("[uuid:{}]{}-下载zip文件({}){},MD5校验失败", uuid, paramName, paramId, fileName);
LogUtil.WriteLog_MDBlackCard("[uuid:" + uuid + "]-下载zip文件paramName为"+paramName+"paramId为"+paramId+"fileName"+fileName +"MD5校验失败",
"FtpDownloadServiceImpl-getMdMdTask");
//throw new PPException(MessageEnum.MD5校验失败.getCode(), MessageEnum.MD5校验失败.getMessage());
} else {
log.info("[uuid:{}]{}-下载zip文件({}){},MD5校验成功", uuid, paramName, paramId, fileName);
//log.info("[uuid:{}]{}-下载zip文件({}){},MD5校验成功", uuid, paramName, paramId, fileName);
LogUtil.WriteLog_MDBlackCard("[uuid:" + uuid + "]-下载zip文件paramName为"+paramName+"paramId为"+paramId+"fileName"+fileName +"MD5校验成功",
"FtpDownloadServiceImpl-getMdMdTask");
//获取新的版本号
String newVersion = StringUtil.getNewVersion(paramId + Constant.STR_01, version);
//插入sqlite数据库任务表中
@ -261,13 +296,17 @@ public class FtpDownloadServiceImpl implements FtpDownloadService {
if (businessTasks.size() == 0) {
DatabaseUtil.insertSqlite(sqliteDriverName, sqliteUrl, sqliteInsert);
log.info("[uuid:{}]插入sqlite数据库business_tasks_info表中任务数据,id为{}protocolType为{}", uuid, paramId + version, paramId);
//log.info("[uuid:{}]插入sqlite数据库business_tasks_info表中任务数据,id为{}protocolType为{}", uuid, paramId + version, paramId);
LogUtil.WriteLog_MDBlackCard("[uuid:" + uuid + "]-插入sqlite数据库business_tasks_info表中任务数据,id为"+paramId + version+"protocolType为"+paramId,
"FtpDownloadServiceImpl-getMdMdTask");
}
//先处理数据生成DBF或者入库后再更新中介库下载任务信息
}
}
} else {
log.info("[uuid:{}]sqlite数据库中已存在下载的记录,无需下载,跳过下载执行后面的处理程序,id为{}", uuid, paramId);
//log.info("[uuid:{}]sqlite数据库中已存在下载的记录,无需下载,跳过下载执行后面的处理程序,id为{}", uuid, paramId);
LogUtil.WriteLog_MDBlackCard("[uuid:" + uuid + "]-sqlite数据库中已存在下载的记录同时下载路径下存在该文件"+fileName+"跳过下载执行后面的处理程序,id为"+paramId,
"FtpDownloadServiceImpl-getMdMdTask");
}
}

@ -4,6 +4,7 @@ import com.nm.gsgl.common.Constant;
import com.nm.gsgl.common.config.MyEnvironmentPostProcessor;
import com.nm.gsgl.common.utils.DatabaseUtil;
import com.nm.gsgl.common.utils.FileUtil;
import com.nm.gsgl.common.utils.LogUtil;
import com.nm.gsgl.common.utils.PathUtil;
import com.nm.gsgl.entity.intermediary.AppAliveStatus;
import com.nm.gsgl.entity.sqlite.BusinessTasks;
@ -11,8 +12,6 @@ import com.nm.gsgl.mapper.AppAliveStatusMapper;
import com.nm.gsgl.service.CreateDbService;
import com.nm.gsgl.service.IntoDatabaseService;
import com.nm.gsgl.service.SqliteBackupsService;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;
@ -52,7 +51,7 @@ public class IntoDatabaseServiceImpl implements IntoDatabaseService {
private SqliteBackupsService sqliteBackupsService;
@Resource
private CreateDbService createDbService;
public static Logger insertDBLog = LoggerFactory.getLogger("InsertDB");
//public static Logger insertDBLog = LoggerFactory.getLogger("InsertDB");
/**
* nmgmpayadmin
*
@ -66,7 +65,9 @@ public class IntoDatabaseServiceImpl implements IntoDatabaseService {
//双机方案
//获取本机的机器ID,本机的机器ID 1-主机2-备用机
int mId = Integer.parseInt(machineId);
insertDBLog.info("[uuid:{}]获取当前机器的id为{}", uuid, mId);
//insertDBLog.info("[uuid:{}]获取当前机器的id为{}", uuid, mId);
LogUtil.WriteLog_InsertDB("[uuid:" + uuid + "]-获取当前机器的id为" +mId, "IntoDatabaseServiceImpl");
AppAliveStatus appAliveStatus = appAliveStatusMapper.selectById(1);
int aLiveId = appAliveStatus.getALiveId();
if (mId == aLiveId) {
@ -81,13 +82,136 @@ public class IntoDatabaseServiceImpl implements IntoDatabaseService {
//是否处理完成
boolean isFinished = false;
if (Constant.INTO_DB_LIST.contains(protocolType)) {
String fileName = businessTask.getFileName();
String fileZip = NFSFilePath + PathUtil.downFile + fileName;
String dirPath="";
switch (protocolType) {
//ETC拆分文件下载
case "237":
dirPath = "ETCCLEARSUM237";
break;
//其他交易省域拆分文件下载
case "240":
dirPath = "OTHERCLEARSUM240";
break;
//ETC拆分结果以通行省门架汇总记录拆分下载
case "241":
dirPath = "GANTRYPASSID241";
break;
//ETC拆分结果以出口交易正常记账拆分下载
case "242":
dirPath = "OUTPORTETC242";
break;
//ETC拆分结果以出口交易争议记账拆分下载
case "243":
dirPath = "OUTPORTETCNOPAY243";
break;
//其他交易拆分结果(以出口交易拆分)下载(新增)
case "244":
dirPath = "OTHEROUTPORT244";
break;
//其他交易拆分结果(以通行省门架汇总记录拆分)下载
case "245":
dirPath = "OTHERGANTRYPASSID245";
break;
//出口其他交易(以通行省门架汇总记录拆分)下载
case "246":
dirPath = "OUTOTHERGANTRY246";
break;
//服务方退费数据
case "302":
dirPath = "SERVERREFUNDTABLE302";
break;
//服务方补交数据
case "304":
dirPath = "SERVERLATERPAYTABLE304";
break;
//新增加的306
case "306":
dirPath = "CTREFUNDLATERPAYSUM306";
break;
//其他交易冲正下载
case "501":
dirPath = "OTHERTRADETABLE501";
break;
//新增加的502、503、604、605、804
case "502":
dirPath = "CTOTHERCLEAR502";
break;
case "503":
dirPath = "CTOTHERCLEARSUM503";
break;
case "604":
dirPath = "CTGATHERDETAILTABLE604";
break;
case "605":
dirPath = "CTPAYERDETAILTABLE605";
break;
case "804":
dirPath = "CHECKRESULTINFOTABLE804";
break;
//绿通查验结果下载
case "803":
dirPath = "CHECKRESULTTABLENEW803";
break;
//etc通行交易上传逾期拆分结果下载
case "901":
dirPath = "OVERTIMEPASSPROV901";
break;
//etc通行交易争议不记账拆分结果下载
case "902":
dirPath = "DISPUTEPASSPROV902";
break;
//etc通行特殊交易拆分文件下载
case "903":
dirPath = "CLEARSUM903";
break;
//出口未上传追偿协调数据下载
case "904":
dirPath = "NOOUTPORTS2DOWN904";
break;
//未收到拆分结果追偿协调数据下载
case "905":
dirPath = "NOSPLITS2DOWN905";
break;
//etc通行交易上传逾期拆分结果出口省应付明细下载
case "917":
dirPath = "OVERTIMEEXPROV917";
break;
//etc通行交易争议不记账拆分结果出口省应付明细下载
case "918":
dirPath = "DISPUTEEXPROV918";
break;
//出口交易记录未上传追偿数据(发起省待处理)下载
case "922":
dirPath = "NOOUTPORTS3DOWN922";
break;
//出口交易记录未上传追偿结果文件下载
case "923":
dirPath = "NOOUTPORTRESULT923";
break;
//未收到拆分结果追偿数据(发起省待处理)下载
case "924":
dirPath = "NOSPLITS3DOWN924";
break;
//未收到拆分结果追偿结果文件下载(新增)
case "925":
dirPath = "NOSPLITRESULT925";
break;
}
//备份zip文件
String downFileBackup = NFSFilePath + PathUtil.downZipInsertBackup + dirPath + File.separator;
FileUtil.fileCreat(downFileBackup);
FileUtil.copyFile(fileZip, downFileBackup + fileName);
//入库操作
createDbService.zipToDataBase(uuid, businessTask);
isFinished = true;
}
if (isFinished) {
//更新中介库华软的下载状态
insertDBLog.info("[uuid:{}]-更新中介库TF_DOWNLOAD_NOTIFY(id为:{})下载状态为1-已下载", uuid, id);
//insertDBLog.info("[uuid:{}]-更新中介库TF_DOWNLOAD_NOTIFY(id为:{})下载状态为1-已下载", uuid, id);
LogUtil.WriteLog_InsertDB("[uuid:" + uuid + "]-更新中介库TF_DOWNLOAD_NOTIFY(id为:" +id+")下载状态为1-已下载", "IntoDatabaseServiceImpl");
DatabaseUtil.updateSql(driverName, HRConnectionStringOracle, HROracleName, HROraclePass, Constant.UPDATE_TF_DOWNLOAD_NOTIFY_HR + id, uuid);
//删除下载的zip文件
FileUtil.delete(new File(NFSFilePath + PathUtil.downFile + businessTask.getFileName()));

@ -1,11 +1,10 @@
package com.nm.gsgl.service.impl;
import com.baomidou.mybatisplus.core.conditions.update.UpdateWrapper;
import com.nm.gsgl.common.utils.LogUtil;
import com.nm.gsgl.entity.intermediary.AppAliveStatus;
import com.nm.gsgl.mapper.AppAliveStatusMapper;
import com.nm.gsgl.service.MinDualMachineService;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Isolation;
import org.springframework.transaction.annotation.Transactional;
@ -22,7 +21,7 @@ import java.util.Date;
public class MinDualMachineServiceImpl implements MinDualMachineService {
@Resource
private AppAliveStatusMapper appAliveStatusMapper;
public static Logger log = LoggerFactory.getLogger("MinFee");
//public static Logger log = LoggerFactory.getLogger("MinFee");
/**
* ()
*
@ -36,7 +35,8 @@ public class MinDualMachineServiceImpl implements MinDualMachineService {
@Override
public boolean validateDual(String uuid, int machineId) {
AppAliveStatus appAliveStatus = appAliveStatusMapper.selectById(1);
log.info("[uuid:{}]查询APP_ALIVE_STATUS中APPID为1的数据{}", uuid,appAliveStatus);
//log.info("[uuid:{}]查询APP_ALIVE_STATUS中APPID为1的数据{}", uuid,appAliveStatus);
LogUtil.WriteLog_MinFee("[uuid:" + uuid + "]-查询APP_ALIVE_STATUS中APPID为1的数据" +appAliveStatus, "MinDualMachineServiceImpl");
Date date = new Date();
//Date mTime = appAliveStatus.getMTime();
Date sTime = appAliveStatus.getSTime();
@ -48,14 +48,16 @@ public class MinDualMachineServiceImpl implements MinDualMachineService {
UpdateWrapper<AppAliveStatus> updateWrapper = new UpdateWrapper<>();
updateWrapper.eq("APPID", 1);
if (appAliveStatus.getALiveId() == machineId) {
log.info("[uuid:{}]当前的应用的ID为{},当前活跃的机器为:{}开始下载程序,并更新程序存活时间", uuid,machineId, appAliveStatus.getALiveId());
//log.info("[uuid:{}]当前的应用的ID为{},当前活跃的机器为:{}开始下载程序,并更新程序存活时间", uuid,machineId, appAliveStatus.getALiveId());
LogUtil.WriteLog_MinFee("[uuid:" + uuid + "]-当前的应用的ID为" +machineId+",当前活跃的机器为:"+ appAliveStatus.getALiveId()+"开始下载程序,并更新程序存活时间", "MinDualMachineServiceImpl");
updateWrapper.set("MTIME", date);
appAliveStatusMapper.update(null, updateWrapper);
return true;
} else {
log.info("[uuid:{}]当前活跃的机器为:{},开始验证是否超时", uuid, appAliveStatus.getALiveId());
//log.info("[uuid:{}]当前活跃的机器为:{},开始验证是否超时", uuid, appAliveStatus.getALiveId());
if ((time2 + ((long) deadTime * deadCount * 1000) < time)) {
log.info("[uuid:{}]修改APP_ALIVE_STATUS表中活跃主机ALIVEID为{},并更新备用机存活时间", uuid, 1);
//log.info("[uuid:{}]修改APP_ALIVE_STATUS表中活跃主机ALIVEID为{},并更新备用机存活时间", uuid, 1);
LogUtil.WriteLog_MinFee("[uuid:" + uuid + "]-修改APP_ALIVE_STATUS表中活跃主机ALIVEID为" +1+"并更新备用机存活时间", "MinDualMachineServiceImpl");
updateWrapper.set("MTIME", date);
updateWrapper.set("ALIVEID", 1);
appAliveStatusMapper.update(null, updateWrapper);
@ -64,7 +66,8 @@ public class MinDualMachineServiceImpl implements MinDualMachineService {
}
updateWrapper.set("MTIME", date);
appAliveStatusMapper.update(null, updateWrapper);
log.info("[uuid:{}]当前活跃的机器为{}不是ALIVEID=1的程序更新主程序存活时间程序休眠等待下次轮询", uuid, appAliveStatus.getALiveId());
//log.info("[uuid:{}]当前活跃的机器为{}不是ALIVEID=1的程序更新主程序存活时间程序休眠等待下次轮询", uuid, appAliveStatus.getALiveId());
LogUtil.WriteLog_MinFee("[uuid:" + uuid + "]-当前活跃的机器为" +appAliveStatus.getALiveId()+"不是ALIVEID=1的程序更新主程序存活时间程序休眠等待下次轮询", "MinDualMachineServiceImpl");
return false;
}
@ -90,14 +93,17 @@ public class MinDualMachineServiceImpl implements MinDualMachineService {
UpdateWrapper<AppAliveStatus> updateWrapper = new UpdateWrapper<>();
updateWrapper.eq("APPID", 1);
if (appAliveStatus.getALiveId() == machineId) {
log.info("[uuid:{}]当前的应用的ID为{},当前活跃的机器为:{}开始下载程序,并更新程序存活时间", uuid,machineId, appAliveStatus.getALiveId());
//log.info("[uuid:{}]当前的应用的ID为{},当前活跃的机器为:{}开始下载程序,并更新程序存活时间", uuid,machineId, appAliveStatus.getALiveId());
LogUtil.WriteLog_MinFee("[uuid:" + uuid + "]-当前的应用的ID为" +machineId+",当前活跃的机器为:"+ appAliveStatus.getALiveId()+"开始下载程序,并更新程序存活时间", "MinDualMachineServiceImpl");
updateWrapper.set("STIME", date);
appAliveStatusMapper.update(null, updateWrapper);
return true;
} else {
log.info("[uuid:{}]当前活跃的机器为:{},开始验证是否超时", uuid, appAliveStatus.getALiveId());
//log.info("[uuid:{}]当前活跃的机器为:{},开始验证是否超时", uuid, appAliveStatus.getALiveId());
if ((time1 + ((long) deadTime * deadCount * 1000) < time)) {
log.info("[uuid:{}]修改APP_ALIVE_STATUS表中活跃主机ALIVEID为{},并更新备用机存活时间", uuid, 2);
//log.info("[uuid:{}]修改APP_ALIVE_STATUS表中活跃主机ALIVEID为{},并更新备用机存活时间", uuid, 2);
LogUtil.WriteLog_MinFee("[uuid:" + uuid + "]-修改APP_ALIVE_STATUS表中活跃主机ALIVEID为" +2+"并更新备用机存活时间", "MinDualMachineServiceImpl");
updateWrapper.set("STIME", date);
updateWrapper.set("ALIVEID", 2);
appAliveStatusMapper.update(null, updateWrapper);
@ -106,7 +112,8 @@ public class MinDualMachineServiceImpl implements MinDualMachineService {
}
updateWrapper.set("STIME", date);
appAliveStatusMapper.update(null, updateWrapper);
log.info("[uuid:{}]当前活跃的机器为{}不是ALIVEID=2的程序程序休眠更新备用机存活时间等待下次轮询", uuid, appAliveStatus.getALiveId());
//log.info("[uuid:{}]当前活跃的机器为{}不是ALIVEID=2的程序程序休眠更新备用机存活时间等待下次轮询", uuid, appAliveStatus.getALiveId());
LogUtil.WriteLog_MinFee("[uuid:" + uuid + "]-当前活跃的机器为" +appAliveStatus.getALiveId()+"不是ALIVEID=2的程序更新主程序存活时间程序休眠等待下次轮询", "MinDualMachineServiceImpl");
return false;
}
}

@ -4,6 +4,7 @@ import com.nm.gsgl.common.Constant;
import com.nm.gsgl.common.config.MyEnvironmentPostProcessor;
import com.nm.gsgl.common.utils.DatabaseUtil;
import com.nm.gsgl.common.utils.FileUtil;
import com.nm.gsgl.common.utils.LogUtil;
import com.nm.gsgl.common.utils.PathUtil;
import com.nm.gsgl.entity.sqlite.BusinessTasks;
import com.nm.gsgl.service.CreateSqbByDllService;
@ -11,8 +12,6 @@ import com.nm.gsgl.service.MinDualMachineService;
import com.nm.gsgl.service.MinFeeService;
import com.nm.gsgl.service.MinFtpDownloadService;
import com.nm.gsgl.service.SqliteBackupsService;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;
@ -56,7 +55,7 @@ public class MinFeeServiceImpl implements MinFeeService {
private String NFSFilePath;
@Resource
private SqliteBackupsService sqliteBackupsService;
public static Logger log = LoggerFactory.getLogger("MinFee");
//public static Logger log = LoggerFactory.getLogger("MinFee");
/**
* ()
*
@ -69,7 +68,8 @@ public class MinFeeServiceImpl implements MinFeeService {
//双机方案
//获取本机的机器ID,本机的机器ID 1-主机2-备用机
int mId = Integer.parseInt(machineId);
log.info("[uuid:{}]获取当前机器的id为{}", uuid, mId);
//log.info("[uuid:{}]获取当前机器的id为{}", uuid, mId);
LogUtil.WriteLog_MinFee("[uuid:" + uuid + "]-获取当前机器的id为" +mId, "MinFeeServiceImpl");
//验证是否当前服务程序处理事件
boolean validateHR;
if (mId == 1) {
@ -78,7 +78,8 @@ public class MinFeeServiceImpl implements MinFeeService {
validateHR = minDualMachineService.validateDualSpare(uuid, mId);
}
//如果是当前服务处理,继续下一步,否则程序定时任务不做处理,等待下次轮询
log.info("[uuid:{}]验证是否当前服务程序处理事件{}", uuid, validateHR);
//log.info("[uuid:{}]验证是否当前服务程序处理事件{}", uuid, validateHR);
LogUtil.WriteLog_MinFee("[uuid:" + uuid + "]-验证是否当前服务程序处理事件" +validateHR, "MinFeeServiceImpl");
if (validateHR) {
//开始下载扫描华软最小费额下载任务
minFtpDownloadService.getMinFeeHrMdTask(uuid);
@ -98,13 +99,15 @@ public class MinFeeServiceImpl implements MinFeeService {
}
if (isFinished) {
//更新中介库华软的下载状态
log.info("[uuid:{}]-更新中介库TF_DOWNLOAD_NOTIFY(id为:{})下载状态为1-已下载", uuid, id);
//log.info("[uuid:{}]-更新中介库TF_DOWNLOAD_NOTIFY(id为:{})下载状态为1-已下载", uuid, id);
LogUtil.WriteLog_MinFee("[uuid:" + uuid + "]-更新中介库TF_DOWNLOAD_NOTIFY(id为" +id+")下载状态为1-已下载", "MinFeeServiceImpl");
DatabaseUtil.updateSql(driverName, HRConnectionStringOracle, HROracleName, HROraclePass, Constant.UPDATE_TF_DOWNLOAD_NOTIFY_HR + id, uuid);
//删除下载的zip文件
FileUtil.delete(new File(NFSFilePath + PathUtil.downFile + businessTask.getFileName()));
sqliteBackupsService.buildBackups(uuid);
} else {
log.info("[uuid:{}]最小费额文件数据处理失败中介下载任务id{}不更新", uuid, id);
//log.info("[uuid:{}]最小费额文件数据处理失败中介下载任务id{}不更新", uuid, id);
LogUtil.WriteLog_MinFee("[uuid:" + uuid + "]-最小费额文件数据处理失败,中介下载任务(id为" +id+")不更新", "MinFeeServiceImpl");
}
}
}

@ -8,6 +8,7 @@ import com.nm.gsgl.common.utils.DatabaseUtil;
import com.nm.gsgl.common.utils.DateTimeUtil;
import com.nm.gsgl.common.utils.FTPUtil;
import com.nm.gsgl.common.utils.FileUtil;
import com.nm.gsgl.common.utils.LogUtil;
import com.nm.gsgl.common.utils.MD5Util;
import com.nm.gsgl.common.utils.PathUtil;
import com.nm.gsgl.common.utils.StringUtil;
@ -15,8 +16,6 @@ import com.nm.gsgl.entity.intermediary.DownloadNotify;
import com.nm.gsgl.entity.sqlite.BusinessTasks;
import com.nm.gsgl.service.MinFtpDownloadService;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;
@ -31,7 +30,7 @@ import java.util.List;
*/
@Service
public class MinFtpDownloadServiceImpl implements MinFtpDownloadService {
public static Logger log = LoggerFactory.getLogger("MinFee");
//public static Logger log = LoggerFactory.getLogger("MinFee");
/**
* FTP
*/
@ -74,14 +73,17 @@ public class MinFtpDownloadServiceImpl implements MinFtpDownloadService {
*/
@Override
public void getMinFeeHrMdTask(String uuid) {
log.info("[uuid:{}]开始查询华软中介服务上的最小费额下载任务数据", uuid);
//log.info("[uuid:{}]开始查询华软中介服务上的最小费额下载任务数据", uuid);
LogUtil.WriteLog_MinFee("[uuid:" + uuid + "]-开始查询华软中介服务上的最小费额下载任务数据" , "MinFtpDownloadServiceImpl");
//查询下载任务数据
List<DownloadNotify> downloadNotifys = DatabaseUtil.selectDownloadNotify(driverName, HRConnectionStringOracle, HROracleName, HROraclePass, Constant.SELECT_TF_DOWNLOAD_NOTIFY_MIN_FEE);
String localPath = NFSFilePath + PathUtil.downFile;
FileUtil.fileCreat(localPath);
log.info("[uuid:{}]查询华软中介服务上的最小费额下载任务数据条数为{}", uuid, downloadNotifys.size());
//log.info("[uuid:{}]查询华软中介服务上的最小费额下载任务数据条数为{}", uuid, downloadNotifys.size());
LogUtil.WriteLog_MinFee("[uuid:" + uuid + "]-查询华软中介服务上的最小费额下载任务数据条数为"+downloadNotifys.size() , "MinFtpDownloadServiceImpl");
if (downloadNotifys.size() == 0) {
log.info("[uuid:{}]华软中介服务上无最小费额下载任务", uuid);
//log.info("[uuid:{}]华软中介服务上无最小费额下载任务", uuid);
LogUtil.WriteLog_MinFee("[uuid:" + uuid + "]-华软中介服务上无最小费额下载任务" , "MinFtpDownloadServiceImpl");
} else {
for (DownloadNotify downloadNotify : downloadNotifys) {
long start = System.currentTimeMillis();
@ -103,28 +105,38 @@ public class MinFtpDownloadServiceImpl implements MinFtpDownloadService {
String fileName = domain1.substring(domain1.lastIndexOf("/") + 1);
String ftpFilePath = domain1.replace(ftpUrl, Constant.NULL_STRING).replace(fileName, Constant.NULL_STRING);
if (businessTasks.size() == 0 || !FileUtil.fileExists(localPath + fileName)) {
log.info("[uuid:{}]开始下载华软中介服务上的最小费额任务数据,protocolType为{}", uuid, protocolType);
//log.info("[uuid:{}]开始下载华软中介服务上的最小费额任务数据,protocolType为{}", uuid, protocolType);
LogUtil.WriteLog_MinFee("[uuid:" + uuid + "]-开始下载华软中介服务上的最小费额任务数据,protocolType为"+protocolType , "MinFtpDownloadServiceImpl");
boolean isDownload;
try {
isDownload = FTPUtil.downloadFile(HRFtpIP, HRFtpUsername, HRPassword, Integer.parseInt(HRPort), ftpFilePath, localPath, fileName);
} catch (Exception e) {
log.error("[uuid:{}]{}-下载zip文件({}){}失败,等待下次轮询", uuid, protocolType, id, tableName);
//log.error("[uuid:{}]{}-下载zip文件({}){}失败,等待下次轮询", uuid, protocolType, id, tableName);
LogUtil.WriteLog_MinFee("[uuid:" + uuid + "]-下载zip文件protocolType为"+protocolType+"id为"+id+"业务主表表名为"+tableName , "MinFtpDownloadServiceImpl");
throw new PPException(MessageEnum.FTP.getCode(), MessageEnum.FTP.getMessage());
}
long end = System.currentTimeMillis();
log.info("[uuid:{}]下载耗时:{}毫秒", uuid, end - start);
//log.info("[uuid:{}]下载耗时:{}毫秒", uuid, end - start);
if (isDownload) {
//校验md5
String downlMd5 = MD5Util.getMd5ByFilePath(localPath + fileName);
log.info("[uuid:{}]{}-下载zip文件({}){},原MD5值为{}", uuid, protocolType, id, tableName, md5);
log.info("[uuid:{}]{}-下载zip文件({}){},下载的MD5值为{}", uuid, protocolType, id, tableName, downlMd5);
//log.info("[uuid:{}]{}-下载zip文件({}){},原MD5值为{}", uuid, protocolType, id, tableName, md5);
//log.info("[uuid:{}]{}-下载zip文件({}){},下载的MD5值为{}", uuid, protocolType, id, tableName, downlMd5);
LogUtil.WriteLog_MinFee("[uuid:" + uuid + "]-下载zip文件protocolType为"+protocolType+"id为"+id+"业务主表表名为"+tableName +"原MD5值为"+md5,
"MinFtpDownloadServiceImpl");
LogUtil.WriteLog_MinFee("[uuid:" + uuid + "]-下载zip文件protocolType为"+protocolType+"id为"+id+"业务主表表名为"+tableName +"下载的MD5值为"+downlMd5,
"MinFtpDownloadServiceImpl");
if (StringUtils.isBlank(downlMd5) || !md5.equals(downlMd5)) {
log.info("[uuid:{}]{}-下载zip文件({}){},MD5校验失败", uuid, protocolType, id, tableName);
//log.info("[uuid:{}]{}-下载zip文件({}){},MD5校验失败", uuid, protocolType, id, tableName);
LogUtil.WriteLog_MinFee("[uuid:" + uuid + "]-下载zip文件protocolType为"+protocolType+"id为"+id+"业务主表表名为"+tableName +"MD5校验失败",
"MinFtpDownloadServiceImpl");
//throw new PPException(MessageEnum.MD5校验失败.getCode(), MessageEnum.MD5校验失败.getMessage());
} else {
log.info("[uuid:{}]{}-下载zip文件({}){},MD5校验成功", uuid, protocolType, id, tableName);
//log.info("[uuid:{}]{}-下载zip文件({}){},MD5校验成功", uuid, protocolType, id, tableName);
LogUtil.WriteLog_MinFee("[uuid:" + uuid + "]-下载zip文件protocolType为"+protocolType+"id为"+id+"业务主表表名为"+tableName +"MD5校验成功",
"MinFtpDownloadServiceImpl");
//获取新的版本号
String newVersion = StringUtil.getNewVersion(protocolType, version);
//插入sqlite数据库任务表中
@ -149,14 +161,18 @@ public class MinFtpDownloadServiceImpl implements MinFtpDownloadService {
String sqliteInsert = StringUtil.sqliteInsert(bt);
if (businessTasks.size() == 0) {
DatabaseUtil.insertSqlite(sqliteDriverName, sqliteUrl, sqliteInsert);
log.info("[uuid:{}]插入sqlite数据库business_tasks_info表中任务数据,id为{}protocolType为{}", uuid, id, protocolType);
//log.info("[uuid:{}]插入sqlite数据库business_tasks_info表中任务数据,id为{}protocolType为{}", uuid, id, protocolType);
LogUtil.WriteLog_MinFee("[uuid:" + uuid + "]-插入sqlite数据库business_tasks_info表中任务数据,id为"+id+"protocolType为"+protocolType,
"MinFtpDownloadServiceImpl");
}
}
}
} else {
log.info("[uuid:{}]sqlite数据库中已存在下载的记录同时下载路径下存在该文件{},跳过下载执行后面的处理程序,id为{}", uuid, fileName, id);
//log.info("[uuid:{}]sqlite数据库中已存在下载的记录同时下载路径下存在该文件{},跳过下载执行后面的处理程序,id为{}", uuid, fileName, id);
LogUtil.WriteLog_MinFee("[uuid:" + uuid + "]-sqlite数据库中已存在下载的记录同时下载路径下存在该文件"+fileName+"跳过下载执行后面的处理程序,id为"+id,
"MinFtpDownloadServiceImpl");
}
}

@ -1,11 +1,10 @@
package com.nm.gsgl.service.impl;
import com.nm.gsgl.common.utils.FileUtil;
import com.nm.gsgl.common.utils.LogUtil;
import com.nm.gsgl.common.utils.PathUtil;
import com.nm.gsgl.service.SqliteBackupsService;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
/**
* @author: shuguang
@ -13,28 +12,28 @@ import org.springframework.transaction.annotation.Transactional;
* @description: sqlite
*/
@Service
@Slf4j
public class SqliteBackupsServiceImpl implements SqliteBackupsService {
/**
* sqlite
*
* @param uuid uuid
* @return boolean
* @author shuguang
* @date 2023-02-21 9:01
*/
@Override
public boolean buildBackups(String uuid) {
public void buildBackups(String uuid) {
String sqlitePath = PathUtil.webPathSqlite+ "identifier.sqlite";
log.info("[uuid:{}]开始备份sqlite文件{}", uuid,sqlitePath);
if(!FileUtil.fileExists(sqlitePath)){
log.info("[uuid:{}]备份sqlite文件{}不存在", uuid,sqlitePath);
return false;
}
//log.info("[uuid:{}]开始备份sqlite文件{}", uuid,sqlitePath);
LogUtil.WriteLog_BackupSqlite("[uuid:" + uuid + "]-开始备份sqlite文件" + sqlitePath, "SqliteBackupsServiceImpl");
//if(!FileUtil.fileExists(sqlitePath)){
// //log.info("[uuid:{}]备份sqlite文件{}不存在", uuid,sqlitePath);
// LogUtil.WriteLog_BackupSqlite("[uuid:" + uuid + "]-备份sqlite文件不存在" + sqlitePath, "SqliteBackupsServiceImpl");
// return false;
//}
String sqliteBackupsPath = PathUtil.backUpPath+"/sqliteBackups/";
FileUtil.fileCreat(sqliteBackupsPath);
FileUtil.copyFile(sqlitePath, sqliteBackupsPath+"identifier.sqlite");
log.info("[uuid:{}]备份sqlite到目录{}", uuid,sqliteBackupsPath+"identifier.sqlite");
return false;
//log.info("[uuid:{}]备份sqlite到目录{}", uuid,sqliteBackupsPath+"identifier.sqlite");
LogUtil.WriteLog_BackupSqlite("[uuid:" + uuid + "]-备份sqlite到目录" + sqliteBackupsPath+"identifier.sqlite", "SqliteBackupsServiceImpl");
}
}

@ -11,6 +11,7 @@ import com.nm.gsgl.common.exception.PPException;
import com.nm.gsgl.common.utils.DatabaseUtil;
import com.nm.gsgl.common.utils.DateTimeUtil;
import com.nm.gsgl.common.utils.FileUtil;
import com.nm.gsgl.common.utils.LogUtil;
import com.nm.gsgl.common.utils.MD5Util;
import com.nm.gsgl.common.utils.PathUtil;
import com.nm.gsgl.common.utils.UnzipUtil;
@ -68,7 +69,7 @@ public class ZipToSqbServiceImpl implements ZipToSqbService {
@Resource
private BasicInfoMapper basicInfoMapper;
public static Logger log = LoggerFactory.getLogger("BlackCard");
//public static Logger log = LoggerFactory.getLogger("BlackCard");
/**
* SQB
*
@ -97,9 +98,9 @@ public class ZipToSqbServiceImpl implements ZipToSqbService {
//压缩文件存放-会被删除
if (!FileUtil.fileExists(localPath + fileName)) {
log.info("[uuid:{}]下载的zip文件{}不存在", uuid, localPath + fileName);
throw new PPException(MessageEnum.zip.getCode(), MessageEnum.zip.getMessage());
}
//log.info("[uuid:{}]下载的zip文件{}不存在", uuid, localPath + fileName);
LogUtil.WriteLog_Error("[uuid:" + uuid + "]-下载的zip文件不存在" + localPath + fileName, "ZipToSqbServiceImpl");
throw new PPException(MessageEnum.zip.getCode(), MessageEnum.zip.getMessage());}
//调用动态库生成DBF文件
try {
//dbf文件名称
@ -171,7 +172,8 @@ public class ZipToSqbServiceImpl implements ZipToSqbService {
bfSqbName = "SQB_EMERCARINC";
break;
default:
log.error("[uuid:{}]-未找到相应的参数处理方法", uuid);
//log.error("[uuid:{}]-未找到相应的参数处理方法", uuid);
LogUtil.WriteLog_Error("[uuid:" + uuid + "]-未找到相应的参数处理方法" , "ZipToSqbServiceImpl");
throw new PPException(MessageEnum..getCode(), MessageEnum..getMessage());
}
FileUtil.fileCreat(sqbPath + dbfDir);
@ -195,15 +197,20 @@ public class ZipToSqbServiceImpl implements ZipToSqbService {
FileUtil.fileCreat(insertDbPath);
FileUtil.copyFile(localPath + fileName, insertDbPath + fileName);
}
log.info("[uuid:{}]调用动态库生成全量SQB文件中,协议类型为:{}zip文件名为{}", uuid, protocolType, fileName);
//log.info("[uuid:{}]调用动态库生成全量SQB文件中,协议类型为:{}zip文件名为{}", uuid, protocolType, fileName);
LogUtil.WriteLog_HRBlackCard("[uuid:" + uuid + "]-调用动态库生成全量SQB文件中,协议类型为" + protocolType+"zip文件名为" + fileName, "ZipToSqbServiceImpl");
countSqb = CMinFeeCreator.INSTANCE.MakeBlackAllSQB(etcType, version, format,
(localPath + fileName).replace("\\", "/"), sqbPathName.replace("\\", "/"),
sqbZlibPathName.replace("\\", "/"), bytMd5);
log.info("[uuid:{}]调用动态库生成全量SQB文件完成,返回值为:{}", uuid, countSqb);
//log.info("[uuid:{}]调用动态库生成全量SQB文件完成,返回值为:{}", uuid, countSqb);
LogUtil.WriteLog_HRBlackCard("[uuid:" + uuid + "]-调用动态库生成全量SQB文件完成,返回值为" + countSqb, "ZipToSqbServiceImpl");
} else {
log.info("[uuid:{}]调用动态库生成增量SQB文件中,协议类型为:{}zip文件名为{}", uuid, protocolType, fileName);
//log.info("[uuid:{}]调用动态库生成增量SQB文件中,协议类型为:{}zip文件名为{}", uuid, protocolType, fileName);
LogUtil.WriteLog_HRBlackCard("[uuid:" + uuid + "]-调用动态库生成增量SQB文件中,协议类型为" + protocolType+"zip文件名为" + fileName, "ZipToSqbServiceImpl");
//每天生成一个备份zip文件夹存放增量下载的zip文件
dayBackPath = NFSFilePath + PathUtil.downFileBackup + dbfDir + File.separator + dateTime + File.separator;
FileUtil.fileCreat(dayBackPath);
@ -218,7 +225,8 @@ public class ZipToSqbServiceImpl implements ZipToSqbService {
NewFileInfo newFileInfo = newFileInfoMapper.selectById(protocolType);
if(newFileInfo != null ){
Date updateTime = newFileInfo.getUpdateTime();
log.info("[uuid:{}]根据主键protocolType={}查询DIS_NEWFILEINFO_JAVA表中上次的生成时间为{}", uuid, protocolType, updateTime);
//log.info("[uuid:{}]根据主键protocolType={}查询DIS_NEWFILEINFO_JAVA表中上次的生成时间为{}", uuid, protocolType, updateTime);
LogUtil.WriteLog_HRBlackCard("[uuid:" + uuid + "]-根据主键protocolType=" + protocolType+"查询DIS_NEWFILEINFO_JAVA表中上次的生成时间为" + updateTime, "ZipToSqbServiceImpl");
int hours = DateTimeUtil.getHour(date);
if (updateTime != null) {
if (hours != DateTimeUtil.getHour(updateTime)) {
@ -237,7 +245,8 @@ public class ZipToSqbServiceImpl implements ZipToSqbService {
//如果是增量先查询上一版本的版本号
if (Constant.ONE_HOUR_ISSUED.contains(protocolType)) {
version = getVersion(protocolType, date, uuid);
log.info("[uuid:{}]最终增量本次版本号为{}", uuid, version);
//log.info("[uuid:{}]最终增量本次版本号为{}", uuid, version);
LogUtil.WriteLog_HRBlackCard("[uuid:" + uuid + "]-最终增量本次版本号为" + version, "ZipToSqbServiceImpl");
}
//增量目录名称
StringBuilder incDirLst = new StringBuilder();
@ -253,7 +262,8 @@ public class ZipToSqbServiceImpl implements ZipToSqbService {
zlibSqbName = bfSqbName + Constant.STR_ + formateString + Constant.STR_ + version + Constant.STR_ZLIB;
sqbZlibPathName = sqbPath + dbfDir + File.separator + zlibSqbName;
log.info("[uuid:{}]调用动态库生成增量SQB文件中,协议类型为:{}zip文件名为{}", uuid, protocolType, fileName);
//log.info("[uuid:{}]调用动态库生成增量SQB文件中,协议类型为:{}zip文件名为{}", uuid, protocolType, fileName);
LogUtil.WriteLog_HRBlackCard("[uuid:" + uuid + "]-调用动态库生成增量SQB文件中,协议类型为" + protocolType+"zip文件名为"+fileName , "ZipToSqbServiceImpl");
//获取最新的全量sqb文件全路径
//全量文件
@ -274,9 +284,13 @@ public class ZipToSqbServiceImpl implements ZipToSqbService {
//获取当前时间加载增量时间之前的全量文件名
Date tenDaysAgo = DateTimeUtil.addDateDays(new Date(), -(Integer.parseInt(incDays)));
String tenDaysVersion = DateTimeUtil.getFormateString(tenDaysAgo, Constant.YYMMDD) + "103";
log.info("[uuid:{}]获取{}天前全量SQB,修改时间为{}", uuid, incDays,tenDaysAgo);
//log.info("[uuid:{}]获取{}天前全量SQB,修改时间为{}", uuid, incDays,tenDaysAgo);
LogUtil.WriteLog_HRBlackCard("[uuid:" + uuid + "]-获取" + incDays+"天前全量SQB,修改时间为" +tenDaysAgo, "ZipToSqbServiceImpl");
if (fileInfos2.size() > 0) {
log.info("[uuid:{}]获取全量SQB为{}", uuid, fileInfos2);
//log.info("[uuid:{}]获取全量SQB为{}", uuid, fileInfos2);
LogUtil.WriteLog_HRBlackCard("[uuid:" + uuid + "]-获取全量SQB为" + fileInfos2, "ZipToSqbServiceImpl");
//对文件的最后修改时间进行排序
fileInfos2.sort(Comparator.comparing(FileInfo::getLastModified).reversed());
for (FileInfo fileInfo2 : fileInfos2) {
@ -289,37 +303,49 @@ public class ZipToSqbServiceImpl implements ZipToSqbService {
pblackAllFileName = fileInfos2.get(0).getFileName();
}
} else {
log.error("[uuid:{}]获取获取{}天前全量SQB文件失败", uuid,incDays);
LogUtil.WriteLog_Error("[uuid:" + uuid + "]-获取" + incDays+"天前全量SQB文件失败" , "ZipToSqbServiceImpl");
//更新sqlite任务表的任务状态为2-报错
DatabaseUtil.insertSqlite(sqliteDriverName, sqliteUrl, Constant.ERROR_UPDATE_BUSINESS_HANDLER + businessTask.getId());
throw new PPException(MessageEnum..getCode(), MessageEnum..getMessage());
}
log.info("[uuid:{}]获取获取{}天前全量SQB名称{}", uuid, incDays, pblackAllFileName);
//log.info("[uuid:{}]获取获取{}天前全量SQB名称{}", uuid, incDays, pblackAllFileName);
LogUtil.WriteLog_HRBlackCard("[uuid:" + uuid + "]-获取" + incDays+"天前全量SQB名称"+pblackAllFileName , "ZipToSqbServiceImpl");
}
//如果没有最新的全量SQB文件时不生成增量文件
if (StringUtils.isNotBlank(pblackAllFileName)) {
log.info("[uuid:{}]调用动态库生成增量入参,全量SQB文件全路径{}", uuid, (sqbQlFileDir + File.separator + pblackAllFileName).replace("\\", "/"));
log.info("[uuid:{}]调用动态库生成增量入参, 生成SQB文件全路径{}", uuid, sqbPathName.replace("\\", "/"));
log.info("[uuid:{}]调用动态库生成增量入参, 生成SQBZLIB文件全路径{}", uuid, sqbZlibPathName.replace("\\", "/"));
//log.info("[uuid:{}]调用动态库生成增量入参,全量SQB文件全路径{}", uuid, (sqbQlFileDir + File.separator + pblackAllFileName).replace("\\", "/"));
//log.info("[uuid:{}]调用动态库生成增量入参, 生成SQB文件全路径{}", uuid, sqbPathName.replace("\\", "/"));
//log.info("[uuid:{}]调用动态库生成增量入参, 生成SQBZLIB文件全路径{}", uuid, sqbZlibPathName.replace("\\", "/"));
LogUtil.WriteLog_HRBlackCard("[uuid:" + uuid + "]-调用动态库生成增量入参,全量SQB文件全路径" + (sqbQlFileDir + File.separator + pblackAllFileName).replace("\\", "/") , "ZipToSqbServiceImpl");
LogUtil.WriteLog_HRBlackCard("[uuid:" + uuid + "]-调用动态库生成增量入参,生成SQB文件全路径" + sqbPathName.replace("\\", "/") , "ZipToSqbServiceImpl");
LogUtil.WriteLog_HRBlackCard("[uuid:" + uuid + "]-调用动态库生成增量入参,生成SQBzlib文件全路径" + sqbZlibPathName.replace("\\", "/") , "ZipToSqbServiceImpl");
countSqb = CMinFeeCreator.INSTANCE.MakeBlackIncSQB(etcType, version, format, incDirLst.toString(),
(sqbQlFileDir + File.separator + pblackAllFileName).replace("\\", "/"),
sqbPathName.replace("\\", "/"), sqbZlibPathName.replace("\\", "/"), bytMd5);
log.info("[uuid:{}]调用动态库生成增量SQB文件完成,返回值为:{}", uuid, countSqb);
//log.info("[uuid:{}]调用动态库生成增量SQB文件完成,返回值为:{}", uuid, countSqb);
LogUtil.WriteLog_HRBlackCard("[uuid:" + uuid + "]-]调用动态库生成增量SQB文件完成,返回值为" + countSqb , "ZipToSqbServiceImpl");
}
} else {
log.info("[uuid:{}]该时间点{}已下发/或者该点不下发", uuid, protocolType);
//log.info("[uuid:{}]该时间点{}已下发/或者该点不下发", uuid, protocolType);
LogUtil.WriteLog_HRBlackCard("[uuid:" + uuid + "]-该时间点" + protocolType+"已下发/或者该点不下发" , "ZipToSqbServiceImpl");
//更新sqlite任务表的任务状态为3-不下发
DatabaseUtil.insertSqlite(sqliteDriverName, sqliteUrl,
Constant.NOT_DIS_UPDATE_BUSINESS_HANDLER + DateTimeUtil.getFormateString(new Date(), Constant.YYYY_MM_DD_HH_MM_SS) + Constant.WHERE_ID + businessTask.getId());
log.info("[uuid:{}]更新business_tasks_info任务表的下发状态为3-不下发id为:{}", uuid, businessTask.getId());
//log.info("[uuid:{}]更新business_tasks_info任务表的下发状态为3-不下发id为:{}", uuid, businessTask.getId());
LogUtil.WriteLog_HRBlackCard("[uuid:" + uuid + "]-更新business_tasks_info任务表的下发状态为3-不下发id为" + businessTask.getId() , "ZipToSqbServiceImpl");
}
}
log.info("[uuid:{}]调用动态库生成SQB返回值count为{}", uuid, countSqb);
//log.info("[uuid:{}]调用动态库生成SQB返回值count为{}", uuid, countSqb);
LogUtil.WriteLog_HRBlackCard("[uuid:" + uuid + "]-调用动态库生成SQB返回值count为" + countSqb , "ZipToSqbServiceImpl");
if (countSqb != 0) {
if (countSqb > 0) {
String formateStr = DateTimeUtil.getFormateString(date, Constant.YYYYMMDDHHMMSS);
log.info("[uuid:{}]开始生成SQB备份文件", uuid);
//log.info("[uuid:{}]开始生成SQB备份文件", uuid);
LogUtil.WriteLog_HRBlackCard("[uuid:" + uuid + "]-]开始生成SQB备份文件" , "ZipToSqbServiceImpl");
//sqb备份目录
FileUtil.fileCreat(NFSFilePath + PathUtil.sqbBackPath + dbfDir);
String newSqbBackUpName = formateStr + Constant.STR_ + bfSqbName + Constant.STR_ + formateString + Constant.STR_ + version + Constant.STR_SQB;
@ -328,8 +354,8 @@ public class ZipToSqbServiceImpl implements ZipToSqbService {
//获取生成SQB的zlib的文件名
String zlibFileNameMd5Sqb = FileUtil.zlibFileName(NFSFilePath + PathUtil.sqbPath + dbfDir, zlibSqbName.replace(Constant.STR_ZLIB, Constant.NULL_STRING));
log.info("[uuid:{}]获取zlib的文件名称为:{}", uuid, zlibFileNameMd5Sqb);
//log.info("[uuid:{}]获取zlib的文件名称为:{}", uuid, zlibFileNameMd5Sqb);
LogUtil.WriteLog_HRBlackCard("[uuid:" + uuid + "]-获取zlib的文件名称为" + zlibFileNameMd5Sqb, "ZipToSqbServiceImpl");
//更新CMS_NAME_TABLE_AUTO_JAVA表中的下发类型
UpdateWrapper<NameTableAuto> updateWrapper = new UpdateWrapper<>();
//ISHIGHWAY=1高速
@ -360,7 +386,9 @@ public class ZipToSqbServiceImpl implements ZipToSqbService {
updateWrapper.set("EMERCARINCFLAG", Integer.parseInt(protocolType));
break;
default:
log.error("[uuid:{}]-未找到相应的更新自动下发分中心相应状态方法", uuid);
//log.error("[uuid:{}]-未找到相应的更新自动下发分中心相应状态方法", uuid);
LogUtil.WriteLog_Error("[uuid:" + uuid + "]-未找到相应的更新自动下发分中心相应状态方法" , "ZipToSqbServiceImpl");
throw new PPException(MessageEnum..getCode(), MessageEnum..getMessage());
}
@ -371,9 +399,11 @@ public class ZipToSqbServiceImpl implements ZipToSqbService {
String nowString = DateTimeUtil.getFormateString(new Date(), Constant.YYYY_MM_DD_HH_MM_SS);
String sqlParam = Constant.UPDATE_BUSINESS_HANDLER + nowString + Constant.PUBLISH_TIME + nowString + Constant.WHERE_ID + businessTask.getId();
if (DatabaseUtil.insertSqlite(sqliteDriverName, sqliteUrl, sqlParam) > 0) {
log.info("[uuid:{}]更新business_tasks_info调用动态库任务列表成功id为:{}", uuid, businessTask.getId());
//log.info("[uuid:{}]更新business_tasks_info调用动态库任务列表成功id为:{}", uuid, businessTask.getId());
LogUtil.WriteLog_HRBlackCard("[uuid:" + uuid + "]-更新business_tasks_info调用动态库任务列表成功,id为" + businessTask.getId(), "ZipToSqbServiceImpl");
}
log.info("[uuid:{}]调用动态库生成SQB任务,下载协议类型:{},zip文件名为:{}执行成功", uuid, protocolType, fileName);
//log.info("[uuid:{}]调用动态库生成SQB任务,下载协议类型:{},zip文件名为:{}执行成功", uuid, protocolType, fileName);
LogUtil.WriteLog_HRBlackCard("[uuid:" + uuid + "]-调用动态库生成SQB任务,下载协议类型为" + protocolType+"zip文件名为"+fileName+"执行成功", "ZipToSqbServiceImpl");
} else {
//更新sqlite任务表的任务状态为2-报错(等待下次轮询)
DatabaseUtil.insertSqlite(sqliteDriverName, sqliteUrl, Constant.ERROR_UPDATE_BUSINESS_HANDLER + businessTask.getId());
@ -381,15 +411,17 @@ public class ZipToSqbServiceImpl implements ZipToSqbService {
+ ",SQB动态库返回值为" + countSqb);
}
} else {
log.info("[uuid:{}]该时间点{}已下发/或者该点不下发", uuid, protocolType);
LogUtil.WriteLog_HRBlackCard("[uuid:" + uuid + "]-该时间点" + protocolType+"已下发/或者该点不下发", "ZipToSqbServiceImpl");
//更新sqlite任务表的任务状态为3-不下发
DatabaseUtil.insertSqlite(sqliteDriverName, sqliteUrl,
Constant.NOT_DIS_UPDATE_BUSINESS_HANDLER + DateTimeUtil.getFormateString(new Date(), Constant.YYYY_MM_DD_HH_MM_SS) + Constant.WHERE_ID + businessTask.getId());
log.info("[uuid:{}]更新business_tasks_info任务表的下发状态为3-不下发id为:{}", uuid, businessTask.getId());
//log.info("[uuid:{}]更新business_tasks_info任务表的下发状态为3-不下发id为:{}", uuid, businessTask.getId());
LogUtil.WriteLog_HRBlackCard("[uuid:" + uuid + "]-更新business_tasks_info任务表的下发状态为3-不下发id为" + businessTask.getId(), "ZipToSqbServiceImpl");
}
} catch (Exception e) {
log.error("[uuid:{}]调用动态库生成SQB失败{}", uuid, e.getMessage(), e);
//log.error("[uuid:{}]调用动态库生成SQB失败{}", uuid, e.getMessage(), e);
LogUtil.WriteLog_Error("[uuid:" + uuid + "]调用动态库生成SQB失败" + e.getMessage(), "ZipToSqbServiceImpl");
//更新sqlite任务表的任务状态为2-报错
DatabaseUtil.insertSqlite(sqliteDriverName, sqliteUrl, Constant.ERROR_UPDATE_BUSINESS_HANDLER + businessTask.getId());
throw new PPException(MessageEnum.SQB.getCode(), MessageEnum.SQB.getMessage()
@ -425,7 +457,8 @@ public class ZipToSqbServiceImpl implements ZipToSqbService {
//压缩文件存放-会被删除
if (!FileUtil.fileExists(localPath + fileName)) {
log.info("[uuid:{}]下载的zip文件{}不存在", uuid, localPath + fileName);
//log.info("[uuid:{}]下载的zip文件{}不存在", uuid, localPath + fileName);
LogUtil.WriteLog_Error("[uuid:" + uuid + "]-下载的zip文件不存在" + localPath + fileName, "ZipToSqbServiceImpl");
throw new PPException(MessageEnum.zip.getCode(), MessageEnum.zip.getMessage());
}
//调用动态库生成DBF文件
@ -454,7 +487,8 @@ public class ZipToSqbServiceImpl implements ZipToSqbService {
bfSqbName = "SQB_SNBLACKINC";
break;
default:
log.error("[uuid:{}]-未找到相应的参数处理方法", uuid);
//log.error("[uuid:{}]-未找到相应的参数处理方法", uuid);
LogUtil.WriteLog_Error("[uuid:" + uuid + "]-未找到相应的参数处理方法" , "ZipToSqbServiceImpl");
throw new PPException(MessageEnum..getCode(), MessageEnum..getMessage());
}
FileUtil.fileCreat(sqbPath + dbfDir);
@ -472,15 +506,20 @@ public class ZipToSqbServiceImpl implements ZipToSqbService {
dayBackPath = NFSFilePath + PathUtil.downFileBackup + dbfDir + File.separator;
FileUtil.fileCreat(dayBackPath);
FileUtil.copyFile(localPath + fileName, dayBackPath + fileName);
log.info("[uuid:{}]调用动态库生成全量SQB文件中,协议类型为:{}zip文件名为{}", uuid, protocolType, fileName);
//log.info("[uuid:{}]调用动态库生成全量SQB文件中,协议类型为:{}zip文件名为{}", uuid, protocolType, fileName);
LogUtil.WriteLog_MDBlackCard("[uuid:" + uuid + "]-调用动态库生成全量SQB文件中,协议类型为" + protocolType+"zip文件名为" + fileName, "ZipToSqbServiceImpl");
countSqb = CMinFeeCreator.INSTANCE.MakeBlackAllSQB(etcType, version, format,
(localPath + fileName).replace("\\", "/"), sqbPathName.replace("\\", "/"),
sqbZlibPathName.replace("\\", "/"), bytMd5);
log.info("[uuid:{}]调用动态库生成全量SQB文件完成,返回值为:{}", uuid, countSqb);
//log.info("[uuid:{}]调用动态库生成全量SQB文件完成,返回值为:{}", uuid, countSqb);
LogUtil.WriteLog_MDBlackCard("[uuid:" + uuid + "]-调用动态库生成全量SQB文件完成,返回值为" + countSqb, "ZipToSqbServiceImpl");
} else {
log.info("[uuid:{}]调用动态库生成增量SQB文件中,协议类型为:{}zip文件名为{}", uuid, protocolType, fileName);
//log.info("[uuid:{}]调用动态库生成增量SQB文件中,协议类型为:{}zip文件名为{}", uuid, protocolType, fileName);
LogUtil.WriteLog_MDBlackCard("[uuid:" + uuid + "]-调用动态库生成增量SQB文件中,协议类型为" + protocolType+"zip文件名为" + fileName, "ZipToSqbServiceImpl");
//每天生成一个备份zip文件夹存放增量下载的zip文件
dayBackPath = NFSFilePath + PathUtil.downFileBackup + dbfDir + File.separator + dateTime + File.separator;
FileUtil.fileCreat(dayBackPath);
@ -495,7 +534,9 @@ public class ZipToSqbServiceImpl implements ZipToSqbService {
NewFileInfo newFileInfo = newFileInfoMapper.selectById(protocolType);
if(newFileInfo != null ){
Date updateTime = newFileInfo.getUpdateTime();
log.info("[uuid:{}]根据主键protocolType={}查询DIS_NEWFILEINFO_JAVA表中上次的生成时间为{}", uuid, protocolType, updateTime);
//log.info("[uuid:{}]根据主键protocolType={}查询DIS_NEWFILEINFO_JAVA表中上次的生成时间为{}", uuid, protocolType, updateTime);
LogUtil.WriteLog_MDBlackCard("[uuid:" + uuid + "]-根据主键protocolType=" + protocolType+"查询DIS_NEWFILEINFO_JAVA表中上次的生成时间为" + updateTime, "ZipToSqbServiceImpl");
int hours = DateTimeUtil.getHour(date);
if (updateTime != null) {
if (hours != DateTimeUtil.getHour(updateTime)) {
@ -514,7 +555,8 @@ public class ZipToSqbServiceImpl implements ZipToSqbService {
//如果是增量先查询上一版本的版本号
if (Constant.ONE_HOUR_ISSUED.contains(protocolType)) {
version = getVersion(protocolType, date, uuid);
log.info("[uuid:{}]最终增量本次版本号为{}", uuid, version);
//log.info("[uuid:{}]最终增量本次版本号为{}", uuid, version);
LogUtil.WriteLog_MDBlackCard("[uuid:" + uuid + "]-最终增量本次版本号为" + version, "ZipToSqbServiceImpl");
}
//增量目录名称
StringBuilder incDirLst = new StringBuilder();
@ -530,7 +572,8 @@ public class ZipToSqbServiceImpl implements ZipToSqbService {
zlibSqbName = bfSqbName + Constant.STR_ + formateString + Constant.STR_ + version + Constant.STR_ZLIB;
sqbZlibPathName = sqbPath + dbfDir + File.separator + zlibSqbName;
log.info("[uuid:{}]调用动态库生成增量SQB文件中,协议类型为:{}zip文件名为{}", uuid, protocolType, fileName);
//log.info("[uuid:{}]调用动态库生成增量SQB文件中,协议类型为:{}zip文件名为{}", uuid, protocolType, fileName);
LogUtil.WriteLog_MDBlackCard("[uuid:" + uuid + "]-调用动态库生成增量SQB文件中,协议类型为" + protocolType+"zip文件名为"+fileName , "ZipToSqbServiceImpl");
//获取最新的全量sqb文件全路径
//全量文件
@ -551,9 +594,13 @@ public class ZipToSqbServiceImpl implements ZipToSqbService {
//获取当前时间加载增量时间之前的全量文件名
Date tenDaysAgo = DateTimeUtil.addDateDays(new Date(), -(Integer.parseInt(incDays)));
String tenDaysVersion = DateTimeUtil.getFormateString(tenDaysAgo, Constant.YYMMDD) + "103";
log.info("[uuid:{}]获取{}天前全量SQB,修改时间为{}", uuid, incDays,tenDaysAgo);
//log.info("[uuid:{}]获取{}天前全量SQB,修改时间为{}", uuid, incDays,tenDaysAgo);
LogUtil.WriteLog_MDBlackCard("[uuid:" + uuid + "]-获取" + incDays+"天前全量SQB,修改时间为" +tenDaysAgo, "ZipToSqbServiceImpl");
if (fileInfos2.size() > 0) {
log.info("[uuid:{}]获取全量SQB为{}", uuid, fileInfos2);
//log.info("[uuid:{}]获取全量SQB为{}", uuid, fileInfos2);
LogUtil.WriteLog_MDBlackCard("[uuid:" + uuid + "]-获取全量SQB为" + fileInfos2, "ZipToSqbServiceImpl");
//对文件的最后修改时间进行排序
fileInfos2.sort(Comparator.comparing(FileInfo::getLastModified).reversed());
for (FileInfo fileInfo2 : fileInfos2) {
@ -566,37 +613,50 @@ public class ZipToSqbServiceImpl implements ZipToSqbService {
pblackAllFileName = fileInfos2.get(0).getFileName();
}
} else {
log.error("[uuid:{}]获取获取{}天前全量SQB文件失败", uuid,incDays);
//log.error("[uuid:{}]获取获取{}天前全量SQB文件失败", uuid,incDays);
LogUtil.WriteLog_Error("[uuid:" + uuid + "]-获取" + incDays+"天前全量SQB文件失败" , "ZipToSqbServiceImpl");
//更新sqlite任务表的任务状态为2-报错
DatabaseUtil.insertSqlite(sqliteDriverName, sqliteUrl, Constant.ERROR_UPDATE_BUSINESS_HANDLER + businessTask.getId());
throw new PPException(MessageEnum..getCode(), MessageEnum..getMessage());
}
log.info("[uuid:{}]获取获取{}天前全量SQB名称{}", uuid, incDays, pblackAllFileName);
//log.info("[uuid:{}]获取获取{}天前全量SQB名称{}", uuid, incDays, pblackAllFileName);
LogUtil.WriteLog_MDBlackCard("[uuid:" + uuid + "]-获取" + incDays+"天前全量SQB名称"+pblackAllFileName , "ZipToSqbServiceImpl");
}
//如果没有最新的全量SQB文件时不生成增量文件
if (StringUtils.isNotBlank(pblackAllFileName)) {
log.info("[uuid:{}]调用动态库生成增量入参,全量SQB文件全路径{}", uuid, (sqbQlFileDir + File.separator + pblackAllFileName).replace("\\", "/"));
log.info("[uuid:{}]调用动态库生成增量入参, 生成SQB文件全路径{}", uuid, sqbPathName.replace("\\", "/"));
log.info("[uuid:{}]调用动态库生成增量入参, 生成SQBZLIB文件全路径{}", uuid, sqbZlibPathName.replace("\\", "/"));
//log.info("[uuid:{}]调用动态库生成增量入参,全量SQB文件全路径{}", uuid, (sqbQlFileDir + File.separator + pblackAllFileName).replace("\\", "/"));
//log.info("[uuid:{}]调用动态库生成增量入参, 生成SQB文件全路径{}", uuid, sqbPathName.replace("\\", "/"));
//log.info("[uuid:{}]调用动态库生成增量入参, 生成SQBZLIB文件全路径{}", uuid, sqbZlibPathName.replace("\\", "/"));
LogUtil.WriteLog_MDBlackCard("[uuid:" + uuid + "]-调用动态库生成增量入参,全量SQB文件全路径" + (sqbQlFileDir + File.separator + pblackAllFileName).replace("\\", "/") , "ZipToSqbServiceImpl");
LogUtil.WriteLog_MDBlackCard("[uuid:" + uuid + "]-调用动态库生成增量入参,生成SQB文件全路径" + sqbPathName.replace("\\", "/") , "ZipToSqbServiceImpl");
LogUtil.WriteLog_MDBlackCard("[uuid:" + uuid + "]-调用动态库生成增量入参,生成SQBzlib文件全路径" + sqbZlibPathName.replace("\\", "/") , "ZipToSqbServiceImpl");
countSqb = CMinFeeCreator.INSTANCE.MakeBlackIncSQB(etcType, version, format, incDirLst.toString(),
(sqbQlFileDir + File.separator + pblackAllFileName).replace("\\", "/"),
sqbPathName.replace("\\", "/"), sqbZlibPathName.replace("\\", "/"), bytMd5);
log.info("[uuid:{}]调用动态库生成增量SQB文件完成,返回值为:{}", uuid, countSqb);
//log.info("[uuid:{}]调用动态库生成增量SQB文件完成,返回值为:{}", uuid, countSqb);
LogUtil.WriteLog_MDBlackCard("[uuid:" + uuid + "]-]调用动态库生成增量SQB文件完成,返回值为" + countSqb , "ZipToSqbServiceImpl");
}
} else {
log.info("[uuid:{}]该时间点{}已下发/或者该点不下发", uuid, protocolType);
//log.info("[uuid:{}]该时间点{}已下发/或者该点不下发", uuid, protocolType);
LogUtil.WriteLog_MDBlackCard("[uuid:" + uuid + "]-该时间点" + protocolType+"已下发/或者该点不下发" , "ZipToSqbServiceImpl");
//更新sqlite任务表的任务状态为3-不下发
DatabaseUtil.insertSqlite(sqliteDriverName, sqliteUrl,
Constant.NOT_DIS_UPDATE_BUSINESS_HANDLER + DateTimeUtil.getFormateString(new Date(), Constant.YYYY_MM_DD_HH_MM_SS) + Constant.WHERE_ID + businessTask.getId());
log.info("[uuid:{}]更新business_tasks_info任务表的下发状态为3-不下发id为:{}", uuid, businessTask.getId());
//log.info("[uuid:{}]更新business_tasks_info任务表的下发状态为3-不下发id为:{}", uuid, businessTask.getId());
LogUtil.WriteLog_MDBlackCard("[uuid:" + uuid + "]-更新business_tasks_info任务表的下发状态为3-不下发id为" + businessTask.getId() , "ZipToSqbServiceImpl");
}
}
log.info("[uuid:{}]调用动态库生成SQB返回值count为{}", uuid, countSqb);
//log.info("[uuid:{}]调用动态库生成SQB返回值count为{}", uuid, countSqb);
LogUtil.WriteLog_MDBlackCard("[uuid:" + uuid + "]-调用动态库生成SQB返回值count为" + countSqb , "ZipToSqbServiceImpl");
if (countSqb != 0) {
if (countSqb > 0) {
String formateStr = DateTimeUtil.getFormateString(date, Constant.YYYYMMDDHHMMSS);
log.info("[uuid:{}]开始生成SQB备份文件", uuid);
//log.info("[uuid:{}]开始生成SQB备份文件", uuid);
LogUtil.WriteLog_MDBlackCard("[uuid:" + uuid + "]-]开始生成SQB备份文件" , "ZipToSqbServiceImpl");
//sqb备份目录
FileUtil.fileCreat(NFSFilePath + PathUtil.sqbBackPath + dbfDir);
String newSqbBackUpName = formateStr + Constant.STR_ + bfSqbName + Constant.STR_ + formateString + Constant.STR_ + version + Constant.STR_SQB;
@ -605,7 +665,9 @@ public class ZipToSqbServiceImpl implements ZipToSqbService {
//获取生成SQB的zlib的文件名
String zlibFileNameMd5Sqb = FileUtil.zlibFileName(NFSFilePath + PathUtil.sqbPath + dbfDir, zlibSqbName.replace(Constant.STR_ZLIB, Constant.NULL_STRING));
log.info("[uuid:{}]获取zlib的文件名称为:{}", uuid, zlibFileNameMd5Sqb);
//log.info("[uuid:{}]获取zlib的文件名称为:{}", uuid, zlibFileNameMd5Sqb);
LogUtil.WriteLog_MDBlackCard("[uuid:" + uuid + "]-获取zlib的文件名称为" + zlibFileNameMd5Sqb, "ZipToSqbServiceImpl");
//更新CMS_NAME_TABLE_AUTO_JAVA表中的下发类型
UpdateWrapper<NameTableAuto> updateWrapper = new UpdateWrapper<>();
@ -621,10 +683,11 @@ public class ZipToSqbServiceImpl implements ZipToSqbService {
updateWrapper.set("BLACKINCFLAG", Integer.parseInt(protocolType));
break;
default:
log.error("[uuid:{}]-未找到相应的更新自动下发分中心相应状态方法", uuid);
//log.error("[uuid:{}]-未找到相应的更新自动下发分中心相应状态方法", uuid);
LogUtil.WriteLog_Error("[uuid:" + uuid + "]-未找到相应的更新自动下发分中心相应状态方法" , "ZipToSqbServiceImpl");
throw new PPException(MessageEnum..getCode(), MessageEnum..getMessage());
}
//入库新文件的信息表
intoNewFileInfo(protocolType, zlibFileNameMd5Sqb, dbfDir, version, basicType, dayBackPath, fileName);
//更新自动下发分中心相应状态
@ -632,9 +695,11 @@ public class ZipToSqbServiceImpl implements ZipToSqbService {
String nowString = DateTimeUtil.getFormateString(new Date(), Constant.YYYY_MM_DD_HH_MM_SS);
String sqlParam = Constant.UPDATE_BUSINESS_HANDLER + nowString + Constant.PUBLISH_TIME + nowString + Constant.WHERE_ID + businessTask.getId();
if (DatabaseUtil.insertSqlite(sqliteDriverName, sqliteUrl, sqlParam) > 0) {
log.info("[uuid:{}]更新business_tasks_info调用动态库任务列表成功id为:{}", uuid, businessTask.getId());
//log.info("[uuid:{}]更新business_tasks_info调用动态库任务列表成功id为:{}", uuid, businessTask.getId());
LogUtil.WriteLog_MDBlackCard("[uuid:" + uuid + "]-更新business_tasks_info调用动态库任务列表成功,id为" + businessTask.getId(), "ZipToSqbServiceImpl");
}
log.info("[uuid:{}]调用动态库生成SQB任务,下载协议类型:{},zip文件名为:{}执行成功", uuid, protocolType, fileName);
//log.info("[uuid:{}]调用动态库生成SQB任务,下载协议类型:{},zip文件名为:{}执行成功", uuid, protocolType, fileName);
LogUtil.WriteLog_MDBlackCard("[uuid:" + uuid + "]-调用动态库生成SQB任务,下载协议类型为" + protocolType+"zip文件名为"+fileName+"执行成功", "ZipToSqbServiceImpl");
} else {
//更新sqlite任务表的任务状态为2-报错(等待下次轮询)
DatabaseUtil.insertSqlite(sqliteDriverName, sqliteUrl, Constant.ERROR_UPDATE_BUSINESS_HANDLER + businessTask.getId());
@ -642,15 +707,17 @@ public class ZipToSqbServiceImpl implements ZipToSqbService {
+ ",SQB动态库返回值为" + countSqb);
}
} else {
log.info("[uuid:{}]该时间点{}已下发/或者该点不下发", uuid, protocolType);
LogUtil.WriteLog_MDBlackCard("[uuid:" + uuid + "]-该时间点" + protocolType+"已下发/或者该点不下发", "ZipToSqbServiceImpl");
//更新sqlite任务表的任务状态为3-不下发
DatabaseUtil.insertSqlite(sqliteDriverName, sqliteUrl,
Constant.NOT_DIS_UPDATE_BUSINESS_HANDLER + DateTimeUtil.getFormateString(new Date(), Constant.YYYY_MM_DD_HH_MM_SS) + Constant.WHERE_ID + businessTask.getId());
log.info("[uuid:{}]更新business_tasks_info任务表的下发状态为3-不下发id为:{}", uuid, businessTask.getId());
//log.info("[uuid:{}]更新business_tasks_info任务表的下发状态为3-不下发id为:{}", uuid, businessTask.getId());
LogUtil.WriteLog_MDBlackCard("[uuid:" + uuid + "]-更新business_tasks_info任务表的下发状态为3-不下发id为" + businessTask.getId(), "ZipToSqbServiceImpl");
}
} catch (Exception e) {
log.error("[uuid:{}]调用动态库生成SQB失败{}", uuid, e.getMessage(), e);
//log.error("[uuid:{}]调用动态库生成SQB失败{}", uuid, e.getMessage(), e);
LogUtil.WriteLog_Error("[uuid:" + uuid + "]调用动态库生成SQB失败" + e.getMessage(), "ZipToSqbServiceImpl");
//更新sqlite任务表的任务状态为2-报错
DatabaseUtil.insertSqlite(sqliteDriverName, sqliteUrl, Constant.ERROR_UPDATE_BUSINESS_HANDLER + businessTask.getId());
throw new PPException(MessageEnum.SQB.getCode(), MessageEnum.SQB.getMessage()
@ -675,9 +742,9 @@ public class ZipToSqbServiceImpl implements ZipToSqbService {
basicInfoDownLists.addAll(basicInfoDownList);
}
}
log.info("[uuid:{}]解析zip文件{}生成json转对象完成集合size为{}", uuid, fileName, basicInfoDownLists.size());
//log.info("[uuid:{}]解析zip文件{}生成json转对象完成集合size为{}", uuid, fileName, basicInfoDownLists.size());
String formateString = DateTimeUtil.getFormateString(new Date(), Constant.YYYY_MM_DD_HH_MM_SS);
log.info("[uuid:{}]BasicInfo入库写入BASICINFO_TABLE表中的同时也要生成SQBzip文件名称为{}", uuid, fileName);
//log.info("[uuid:{}]BasicInfo入库写入BASICINFO_TABLE表中的同时也要生成SQBzip文件名称为{}", uuid, fileName);
//入库操作
int count = 0;
if (!basicInfoDownLists.isEmpty()) {
@ -697,9 +764,13 @@ public class ZipToSqbServiceImpl implements ZipToSqbService {
}
}
}
log.info("[uuid:{}]插入数据库表BASICINFO_TABLE完成{}条数据", uuid, count);
//log.info("[uuid:{}]插入数据库表BASICINFO_TABLE完成{}条数据", uuid, count);
LogUtil.WriteLog_HRBlackCard("[uuid:" + uuid + "]-插入数据库表BASICINFO_TABLE完成" + count+"条数据", "ZipToSqbServiceImpl");
} catch (IOException e) {
log.error("[uuid:{}]{}入库BASICINFO_TABLE失败{}", uuid, fileName, e.getMessage(), e);
//log.error("[uuid:{}]{}入库BASICINFO_TABLE失败{}", uuid, fileName, e.getMessage(), e);
LogUtil.WriteLog_Error("[uuid:" + uuid + "]入库BASICINFO_TABLE失败" + e.getMessage(), "ZipToSqbServiceImpl");
throw new PPException(MessageEnum..getCode(), MessageEnum..getMessage());
}
@ -710,7 +781,7 @@ public class ZipToSqbServiceImpl implements ZipToSqbService {
*
*/
private List<String> UnzipFile(String uuid, String dbfDir, String fileName, int type) {
log.info("[uuid:{}]解析zip文件F开始{}", uuid, fileName);
//log.info("[uuid:{}]解析zip文件F开始{}", uuid, fileName);
//被解压的压缩文件
String fileZip = NFSFilePath + PathUtil.downFile + fileName;
String dateTime = fileName.substring(fileName.lastIndexOf(Constant.STR_) + 1).replace(Constant.STR_ZIP, Constant.NULL_STRING).substring(0, 8);
@ -730,9 +801,9 @@ public class ZipToSqbServiceImpl implements ZipToSqbService {
String destDir = NFSFilePath + PathUtil.unZip + dbfDir + File.separator;
FileUtil.fileCreat(destDir);
//解压之前先清空解压目标目录
log.info("[uuid:{}]清空解压目标目录,{}", uuid, destDir);
//log.info("[uuid:{}]清空解压目标目录,{}", uuid, destDir);
FileUtil.removeFiles(new File(destDir));
log.info("[uuid:{}]解析zip文件{}生成json转对象开始", uuid, fileName);
//log.info("[uuid:{}]解析zip文件{}生成json转对象开始", uuid, fileName);
//解压缩文件
UnzipUtil.dealUnZip(fileZip, destDir);
return FileUtil.getJsonPaths(destDir);
@ -807,7 +878,7 @@ public class ZipToSqbServiceImpl implements ZipToSqbService {
if (newFileInfo == null) {
newVersion =versionPre + 104;
} else {
log.info("[uuid:{}]获取上一版本号为{}", uuid, newFileInfo.getVersion());
//log.info("[uuid:{}]获取上一版本号为{}", uuid, newFileInfo.getVersion());
if (newFileInfo.getVersion().substring(0, 6).equals(versionPre)) {
newVersion = String.valueOf(Integer.parseInt(newFileInfo.getVersion()) + 1);
} else {

@ -0,0 +1,108 @@
package com.nm.gsgl.test;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.List;
/**
* @author: shuguang
* @date: 20240523 16:51
* @description:
*/
public class ContrastTest {
public static void main(String[] args) {
// 数据库连接信息
String db1Url = "jdbc:oracle:thin:@192.168.101.70:1521/XE";
String db2Url = "jdbc:oracle:thin:@10.15.100.5:1521/TORCL";
String username = "nmgmpayadmin";
String password = "nmgh90[]";
//String query = "SELECT * FROM CT_ETC_CLEAR_SUM WHERE PROCESSTIME>=to_date('2024-05-01 00:00:00','yyyy-mm-dd hh24:mi:ss') and PROCESSTIME<=to_date('2024-05-16 00:00:00','yyyy-mm-dd hh24:mi:ss') ORDER BY PROCESSTIME ";
//String query = "SELECT SERPROVINCEID,CLEARDATE,PROCESSTIME,PAYERAMOUNT,PAYERMESSAGECOUNT,RECEIVERAMOUNT FROM CT_OTHER_CLEAR_SUM_1 WHERE PROCESSTIME>=to_date('2024-05-01 00:00:00','yyyy-mm-dd hh24:mi:ss') and PROCESSTIME<=to_date('2024-05-16 00:00:00','yyyy-mm-dd hh24:mi:ss') ORDER BY PROCESSTIME ";
//String query = "SELECT count(*) FROM CT_GANTRYPASSID WHERE EXTIME>=to_date('2024-05-02 00:00:00','yyyy-mm-dd hh24:mi:ss') and EXTIME<=to_date('2024-05-03 00:00:00','yyyy-mm-dd hh24:mi:ss') ";
//String query = "SELECT ID,SPLITTIME,EXTIME FROM CT_OUT_PORT_ETC_NOPAY WHERE EXTIME>=to_date('2024-05-02 00:00:00','yyyy-mm-dd hh24:mi:ss') and EXTIME<=to_date('2024-05-03 00:00:00','yyyy-mm-dd hh24:mi:ss') ORDER BY EXTIME";
//String query = "SELECT ID,SPLITTIME,ENTIME FROM CT_OTHER_OUT_PORT WHERE ENTIME>=to_date('2024-05-02 00:00:00','yyyy-mm-dd hh24:mi:ss') and ENTIME<=to_date('2024-05-03 00:00:00','yyyy-mm-dd hh24:mi:ss') ORDER BY EXTIME";
//String query = "SELECT ID,SPLITTIME FROM CT_OTHER_GANTRYPASSID WHERE SPLITTIME>=to_date('2024-05-02 00:00:00','yyyy-mm-dd hh24:mi:ss') and SPLITTIME<=to_date('2024-05-03 00:00:00','yyyy-mm-dd hh24:mi:ss') ORDER BY ID " ;
//String query = "SELECT ID,SPLITTIME FROM CT_OUTOTHER_GANTRY WHERE SPLITTIME>=to_date('2024-05-02 00:00:00','yyyy-mm-dd hh24:mi:ss') and SPLITTIME<=to_date('2024-05-03 00:00:00','yyyy-mm-dd hh24:mi:ss') ORDER BY ID" ;
//String query = "SELECT ID,REFUNDID,PROCESSTIME FROM CT_SERVERREFUND_TABLE WHERE PROCESSTIME>=to_date('2024-05-02 00:00:00','yyyy-mm-dd hh24:mi:ss') and PROCESSTIME<=to_date('2024-05-03 00:00:00','yyyy-mm-dd hh24:mi:ss') ORDER BY PROCESSTIME" ;
//String query = "SELECT ID,RESTITUTIONID,PROCESSTIME FROM CT_SERVERLATERPAY_TABLE WHERE PROCESSTIME>=to_date('2024-05-02 00:00:00','yyyy-mm-dd hh24:mi:ss') and PROCESSTIME<=to_date('2024-05-03 00:00:00','yyyy-mm-dd hh24:mi:ss') ORDER BY PROCESSTIME" ;
//604数据不一致正式库无数据
//String query = "SELECT PASSID,VEHICLEID,RECEIVERID,PAYERID,FEE,RECEIVETIME FROM CT_GATHERDETAIL_TABLE WHERE RECEIVETIME>=to_date('2024-05-02 00:00:00','yyyy-mm-dd hh24:mi:ss') and RECEIVETIME<=to_date('2024-05-03 00:00:00','yyyy-mm-dd hh24:mi:ss') ORDER BY RECEIVETIME" ;
//605数据不一致 正式库无数据
//String query = "SELECT PASSID,VEHICLEID,ORIGIN,PAYERID,PAYFEE,PAYTIME FROM CT_PAYERDETAIL_TABLE WHERE PAYTIME>=to_date('2024-05-02 00:00:00','yyyy-mm-dd hh24:mi:ss') and PAYTIME<=to_date('2024-05-03 00:00:00','yyyy-mm-dd hh24:mi:ss') ORDER BY PAYTIME" ;
//804数据不一致 条数一致 CHECKID不一致
//String query = "SELECT CHECKID FROM CHECKRESULT_INFO_TABLE WHERE CHECKTIME>=to_date('2024-05-02 00:00:00','yyyy-mm-dd hh24:mi:ss') and CHECKTIME<=to_date('2024-05-03 00:00:00','yyyy-mm-dd hh24:mi:ss') ORDER BY CHECKTIME" ;
//803数据不一致 条数一致 CHECKID不一致
//String query = "SELECT CHECKID,VEHICLEID,CHECKTIME FROM CHECKRESULT_TABLE_NEW WHERE CHECKTIME>=to_date('2024-05-02 00:00:00','yyyy-mm-dd hh24:mi:ss') and CHECKTIME<=to_date('2024-05-03 00:00:00','yyyy-mm-dd hh24:mi:ss') ORDER BY CHECKTIME" ;
//String query = "SELECT ID,EXTIME FROM BSDZ_DISPUTE_PASSPROV WHERE EXTIME>=to_date('2024-05-02 00:00:00','yyyy-mm-dd hh24:mi:ss') and EXTIME<=to_date('2024-05-03 00:00:00','yyyy-mm-dd hh24:mi:ss') ORDER BY EXTIME" ;
//String query = "SELECT SPLITDATE,SERPROVINCEID FROM BSDZ_CLEAR_SUM WHERE SPLITDATE>=to_date('2024-05-02 00:00:00','yyyy-mm-dd hh24:mi:ss') and SPLITDATE<=to_date('2024-05-03 00:00:00','yyyy-mm-dd hh24:mi:ss') ORDER BY SPLITDATE" ;
//904数据不一致 条数不一致
//String query = "SELECT ID,PROVEXTIME FROM BSDZ_NOOUTPORT_S2_DOWN WHERE PROVEXTIME>=to_date('2024-05-02 00:00:00','yyyy-mm-dd hh24:mi:ss') and PROVEXTIME<=to_date('2024-05-03 00:00:00','yyyy-mm-dd hh24:mi:ss') ORDER BY PROVEXTIME" ;
//String query = "SELECT ID,SPLITDATE FROM BSDZ_NOSPLIT_S2_DOWN WHERE SPLITDATE>=to_date('2024-05-11 00:00:00','yyyy-mm-dd hh24:mi:ss') and SPLITDATE<=to_date('2024-05-13 00:00:00','yyyy-mm-dd hh24:mi:ss') ORDER BY SPLITDATE" ;
//String query = "SELECT ID,EXTIME FROM BSDZ_DISPUTE_EXPROV WHERE EXTIME>=to_date('2024-05-11 00:00:00','yyyy-mm-dd hh24:mi:ss') and EXTIME<=to_date('2024-05-12 00:00:00','yyyy-mm-dd hh24:mi:ss') ORDER BY EXTIME" ;
//922数据不一致 条数不一致
//String query = "SELECT ID,PROVEXTIME FROM BSDZ_NOOUTPORT_S3_DOWN WHERE PROVEXTIME>=to_date('2024-05-02 00:00:00','yyyy-mm-dd hh24:mi:ss') and PROVEXTIME<=to_date('2024-05-03 00:00:00','yyyy-mm-dd hh24:mi:ss') ORDER BY PROVEXTIME" ;
//923数据不一致 条数不一致
String query = "SELECT ID,PROVEXTIME FROM BSDZ_NOOUTPORT_RESULT WHERE PROVEXTIME>=to_date('2024-05-02 00:00:00','yyyy-mm-dd hh24:mi:ss') and PROVEXTIME<=to_date('2024-05-03 00:00:00','yyyy-mm-dd hh24:mi:ss') ORDER BY PROVEXTIME" ;
//String query = "SELECT ID,SPLITDATE FROM BSDZ_NOSPLIT_S3_DOWN WHERE SPLITDATE>=to_date('2024-05-02 00:00:00','yyyy-mm-dd hh24:mi:ss') and SPLITDATE<=to_date('2024-05-03 00:00:00','yyyy-mm-dd hh24:mi:ss') ORDER BY SPLITDATE" ;
//String query = "SELECT ID,SPLITDATE FROM BSDZ_NOSPLIT_RESULT WHERE SPLITDATE>=to_date('2024-05-08 00:00:00','yyyy-mm-dd hh24:mi:ss') and SPLITDATE<=to_date('2024-05-09 00:00:00','yyyy-mm-dd hh24:mi:ss') ORDER BY SPLITDATE" ;
// 连接两个数据库
try (Connection db1Connection = DriverManager.getConnection(db1Url, username, password);
Connection db2Connection = DriverManager.getConnection(db2Url, username, password)) {
// 创建Statement对象
Statement db1Statement = db1Connection.createStatement();
Statement db2Statement = db2Connection.createStatement();
// 执行查询
ResultSet db1ResultSet = db1Statement.executeQuery(query);
ResultSet db2ResultSet = db2Statement.executeQuery(query);
// 将查询结果转换为List
List<String> db1Data = new ArrayList<>();
List<String> db2Data = new ArrayList<>();
while (db1ResultSet.next()) {
String row = convertResultSetToString(db1ResultSet);
db1Data.add(row);
}
while (db2ResultSet.next()) {
String row = convertResultSetToString(db2ResultSet);
db2Data.add(row);
}
System.out.println("db1Data的size"+db1Data.size());
System.out.println("db2Data的size"+db2Data.size());
// 比较数据
if (db1Data.equals(db2Data)) {
System.out.println("两个数据库中的数据一致。");
} else {
System.out.println("两个数据库中的数据不一致。");
}
} catch (SQLException e) {
e.printStackTrace();
}
}
// 将ResultSet转换为String
private static String convertResultSetToString(ResultSet rs) throws SQLException {
StringBuilder sb = new StringBuilder();
int columnCount = rs.getMetaData().getColumnCount();
for (int i = 1; i <= columnCount; i++) {
sb.append(rs.getObject(i));
if (i < columnCount) {
sb.append(", ");
}
}
return sb.toString();
}
}

@ -1,101 +1,101 @@
package com.nm.gsgl.test;
import com.alibaba.fastjson2.JSON;
import com.alibaba.fastjson2.JSONObject;
import com.alibaba.fastjson2.JSONReader;
import com.alibaba.fastjson2.JSONWriter;
import com.alibaba.fastjson2.TypeReference;
import com.nm.gsgl.common.Constant;
import com.nm.gsgl.common.utils.FileUtil;
import com.nm.gsgl.entity.business.db.OutPortEtc;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.io.FileUtils;
import org.springframework.util.ResourceUtils;
import java.io.File;
import java.util.ArrayList;
import java.util.List;
/**
* @author: shuguang
* @date: 20230207 16:21
* @description:
*/
@Slf4j
public class Test01 {
public static void main(String[] args) throws Exception {
//String s = "shuguang123";
//String decrypt = PassWordUtils.encrypt(s);
//System.out.println(decrypt);
//String formateString = DateTimeUtil.getFormateString(new Date(), Constant.YYYYMMDDHHMMSS);
//System.out.println(formateString);
//System.out.println(formateString.substring(0, 5));
//System.out.println(formateString.substring(0, 6));
//
//System.out.println(PassWordUtils.decrypt("TnpOQHhrejBoR1hAgGh0PGgtcFZSdXYs"));
//String fileZip = "D:\\桌面\\ETCTS_EXITPTSD_RES_150201_20230309231802011.zip";
String destDir = "D:\\桌面\\test\\";
//UnzipUtil.dealUnZip(fileZip, destDir);
List<String> jsonPaths = FileUtil.getJsonPaths(destDir);
List<OutPortEtc> outPortEtcLists = new ArrayList<>();
if (jsonPaths.size() > 0) {
for (String jsonPath : jsonPaths) {
File file2 = ResourceUtils.getFile(jsonPath);
String json = FileUtils.readFileToString(file2, Constant.UTF_8);
System.out.println(json);
System.out.println("===========================================");
//List<OutPortEtc> outPortEtcList = JSON.parseObject(json, new TypeReference<List<OutPortEtc>>(){}, JSONReader.Feature.IgnoreSetNullValue);
List<OutPortEtc> outPortEtcList = JSON.parseObject(json, new TypeReference<List<OutPortEtc>>(){}, JSONReader.Feature.DuplicateKeyValueAsArray);
for (OutPortEtc outPortEtc : outPortEtcList) {
System.out.println(JSONObject.toJSONString(outPortEtc.getSplitProvince(), JSONWriter.Feature.WriteMapNullValue));
//String s = JSONObject.toJSONString(outPortEtc, JSONWriter.Feature.NullAsDefaultValue);
//System.out.println(s);
}
//JSONWriter.Feature.WriteMapNullValue,
System.out.println(outPortEtcList);
outPortEtcLists.addAll(outPortEtcList);
}
}
//System.out.println("-----------------------------------");
//System.out.println(outPortEtcLists);
////String fileName = jsonPaths.get(0).substring(jsonPaths.get(0).lastIndexOf("\\")+1);
////System.out.println(fileName);
//int count = 0;
for (OutPortEtc outPortEtc : outPortEtcLists) {
// //List<SplitProvince> splitProvince =outPortEtc.getSplitProvince();
// String s1 = JSON.toJSONString(outPortEtc.getSplitProvince(), JSONWriter.Feature.WriteMapNullValue);
// //String s1 = JSON.toJSONString(outPortEtc.getSplitProvince(), JSONWriter.Feature.WriteMapNullValue,JSONWriter.Feature.IgnoreNoneSerializable);
// String s1 = JSON.toJSONString(JSON.toJSONString(outPortEtc.getSplitProvince(), String.valueOf(SerializerFeature.WriteMapNullValue)), String.valueOf(SerializerFeature.WriteMapNullValue));
// System.out.println("*********************************");
// System.out.println(s1);
// System.out.println(outPortEtc.getSplitProvince());
// ////先查询ID是否有记录
// //QueryWrapper<OutPortEtc> queryWrapper = new QueryWrapper<>();
// //queryWrapper.eq("ID", outPortEtc.getId());
// //boolean exists = outPortEtcMapper.exists(queryWrapper);
// //if (!exists) {
// // outPortEtc.setExTime(outPortEtc.getExTime().replace(Constant.STR_T, Constant.STR_SPACE));
// // outPortEtc.setEnTime(outPortEtc.getEnTime().replace(Constant.STR_T, Constant.STR_SPACE));
// // outPortEtc.setAccountTime(outPortEtc.getAccountTime().replace(Constant.STR_T, Constant.STR_SPACE));
// // outPortEtc.setReceiveTime(outPortEtc.getReceiveTime().replace(Constant.STR_T, Constant.STR_SPACE));
// // outPortEtc.setBasicFilename(fileName);
// // outPortEtcMapper.insert(outPortEtc);
// // count++;
// //} else {
// // log.info("数据库表中存在ID={}的数据", outPortEtc.getId());
// //}
System.out.println("################################");
// System.out.println(outPortEtc);
System.out.println(JSONObject.toJSONString(outPortEtc.getSplitProvince(), JSONWriter.Feature.WriteMapNullValue));
}
}
}
//package com.nm.gsgl.test;
//
//import com.alibaba.fastjson2.JSON;
//import com.alibaba.fastjson2.JSONObject;
//import com.alibaba.fastjson2.JSONReader;
//import com.alibaba.fastjson2.JSONWriter;
//import com.alibaba.fastjson2.TypeReference;
//import com.nm.gsgl.common.Constant;
//import com.nm.gsgl.common.utils.FileUtil;
//import com.nm.gsgl.entity.business.db.OutPortEtc;
//import lombok.extern.slf4j.Slf4j;
//import org.apache.commons.io.FileUtils;
//import org.springframework.util.ResourceUtils;
//
//import java.io.File;
//import java.util.ArrayList;
//import java.util.List;
//
///**
// * @author: shuguang
// * @date: 2023年02月07日 16:21
// * @description:
// */
//@Slf4j
//public class Test01 {
//
// public static void main(String[] args) throws Exception {
// //String s = "shuguang123";
// //String decrypt = PassWordUtils.encrypt(s);
// //System.out.println(decrypt);
// //String formateString = DateTimeUtil.getFormateString(new Date(), Constant.YYYYMMDDHHMMSS);
// //System.out.println(formateString);
// //System.out.println(formateString.substring(0, 5));
// //System.out.println(formateString.substring(0, 6));
// //
// //System.out.println(PassWordUtils.decrypt("TnpOQHhrejBoR1hAgGh0PGgtcFZSdXYs"));
// //String fileZip = "D:\\桌面\\ETCTS_EXITPTSD_RES_150201_20230309231802011.zip";
// String destDir = "D:\\桌面\\test\\";
// //UnzipUtil.dealUnZip(fileZip, destDir);
// List<String> jsonPaths = FileUtil.getJsonPaths(destDir);
// List<OutPortEtc> outPortEtcLists = new ArrayList<>();
//
// if (jsonPaths.size() > 0) {
// for (String jsonPath : jsonPaths) {
// File file2 = ResourceUtils.getFile(jsonPath);
// String json = FileUtils.readFileToString(file2, Constant.UTF_8);
// System.out.println(json);
// System.out.println("===========================================");
//
// //List<OutPortEtc> outPortEtcList = JSON.parseObject(json, new TypeReference<List<OutPortEtc>>(){}, JSONReader.Feature.IgnoreSetNullValue);
// List<OutPortEtc> outPortEtcList = JSON.parseObject(json, new TypeReference<List<OutPortEtc>>(){}, JSONReader.Feature.DuplicateKeyValueAsArray);
// for (OutPortEtc outPortEtc : outPortEtcList) {
// System.out.println(JSONObject.toJSONString(outPortEtc.getSplitProvince(), JSONWriter.Feature.WriteMapNullValue));
// //String s = JSONObject.toJSONString(outPortEtc, JSONWriter.Feature.NullAsDefaultValue);
// //System.out.println(s);
// }
// //JSONWriter.Feature.WriteMapNullValue,
// System.out.println(outPortEtcList);
//
//
// outPortEtcLists.addAll(outPortEtcList);
// }
// }
//
//
// //System.out.println("-----------------------------------");
// //System.out.println(outPortEtcLists);
// ////String fileName = jsonPaths.get(0).substring(jsonPaths.get(0).lastIndexOf("\\")+1);
// ////System.out.println(fileName);
// //int count = 0;
// for (OutPortEtc outPortEtc : outPortEtcLists) {
// // //List<SplitProvince> splitProvince =outPortEtc.getSplitProvince();
// // String s1 = JSON.toJSONString(outPortEtc.getSplitProvince(), JSONWriter.Feature.WriteMapNullValue);
// // //String s1 = JSON.toJSONString(outPortEtc.getSplitProvince(), JSONWriter.Feature.WriteMapNullValue,JSONWriter.Feature.IgnoreNoneSerializable);
// // String s1 = JSON.toJSONString(JSON.toJSONString(outPortEtc.getSplitProvince(), String.valueOf(SerializerFeature.WriteMapNullValue)), String.valueOf(SerializerFeature.WriteMapNullValue));
// // System.out.println("*********************************");
// // System.out.println(s1);
// // System.out.println(outPortEtc.getSplitProvince());
// // ////先查询ID是否有记录
// // //QueryWrapper<OutPortEtc> queryWrapper = new QueryWrapper<>();
// // //queryWrapper.eq("ID", outPortEtc.getId());
// // //boolean exists = outPortEtcMapper.exists(queryWrapper);
// // //if (!exists) {
// // // outPortEtc.setExTime(outPortEtc.getExTime().replace(Constant.STR_T, Constant.STR_SPACE));
// // // outPortEtc.setEnTime(outPortEtc.getEnTime().replace(Constant.STR_T, Constant.STR_SPACE));
// // // outPortEtc.setAccountTime(outPortEtc.getAccountTime().replace(Constant.STR_T, Constant.STR_SPACE));
// // // outPortEtc.setReceiveTime(outPortEtc.getReceiveTime().replace(Constant.STR_T, Constant.STR_SPACE));
// // // outPortEtc.setBasicFilename(fileName);
// // // outPortEtcMapper.insert(outPortEtc);
// // // count++;
// // //} else {
// // // log.info("数据库表中存在ID={}的数据", outPortEtc.getId());
// // //}
// System.out.println("################################");
// // System.out.println(outPortEtc);
// System.out.println(JSONObject.toJSONString(outPortEtc.getSplitProvince(), JSONWriter.Feature.WriteMapNullValue));
// }
//
// }
//
//}

@ -1,70 +1,70 @@
package com.nm.gsgl.test;
import com.nm.gsgl.common.utils.DbfWriterAndReadUtil;
import com.nm.gsgl.common.utils.UuidUtil;
import lombok.extern.slf4j.Slf4j;
import java.io.BufferedWriter;
import java.io.FileNotFoundException;
import java.io.FileWriter;
import java.io.IOException;
import java.time.LocalDateTime;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
import java.util.stream.Stream;
/**
* @author: shuguang
* @date: 20230727 16:02
* @description:
*/
@Slf4j
public class Test04 {
public static void main(String[] args) throws IOException {
////String s = Integer.toBinaryString(4);
////十进制转二进制
//String s = Integer.toBinaryString(24);
//System.out.println(s);
String uuid = UuidUtil.getUuid();
log.info("[uuid:{}]当前时间={}", uuid, LocalDateTime.now());
long start = System.currentTimeMillis();
String dbfPath= "D:\\桌面\\增量\\20230727110001_CARDBLACKLISTINC.DBF";
List<Map<String, Object>> mapList = DbfWriterAndReadUtil.readDbf(dbfPath, "UTF-8");
System.out.println("=========================");
System.out.println(mapList.size());
String dbfPath2= "D:\\桌面\\增量\\20230727110647_TB_CARDBLACKINC_20230727110002_230727114.DBF";
List<Map<String, Object>> mapList2 = DbfWriterAndReadUtil.readDbf(dbfPath2, "UTF-8");
System.out.println("=========================");
System.out.println(mapList2.size());
List<Map<String, Object>> mapList3 = Stream.of(mapList,mapList2)
.flatMap(Collection::stream)
.distinct()
.collect(Collectors.toList());
System.out.println("=========================");
System.out.println(mapList3.size());
FileWriter outFile = null;
BufferedWriter writer = null;
try {
outFile = new FileWriter("D:\\桌面\\增量\\test.txt");
writer= new BufferedWriter(outFile);
for (Map<String, Object> maps : mapList3) {
String t = "";
for (String key : maps.keySet()) {
t += maps.get(key) + "\t\n";
}
t = t.substring(0, t.length() - 1);
writer.write(t);
writer.newLine();
}
writer.flush();
writer.close();
} catch (IOException e) {
e.printStackTrace();
}
long end = System.currentTimeMillis();
log.info("[uuid:{}]执行完成,耗时:{}毫秒", uuid, end - start);
}
}
//package com.nm.gsgl.test;
//
//import com.nm.gsgl.common.utils.DbfWriterAndReadUtil;
//import com.nm.gsgl.common.utils.UuidUtil;
//import lombok.extern.slf4j.Slf4j;
//
//import java.io.BufferedWriter;
//import java.io.FileNotFoundException;
//import java.io.FileWriter;
//import java.io.IOException;
//import java.time.LocalDateTime;
//import java.util.Collection;
//import java.util.List;
//import java.util.Map;
//import java.util.stream.Collectors;
//import java.util.stream.Stream;
//
///**
// * @author: shuguang
// * @date: 2023年07月27日 16:02
// * @description:
// */
//@Slf4j
//public class Test04 {
// public static void main(String[] args) throws IOException {
// ////String s = Integer.toBinaryString(4);
// ////十进制转二进制
// //String s = Integer.toBinaryString(24);
// //System.out.println(s);
// String uuid = UuidUtil.getUuid();
// log.info("[uuid:{}]当前时间={}", uuid, LocalDateTime.now());
// long start = System.currentTimeMillis();
// String dbfPath= "D:\\桌面\\增量\\20230727110001_CARDBLACKLISTINC.DBF";
// List<Map<String, Object>> mapList = DbfWriterAndReadUtil.readDbf(dbfPath, "UTF-8");
// System.out.println("=========================");
// System.out.println(mapList.size());
// String dbfPath2= "D:\\桌面\\增量\\20230727110647_TB_CARDBLACKINC_20230727110002_230727114.DBF";
// List<Map<String, Object>> mapList2 = DbfWriterAndReadUtil.readDbf(dbfPath2, "UTF-8");
// System.out.println("=========================");
// System.out.println(mapList2.size());
// List<Map<String, Object>> mapList3 = Stream.of(mapList,mapList2)
// .flatMap(Collection::stream)
// .distinct()
// .collect(Collectors.toList());
// System.out.println("=========================");
// System.out.println(mapList3.size());
// FileWriter outFile = null;
// BufferedWriter writer = null;
//
// try {
// outFile = new FileWriter("D:\\桌面\\增量\\test.txt");
// writer= new BufferedWriter(outFile);
// for (Map<String, Object> maps : mapList3) {
// String t = "";
// for (String key : maps.keySet()) {
// t += maps.get(key) + "\t\n";
// }
// t = t.substring(0, t.length() - 1);
// writer.write(t);
// writer.newLine();
// }
// writer.flush();
// writer.close();
// } catch (IOException e) {
// e.printStackTrace();
// }
// long end = System.currentTimeMillis();
// log.info("[uuid:{}]执行完成,耗时:{}毫秒", uuid, end - start);
// }
//}

@ -1,118 +1,118 @@
package com.nm.gsgl.test;
import com.alibaba.fastjson2.JSON;
import com.alibaba.fastjson2.TypeReference;
import com.nm.gsgl.common.Constant;
import com.nm.gsgl.common.utils.DatabaseUtil;
import com.nm.gsgl.common.utils.DateTimeUtil;
import com.nm.gsgl.common.utils.FileUtil;
import com.nm.gsgl.common.utils.UnzipUtil;
import com.nm.gsgl.common.utils.UuidUtil;
import com.nm.gsgl.entity.intodb.mysql.CardBlackStatus;
import com.trkf.PasswordEncryption.PassWordUtils;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.io.FileUtils;
import org.apache.commons.lang3.StringUtils;
import org.springframework.util.ResourceUtils;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.Map;
/**
* @author: shuguang
* @date: 20230206 9:33
* @description:
*/
@Slf4j
public class TestDemo {
public static void main(String[] args) throws IOException {
String uuid = UuidUtil.getUuid();
long start = System.currentTimeMillis();
String IntoDbConnectionStringMySQL="jdbc:mysql://10.15.110.122:3306/paramstatus?useUnicode=true&characterEncoding=utf-8&useSSL=true&serverTimezone=GMT";
String IntoDbMySQLClassName="com.mysql.cj.jdbc.Driver";
String IntoDbMySQLName="root";
String IntoDbMySQLPass= PassWordUtils.decrypt("bXoxXEJrejBoQUhjgGl0WmJQcFU6dUc2");
String sqlParam = "insert into etc_cardblack_status(CARDID,CREATIONTIME,INSERTTIME,DT,ISSUERID,STATUS,TYPE) values(?,?,?,?,?,?,?)";
String fileName = "D:\\桌面\\BASIC_CARDBLACKLISTDOWN_RES_15_20230205003904592.zip";
//解压的目标目录
String destDir = "D:\\桌面\\unzip\\";
List<String> jsonPathList = UnzipFile(uuid, fileName,destDir);
//List<Map<String, Object>> mapLists = jsonToMapList(jsonPathList);
Date date = new Date();
String dt = DateTimeUtil.getFormateString(date, Constant.YYYY_MM_DD);
if (jsonPathList.size() > 0) {
int count = 0;
for (String jsonPath : jsonPathList) {
File file2 = ResourceUtils.getFile(jsonPath);
String json = FileUtils.readFileToString(file2, Constant.UTF_8);
List<Map<String, Object>> mapLists = JSON.parseObject(json, new TypeReference<List<Map<String, Object>>>() {
});
List<CardBlackStatus> lists = new ArrayList<>();
if (mapLists.size() > 0) {
for (Map<String, Object> mapList : mapLists) {
CardBlackStatus op = new CardBlackStatus();
op.setCardId(StringUtils.isNotBlank((CharSequence) mapList.get("cardId")) ? (String) mapList.get("cardId") : null);
op.setCreationTime(StringUtils.isNotBlank((CharSequence) mapList.get("creationTime")) ? ((String) mapList.get("creationTime")).replace(Constant.STR_T, Constant.STR_SPACE) : null);
op.setInsertTime(StringUtils.isNotBlank((CharSequence) mapList.get("insertTime")) ? ((String) mapList.get("insertTime")).replace(Constant.STR_T, Constant.STR_SPACE) : null);
op.setDT(dt);
op.setIssuerId(StringUtils.isNotBlank((CharSequence) mapList.get("issuerId")) ? (String) mapList.get("issuerId") : null);
op.setStatus((mapList.get("status") != null) ? Integer.valueOf(String.valueOf(mapList.get("status"))) : null);
op.setType((mapList.get("type") != null) ? Integer.valueOf(String.valueOf(mapList.get("type"))) : null);
lists.add(op);
}
}
log.info("[uuid:{}]lists中{}条数据", uuid, lists.size());
DatabaseUtil.insertDb(IntoDbMySQLClassName, IntoDbConnectionStringMySQL, IntoDbMySQLName, IntoDbMySQLPass, lists,sqlParam);
FileUtil.fileDelete(jsonPath);
count++;
log.info("[uuid:{}]计数{}", uuid,count);
}
}
long end = System.currentTimeMillis();
log.info("[uuid:{}]插入数据库完成,耗时{}ms",uuid, (end - start ));
}
/**
*
*/
private static List<String> UnzipFile(String uuid, String fileName, String destDir) {
log.info("[uuid:{}]解析zip文件插入数据库开始{}", uuid, fileName);
FileUtil.fileCreat(destDir);
//解压之前先清空解压目标目录
log.info("[uuid:{}]清空解压目标目录,{}", uuid, destDir);
FileUtil.removeFiles(new File(destDir));
log.info("[uuid:{}]解析zip文件{}生成json转对象开始", uuid, fileName);
//解压缩文件
UnzipUtil.dealUnZip(fileName, destDir);
return FileUtil.getJsonPaths(destDir);
}
/**
* jsonlist
*
* @param jsonPathList json
* @return java.util.List<java.util.Map < java.lang.String, java.lang.Object>>
* @author shuguang
* @date 2023-03-27 8:38
*/
private static List<Map<String, Object>> jsonToMapList(List<String> jsonPathList) throws IOException {
List<Map<String, Object>> mapLists = new ArrayList<>();
if (jsonPathList.size() > 0) {
for (String jsonPath : jsonPathList) {
File file2 = ResourceUtils.getFile(jsonPath);
String json = FileUtils.readFileToString(file2, Constant.UTF_8);
List<Map<String, Object>> mapList = JSON.parseObject(json, new TypeReference<List<Map<String, Object>>>() {
});
mapLists.addAll(mapList);
}
}
return mapLists;
}
}
//package com.nm.gsgl.test;
//
//import com.alibaba.fastjson2.JSON;
//import com.alibaba.fastjson2.TypeReference;
//import com.nm.gsgl.common.Constant;
//import com.nm.gsgl.common.utils.DatabaseUtil;
//import com.nm.gsgl.common.utils.DateTimeUtil;
//import com.nm.gsgl.common.utils.FileUtil;
//import com.nm.gsgl.common.utils.UnzipUtil;
//import com.nm.gsgl.common.utils.UuidUtil;
//import com.nm.gsgl.entity.intodb.mysql.CardBlackStatus;
//import com.trkf.PasswordEncryption.PassWordUtils;
//import lombok.extern.slf4j.Slf4j;
//import org.apache.commons.io.FileUtils;
//import org.apache.commons.lang3.StringUtils;
//import org.springframework.util.ResourceUtils;
//
//import java.io.File;
//import java.io.IOException;
//import java.util.ArrayList;
//import java.util.Date;
//import java.util.List;
//import java.util.Map;
//
///**
// * @author: shuguang
// * @date: 2023年02月06日 9:33
// * @description:
// */
//@Slf4j
//public class TestDemo {
// public static void main(String[] args) throws IOException {
// String uuid = UuidUtil.getUuid();
// long start = System.currentTimeMillis();
// String IntoDbConnectionStringMySQL="jdbc:mysql://10.15.110.122:3306/paramstatus?useUnicode=true&characterEncoding=utf-8&useSSL=true&serverTimezone=GMT";
// String IntoDbMySQLClassName="com.mysql.cj.jdbc.Driver";
// String IntoDbMySQLName="root";
// String IntoDbMySQLPass= PassWordUtils.decrypt("bXoxXEJrejBoQUhjgGl0WmJQcFU6dUc2");
// String sqlParam = "insert into etc_cardblack_status(CARDID,CREATIONTIME,INSERTTIME,DT,ISSUERID,STATUS,TYPE) values(?,?,?,?,?,?,?)";
//
// String fileName = "D:\\桌面\\BASIC_CARDBLACKLISTDOWN_RES_15_20230205003904592.zip";
// //解压的目标目录
// String destDir = "D:\\桌面\\unzip\\";
// List<String> jsonPathList = UnzipFile(uuid, fileName,destDir);
// //List<Map<String, Object>> mapLists = jsonToMapList(jsonPathList);
// Date date = new Date();
// String dt = DateTimeUtil.getFormateString(date, Constant.YYYY_MM_DD);
//
// if (jsonPathList.size() > 0) {
// int count = 0;
// for (String jsonPath : jsonPathList) {
// File file2 = ResourceUtils.getFile(jsonPath);
// String json = FileUtils.readFileToString(file2, Constant.UTF_8);
// List<Map<String, Object>> mapLists = JSON.parseObject(json, new TypeReference<List<Map<String, Object>>>() {
// });
// List<CardBlackStatus> lists = new ArrayList<>();
// if (mapLists.size() > 0) {
// for (Map<String, Object> mapList : mapLists) {
// CardBlackStatus op = new CardBlackStatus();
// op.setCardId(StringUtils.isNotBlank((CharSequence) mapList.get("cardId")) ? (String) mapList.get("cardId") : null);
// op.setCreationTime(StringUtils.isNotBlank((CharSequence) mapList.get("creationTime")) ? ((String) mapList.get("creationTime")).replace(Constant.STR_T, Constant.STR_SPACE) : null);
// op.setInsertTime(StringUtils.isNotBlank((CharSequence) mapList.get("insertTime")) ? ((String) mapList.get("insertTime")).replace(Constant.STR_T, Constant.STR_SPACE) : null);
// op.setDT(dt);
// op.setIssuerId(StringUtils.isNotBlank((CharSequence) mapList.get("issuerId")) ? (String) mapList.get("issuerId") : null);
// op.setStatus((mapList.get("status") != null) ? Integer.valueOf(String.valueOf(mapList.get("status"))) : null);
// op.setType((mapList.get("type") != null) ? Integer.valueOf(String.valueOf(mapList.get("type"))) : null);
// lists.add(op);
// }
// }
// log.info("[uuid:{}]lists中{}条数据", uuid, lists.size());
// DatabaseUtil.insertDb(IntoDbMySQLClassName, IntoDbConnectionStringMySQL, IntoDbMySQLName, IntoDbMySQLPass, lists,sqlParam);
// FileUtil.fileDelete(jsonPath);
// count++;
// log.info("[uuid:{}]计数{}", uuid,count);
// }
// }
// long end = System.currentTimeMillis();
// log.info("[uuid:{}]插入数据库完成,耗时{}ms",uuid, (end - start ));
//
// }
//
//
// /**
// * 解压文件
// */
// private static List<String> UnzipFile(String uuid, String fileName, String destDir) {
// log.info("[uuid:{}]解析zip文件插入数据库开始{}", uuid, fileName);
// FileUtil.fileCreat(destDir);
// //解压之前先清空解压目标目录
// log.info("[uuid:{}]清空解压目标目录,{}", uuid, destDir);
// FileUtil.removeFiles(new File(destDir));
// log.info("[uuid:{}]解析zip文件{}生成json转对象开始", uuid, fileName);
// //解压缩文件
// UnzipUtil.dealUnZip(fileName, destDir);
// return FileUtil.getJsonPaths(destDir);
// }
// /**
// * 将json转换成list集合
// *
// * @param jsonPathList json路径集合
// * @return java.util.List<java.util.Map < java.lang.String, java.lang.Object>>
// * @author shuguang
// * @date 2023-03-27 8:38
// */
// private static List<Map<String, Object>> jsonToMapList(List<String> jsonPathList) throws IOException {
// List<Map<String, Object>> mapLists = new ArrayList<>();
// if (jsonPathList.size() > 0) {
// for (String jsonPath : jsonPathList) {
// File file2 = ResourceUtils.getFile(jsonPath);
// String json = FileUtils.readFileToString(file2, Constant.UTF_8);
// List<Map<String, Object>> mapList = JSON.parseObject(json, new TypeReference<List<Map<String, Object>>>() {
// });
// mapLists.addAll(mapList);
// }
// }
// return mapLists;
// }
//}

@ -1,301 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<!-- 日志级别从低到高分为TRACE < DEBUG < INFO < WARN < ERROR < FATAL如果设置为WARN则低于WARN的信息都不会输出 -->
<!-- scan:当此属性设置为true时配置文件如果发生改变将会被重新加载默认值为true -->
<!-- scanPeriod:设置监测配置文件是否有修改的时间间隔如果没有给出时间单位默认单位是毫秒。当scan为true时此属性生效。默认的时间间隔为1分钟。 -->
<!-- debug:当此属性设置为true时将打印出logback内部日志信息实时查看logback运行状态。默认值为false。 -->
<configuration scan="false" scanPeriod="10 seconds" debug="true">
<springProperty scope="context" name="logLevel" source="spring.logback.level"/>
<springProperty scope="context" name="APP_NAME" source="spring.logback.appName"/>
<springProperty scope="context" name="myMaxHistory" source="spring.logback.myMaxHistory"/>
<springProperty scope="context" name="myFileSize" source="spring.logback.myFileSize"/>
<springProperty scope="context" name="myTotalSizeCap" source="spring.logback.myTotalSizeCap"/>
<springProperty scope="context" name="logPath" source="spring.logback.logPath"/>
<contextName>${APP_NAME}</contextName>
<!-- 彩色日志 -->
<!-- 彩色日志依赖的渲染类 -->
<conversionRule conversionWord="clr" converterClass="org.springframework.boot.logging.logback.ColorConverter"/>
<conversionRule conversionWord="wex"
converterClass="org.springframework.boot.logging.logback.WhitespaceThrowableProxyConverter"/>
<conversionRule conversionWord="wEx"
converterClass="org.springframework.boot.logging.logback.ExtendedWhitespaceThrowableProxyConverter"/>
<!--彩色日志输出格式-->
<property name="CONSOLE_LOG_PATTERN"
value="%clr(%d{yyyy-MM-dd HH:mm:ss.SSS}){faint} %clr(%level){blue} %clr(${PID}){magenta} %clr([%thread]){orange} %clr(%logger){cyan} %m%n${LOG_EXCEPTION_CONVERSION_WORD:-%wEx}"/>
<!--非彩色日志输出格式-->
<property name="PATTERN"
value="%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level 服务名称:${APP_NAME} %logger{36} - %msg%n"/>
<!--dev文件路径src同级目录logs,如果上级目录不存在会自动创建-->
<property name="DEV_FILE_PATH" value="${logPath}/${APP_NAME}/"/>
<!-- pro文件路径 -->
<property name="PRO_FILE_PATH" value="${logPath}/${APP_NAME}/"/>
<!-- 控制台输出 -->
<appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
<encoder>
<!--<pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%level] [%thread] [%class:%line] %logger{50} - %m %n</pattern>-->
<!--格式化输出:%d表示日期%thread表示线程名%-5level级别从左显示5个字符宽度%msg日志消息%n是换行符-->
<pattern>${PATTERN}</pattern>
<!-- <charset>UTF-8</charset>-->
</encoder>
<filter class="ch.qos.logback.classic.filter.ThresholdFilter">
<level>INFO</level>
</filter>
</appender>
<!-- 按照每天生成输出日志文件 -->
<appender name="fileAppender" class="ch.qos.logback.core.rolling.RollingFileAppender">
<encoder>
<!--格式化输出:%d表示日期%thread表示线程%-5level级别从左显示五个字符宽度%logger{36}logger是class的全名,后面的数字代表限制最长的字符,%msg日志消息%n换行符-->
<pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n</pattern>
</encoder>
<!--滚动策略按照时间滚动-->
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<!-- rollover daily 文件名称 -->
<fileNamePattern>${DEV_FILE_PATH}/output-%d{yyyy-MM-dd}.%i.log</fileNamePattern>
<maxHistory>${myMaxHistory}</maxHistory>
<maxFileSize>${myFileSize}</maxFileSize>
<totalSizeCap>${myTotalSizeCap}</totalSizeCap>
</rollingPolicy>
</appender>
<appender name="INFO" class="ch.qos.logback.core.rolling.RollingFileAppender">
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<fileNamePattern>${DEV_FILE_PATH}/log_info_%d{yyyy-MM-dd_HH}.%i.txt</fileNamePattern>
<!-- 当文件总大小超过2GB先删除30天内最早的文件。不会删除30天外的文件。 -->
<maxHistory>${myMaxHistory}</maxHistory>
<maxFileSize>${myFileSize}</maxFileSize>
<totalSizeCap>${myTotalSizeCap}</totalSizeCap>
</rollingPolicy>
<append>true</append>
<encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder">
<pattern>${PATTERN}</pattern>
<charset>utf-8</charset>
</encoder>
<filter class="ch.qos.logback.classic.filter.LevelFilter">
<level>INFO</level>
<onMatch>ACCEPT</onMatch>
<onMismatch>DENY</onMismatch>
</filter>
</appender>
<appender name="ERROR" class="ch.qos.logback.core.rolling.RollingFileAppender">
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<fileNamePattern>${DEV_FILE_PATH}/log_error_%d{yyyy-MM-dd_HH}.%i.txt</fileNamePattern>
<!-- 当文件总大小超过1GB先删除10天内最早的文件。不会删除10天外的文件。 -->
<maxHistory>${myMaxHistory}</maxHistory>
<maxFileSize>${myFileSize}</maxFileSize>
<totalSizeCap>${myTotalSizeCap}</totalSizeCap>
</rollingPolicy>
<append>true</append>
<encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder">
<pattern>${PATTERN}</pattern>
<charset>utf-8</charset>
</encoder>
<filter class="ch.qos.logback.classic.filter.LevelFilter">
<level>ERROR</level>
<onMatch>ACCEPT</onMatch>
<onMismatch>DENY</onMismatch>
</filter>
</appender>
<!-- 黑名单日志生成DBF及SQB文件 -->
<appender name="BlackCard-Log"
class="ch.qos.logback.core.rolling.RollingFileAppender">
<!-- <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">-->
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<FileNamePattern>${DEV_FILE_PATH}BlackCard_%d{yyyy-MM-dd_HH}.%i.txt</FileNamePattern>
<!-- 当文件总大小超过1GB先删除10天内最早的文件。不会删除10天外的文件。 -->
<maxHistory>${myMaxHistory}</maxHistory>
<maxFileSize>${myFileSize}</maxFileSize>
<totalSizeCap>${myTotalSizeCap}</totalSizeCap>
</rollingPolicy>
<append>true</append>
<encoder>
<pattern>${PATTERN}</pattern>
<charset>utf-8</charset>
</encoder>
<filter class="ch.qos.logback.classic.filter.LevelFilter">
<level>INFO</level>
<onMatch>ACCEPT</onMatch>
<onMismatch>DENY</onMismatch>
</filter>
</appender>
<logger name="BlackCard" additivity="false">
<appender-ref ref="BlackCard-Log"/>
</logger>
<!--异步输出-->
<appender name="BlackCard_ASYNC_LOG" class="ch.qos.logback.classic.AsyncAppender">
<!-- 不丢失日志.默认的,如果队列的80%已满,则会丢弃TRACT、DEBUG、INFO级别的日志 -->
<discardingThreshold>0</discardingThreshold>
<!-- 更改默认的队列的深度,该值会影响性能.默认值为256 -->
<queueSize>2048</queueSize>
<includeCallerData>true</includeCallerData>
<!-- 添加附加的appender,最多只能添加一个 -->
<appender-ref ref="BlackCard-Log"/>
</appender>
<!-- 入库文件处理日志 -->
<appender name="InsertDB-Log"
class="ch.qos.logback.core.rolling.RollingFileAppender">
<!-- <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">-->
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<FileNamePattern>${DEV_FILE_PATH}InsertDB_%d{yyyy-MM-dd_HH}.%i.txt</FileNamePattern>
<!-- 当文件总大小超过1GB先删除10天内最早的文件。不会删除10天外的文件。 -->
<maxHistory>${myMaxHistory}</maxHistory>
<maxFileSize>${myFileSize}</maxFileSize>
<totalSizeCap>${myTotalSizeCap}</totalSizeCap>
</rollingPolicy>
<append>true</append>
<encoder>
<pattern>${PATTERN}</pattern>
<charset>utf-8</charset>
</encoder>
<filter class="ch.qos.logback.classic.filter.LevelFilter">
<level>INFO</level>
<onMatch>ACCEPT</onMatch>
<onMismatch>DENY</onMismatch>
</filter>
</appender>
<logger name="InsertDB" additivity="false">
<appender-ref ref="InsertDB-Log"/>
</logger>
<!--异步输出-->
<appender name="InsertDB_ASYNC_LOG" class="ch.qos.logback.classic.AsyncAppender">
<!-- 不丢失日志.默认的,如果队列的80%已满,则会丢弃TRACT、DEBUG、INFO级别的日志 -->
<discardingThreshold>0</discardingThreshold>
<!-- 更改默认的队列的深度,该值会影响性能.默认值为256 -->
<queueSize>2048</queueSize>
<includeCallerData>true</includeCallerData>
<!-- 添加附加的appender,最多只能添加一个 -->
<appender-ref ref="InsertDB-Log"/>
</appender>
<!-- 最小费额文件处理日志 -->
<appender name="MinFee-Log"
class="ch.qos.logback.core.rolling.RollingFileAppender">
<!-- <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">-->
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<FileNamePattern>${DEV_FILE_PATH}MinFee_%d{yyyy-MM-dd_HH}.%i.txt</FileNamePattern>
<!-- 当文件总大小超过1GB先删除10天内最早的文件。不会删除10天外的文件。 -->
<maxHistory>${myMaxHistory}</maxHistory>
<maxFileSize>${myFileSize}</maxFileSize>
<totalSizeCap>${myTotalSizeCap}</totalSizeCap>
</rollingPolicy>
<append>true</append>
<encoder>
<pattern>${PATTERN}</pattern>
<charset>utf-8</charset>
</encoder>
<filter class="ch.qos.logback.classic.filter.LevelFilter">
<level>INFO</level>
<onMatch>ACCEPT</onMatch>
<onMismatch>DENY</onMismatch>
</filter>
</appender>
<logger name="MinFee" additivity="false">
<appender-ref ref="MinFee-Log"/>
</logger>
<!--异步输出-->
<appender name="MinFee_ASYNC_LOG" class="ch.qos.logback.classic.AsyncAppender">
<!-- 不丢失日志.默认的,如果队列的80%已满,则会丢弃TRACT、DEBUG、INFO级别的日志 -->
<discardingThreshold>0</discardingThreshold>
<!-- 更改默认的队列的深度,该值会影响性能.默认值为256 -->
<queueSize>2048</queueSize>
<includeCallerData>true</includeCallerData>
<!-- 添加附加的appender,最多只能添加一个 -->
<appender-ref ref="MinFee-Log"/>
</appender>
<!-- 调用自动下发接口 -->
<appender name="CallDisInterface-Log"
class="ch.qos.logback.core.rolling.RollingFileAppender">
<!-- <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">-->
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<FileNamePattern>${DEV_FILE_PATH}CallDisInterface_%d{yyyy-MM-dd_HH}.%i.txt</FileNamePattern>
<!-- 当文件总大小超过1GB先删除10天内最早的文件。不会删除10天外的文件。 -->
<maxHistory>${myMaxHistory}</maxHistory>
<maxFileSize>${myFileSize}</maxFileSize>
<totalSizeCap>${myTotalSizeCap}</totalSizeCap>
</rollingPolicy>
<append>true</append>
<encoder>
<pattern>${PATTERN}</pattern>
<charset>utf-8</charset>
</encoder>
<filter class="ch.qos.logback.classic.filter.LevelFilter">
<level>INFO</level>
<onMatch>ACCEPT</onMatch>
<onMismatch>DENY</onMismatch>
</filter>
</appender>
<logger name="CallDisInterface" additivity="false">
<appender-ref ref="CallDisInterface-Log"/>
</logger>
<!--异步输出-->
<appender name="CallDisInterface_ASYNC_LOG" class="ch.qos.logback.classic.AsyncAppender">
<!-- 不丢失日志.默认的,如果队列的80%已满,则会丢弃TRACT、DEBUG、INFO级别的日志 -->
<discardingThreshold>0</discardingThreshold>
<!-- 更改默认的队列的深度,该值会影响性能.默认值为256 -->
<queueSize>2048</queueSize>
<includeCallerData>true</includeCallerData>
<!-- 添加附加的appender,最多只能添加一个 -->
<appender-ref ref="CallDisInterface-Log"/>
</appender>
<!--异步输出-->
<appender name="ERROR_ASYNC_LOG" class="ch.qos.logback.classic.AsyncAppender">
<!-- 不丢失日志.默认的,如果队列的80%已满,则会丢弃TRACT、DEBUG、INFO级别的日志 -->
<discardingThreshold>0</discardingThreshold>
<!-- 更改默认的队列的深度,该值会影响性能.默认值为256 -->
<queueSize>1024</queueSize>
<includeCallerData>true</includeCallerData>
<!-- 添加附加的appender,最多只能添加一个 -->
<appender-ref ref="ERROR"/>
</appender>
<!--异步输出-->
<appender name="INFO_ASYNC_LOG" class="ch.qos.logback.classic.AsyncAppender">
<!-- 不丢失日志.默认的,如果队列的80%已满,则会丢弃TRACT、DEBUG、INFO级别的日志 -->
<discardingThreshold>0</discardingThreshold>
<!-- 更改默认的队列的深度,该值会影响性能.默认值为256 -->
<queueSize>2048</queueSize>
<includeCallerData>true</includeCallerData>
<!-- 添加附加的appender,最多只能添加一个 -->
<appender-ref ref="INFO"/>
</appender>
<root level="${logLevel}">
<appender-ref ref="STDOUT"/>
<appender-ref ref="fileAppender"/>
<appender-ref ref="INFO_ASYNC_LOG"/>
<appender-ref ref="ERROR_ASYNC_LOG"/>
<appender-ref ref="BlackCard_ASYNC_LOG"/>
<appender-ref ref="InsertDB_ASYNC_LOG"/>
<appender-ref ref="MinFee_ASYNC_LOG"/>
<appender-ref ref="CallDisInterface_ASYNC_LOG"/>
</root>
</configuration>
Loading…
Cancel
Save