Background
業務需求:提供數據導出功能。數據爲傳感器監測數據,存儲在MySQL。需要用戶傳入傳感器的IDs,起始時間和結束時間,由於數據量較大,所以這裏通過定時任務實現,然後把每個傳感器數據寫一個csv文件,最終把所有的文件壓縮成一個zip,再把壓縮包的存儲路徑寫入數據庫,用戶請求下載數據時從數據庫查詢獲取壓縮包的存儲路徑,最終把壓縮包返回給用戶。下面是主要代碼(springboot+swagger2)
Ctroller
package com.cloudansys.monitor.solution.export.controller;
import com.cloudansys.monitor.base.BaseController;
import com.cloudansys.monitor.common.CSVUtils;
import com.cloudansys.monitor.solution.export.entity.TaskExportDTO;
import com.cloudansys.monitor.solution.export.entity.TaskInitDTO;
import com.cloudansys.monitor.solution.export.service.ExportService;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiImplicitParam;
import io.swagger.annotations.ApiOperation;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.compress.utils.IOUtils;
import org.apache.commons.io.FileUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.*;
import javax.servlet.http.HttpServletResponse;
import java.io.File;
import java.io.OutputStream;
import java.util.List;
@Slf4j
@Api(tags = "數據導出")
@RestController
@RequestMapping("/export")
public class ExportController extends BaseController {
@Autowired
private ExportService service;
@ApiOperation("提交數據導出任務")
@PostMapping("/submitExportJob")
public Integer submitExportJob(@RequestBody TaskExportDTO exportDTO) {
log.info("exportDTO: {}", exportDTO);
return this.service.submitJob(exportDTO);
}
@ApiOperation("根據任務初始化ID進行下載")
@ApiImplicitParam(name = "init_id", value = "任務初始化ID")
@GetMapping("/downloadFile/{init_id}")
public void downloadFile(@PathVariable Integer init_id, HttpServletResponse response) {
String filePath = this.service.getPathByInitID(init_id);
log.info("filePath: {}", filePath);
CSVUtils.downloadZipFile(filePath, response);
}
@ApiOperation("獲取用戶數據導出任務")
@PostMapping("/getExportJob/{user_id}")
public List<TaskInitDTO> getExportJob(@PathVariable Integer user_id) {
log.info("user_id: {}", user_id);
return this.service.getExportJob(user_id);
}
}
定時任務
package com.cloudansys.monitor.solution.export.schedule;
import com.cloudansys.monitor.common.CSVUtils;
import com.cloudansys.monitor.common.CacheHandler;
import com.cloudansys.monitor.common.ZipUtils;
import com.cloudansys.monitor.entity.FileBean;
import com.cloudansys.monitor.solution.data.entity.TargetPrimaryData;
import com.cloudansys.monitor.solution.data.service.TargetPrimaryDataService;
import com.cloudansys.monitor.solution.export.entity.TaskExportDTO;
import com.cloudansys.monitor.solution.export.entity.TaskInitDTO;
import com.cloudansys.monitor.solution.export.service.ExportService;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.CommandLineRunner;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.scheduling.annotation.EnableScheduling;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Component;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
@Slf4j
@Component
@EnableScheduling
public class ScheduleExportJob implements ScheduleJob, CommandLineRunner {
@Autowired
private JdbcTemplate jdbcTemplate;
@Autowired
private ExportService exportService;
@Autowired
private TargetPrimaryDataService targetService;
@Autowired
private CacheHandler cacheHandler;
@Override
public void run(String... args) {
// log.info("定時任務開始 . . .");
exec();
}
// {秒} {分} {時} {日} {月} {星期} {年份(可爲空)}
@Scheduled(cron = "* */10 * * * ?")
private void exec() {
// 壓縮目錄和生成的壓縮文件
String srcFile = "data/data-targets";
String zipFile = "data/data-targets.zip";
String sql_1 = "SELECT id FROM t_task_init WHERE status = 0;";
String sql_2 = "SELECT target_ids, start_time, end_time FROM t_task_export WHERE init_id = ?;";
// log.debug("sql :{} ", sql_1);
// log.debug("sql_2 :{} ", sql_2);
// 獲取需要執行的任務的 init_ids
List<Integer> init_ids = this.jdbcTemplate.queryForList(sql_1, Integer.class);
// 首先判斷有沒有需要執行的數據導出任務
if (null == init_ids || init_ids.size() == 0) {
return;
}
// 遍歷 init_ids 一個一個執行任務
init_ids.forEach(init_id -> {
List<TaskExportDTO> exportDTOs = this.jdbcTemplate.query(sql_2, new Object[]{init_id},
(resultSet, i) -> {
TaskExportDTO exportDTO = new TaskExportDTO();
exportDTO.setInit_id(init_id);
exportDTO.setTarget_ids(resultSet.getString(++i));
exportDTO.setSTime(resultSet.getDate(++i));
exportDTO.setETime(resultSet.getDate(++i));
return exportDTO;
});
// sql_2 查詢出來的總是隻有一個 TaskExportDTO
TaskExportDTO exportDTO = exportDTOs.get(0);
// 開始執行 init_id 的任務,並更新任務狀態
TaskInitDTO initDTO = new TaskInitDTO();
initDTO.setId(init_id);
initDTO.setStatus(1);
this.exportService.updateTaskInit(initDTO);
// 遍歷測點ID,查詢數據,每個測點的數據生成一個以測點編號命名的 csv 文件,都放在 data/data-targets 目錄
String[] target_ids = exportDTO.getTarget_ids().split(",");
Date sTime = exportDTO.getSTime();
Date eTime = exportDTO.getETime();
for (String target_id : target_ids) {
Integer targetId = Integer.valueOf(target_id);
List<TargetPrimaryData> targetPrimaryData = this.targetService.getByTargetId(targetId, sTime, eTime);
// 構建該測點數據生成 csv 文件的文件頭 [參數1,參數2,數據時間,. . .]
String[] param = targetPrimaryData.get(0).getParam();
List<Object> head = new ArrayList<>();
for (String p : param) {
head.add(p);
}
head.add("數據時間");
List<List<Object>> dataList = new ArrayList<>();
for (TargetPrimaryData target : targetPrimaryData) {
Double[] data = target.getData();
List<Object> list = new ArrayList<>();
for (Double d : data) {
list.add(d);
}
list.add(target.getTime());
dataList.add(list);
}
FileBean fileBean = new FileBean();
fileBean.setFileID(targetId);
// 從緩存中獲取該 targetId 對應的 targetCode
String targetCode = cacheHandler.getTargetCode(targetId);
fileBean.setFileName(targetCode);
fileBean.setFilePath("data/data-targets");
// 將該 targetId 測點數據寫入 csv 文件,文件名爲 targetCode
CSVUtils.createCSVFile(head, dataList, fileBean);
// log.info("================csv 文件創建結束!");
}
// 一個數據導出任務所需的數據處理結束後,生成的 csv 數據文件都在 data/data-targets 目錄下
// 然後 把 data-targets 目錄壓縮成 zip
ZipUtils.doCompress(srcFile, zipFile);
log.info("================csv 文件壓縮結束!");
// 最終把壓縮包的路徑存儲到數據庫中,並更新任務狀態
initDTO.setStatus(2);
initDTO.setETime(new Date());
initDTO.setPath(zipFile);
this.exportService.updateTaskInit(initDTO);
log.info("================csv 文件路徑存儲結束!");
});
}
}
CSV工具類
經測試三個字段,1千萬條數據寫成csv文件僅3秒
package com.cloudansys.monitor.common;
import com.cloudansys.monitor.entity.FileBean;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.compress.utils.IOUtils;
import org.apache.commons.csv.CSVFormat;
import org.apache.commons.csv.CSVPrinter;
import org.apache.commons.io.FileUtils;
import javax.servlet.http.HttpServletResponse;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.io.OutputStream;
import java.time.Instant;
import java.time.temporal.ChronoUnit;
import java.util.List;
@SuppressWarnings("ResultOfMethodCallIgnored")
@Slf4j
public class CSVUtils {
/**
* @param head csv 文件頭
* @param dataList 要寫入 csv 文件中的數據
* @param fileBean 文件實體類
* @return 返回生成的 csv 文件的路徑
*/
public static String createCSVFile(List<Object> head, List<List<Object>> dataList, FileBean fileBean) {
Instant start_time = Instant.now();
File csvFile = null;
try {
csvFile = new File(fileBean.getFilePath() + File.separator + fileBean.getFileName() + ".csv");
File parent = csvFile.getParentFile();
if (parent != null && !parent.exists()) {
parent.mkdirs();
}
csvFile.createNewFile();
FileWriter writer = new FileWriter(csvFile);
CSVPrinter printer = CSVFormat.EXCEL.print(writer);
// 寫入文件頭部
printer.printRecord(head);
// 寫入文件內容
for (List<Object> row : dataList) {
printer.printRecord(row);
}
printer.flush();
printer.close();
} catch (IOException e) {
e.printStackTrace();
}
Instant end_time = Instant.now();
long seconds = ChronoUnit.SECONDS.between(start_time, end_time);
// log.info("耗時:{}", seconds);
// System.out.println("寫入成功!");
return csvFile.getPath();
}
/**
* 壓縮 csv 文件成 zip,並提供下載
*
* @param filePath 要進行壓縮的文件目錄
* @param response 下載請求響應對象
*/
public static void downloadZipFile(String filePath, HttpServletResponse response) {
File file = new File(filePath);
if (!file.exists()) {
return;
}
OutputStream os = null;
try {
os = response.getOutputStream();
response.reset();
response.setHeader("Content-Disposition", "attachment;filename=" + file.getName());
response.setContentType("application/octet-stream");
os.write(FileUtils.readFileToByteArray(file));
} catch (Exception e) {
e.printStackTrace();
} finally {
IOUtils.closeQuietly(os);
}
}
}
ZIP壓縮工具
package com.cloudansys.monitor.common;
import lombok.extern.slf4j.Slf4j;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.util.zip.ZipEntry;
import java.util.zip.ZipOutputStream;
@Slf4j
public class ZipUtils {
private ZipUtils() {
}
public static void doCompress(String srcFile, String zipFile) {
try {
doCompress(new File(srcFile), new File(zipFile));
} catch (IOException e) {
log.info("doCompress——文件壓縮出錯了!");
e.printStackTrace();
}
}
/**
* 文件壓縮
*
* @param srcFile 目錄或者單個文件
* @param zipFile 壓縮後的ZIP文件
*/
private static void doCompress(File srcFile, File zipFile) throws IOException {
ZipOutputStream out = null;
try {
out = new ZipOutputStream(new FileOutputStream(zipFile));
doCompress(srcFile, out);
} catch (Exception e) {
throw e;
} finally {
out.close();//記得關閉資源
}
}
static void doCompress(String fileName, ZipOutputStream out) throws IOException {
doCompress(new File(fileName), out);
}
private static void doCompress(File file, ZipOutputStream out) throws IOException {
doCompress(file, out, "");
}
private static void doCompress(File inFile, ZipOutputStream out, String dir) throws IOException {
if (inFile.isDirectory()) {
File[] files = inFile.listFiles();
if (files != null && files.length > 0) {
for (File file : files) {
String name = inFile.getName();
if (!"".equals(dir)) {
name = dir + "/" + name;
}
ZipUtils.doCompress(file, out, name);
}
}
} else {
ZipUtils.doZip(inFile, out, dir);
}
}
private static void doZip(File inFile, ZipOutputStream out, String dir) throws IOException {
String entryName;
if (!"".equals(dir)) {
entryName = dir + "/" + inFile.getName();
} else {
entryName = inFile.getName();
}
ZipEntry entry = new ZipEntry(entryName);
out.putNextEntry(entry);
int len;
byte[] buffer = new byte[1024];
FileInputStream fis = new FileInputStream(inFile);
while ((len = fis.read(buffer)) > 0) {
out.write(buffer, 0, len);
out.flush();
}
out.closeEntry();
fis.close();
}
}