高併發量的數據處理方案

需求描述:

有5000臺左右的設備,每臺設備每隔一分鐘就會向服務器端上報設備的信息(如設備所在位置經緯度等),現在需要服務端對這些上報請求傳輸的數據進行處理,並持久化到數據庫中;

需求就這樣簡單,但服務端要處理的併發還是不小的,平均每秒種都會有將近100個請求過來,遇到這樣的情況,你會怎麼做呢?

我的解決方案是,使用了緩存+批處理操作,代碼如下:

package com.jimu.data.servlet;

import org.apache.log4j.Logger;
import com.jimu.data.conn.JimuDataBakConn;

import java.io.IOException;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.util.HashMap;
import java.util.LinkedList;

import javax.servlet.ServletException;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;


public class GpsRecordServlet extends HttpServlet{
	
	private static final long serialVersionUID = 1L;
	//寫日誌操作
	private static final Logger logger = Logger.getLogger(GpsRecordServlet.class);
	//緩存大小
	private static int cacheSize = 2000;
	//(臨界資源,需同步訪問,確保線程安全)緩存鏈表開關標識
	private static int switchFlag = 1;
	
	//交替緩存數據,當存儲的數據達到cacheSize時,向數據庫中記錄當前鏈表中的數據然後清空
	private static LinkedList<HashMap<String,Object>> cacheList1 = new LinkedList<HashMap<String,Object>>();
	private static LinkedList<HashMap<String,Object>> cacheList2 = new LinkedList<HashMap<String,Object>>();
	
	//127.0.0.1:8080/jimudata/recordgps.html?tsn=9303ZH201508080107&lng=23.175&lat=113.245&tim=1436756925668
	public void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
		response.setContentType("text/html;charset=UTF-8");
        request.setCharacterEncoding("UTF-8");
        
        String termSn = request.getParameter("tsn");		//設備序列號
        String longitude = request.getParameter("lng");		//經度
        String latitude = request.getParameter("lat");		//緯度
        String gpsTime = request.getParameter("tim");		//gps時間戳
        int size = 0;
        if(getSwitchFlag() == 1){
        	size = cacheRecord(termSn,longitude,latitude,gpsTime,getCacheList1());
        	if(size >= cacheSize){
        		setSwitchFlag(2);
        		saveCacheList(getCacheList1());
        		//清空緩存
        		getCacheList1().clear();
        	}
        }else if(getSwitchFlag() == 2){
        	size = cacheRecord(termSn,longitude,latitude,gpsTime,getCacheList2());
        	if(size >= cacheSize){
        		setSwitchFlag(1);
        		saveCacheList(getCacheList2());
        		//清空緩存
        		getCacheList2().clear();
        	}
        }
	}
	public void doPost(HttpServletRequest request, HttpServletResponse response)
			throws ServletException, IOException {
		doGet(request, response);
	}
	
	public int cacheRecord(String ternSn,String longitude,String latitude,String gpsTime,LinkedList<HashMap<String,Object>> cacheList){
		HashMap<String,Object> itemMap = new HashMap<String, Object>();
		itemMap.put("ternSn",ternSn);
		itemMap.put("longitude",longitude);
		itemMap.put("latitude",latitude);
		itemMap.put("gpsTime",gpsTime);
		cacheList.add(itemMap);
		return cacheList.size();
	}
	
	public void saveCacheList(LinkedList<HashMap<String,Object>> cacheList){
		if(cacheList.size() == 0){
			return;
		}
		//數據源JimuBus 對應數據庫jimubus,查詢數據並返回json數據集
		Connection conn = null;
		PreparedStatement ps = null;
		ResultSet rs = null;
		try{
			conn = JimuDataBakConn.getConn();
			String sql = " INSERT INTO term_gps_record (term_sn,longitude,latitude,gps_time,create_time) VALUES(?,?,?,?,now()) ";
			ps = conn.prepareStatement(sql);
			//批處理大小
			final int batchSize = 500;
			int count = 0;
			for (HashMap<String,Object> itemMap: cacheList) {
				String termSn = itemMap.get("termSn") == null ? "":itemMap.get("termSn").toString();
				ps.setString(1,termSn);
				String longitude = itemMap.get("longitude") == null ? "":itemMap.get("longitude").toString();
				ps.setString(2,longitude);
				String latitude = itemMap.get("latitude") == null ? "":itemMap.get("latitude").toString();
				ps.setString(3,latitude);
				String gpsTime = itemMap.get("gpsTime") == null ? "":itemMap.get("gpsTime").toString();
				ps.setString(4,gpsTime);
				
				ps.addBatch();
			    if(++count % batchSize == 0) {
			        ps.executeBatch();		//達到批處理大小時,進行批處理操作
			        ps.clearBatch();		//清空stmt中積攢的sql
			    }
			}
			ps.executeBatch(); 				//insert remaining records
			
		}catch (Exception e) {
			e.printStackTrace();
			logger.info("批處理出現異常");
		}finally{
			try{rs.close();rs=null;}catch(Exception e){}
			try{ps.close();ps=null;}catch(Exception e){}
			try{conn.close();conn=null;}catch(Exception e){}
		}
	}
	
	//應用關閉時,把緩存中的數據保存到數據庫
	public void destroy(){
		System.out.println("tomcat即將關閉,保存緩存中數據!");
		saveCacheList(getCacheList1());
		saveCacheList(getCacheList2());
	}
	
	private synchronized static int getSwitchFlag(){
		return switchFlag;
	}
	
	private synchronized static void setSwitchFlag(int flag){
		switchFlag = flag;
	}
	
	private synchronized static LinkedList<HashMap<String,Object>> getCacheList1(){
		return cacheList1;
	}
	
	private synchronized static LinkedList<HashMap<String,Object>> getCacheList2(){
		return cacheList2;
	}

}

如果有不足之處,歡迎指正,如果您有更好的解決方案,可以和大家探討分享,共同完善!

發佈了144 篇原創文章 · 獲贊 29 · 訪問量 35萬+
發表評論
所有評論
還沒有人評論,想成為第一個評論的人麼? 請在上方評論欄輸入並且點擊發布.
相關文章