import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
public class BatchTool {
// ddl
private static String ddl = "CREATE TABLE IF NOT EXISTS pbeijing_point (OBJECTID INTEGER,NAME TEXT,ADDRESS TEXT,PHONE TEXT,FAX TEXT,TYPE TEXT,CITYCODE TEXT,URL TEXT,EMAIL TEXT,NAME2 TEXT,X INTEGER,Y INTEGER)";
Connection jCon = null;
// get connection
public synchronized Connection getConnection() {
if (jCon == null) {
// json=
Statement state = null;
try {
Class.forName("org.sqlite.JDBC");
jCon = DriverManager.getConnection("jdbc:sqlite:c:\\newD.db");
state = jCon.createStatement();
state.executeUpdate(ddl);
} catch (SQLException e) {
e.printStackTrace();
} catch (ClassNotFoundException e) {
e.printStackTrace();
}
}
return jCon;
}
// 創建500個線程
ExecutorService service = Executors.newFixedThreadPool(20);
// 讀取sql文件 每五百個insert 語句由一個線程批量操作
public void readBatchSQL(InputStream is) throws IOException {
BufferedReader bufferReader = new BufferedReader(new InputStreamReader(is, "UTF-8"));
String line;
String one = "";
int tag = 0;
String batchSql = "";
while ((line = bufferReader.readLine()) != null) {
one += line;
if (one.indexOf(";") != -1) {
batchSql += one;
one = "";// reset
tag++;
}
// 符合條件 開闢一個線程
if (tag != 0 && tag / 500 != 0) {
service.execute(new SQLiteBatchHandler(batchSql));
batchSql = "";// reset
tag = 0;// reset
}
}
// 最後執行 剩餘的sql
if (batchSql.length() > 0) {
System.out.println("finalSQL:" + batchSql);
Runnable r = new SQLiteBatchHandler(batchSql);
service.execute(r);
}
try {
// 關閉線程池
this.service.shutdown();
this.service.awaitTermination(1, TimeUnit.HOURS);
getConnection().close();
} catch (InterruptedException e) {
e.printStackTrace();
} catch (SQLException e) {
e.printStackTrace();
}
};
/**
* @note 分割sql
* */
private static String[] splitSQL(String batchSQl) {
if (batchSQl != null) {
return batchSQl.split(";");
}
return null;
}
/**
* @note 執行批量更新操作 由於connection.comit 操作時 如果存在 statement沒有close 就會報錯
* 因此將此方法加上同步 。
* */
private synchronized void exucteUpdate(String batch) {
Statement ste = null;
Connection con = null;
try {
con = getConnection();
con.setAutoCommit(false);
ste = con.createStatement();
String[] sqls = splitSQL(batch);
for (String sql : sqls) {
if (sql != null) {
ste.addBatch(sql);
}
}
ste.executeBatch();
ste.close();
con.commit();// 提交
} catch (Exception e) {
e.printStackTrace();
System.out.println("執行失敗:" + batch);
try {
con.rollback();// 回滾
} catch (SQLException e1) {
e1.printStackTrace();
}
} finally {
if (ste != null) {
try {
ste.close();
} catch (SQLException e) {
e.printStackTrace();
}
}
}
}
/**
* @note 入庫線程
* */
private class SQLiteBatchHandler implements Runnable {
private String batch;
public SQLiteBatchHandler(String sql) {
this.batch = sql;
}
@SuppressWarnings("static-access")
@Override
public void run() {
try {
Thread.currentThread().sleep(50);
} catch (InterruptedException e) {
e.printStackTrace();
}
if (this.batch.length() > 0) {
exucteUpdate(batch);
}
}
}
public static void main(String[] args) throws FileNotFoundException, IOException {
BatchTool s = new BatchTool();
s.readBatchSQL(new FileInputStream(new File("c:\\poi.sql")));
}
}
SQLite之大數據量批量入庫
發表評論
所有評論
還沒有人評論,想成為第一個評論的人麼? 請在上方評論欄輸入並且點擊發布.