https://blog.csdn.net/zuochang_liu/article/details/97398209
SparkSession ss = SparkSession.builder().config(conf).enableHiveSupport().getOrCreate()
//讀取方式1
String sql = "(select * from xxx) as tmp; //注意這裏的sql格式,該sql也可以直接是一個表名
Dataset<Row> df = session.read().format("jdbc")
.option("url", jdbcURL)
.option("driver", driver)
.option("dbtable", sql)
.option("user", username)
.option("password", password)
.load();
//讀取方式2:
Properties connectionProperties = new Properties();
connectionProperties.put("user", username);
connectionProperties.put("password", password);
connectionProperties.put("driver", driver);
session.read().jdbc(url, table, properties)
df = session.read().jdbc(jdbcURL,sql,connectionProperties);
//寫入方式1:
String saveMode = "Overwrite";
df.write().mode(saveMode).jdbc(jdbcURL, tablename, connectionProperties);
//寫入方式2:
final String sql = "insert into tab_xxx (c1,c2,c3) values(?,?,?)";
df.javaRDD().foreachPartition(new VoidFunction<Iterator<Row>>() {
private static final long serialVersionUID = -834520661839866305L;
@Override
public void call(Iterator<Row> t) throws Exception {
Class.forName(driver);
Connection conn = (Connection) DriverManager.getConnection(url, username, password);
conn.setAutoCommit(false);
try {
PreparedStatement pstmt = (PreparedStatement) conn.prepareStatement(sql);
int loop = 0;
while (t.hasNext()) {
Row row = t.next();
for (int i = 0; i < 3; i++) { //這裏的3是插入的列只有3列
pstmt.setObject(i + 1, row.get(i));
}
pstmt.executeUpdate();
if (++loop % 1000 == 0) {
conn.commit();
}
}
conn.commit();
pstmt.close();
} finally {
conn.close();
}
}
});
}