因業務服務提升,單個庫已經扛不住日活10w,每天訂單380w+的數據, 故引入mysql讀寫分離;
首先項目架構 springboot+mybatis ;
由於引用的druid 的連接池 ,所以第一步要禁用springboot默認加載的tomcat連接池
@EnableAutoConfiguration(exclude={DataSourceAutoConfiguration.class})
@EnableAutoConfiguration(exclude={DataSourceAutoConfiguration.class})
@ServletComponentScan
@EnableAspectJAutoProxy
@SpringBootApplication(exclude = { org.springframework.boot.autoconfigure.thymeleaf.ThymeleafAutoConfiguration.class })
public class ApplicationRun extends SpringBootServletInitializer
接下來就配置引用的數據源 .properties 文件引入主庫鏈接以及從庫鏈接以及最小連接數,最大連接數等等。
################數據寫庫配置##############
mysql.datasource.write.url=jdbc:mysql://127.0.0.1/test?useUnicode=true&characterEncoding=UTF-8&autoReconnect=true
mysql.datasource.write.username=root
mysql.datasource.write.password=root123
mysql.datasource.write.driverClassName=com.mysql.jdbc.Driver
mysql.datasource.write.minIdle=5
mysql.datasource.write.maxActive=100
mysql.datasource.write.initialSize=10
mysql.datasource.write.maxWait=60000
mysql.datasource.write.timeBetweenEvictionRunsMillis=60000
mysql.datasource.write.minEvictableIdleTimeMillis=300000
mysql.datasource.write.validationQuery=select 'x'
mysql.datasource.write.testWhileIdle=true
mysql.datasource.write.testOnBorrow=false
mysql.datasource.write.testOnReturn=false
mysql.datasource.write.poolPreparedStatements=true
mysql.datasource.write.maxPoolPreparedStatementPerConnectionSize=50
mysql.datasource.write.removeAbandoned=true
mysql.datasource.write.filters=stat
################數據讀庫配置##############
mysql.datasource.read.url=jdbc:mysql://127.0.0.1/test2?useUnicode=true&characterEncoding=UTF-8&autoReconnect=true
mysql.datasource.read.username=root
mysql.datasource.read.password=root123
mysql.datasource.read.driverClassName=com.mysql.jdbc.Driver
mysql.datasource.read.minIdle=5
mysql.datasource.read.maxActive=100
mysql.datasource.read.initialSize=10
mysql.datasource.read.maxWait=60000
mysql.datasource.read.timeBetweenEvictionRunsMillis=60000
mysql.datasource.read.minEvictableIdleTimeMillis=300000
mysql.datasource.read.validationQuery=select 'x'
mysql.datasource.read.testWhileIdle=true
mysql.datasource.read.testOnBorrow=false
mysql.datasource.read.testOnReturn=false
mysql.datasource.read.poolPreparedStatements=true
mysql.datasource.read.maxPoolPreparedStatementPerConnectionSize=50
mysql.datasource.read.removeAbandoned=true
mysql.datasource.read.filters=stat
配置加載重寫數據源
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.autoconfigure.jdbc.DataSourceBuilder;
import org.springframework.boot.autoconfigure.jdbc.DataSourceProperties;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Primary;
import javax.sql.DataSource;
/**
* 配置數據源
*/
@Configuration
@EnableConfigurationProperties(DataSourceProperties.class)
public class DataSourceConfiguration {
static final Log log = LogFactory.getLog(DataSourceConfiguration.class);
@Value("${mysql.datasource.type}")
private Class<? extends DataSource> dataSourceType;
/**
* 寫庫 數據源配置
* @return
*/
@Bean(name = "writeDataSource")
@Primary
@ConfigurationProperties(prefix ="mysql.datasource.write")
public DataSource writeDataSource() throws Exception{
log.info("-------------------- writeDataSource init ---------------------");
return DataSourceBuilder.create().type(dataSourceType).build();
}
/**
* 從庫
* @return
*/
@Bean(name = "readDataSource")
@ConfigurationProperties(prefix ="mysql.datasource.read")
public DataSource readDataSourceOne() throws Exception{
log.info("-------------------- read01 DataSourceOne init ---------------------");
// DruidDataSource druidDataSource = new DruidDataSource();
// return druidDataSource;
return DataSourceBuilder.create().type(dataSourceType).build();
}
}
設置 本地線程,數據源上下文;引用threadlocal
package com.mp.chengxin.config.datasourceconfig;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
/*
* 本地線程,數據源上下文
*/
public class DataSourceContextHolder {
static final Log log = LogFactory.getLog(DataSourceContextHolder.class);
//線程本地環境
private static final ThreadLocal<String> local = new ThreadLocal<String>();
public static ThreadLocal<String> getLocal() {
return local;
}
/**
* 讀庫
*/
public static void setRead() {
local.set(DataSourceType.read.getType());
log.info("數據庫切換到讀庫...");
}
/**
* 寫庫
*/
public static void setWrite() {
local.set(DataSourceType.write.getType());
log.info("數據庫切換到寫庫...");
}
public static String getReadOrWrite() {
return local.get();
}
public static void clear(){
local.remove();
}
}
設置主從庫枚舉類
public enum DataSourceType {
read("read", "從庫"),
write("write", "主庫");
private String type;
private String name;
DataSourceType(String type, String name) {
this.type = type;
this.name = name;
}
public String getType() {
return type;
}
public void setType(String type) {
this.type = type;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
}
設置動態路由: 切面攔截後會只入數據源
@Configuration
@AutoConfigureAfter(DataSourceConfiguration.class)
@MapperScan(basePackages = "com.*.*.mapper")
public class MyBatisConfiguration {
@Autowired
@Qualifier("writeDataSource")
private DataSource writeDataSource;
@Autowired
@Qualifier("readDataSource")
private DataSource readDataSource;
@Bean(name = "sqlSessionFactory")
public SqlSessionFactory sqlSessionFactoryBean() throws Exception {
SqlSessionFactoryBean sqlSessionFactoryBean = new SqlSessionFactoryBean();
sqlSessionFactoryBean.setDataSource(roundRobinDataSouceProxy());
// 讀取配置
sqlSessionFactoryBean.setTypeAliasesPackage("com.*.*.model");
//設置mapper.xml文件所在位置
Resource[] resources = new PathMatchingResourcePatternResolver().getResources("classpath:com/*/*Mapper.xml");
sqlSessionFactoryBean.setMapperLocations(resources);
//添加插件
sqlSessionFactoryBean.setPlugins(new Interceptor[]{paginationInterceptor});
try {
return sqlSessionFactoryBean.getObject();
} catch (Exception e) {
e.printStackTrace();
throw new RuntimeException(e);
}
}
/**
* 把所有數據庫都放在路由中
* @return
*/
@Bean(name="roundRobinDataSouceProxy")
public AbstractRoutingDataSource roundRobinDataSouceProxy() {
Map<Object, Object> targetDataSources = new HashMap<Object, Object>();
//把所有數據庫都放在targetDataSources中,注意key值要和determineCurrentLookupKey()中代碼寫的一至,
//否則切換數據源時找不到正確的數據源
targetDataSources.put(DataSourceType.write.getType(), writeDataSource);
targetDataSources.put(DataSourceType.read.getType(), readDataSource);
// targetDataSources.put(DataSourceType.read.getType()+"1", readDataSource01);
// targetDataSources.put(DataSourceType.read.getType()+"2", readDataSource02);
// final int readSize = Integer.parseInt(readDataSourceSize);
// MyAbstractRoutingDataSource proxy = new MyAbstractRoutingDataSource(readSize);
//路由類,尋找對應的數據源
AbstractRoutingDataSource proxy = new AbstractRoutingDataSource(){
/**
* 這是AbstractRoutingDataSource類中的一個抽象方法,
* 而它的返回值是你所要用的數據源dataSource的key值,有了這個key值,
* targetDataSources就從中取出對應的DataSource,如果找不到,就用配置默認的數據源。
*/
@Override
protected Object determineCurrentLookupKey() {
String typeKey = DataSourceContextHolder.getReadOrWrite();
if(typeKey == null){
// System.err.println("使用數據庫write.............");
// return DataSourceType.write.getType();
throw new NullPointerException("數據庫路由時,決定使用哪個數據庫源類型不能爲空...");
}
if (typeKey.equals(DataSourceType.write.getType())){
System.err.println("使用數據庫write.............");
return DataSourceType.write.getType();
}
//讀庫
return DataSourceType.read.getType();
}
};
proxy.setDefaultTargetDataSource(writeDataSource);//默認庫
proxy.setTargetDataSources(targetDataSources);
return proxy;
}
@Bean
public SqlSessionTemplate sqlSessionTemplate(SqlSessionFactory sqlSessionFactory) {
return new SqlSessionTemplate(sqlSessionFactory);
}
// //事務管理
@Bean
public PlatformTransactionManager annotationDrivenTransactionManager() {
return new DataSourceTransactionManager((DataSource) SpringUtil.getApplicationContext().getBean("roundRobinDataSouceProxy"));
}
}
下面最關鍵的一步 切面攔截, service層攔截,這樣可以保證寫事物的一致性
/**
* 在service層決定數據源
*
* 必須在事務AOP之前執行,所以實現Ordered,order的值越小,越先執行
* 如果一旦開始切換到寫庫,則之後的讀都會走寫庫
*
* @author Jfei
*
*/
@Aspect
@EnableAspectJAutoProxy(exposeProxy=true,proxyTargetClass=true)
@Component
public class DataSourceAopInService implements PriorityOrdered {
static final Log log = LogFactory.getLog(DataSourceAopInService.class);
@Before("apiAspect() ")
public void setReadDataSourceType() {
//如果已經開啓寫事務了,那之後的所有讀都從寫庫讀
// if(!DataSourceType.write.getType().equals(DataSourceContextHolder.getReadOrWrite())){
// DataSourceContextHolder.setRead();
// }
DataSourceContextHolder.setRead();
}
//
// @Before("apiAspect()")
// public void setWriteDataSourceType() {
// DataSourceContextHolder.setWrite();
// }
@Override
public int getOrder() {
/**
* 值越小,越優先執行
* 要優於事務的執行
* 在啓動類中加上了@EnableTransactionManagement(order = 10)
*/
return 1;
}
}