1.說明:
springboot連接多數據源是大家經常要用到的操作,下面我用springboot連接兩個mysql和一個hive,採用的連接池是druid,學會這個後大家可以連接各種多數據源了,採用的操作方式是mybatis,也大致講述使用jdbc的操作方式
2.步驟(表演來了:我用的是2.1.8版本的springboot)
1.導入相應的jar包
<dependency>
<groupId>mysql</groupId>
<artifactId>mysql-connector-java</artifactId>
<version>8.0.11</version>
</dependency>
<dependency>
<groupId>com.alibaba</groupId>
<artifactId>druid-spring-boot-starter</artifactId>
<version>1.1.10</version>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-jdbc</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-tomcat</artifactId>
</dependency>
<dependency>
<groupId>org.apache.hive</groupId>
<artifactId>hive-jdbc</artifactId>
<version>1.1.0</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<version>2.6.0</version>
</dependency>
<!-- tk.mapper -->
<dependency>
<groupId>tk.mybatis</groupId>
<artifactId>mapper-spring-boot-starter</artifactId>
<version>2.1.5</version>
</dependency>
2.配置yml
spring:
datasource:
#使用druid連接池
type: com.alibaba.druid.pool.DruidDataSource
# 自定義的主數據源配置信息
primary:
datasource:
#druid相關配置
druid:
#監控統計攔截的filters
filters: stat
driverClassName: com.mysql.jdbc.Driver
#配置基本屬性
url: jdbc:mysql://127.0.0.1:3306/test?useUnicode=true&characterEncoding=UTF-8&allowMultiQueries=true&autoReconnect=true&useSSL=false
username: root
password: KaiHan@123
#配置初始化大小/最小/最大
initialSize: 1
minIdle: 1
maxActive: 10
#獲取連接等待超時時間
maxWait: 60000
#間隔多久進行一次檢測,檢測需要關閉的空閒連接
timeBetweenEvictionRunsMillis: 60000
#一個連接在池中最小生存的時間
minEvictableIdleTimeMillis: 300000
validationQuery: SELECT 'x'
testWhileIdle: true
testOnBorrow: false
testOnReturn: false
#打開PSCache,並指定每個連接上PSCache的大小。oracle設爲true,mysql設爲false。分庫分表較多推薦設置爲false
poolPreparedStatements: false
maxPoolPreparedStatementPerConnectionSize: 10
# 自定義的從數據源配置信息
back:
datasource:
#druid相關配置
druid:
#監控統計攔截的filters
filters: stat
driverClassName: com.mysql.jdbc.Driver
#配置基本屬性
url: jdbc:mysql://127.0.0.1:3306/test?useUnicode=true&characterEncoding=UTF-8&allowMultiQueries=true&autoReconnect=true&useSSL=false
username: root
password: 12311
#配置初始化大小/最小/最大
initialSize: 1
minIdle: 1
maxActive: 10
#獲取連接等待超時時間
maxWait: 60000
#間隔多久進行一次檢測,檢測需要關閉的空閒連接
timeBetweenEvictionRunsMillis: 60000
#一個連接在池中最小生存的時間
minEvictableIdleTimeMillis: 300000
validationQuery: SELECT 'x'
testWhileIdle: true
testOnBorrow: false
testOnReturn: false
#打開PSCache,並指定每個連接上PSCache的大小。oracle設爲true,mysql設爲false。分庫分表較多推薦設置爲false
poolPreparedStatements: false
maxPoolPreparedStatementPerConnectionSize: 10
# 自定義的從數據源配置信息
hive:
datasource:
#druid相關配置
druid:
#監控統計攔截的filters
filters: stat
driverClassName: org.apache.hive.jdbc.HiveDriver
#配置基本屬性
url: jdbc:hive2://192.168.1.1:10000/test
username: root
password: Herbert@123
#配置初始化大小/最小/最大
initialSize: 1
minIdle: 1
maxActive: 10
#獲取連接等待超時時間
maxWait: 60000
#間隔多久進行一次檢測,檢測需要關閉的空閒連接
timeBetweenEvictionRunsMillis: 60000
#一個連接在池中最小生存的時間
minEvictableIdleTimeMillis: 300000
validationQuery: SELECT 'x'
testWhileIdle: true
testOnBorrow: false
testOnReturn: false
#打開PSCache,並指定每個連接上PSCache的大小。oracle設爲true,mysql設爲false。分庫分表較多推薦設置爲false
poolPreparedStatements: false
maxPoolPreparedStatementPerConnectionSize: 10
3.主數據員配置
import com.alibaba.druid.pool.DruidDataSource;
import lombok.Data;
import org.apache.ibatis.session.SqlSessionFactory;
import org.mybatis.spring.SqlSessionFactoryBean;
import org.mybatis.spring.annotation.MapperScan;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Primary;
import org.springframework.core.io.support.PathMatchingResourcePatternResolver;
import org.springframework.jdbc.datasource.DataSourceTransactionManager;
import javax.sql.DataSource;
import java.sql.SQLException;
/**
* @Description: 主數據源配置類
* 前綴爲primary.datasource.druid的配置信息
*/
@Data
@Configuration
@tk.mybatis.spring.annotation.MapperScan(basePackages = PrimaryDataBaseConfig.PACKAGE)
@ConfigurationProperties(prefix = "primary.datasource.druid")
@MapperScan(basePackages = PrimaryDataBaseConfig.PACKAGE, sqlSessionFactoryRef = "primarySqlSessionFactory")
public class PrimaryDataBaseConfig {
/**
* dao層的包路徑
*/
static final String PACKAGE = "com.mao.mysqlhive.demomh.mapper.primary";
/**
* mapper文件的相對路徑
*/
private static final String MAPPER_LOCATION = "classpath:mappers/primary/*Mapper.xml";
private String filters;
private String url;
private String username;
private String password;
private String driverClassName;
private int initialSize;
private int minIdle;
private int maxActive;
private long maxWait;
private long timeBetweenEvictionRunsMillis;
private long minEvictableIdleTimeMillis;
private String validationQuery;
private boolean testWhileIdle;
private boolean testOnBorrow;
private boolean testOnReturn;
private boolean poolPreparedStatements;
private int maxPoolPreparedStatementPerConnectionSize;
/**
* 主數據源使用@Primary註解進行標識
*/
@Primary
@Bean(name = "primaryDataSource")
public DataSource primaryDataSource() throws SQLException {
DruidDataSource druid = new DruidDataSource();
// 監控統計攔截的filters
druid.setFilters(filters);
// 配置基本屬性
druid.setDriverClassName(driverClassName);
druid.setUsername(username);
druid.setPassword(password);
druid.setUrl(url);
//初始化時建立物理連接的個數
druid.setInitialSize(initialSize);
//最大連接池數量
druid.setMaxActive(maxActive);
//最小連接池數量
druid.setMinIdle(minIdle);
//獲取連接時最大等待時間,單位毫秒。
druid.setMaxWait(maxWait);
//間隔多久進行一次檢測,檢測需要關閉的空閒連接
druid.setTimeBetweenEvictionRunsMillis(timeBetweenEvictionRunsMillis);
//一個連接在池中最小生存的時間
druid.setMinEvictableIdleTimeMillis(minEvictableIdleTimeMillis);
//用來檢測連接是否有效的sql
druid.setValidationQuery(validationQuery);
//建議配置爲true,不影響性能,並且保證安全性。
druid.setTestWhileIdle(testWhileIdle);
//申請連接時執行validationQuery檢測連接是否有效
druid.setTestOnBorrow(testOnBorrow);
druid.setTestOnReturn(testOnReturn);
//是否緩存preparedStatement,也就是PSCache,oracle設爲true,mysql設爲false。分庫分表較多推薦設置爲false
druid.setPoolPreparedStatements(poolPreparedStatements);
// 打開PSCache時,指定每個連接上PSCache的大小
druid.setMaxPoolPreparedStatementPerConnectionSize(maxPoolPreparedStatementPerConnectionSize);
return druid;
}
/**
* 創建該數據源的事務管理
*/
@Primary
@Bean(name = "primaryTransactionManager")
public DataSourceTransactionManager primaryTransactionManager() throws SQLException {
return new DataSourceTransactionManager(primaryDataSource());
}
/**
* 創建Mybatis的連接會話工廠實例
*/
@Primary
@Bean(name = "primarySqlSessionFactory")
public SqlSessionFactory primarySqlSessionFactory(@Qualifier("primaryDataSource") DataSource primaryDataSource) throws Exception {
final SqlSessionFactoryBean sessionFactory = new SqlSessionFactoryBean();
sessionFactory.setDataSource(primaryDataSource); // 設置數據源bean
sessionFactory.setMapperLocations(new PathMatchingResourcePatternResolver()
.getResources(PrimaryDataBaseConfig.MAPPER_LOCATION)); // 設置mapper文件路徑
return sessionFactory.getObject();
}
4.從數據源配置
import com.alibaba.druid.pool.DruidDataSource;
import lombok.Data;
import org.apache.ibatis.session.SqlSessionFactory;
import org.mybatis.spring.SqlSessionFactoryBean;
import org.mybatis.spring.annotation.MapperScan;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.core.io.support.PathMatchingResourcePatternResolver;
import org.springframework.jdbc.datasource.DataSourceTransactionManager;
import javax.sql.DataSource;
import java.sql.SQLException;
/**
* @ProjectName
* @Description: 後臺數據源配置類
*/
@Data
@Configuration
@ConfigurationProperties(prefix = "back.datasource.druid")
@tk.mybatis.spring.annotation.MapperScan(basePackages = PrimaryDataBaseConfig.PACKAGE)
@MapperScan(basePackages = BackDataBaseConfig.PACKAGE, sqlSessionFactoryRef = "backSqlSessionFactory")
public class BackDataBaseConfig {
/**
* dao層的包路徑
*/
static final String PACKAGE = "com.mao.mysqlhive.demomh.mapper.back";
/**
* mapper文件的相對路徑
*/
private static final String MAPPER_LOCATION = "classpath:mappers/back/*Mapper.xml";
@Value("${back.datasource.druid.filters}")
private String filters;
@Value("${back.datasource.druid.driverClassName}")
private String url;
@Value("${back.datasource.druid.url}")
private String username;
@Value("${back.datasource.druid.username}")
private String password;
@Value("${back.datasource.druid.password}")
private String driverClassName;
@Value("${back.datasource.druid.initialSize}")
private int initialSize;
@Value("${back.datasource.druid.minIdle}")
private int minIdle;
@Value("${back.datasource.druid.maxActive}")
private int maxActive;
@Value("${back.datasource.druid.maxWait}")
private long maxWait;
@Value("${back.datasource.druid.timeBetweenEvictionRunsMillis}")
private long timeBetweenEvictionRunsMillis;
@Value("${back.datasource.druid.minEvictableIdleTimeMillis}")
private long minEvictableIdleTimeMillis;
@Value("${back.datasource.druid.validationQuery}")
private String validationQuery;
@Value("${back.datasource.druid.testWhileIdle}")
private boolean testWhileIdle;
@Value("${back.datasource.druid.testOnBorrow}")
private boolean testOnBorrow;
@Value("${back.datasource.druid.testOnReturn}")
private boolean testOnReturn;
@Value("${back.datasource.druid.poolPreparedStatements}")
private boolean poolPreparedStatements;
@Value("${back.datasource.druid.maxPoolPreparedStatementPerConnectionSize}")
private int maxPoolPreparedStatementPerConnectionSize;
@Bean(name = "backDataSource")
public DataSource backDataSource() throws SQLException {
DruidDataSource druid = new DruidDataSource();
// 監控統計攔截的filters
druid.setFilters(filters);
// 配置基本屬性
druid.setDriverClassName(driverClassName);
druid.setUsername(username);
druid.setPassword(password);
druid.setUrl(url);
//初始化時建立物理連接的個數
druid.setInitialSize(initialSize);
//最大連接池數量
druid.setMaxActive(maxActive);
//最小連接池數量
druid.setMinIdle(minIdle);
//獲取連接時最大等待時間,單位毫秒。
druid.setMaxWait(maxWait);
//間隔多久進行一次檢測,檢測需要關閉的空閒連接
druid.setTimeBetweenEvictionRunsMillis(timeBetweenEvictionRunsMillis);
//一個連接在池中最小生存的時間
druid.setMinEvictableIdleTimeMillis(minEvictableIdleTimeMillis);
//用來檢測連接是否有效的sql
druid.setValidationQuery(validationQuery);
//建議配置爲true,不影響性能,並且保證安全性。
druid.setTestWhileIdle(testWhileIdle);
//申請連接時執行validationQuery檢測連接是否有效
druid.setTestOnBorrow(testOnBorrow);
druid.setTestOnReturn(testOnReturn);
//是否緩存preparedStatement,也就是PSCache,oracle設爲true,mysql設爲false。分庫分表較多推薦設置爲false
druid.setPoolPreparedStatements(poolPreparedStatements);
// 打開PSCache時,指定每個連接上PSCache的大小
druid.setMaxPoolPreparedStatementPerConnectionSize(maxPoolPreparedStatementPerConnectionSize);
return druid;
}
@Bean(name = "backTransactionManager")
public DataSourceTransactionManager backTransactionManager() throws SQLException {
return new DataSourceTransactionManager(backDataSource());
}
@Bean(name = "backSqlSessionFactory")
public SqlSessionFactory backSqlSessionFactory(@Qualifier("backDataSource") DataSource backDataSource) throws Exception {
final SqlSessionFactoryBean sessionFactory = new SqlSessionFactoryBean();
sessionFactory.setDataSource(backDataSource);
sessionFactory.setMapperLocations(new PathMatchingResourcePatternResolver()
.getResources(BackDataBaseConfig.MAPPER_LOCATION));
return sessionFactory.getObject();
}
}
5.hive數據源配置
import com.alibaba.druid.pool.DruidDataSource;
import lombok.Data;
import org.apache.ibatis.session.SqlSessionFactory;
import org.mybatis.spring.SqlSessionFactoryBean;
import org.mybatis.spring.annotation.MapperScan;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.core.io.support.PathMatchingResourcePatternResolver;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.jdbc.datasource.DataSourceTransactionManager;
import javax.sql.DataSource;
import java.sql.SQLException;
/**
* @ProjectName
* @Description: 後臺數據源配置類
*/
@Data
@Configuration
@ConfigurationProperties(prefix = "hive.datasource.druid")
@MapperScan(basePackages = HiveDataBaseConfig.PACKAGE, sqlSessionFactoryRef = "hiveSqlSessionFactory")
public class HiveDataBaseConfig {
/**
* dao層的包路徑
*/
static final String PACKAGE = "com.mao.mysqlhive.demomh.mapper.hive";
@Value("${hive.datasource.druid.filters}")
private String filters;
@Value("${hive.datasource.druid.driverClassName}")
private String url;
@Value("${hive.datasource.druid.url}")
private String username;
@Value("${hive.datasource.druid.username}")
private String password;
@Value("${hive.datasource.druid.password}")
private String driverClassName;
@Value("${hive.datasource.druid.initialSize}")
private int initialSize;
@Value("${hive.datasource.druid.minIdle}")
private int minIdle;
@Value("${hive.datasource.druid.maxActive}")
private int maxActive;
@Value("${hive.datasource.druid.maxWait}")
private long maxWait;
@Value("${hive.datasource.druid.timeBetweenEvictionRunsMillis}")
private long timeBetweenEvictionRunsMillis;
@Value("${hive.datasource.druid.minEvictableIdleTimeMillis}")
private long minEvictableIdleTimeMillis;
@Value("${hive.datasource.druid.validationQuery}")
private String validationQuery;
@Value("${hive.datasource.druid.testWhileIdle}")
private boolean testWhileIdle;
@Value("${hive.datasource.druid.testOnBorrow}")
private boolean testOnBorrow;
@Value("${hive.datasource.druid.testOnReturn}")
private boolean testOnReturn;
@Value("${hive.datasource.druid.poolPreparedStatements}")
private boolean poolPreparedStatements;
@Value("${hive.datasource.druid.maxPoolPreparedStatementPerConnectionSize}")
private int maxPoolPreparedStatementPerConnectionSize;
/**
* mapper文件的相對路徑
*/
private static final String MAPPER_LOCATION = "classpath:mappers/hive/*Mapper.xml";
@Bean(name = "hiveDruidDataSource")
@Qualifier("hiveDruidDataSource")
public DataSource hiveDataSource() throws SQLException {
DruidDataSource druid = new DruidDataSource();
// 監控統計攔截的filters
druid.setFilters(filters);
// 配置基本屬性
druid.setDriverClassName(driverClassName);
druid.setUsername(username);
druid.setPassword(password);
druid.setUrl(url);
//初始化時建立物理連接的個數
druid.setInitialSize(initialSize);
//最大連接池數量
druid.setMaxActive(maxActive);
//最小連接池數量
druid.setMinIdle(minIdle);
//獲取連接時最大等待時間,單位毫秒。
druid.setMaxWait(maxWait);
//間隔多久進行一次檢測,檢測需要關閉的空閒連接
druid.setTimeBetweenEvictionRunsMillis(timeBetweenEvictionRunsMillis);
//一個連接在池中最小生存的時間
druid.setMinEvictableIdleTimeMillis(minEvictableIdleTimeMillis);
//用來檢測連接是否有效的sql
druid.setValidationQuery(validationQuery);
//建議配置爲true,不影響性能,並且保證安全性。
druid.setTestWhileIdle(testWhileIdle);
//申請連接時執行validationQuery檢測連接是否有效
druid.setTestOnBorrow(testOnBorrow);
druid.setTestOnReturn(testOnReturn);
//是否緩存preparedStatement,也就是PSCache,oracle設爲true,mysql設爲false。分庫分表較多推薦設置爲false
druid.setPoolPreparedStatements(poolPreparedStatements);
// 打開PSCache時,指定每個連接上PSCache的大小
druid.setMaxPoolPreparedStatementPerConnectionSize(maxPoolPreparedStatementPerConnectionSize);
return druid;
}
@Bean(name = "hiveJdbcTemplate")
public JdbcTemplate hiveJdbcTemplate(@Qualifier("hiveDruidDataSource") DataSource dataSource){
return new JdbcTemplate(dataSource);
}
/**
* 創建該數據源的事務管理
*/
@Bean(name = "hiveDruidDataSource")
public DataSourceTransactionManager primaryTransactionManager() throws SQLException {
return new DataSourceTransactionManager(hiveDataSource());
}
/**
* 創建Mybatis的連接會話工廠實例
*/
@Bean(name = "hiveSqlSessionFactory")
public SqlSessionFactory primarySqlSessionFactory(@Qualifier("hiveDruidDataSource") DataSource primaryDataSource) throws Exception {
final SqlSessionFactoryBean sessionFactory = new SqlSessionFactoryBean();
sessionFactory.setDataSource(primaryDataSource); // 設置數據源bean
sessionFactory.setMapperLocations(new PathMatchingResourcePatternResolver()
.getResources(HiveDataBaseConfig.MAPPER_LOCATION)); // 設置mapper文件路徑
return sessionFactory.getObject();
}
}
6.hive引用,建議用 JdbcTemplate
@Autowired
@Qualifier("hiveJdbcTemplate")
private JdbcTemplate hiveJdbcTemplate;