结合jdbcTemplate动态注入数据源
maven配置
<dependency>
<groupId>com.alibaba</groupId>
<artifactId>druid-spring-boot-starter</artifactId>
</dependency>
<dependency>
<groupId>mysql</groupId>
<artifactId>mysql-connector-java</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-web</artifactId>
</dependency>
<dependency>
<groupId>com.baomidou</groupId>
<artifactId>mybatis-plus-boot-starter</artifactId>
</dependency>
数据库设计
CREATE TABLE `data_source` (
`id` bigint(11) NOT NULL AUTO_INCREMENT,
`source_name` varchar(32) DEFAULT NULL COMMENT '数据源名称',
`source_code` varchar(12) DEFAULT NULL COMMENT '数据源编码',
`source_type` tinyint(2) DEFAULT NULL COMMENT '数据源类型(1 mysql)',
`url` varchar(128) DEFAULT NULL COMMENT 'URL',
`user_name` varchar(64) DEFAULT NULL COMMENT '用户名',
`password` varchar(64) DEFAULT NULL COMMENT '密码',
`state` tinyint(2) NOT NULL COMMENT '状态(0停用 1启用)',
`create_by` varchar(20) DEFAULT NULL COMMENT '创建人',
`create_time` datetime DEFAULT NULL COMMENT '创建时间',
`update_by` varchar(20) DEFAULT NULL COMMENT '修改人',
`update_time` datetime DEFAULT NULL COMMENT '修改时间',
PRIMARY KEY (`id`)
) ENGINE=InnoDB AUTO_INCREMENT=5 DEFAULT CHARSET=utf8mb4 COMMENT='数据源';
LocalCacheUtil
public class LocalCacheUtil {
private static ConcurrentMap<String, DruidDataSource> cacheRepository = new ConcurrentHashMap<String, DruidDataSource>();
/**
* set cache
*
* @param key
* @param dataSource
* @return
*/
public static boolean set(String key, DruidDataSource dataSource){
// set new cache
if (key==null || key.trim().length()==0) {
return false;
}
cacheRepository.put(key, dataSource);
return true;
}
/**
* get cache
*
* @param key
* @return
*/
public static DruidDataSource get(String key){
if (key==null || key.trim().length()==0) {
return null;
}
DruidDataSource localCacheData = cacheRepository.get(key);
if (localCacheData!=null) {
return localCacheData;
} else {
return null;
}
}
}
JobDataSourceDO
@Data
@TableName("data_source")
public class JobDataSourceDO implements Serializable {
private static final long serialVersionUID = 1L;
/**
*
*/
@TableId(value = "ID", type = IdType.AUTO)
private Long id;
/**
* 数据源名称
*/
private String sourceName;
/**
* 数据源编码
*/
private String sourceCode;
/**
* 数据源类型(1 mysql 2 mongoDB)
*/
private Integer sourceType;
/**
* URL
*/
private String url;
/**
* 用户名
*/
private String userName;
/**
* 密码
*/
private String password;
/**
* 状态(0停用 1启用)
*/
private Integer state;
/**
* 创建人
*/
private String createBy;
/**
* 创建时间
*/
private Date createTime;
/**
* 修改人
*/
private String updateBy;
/**
* 修改时间
*/
private Date updateTime;
}
BaseQueryTool
public class BaseQueryTool {
protected static final Logger logger = LoggerFactory.getLogger(BaseQueryTool.class);
private static ConcurrentMap<String, JdbcTemplate> cacheJdbcTemplate = new ConcurrentHashMap<String, JdbcTemplate>();
private DruidDataSource druidDataSource;
private JdbcTemplate jdbcTemplate;
/**
* @param jobDatasource
*/
BaseQueryTool(JobDataSourceDO jobDatasource) {
logger.info("获取数据源-------------------{}", JSON.toJSONString(jobDatasource));
try{
if (LocalCacheUtil.get(jobDatasource.getSourceName()) == null) {
druidDataSource = getDataSource(jobDatasource);
LocalCacheUtil.set(jobDatasource.getSourceName(),druidDataSource);
} else {
druidDataSource = LocalCacheUtil.get(jobDatasource.getSourceName());
}
if (BaseQueryTool.cacheJdbcTemplate.get(jobDatasource.getSourceName()) == null) {
JdbcTemplate jdbcTemplate = new JdbcTemplate();
jdbcTemplate.setDataSource(druidDataSource);
BaseQueryTool.cacheJdbcTemplate.put(jobDatasource.getSourceName(),jdbcTemplate);
this.jdbcTemplate = jdbcTemplate;
} else {
this.jdbcTemplate = BaseQueryTool.cacheJdbcTemplate.get(jobDatasource.getSourceName());
}
logger.info("开始获取数据源连接数-------------------,{},{}",druidDataSource.getInitialSize(),druidDataSource.getActiveCount());
}catch (Throwable e){
logger.error("获取数据源-------------------{}",JSON.toJSONString(e));
}
logger.info("获取数据源-------------------");
}
private DruidDataSource getDataSource(JobDataSourceDO jobDatasource){
DruidDataSource dataSource = new DruidDataSource ();
dataSource.setUsername(jobDatasource.getUserName());
dataSource.setPassword(jobDatasource.getPassword());
dataSource.setUrl(jobDatasource.getUrl());
dataSource.setDriverClassName("com.mysql.cj.jdbc.Driver");
dataSource.setInitialSize(5);
dataSource.setMinIdle(5);
dataSource.setMaxActive(50);
dataSource.setMaxWait(60000);
dataSource.setTimeBetweenEvictionRunsMillis(60000);
dataSource.setMinEvictableIdleTimeMillis(30000);
dataSource.setValidationQuery("SELECT 1 FROM DUAL");
dataSource.setTestWhileIdle(true);
dataSource.setTestOnBorrow(false);
dataSource.setTestOnReturn(false);
dataSource.setPoolPreparedStatements(true);
return dataSource;
}
public JSONArray getArrByQuerySql(String querySql) {
return jdbcTemplate.query(querySql,new ResultSetExtractor<JSONArray>() {
@Override
public JSONArray extractData(ResultSet resultSet) throws SQLException, DataAccessException {
ResultSetMetaData rsd = resultSet.getMetaData();
int clength = rsd.getColumnCount();
JSONArray ja = new JSONArray();
String columnName;
try {
while (resultSet.next()) {
JSONObject jo = new JSONObject();
for (int i = 0; i < clength; i++) {
columnName = rsd.getColumnLabel(i + 1);
jo.put(columnName, resultSet.getObject(i + 1));
}
ja.add(jo);
}
} catch (Exception e) {
}
return ja;
}
});
}
}
使用
JobDataSourceDO dataSourceDO = new JobDataSourceDO();
dataSourceDO.setPassword(queryJobDefinitionByJobcode.getPassword());
dataSourceDO.setUserName(queryJobDefinitionByJobcode.getUserName());
dataSourceDO.setUrl(queryJobDefinitionByJobcode.getUrl());
dataSourceDO.setSourceType(queryJobDefinitionByJobcode.getSourceType());
dataSourceDO.setSourceName(queryJobDefinitionByJobcode.getSourceName());
JSONArray array = baseQueryTool.getArrByQuerySql(respBO.getRunSql());