phoenix整合springboot採用druid作爲連接池一點時間後報Connection is null or closed.

問題描述

項目高訪問量的時候,基本上線不到三天就會一直輸出下面的錯誤

org.springframework.jdbc.UncategorizedSQLException: StatementCallback; uncategorized SQLException for SQL [select TO_CHAR(captured_time, 'yyyy-MM-dd HH:mm:ss'),camera_id from ST_PEOPLE_FEATURE_WIFI where captured_time>=CAST(TO_DATE('2019-07-31 17:51:55') AS DATE) and captured_time<=CAST(TO_DATE('2019-09-29 17:51:55') AS DATE) and cluster_id=13137900 order by captured_time desc limit 20]; SQL state [null]; error code [0]; java.lang.IllegalArgumentException: Connection is null or closed.; nested exception is java.sql.SQLException: java.lang.IllegalArgumentException: Connection is null or closed.
at org.springframework.jdbc.support.AbstractFallbackSQLExceptionTranslator.translate(AbstractFallbackSQLExceptionTranslator.java:89)
at org.springframework.jdbc.support.AbstractFallbackSQLExceptionTranslator.translate(AbstractFallbackSQLExceptionTranslator.java:81)
at org.springframework.jdbc.support.AbstractFallbackSQLExceptionTranslator.translate(AbstractFallbackSQLExceptionTranslator.java:81)
at org.springframework.jdbc.core.JdbcTemplate.translateException(JdbcTemplate.java:1444)
at org.springframework.jdbc.core.JdbcTemplate.execute(JdbcTemplate.java:388)
at org.springframework.jdbc.core.JdbcTemplate.query(JdbcTemplate.java:452)
at org.springframework.jdbc.core.JdbcTemplate.query(JdbcTemplate.java:462)
at org.springframework.jdbc.core.JdbcTemplate.queryForList(JdbcTemplate.java:490)
at com.suncreate.query.jdbc.service.impl.GeneralSQLServiceImpl.getLists(GeneralSQLServiceImpl.java:40)
at com.suncreate.service.impl.GeneralRestPhoenixServiceImpl.ResultForQuery(GeneralRestPhoenixServiceImpl.java:33)
at com.suncreate.controller.PlatlogGeneralRestApi.ResultForSql(PlatlogGeneralRestApi.java:119)
at sun.reflect.GeneratedMethodAccessor69.invoke(Unknown Source)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at org.springframework.web.method.support.InvocableHandlerMethod.doInvoke(InvocableHandlerMethod.java:189)
at org.springframework.web.method.support.InvocableHandlerMethod.invokeForRequest(InvocableHandlerMethod.java:138)
at org.springframework.web.servlet.mvc.method.annotation.ServletInvocableHandlerMethod.invokeAndHandle(ServletInvocableHandlerMethod.java:102)
at org.springframework.web.servlet.mvc.method.annotation.RequestMappingHandlerAdapter.invokeHandlerMethod(RequestMappingHandlerAdapter.java:895)
at org.springframework.web.servlet.mvc.method.annotation.RequestMappingHandlerAdapter.handleInternal(RequestMappingHandlerAdapter.java:800)
at org.springframework.web.servlet.mvc.method.AbstractHandlerMethodAdapter.handle(AbstractHandlerMethodAdapter.java:87)
at org.springframework.web.servlet.DispatcherServlet.doDispatch(DispatcherServlet.java:1038)
at org.springframework.web.servlet.DispatcherServlet.doService(DispatcherServlet.java:942)
at org.springframework.web.servlet.FrameworkServlet.processRequest(FrameworkServlet.java:1005)
at org.springframework.web.servlet.FrameworkServlet.doGet(FrameworkServlet.java:897)
at javax.servlet.http.HttpServlet.service(HttpServlet.java:634)
at org.springframework.web.servlet.FrameworkServlet.service(FrameworkServlet.java:882)
at javax.servlet.http.HttpServlet.service(HttpServlet.java:741)
at org.apache.catalina.core.ApplicationFilterChain.internalDoFilter(ApplicationFilterChain.java:231)
at org.apache.catalina.core.ApplicationFilterChain.doFilter(ApplicationFilterChain.java:166)
at org.apache.tomcat.websocket.server.WsFilter.doFilter(WsFilter.java:53)
at org.apache.catalina.core.ApplicationFilterChain.internalDoFilter(ApplicationFilterChain.java:193)
at org.apache.catalina.core.ApplicationFilterChain.doFilter(ApplicationFilterChain.java:166)
at org.springframework.boot.actuate.web.trace.servlet.HttpTraceFilter.doFilterInternal(HttpTraceFilter.java:90)
at org.springframework.web.filter.OncePerRequestFilter.doFilter(OncePerRequestFilter.java:107)
at org.apache.catalina.core.ApplicationFilterChain.internalDoFilter(ApplicationFilterChain.java:193)
at org.apache.catalina.core.ApplicationFilterChain.doFilter(ApplicationFilterChain.java:166)
at org.springframework.web.filter.RequestContextFilter.doFilterInternal(RequestContextFilter.java:99)
at org.springframework.web.filter.OncePerRequestFilter.doFilter(OncePerRequestFilter.java:107)
at org.apache.catalina.core.ApplicationFilterChain.internalDoFilter(ApplicationFilterChain.java:193)
at org.apache.catalina.core.ApplicationFilterChain.doFilter(ApplicationFilterChain.java:166)
at org.springframework.web.filter.FormContentFilter.doFilterInternal(FormContentFilter.java:92)
at org.springframework.web.filter.OncePerRequestFilter.doFilter(OncePerRequestFilter.java:107)
at org.apache.catalina.core.ApplicationFilterChain.internalDoFilter(ApplicationFilterChain.java:193)
at org.apache.catalina.core.ApplicationFilterChain.doFilter(ApplicationFilterChain.java:166)
at org.springframework.web.filter.HiddenHttpMethodFilter.doFilterInternal(HiddenHttpMethodFilter.java:93)
at org.springframework.web.filter.OncePerRequestFilter.doFilter(OncePerRequestFilter.java:107)
at org.apache.catalina.core.ApplicationFilterChain.internalDoFilter(ApplicationFilterChain.java:193)
at org.apache.catalina.core.ApplicationFilterChain.doFilter(ApplicationFilterChain.java:166)
at org.springframework.boot.actuate.metrics.web.servlet.WebMvcMetricsFilter.filterAndRecordMetrics(WebMvcMetricsFilter.java:117)
at org.springframework.boot.actuate.metrics.web.servlet.WebMvcMetricsFilter.doFilterInternal(WebMvcMetricsFilter.java:106)
at org.springframework.web.filter.OncePerRequestFilter.doFilter(OncePerRequestFilter.java:107)
at org.apache.catalina.core.ApplicationFilterChain.internalDoFilter(ApplicationFilterChain.java:193)
at org.apache.catalina.core.ApplicationFilterChain.doFilter(ApplicationFilterChain.java:166)
at org.springframework.web.filter.CharacterEncodingFilter.doFilterInternal(CharacterEncodingFilter.java:200)
at org.springframework.web.filter.OncePerRequestFilter.doFilter(OncePerRequestFilter.java:107)
at org.apache.catalina.core.ApplicationFilterChain.internalDoFilter(ApplicationFilterChain.java:193)
at org.apache.catalina.core.ApplicationFilterChain.doFilter(ApplicationFilterChain.java:166)
at org.apache.catalina.core.StandardWrapperValve.invoke(StandardWrapperValve.java:200)
at org.apache.catalina.core.StandardContextValve.invoke(StandardContextValve.java:96)
at org.apache.catalina.authenticator.AuthenticatorBase.invoke(AuthenticatorBase.java:490)
at org.apache.catalina.core.StandardHostValve.invoke(StandardHostValve.java:139)
at org.apache.catalina.valves.ErrorReportValve.invoke(ErrorReportValve.java:92)
at org.apache.catalina.core.StandardEngineValve.invoke(StandardEngineValve.java:74)
at org.apache.catalina.connector.CoyoteAdapter.service(CoyoteAdapter.java:343)
at org.apache.coyote.http11.Http11Processor.service(Http11Processor.java:408)
at org.apache.coyote.AbstractProcessorLight.process(AbstractProcessorLight.java:66)
at org.apache.coyote.AbstractProtocol$ConnectionHandler.process(AbstractProtocol.java:834)
at org.apache.tomcat.util.net.NioEndpoint$SocketProcessor.doRun(NioEndpoint.java:1415)
at org.apache.tomcat.util.net.SocketProcessorBase.run(SocketProcessorBase.java:49)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
at org.apache.tomcat.util.threads.TaskThread$WrappingRunnable.run(TaskThread.java:61)
at java.lang.Thread.run(Thread.java:748)
Caused by: java.sql.SQLException: java.lang.IllegalArgumentException: Connection is null or closed.
at org.apache.phoenix.query.ConnectionQueryServicesImpl.metaDataCoprocessorExec(ConnectionQueryServicesImpl.java:1295)
at org.apache.phoenix.query.ConnectionQueryServicesImpl.metaDataCoprocessorExec(ConnectionQueryServicesImpl.java:1257)
at org.apache.phoenix.query.ConnectionQueryServicesImpl.getTable(ConnectionQueryServicesImpl.java:1474)
at org.apache.phoenix.schema.MetaDataClient.updateCache(MetaDataClient.java:597)
at org.apache.phoenix.schema.MetaDataClient.updateCache(MetaDataClient.java:518)
at org.apache.phoenix.compile.FromCompiler$BaseColumnResolver.createTableRef(FromCompiler.java:573)
at org.apache.phoenix.compile.FromCompiler$SingleTableColumnResolver.(FromCompiler.java:391)
at org.apache.phoenix.compile.FromCompiler.getResolverForQuery(FromCompiler.java:228)
at org.apache.phoenix.compile.FromCompiler.getResolverForQuery(FromCompiler.java:206)
at org.apache.phoenix.jdbc.PhoenixStatement$ExecutableSelectStatement.compilePlan(PhoenixStatement.java:468)
at org.apache.phoenix.jdbc.PhoenixStatement$ExecutableSelectStatement.compilePlan(PhoenixStatement.java:442)
at org.apache.phoenix.jdbc.PhoenixStatement$1.call(PhoenixStatement.java:300)
at org.apache.phoenix.jdbc.PhoenixStatement$1.call(PhoenixStatement.java:290)
at org.apache.phoenix.call.CallRunner.run(CallRunner.java:53)
at org.apache.phoenix.jdbc.PhoenixStatement.executeQuery(PhoenixStatement.java:289)
at org.apache.phoenix.jdbc.PhoenixStatement.executeQuery(PhoenixStatement.java:283)
at org.apache.phoenix.jdbc.PhoenixStatement.executeQuery(PhoenixStatement.java:1706)
at com.alibaba.druid.filter.FilterChainImpl.statement_executeQuery(FilterChainImpl.java:2363)
at com.alibaba.druid.filter.FilterAdapter.statement_executeQuery(FilterAdapter.java:2481)
at com.alibaba.druid.filter.FilterEventAdapter.statement_executeQuery(FilterEventAdapter.java:302)
at com.alibaba.druid.filter.FilterChainImpl.statement_executeQuery(FilterChainImpl.java:2360)
at com.alibaba.druid.proxy.jdbc.StatementProxyImpl.executeQuery(StatementProxyImpl.java:211)
at com.alibaba.druid.pool.DruidPooledStatement.executeQuery(DruidPooledStatement.java:140)
at org.springframework.jdbc.core.JdbcTemplate$1QueryStatementCallback.doInStatement(JdbcTemplate.java:439)
at org.springframework.jdbc.core.JdbcTemplate.execute(JdbcTemplate.java:376)
... 68 common frames omitted
Caused by: java.lang.IllegalArgumentException: Connection is null or closed.
at org.apache.hadoop.hbase.client.HTable.(HTable.java:307)
at org.apache.hadoop.hbase.client.ConnectionManager$HConnectionImplementation.getTable(ConnectionManager.java:768)
at org.apache.hadoop.hbase.client.ConnectionManager$HConnectionImplementation.getTable(ConnectionManager.java:763)
at org.apache.phoenix.query.HTableFactory$HTableFactoryImpl.getTable(HTableFactory.java:51)
at org.apache.phoenix.query.ConnectionQueryServicesImpl.getTable(ConnectionQueryServicesImpl.java:428)
at org.apache.phoenix.query.ConnectionQueryServicesImpl.metaDataCoprocessorExec(ConnectionQueryServicesImpl.java:1274)
... 92 common frames omitted
at org.apache.catalina.valves.ErrorReportValve.invoke(ErrorReportValve.java:92)
at org.apache.catalina.core.StandardEngineValve.invoke(StandardEngineValve.java:74)
at org.apache.catalina.connector.CoyoteAdapter.service(CoyoteAdapter.java:343)
at org.apache.coyote.http11.Http11Processor.service(Http11Processor.java:408)
at org.apache.coyote.AbstractProcessorLight.process(AbstractProcessorLight.java:66)
at org.apache.coyote.AbstractProtocol$ConnectionHandler.process(AbstractProtocol.java:834)
at org.apache.tomcat.util.net.NioEndpoint$SocketProcessor.doRun(NioEndpoint.java:1415)
at org.apache.tomcat.util.net.SocketProcessorBase.run(SocketProcessorBase.java:49)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
at org.apache.tomcat.util.threads.TaskThread$WrappingRunnable.run(TaskThread.java:61)
at java.lang.Thread.run(Thread.java:748)
Caused by: java.sql.SQLException: java.lang.IllegalArgumentException: Connection is null or closed.
at org.apache.phoenix.query.ConnectionQueryServicesImpl.metaDataCoprocessorExec(ConnectionQueryServicesImpl.java:1295)
at org.apache.phoenix.query.ConnectionQueryServicesImpl.metaDataCoprocessorExec(ConnectionQueryServicesImpl.java:1257)
at org.apache.phoenix.query.ConnectionQueryServicesImpl.getTable(ConnectionQueryServicesImpl.java:1474)
at org.apache.phoenix.schema.MetaDataClient.updateCache(MetaDataClient.java:597)
at org.apache.phoenix.schema.MetaDataClient.updateCache(MetaDataClient.java:518)
at org.apache.phoenix.compile.FromCompiler$BaseColumnResolver.createTableRef(FromCompiler.java:573)
at org.apache.phoenix.compile.FromCompiler$SingleTableColumnResolver.(FromCompiler.java:391)
at org.apache.phoenix.compile.FromCompiler.getResolverForQuery(FromCompiler.java:228)
at org.apache.phoenix.compile.FromCompiler.getResolverForQuery(FromCompiler.java:206)
at org.apache.phoenix.jdbc.PhoenixStatement$ExecutableSelectStatement.compilePlan(PhoenixStatement.java:468)
at org.apache.phoenix.jdbc.PhoenixStatement$ExecutableSelectStatement.compilePlan(PhoenixStatement.java:442)
at org.apache.phoenix.jdbc.PhoenixStatement$1.call(PhoenixStatement.java:300)
at org.apache.phoenix.jdbc.PhoenixStatement$1.call(PhoenixStatement.java:290)
at org.apache.phoenix.call.CallRunner.run(CallRunner.java:53)
at org.apache.phoenix.jdbc.PhoenixStatement.executeQuery(PhoenixStatement.java:289)
at org.apache.phoenix.jdbc.PhoenixStatement.executeQuery(PhoenixStatement.java:283)
at org.apache.phoenix.jdbc.PhoenixStatement.executeQuery(PhoenixStatement.java:1706)
at com.alibaba.druid.filter.FilterChainImpl.statement_executeQuery(FilterChainImpl.java:2363)
at com.alibaba.druid.filter.FilterAdapter.statement_executeQuery(FilterAdapter.java:2481)
at com.alibaba.druid.filter.FilterEventAdapter.statement_executeQuery(FilterEventAdapter.java:302)
at com.alibaba.druid.filter.FilterChainImpl.statement_executeQuery(FilterChainImpl.java:2360)
at com.alibaba.druid.proxy.jdbc.StatementProxyImpl.executeQuery(StatementProxyImpl.java:211)
at com.alibaba.druid.pool.DruidPooledStatement.executeQuery(DruidPooledStatement.java:140)
at org.springframework.jdbc.core.JdbcTemplate$1QueryStatementCallback.doInStatement(JdbcTemplate.java:439)
at org.springframework.jdbc.core.JdbcTemplate.execute(JdbcTemplate.java:376)
... 68 common frames omitted
Caused by: java.lang.IllegalArgumentException: Connection is null or closed.
at org.apache.hadoop.hbase.client.HTable.(HTable.java:307)
at org.apache.hadoop.hbase.client.ConnectionManager$HConnectionImplementation.getTable(ConnectionManager.java:786)
at org.apache.hadoop.hbase.client.ConnectionManager$HConnectionImplementation.getTable(ConnectionManager.java:768)
at org.apache.hadoop.hbase.client.ConnectionManager$HConnectionImplementation.getTable(ConnectionManager.java:763)
at org.apache.phoenix.query.HTableFactory$HTableFactoryImpl.getTable(HTableFactory.java:51)
at org.apache.phoenix.query.ConnectionQueryServicesImpl.getTable(ConnectionQueryServicesImpl.java:428)
at org.apache.phoenix.query.ConnectionQueryServicesImpl.metaDataCoprocessorExec(ConnectionQueryServicesImpl.java:1274)
... 92 common frames omitted

問題排查

源碼排查原因

數據庫連接立方圖如下:

                                                              

druid連接池內部有很多phoenix連接,phoenix取查詢hbase的時候,通過hconnection訪問hbase,一個進程內部只有一個實例。

上述追溯源碼

HTable報錯的方法

@InterfaceAudience.Private
  public HTable(TableName tableName, final ClusterConnection connection,
      final ConnectionConfiguration tableConfig,
      final RpcRetryingCallerFactory rpcCallerFactory,
      final RpcControllerFactory rpcControllerFactory,
      final ExecutorService pool) throws IOException {
    if (connection == null || connection.isClosed()) {
      throw new IllegalArgumentException("Connection is null or closed.");
    }
 .........................省略 

   this.finishSetup();
  }

ConnectionManager$HConnectionImplementation報錯代碼    

@Override
    public HTableInterface getTable(TableName tableName, ExecutorService pool) throws IOException {
      if (managed) {
        throw new NeedUnmanagedConnectionException();
      }
      return new HTable(tableName, this, connectionConfig, rpcCallerFactory, rpcControllerFactory, pool);
    }

通過上述源碼發現Connection is null or closed錯誤,是由於上述圖第三層ConnectionManager維護的hbase連接關閉,或者爲null導致。

ConnectionManager問題排查

這個時候,通常開始上jmap了,內存查看工具,查看HConnection的實例

[root@stk04 ~]# jmap -histo:live 219470 | grep HConnection
1264:             2            320  org.apache.hadoop.hbase.client.ConnectionManager$HConnectionImplementation
3063:             2             48  org.apache.hadoop.hbase.client.ConnectionManager$HConnectionImplementation$MasterServiceState
6851:             1             16  org.apache.phoenix.query.HConnectionFactory$HConnectionFactoryImpl
 

通過命令居然發現了兩個,不符合科學啊???????

具體兩個實例的內容,很想看到,怎麼辦,,,有兩種方式,這裏採用jvisualvm,查看具體實例的值

上圖中的左邊的#1,#2代表這兩個實例,查看第一個發現closed已經被關閉了,所以纔會有第二個實例。

但是爲什麼關閉的連接沒有被垃圾回收????,查看一下實例的引用,如下圖:

原來還被phoenixConnection引用,沒有被釋放啊。那druid爲什麼沒有刪除這個不可用的phoenix連接呢???

druid如何剔除異常連接

druid有剔除異常連接的機制:https://www.bookstack.cn/read/Druid/452caf873b3a56bf.md

但是默認的實現並沒有phoneix數據庫,druid的監控頁面也不支持phoneix,看來druid還沒有開始支持phoneix啊。

自己實現一下

    /**
     * 當網絡斷開或者數據庫服務器Crash時,連接池裏面會存在“不可用連接”,連接池需要
     * 一種機制剔除這些“不可用連接”。在Druid和JBoss連接池中,剔除“不可用連接”的機
     * 制稱爲ExceptionSorter,實現的原理是根據異常類型/Code/Reason/Message來識
     * 別“不可用連接”。沒有類似ExceptionSorter的連接池,在數據庫重啓或者網絡中斷之
     * 後,不能恢復工作,所以ExceptionSorter是連接池是否穩定的重要標誌。
     * @author mgguan   2019-10-17 15:00:00
     */
    class PhoenixExceptionSorter implements ExceptionSorter {

        @Override
        public boolean isExceptionFatal(SQLException e) {
            if (e.getMessage().contains("Connection is null or closed")) {
                LOG.error("剔除phoenix不可用的連接", e);
                return true;
            }
            return false;
        }

        @Override
        public void configFromProperties(Properties properties) {

        }
    }

這樣,項目就穩定運行了,如下圖是上線一段時間後日志的採集效果:

說明druid的異常處理機制生效了。

完整的集成代碼

package com.suncreate.query.jdbc.config;

import java.sql.SQLException;
import java.util.Properties;

import javax.annotation.PostConstruct;

import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.stereotype.Component;

import com.alibaba.druid.pool.DruidDataSource;
import com.alibaba.druid.pool.ExceptionSorter;
import com.alibaba.druid.util.StringUtils;
import com.suncreate.query.jdbc.service.impl.GeneralSQLServiceImpl;
import com.suncreate.utils.FusionInsightLogin;
@Component
public class PhoenixDataSourceConfig {

    private static final Logger LOG = LoggerFactory.getLogger(PhoenixDataSourceConfig.class);

    @Autowired
    private FusionInsightLogin login;

    @Value("${spring.datasource.phoenix.driver-class-name:org.apache.phoenix.jdbc.PhoenixDriver}")
    private String driverClassName;
    @Value("${spring.datasource.phoenix.password:passwd}")
    private String password;
    @Value("${spring.datasource.phoenix.username:zx_test}")
    private String username;

    @Value("${spring.datasource.phoenix.url:phoenix}")
    private String url;

    @PostConstruct
    public void init() {
        if (url.equals("phoenix") || StringUtils.isEmpty(url)) {
            url = "jdbc:phoenix:" + login.getConf().get("hbase.zookeeper.quorum");
        }

        if (StringUtils.isEmpty(driverClassName)) {
            driverClassName = "org.apache.phoenix.jdbc.PhoenixDriver";
        }
        LOG.info("目前使用的jdbc-phoenix的driver:" + driverClassName);
        LOG.info("目前使用的jdbc-phoenix的url:" + url);
        LOG.info("目前使用的jdbc-phoenix的用戶名:" + username);
        LOG.info("目前使用的jdbc-phoenix的密碼:" + password);
    }

    @Bean(name = "phoenixDruidDataSource")
    public DruidDataSource druidDataSource() {
        DruidDataSource dds = new DruidDataSource();
        try {
            dds.setDriverClassName(driverClassName);
            dds.setUrl(url);
            dds.setUsername(username);
            dds.setPassword(password);
            dds.setInitialSize(10);
            dds.setMaxActive(60);
            dds.setMinIdle(10);
            dds.setMaxWait(6000);
            dds.setQueryTimeout(60);
            dds.setPoolPreparedStatements(true);
            dds.setMaxOpenPreparedStatements(50);
            dds.setValidationQuery("SELECT 1");
            dds.setTestOnBorrow(false);
            dds.setTestOnReturn(false);
            dds.setTestWhileIdle(true);
            dds.setTimeBetweenEvictionRunsMillis(60000);
            dds.setMinEvictableIdleTimeMillis(25200000);
            dds.setRemoveAbandoned(true);
            dds.setRemoveAbandonedTimeout(24 * 3600);
            dds.setLogAbandoned(false);
            dds.setFilters("stat");
            // 設置剔除異常連接機制
            dds.setExceptionSorter(new PhoenixExceptionSorter());
            
        } catch (SQLException e) {
            e.printStackTrace();
        }
        return dds;
    }

    @Bean(name = "phoenixJdbcTemplate")
    public JdbcTemplate phoenixJdbcTemplate(@Qualifier("phoenixDruidDataSource") DruidDataSource dataSource) {
        return new JdbcTemplate(dataSource);
    }
    

    /**
     * 當網絡斷開或者數據庫服務器Crash時,連接池裏面會存在“不可用連接”,連接池需要
     * 一種機制剔除這些“不可用連接”。在Druid和JBoss連接池中,剔除“不可用連接”的機
     * 制稱爲ExceptionSorter,實現的原理是根據異常類型/Code/Reason/Message來識
     * 別“不可用連接”。沒有類似ExceptionSorter的連接池,在數據庫重啓或者網絡中斷之
     * 後,不能恢復工作,所以ExceptionSorter是連接池是否穩定的重要標誌。
     * @author mgguan   2019-10-17 15:00:00
     */
    class PhoenixExceptionSorter implements ExceptionSorter {

        @Override
        public boolean isExceptionFatal(SQLException e) {
            if (e.getMessage().contains("Connection is null or closed")) {
                LOG.error("剔除phoenix不可用的連接", e);
                return true;
            }
            return false;
        }

        @Override
        public void configFromProperties(Properties properties) {

        }
    }
}
 

 

 

發表評論
所有評論
還沒有人評論,想成為第一個評論的人麼? 請在上方評論欄輸入並且點擊發布.
相關文章