暂无图片
暂无图片
暂无图片
暂无图片
暂无图片

Druid监控还可以这样用

重口味码农 2020-06-18
1154

一、基础监控配置

依赖

  1. <!-- Druid -->

  2. <dependency>

  3. <groupId>com.alibaba</groupId>

  4. <artifactId>druid-spring-boot-starter</artifactId>

  5. <version>1.1.10</version>

  6. </dependency>

1、纯配置文件方式

普通的单数据源项目中,通过在配置文件中配置的方式即可实现监控(但特殊的需求如SpringAOP还是需要通过部分编码)

  1. spring:

  2. datasource:

  3. type: com.alibaba.druid.pool.DruidDataSource

  4. druid:

  5. filters: stat,wall,slf4j # 配置监控统计拦截的filters,去掉后监控界面sql无法统计,'wall'用于防火墙

  6. max-active: 20 #最大连接池数量 maxIdle已经不再使用

  7. initial-size: 5 #初始化时建立物理连接的个数

  8. max-wait: 60000

  9. min-idle: 5 #最小连接池数量

  10. time-between-eviction-runs-millis: 60000 #既作为检测的间隔时间又作为testWhileIdel执行的依据

  11. min-evictable-idle-time-millis: 300000 #销毁线程时检测当前连接的最后活动时间和当前时间差大于该值时,关闭当前连接

  12. validation-query: select 'x' #用来检测连接是否有效的sql

  13. #申请连接的时候检测,如果空闲时间大于timeBetweenEvictionRunsMillis,执行validationQuery检测连接是否有效。

  14. test-while-idle: true

  15. test-on-borrow: false #申请连接时会执行validationQuery检测连接是否有效,开启会降低性能,默认为true

  16. test-on-return: false #归还连接时会执行validationQuery检测连接是否有效,开启会降低性能,默认为true

  17. pool-prepared-statements: false # 是否缓存preparedStatement,也就是PSCache 官方建议MySQL下建议关闭

  18. max-open-prepared-statements: 20

  19. max-pool-prepared-statement-per-connection-size: 20 #当值大于0时poolPreparedStatements会自动修改为true

  20. # 通过connectProperties属性来打开mergeSql功能;慢SQL记录(配置慢SQL的定义时间)

  21. connection-properties: druid.stat.mergeSql=true;druid.stat.slowSqlMillis=5000

  22. use-global-data-source-stat: true # 合并多个DruidDataSource的监控数据


  23. # 设置监控配置

  24. web-stat-filter:

  25. enabled: true

  26. url-pattern: /*

  27. exclusions: "*.js,*.gif,*.jpg,*.png,*.css,*.ico,/druid/*"

  28. session-stat-enable: true

  29. session-stat-max-count: 100

  30. #设置视图拦截,访问druid监控页的账号和密码,默认没有

  31. stat-view-servlet:

  32. enabled: true

  33. url-pattern: /druid/*

  34. reset-enable: true

  35. login-username: admin

  36. login-password: admin

2、通过配置类配置方式

在某些特殊情况下,如系统中有多数据源、动态数据源等情况,Spring无法直接读取配置文件,我们会采用Config类的方式进行配置,这样比较灵活。

1)配置文件,同上,供配置类读取

  1. spring:

  2. datasource:

  3. type: com.alibaba.druid.pool.DruidDataSource

  4. druid:

  5. filters: stat,wall,slf4j # 配置监控统计拦截的filters,去掉后监控界面sql无法统计,'wall'用于防火墙

  6. max-active: 20 #最大连接池数量 maxIdle已经不再使用

  7. initial-size: 5 #初始化时建立物理连接的个数

  8. max-wait: 60000

  9. min-idle: 5 #最小连接池数量

  10. time-between-eviction-runs-millis: 60000 #既作为检测的间隔时间又作为testWhileIdel执行的依据

  11. min-evictable-idle-time-millis: 300000 #销毁线程时检测当前连接的最后活动时间和当前时间差大于该值时,关闭当前连接

  12. validation-query: select 'x' #用来检测连接是否有效的sql

  13. #申请连接的时候检测,如果空闲时间大于timeBetweenEvictionRunsMillis,执行validationQuery检测连接是否有效。

  14. test-while-idle: true

  15. test-on-borrow: false #申请连接时会执行validationQuery检测连接是否有效,开启会降低性能,默认为true

  16. test-on-return: false #归还连接时会执行validationQuery检测连接是否有效,开启会降低性能,默认为true

  17. pool-prepared-statements: false # 是否缓存preparedStatement,也就是PSCache 官方建议MySQL下建议关闭

  18. max-open-prepared-statements: 20

  19. max-pool-prepared-statement-per-connection-size: 20 #当值大于0时poolPreparedStatements会自动修改为true

  20. # 通过connectProperties属性来打开mergeSql功能;慢SQL记录(配置慢SQL的定义时间)

  21. connection-properties: druid.stat.mergeSql=true;druid.stat.slowSqlMillis=5000

  22. use-global-data-source-stat: true # 合并多个DruidDataSource的监控数据


  23. # 设置监控配置

  24. web-stat-filter:

  25. enabled: true

  26. url-pattern: /*

  27. exclusions: "*.js,*.gif,*.jpg,*.png,*.css,*.ico,/druid/*"

  28. session-stat-enable: true

  29. session-stat-max-count: 100

  30. #设置视图拦截,访问druid监控页的账号和密码,默认没有

  31. stat-view-servlet:

  32. enabled: true

  33. url-pattern: /druid/*

  34. reset-enable: true

  35. login-username: admin

  36. login-password: admin

2)配置信息类DruidDataSourceProperties

负责读入配置文件中的信息

  1. import org.springframework.boot.context.properties.ConfigurationProperties;

  2. import org.springframework.context.annotation.Configuration;


  3. /**

  4. * Druid配置信息类

  5. *

  6. * @author

  7. * @date

  8. */

  9. @ConfigurationProperties(prefix = "spring.datasource.druid")

  10. public class DruidDataSourceProperties {


  11. private String driverClassName;

  12. private String url;

  13. private String username;

  14. private String password;


  15. private int initialSize;

  16. private int minIdle;

  17. private int maxActive = 100;

  18. private long maxWait;

  19. private long timeBetweenEvictionRunsMillis;

  20. private long minEvictableIdleTimeMillis;

  21. private String validationQuery;

  22. private boolean testWhileIdle;

  23. private boolean testOnBorrow;

  24. private boolean testOnReturn;

  25. private boolean poolPreparedStatements;

  26. private int maxPoolPreparedStatementPerConnectionSize;


  27. private String filters;


  28. public int getInitialSize() {

  29. return initialSize;

  30. }


  31. public void setInitialSize(int initialSize) {

  32. this.initialSize = initialSize;

  33. }


  34. public int getMinIdle() {

  35. return minIdle;

  36. }


  37. public void setMinIdle(int minIdle) {

  38. this.minIdle = minIdle;

  39. }


  40. public int getMaxActive() {

  41. return maxActive;

  42. }


  43. public void setMaxActive(int maxActive) {

  44. this.maxActive = maxActive;

  45. }


  46. public long getMaxWait() {

  47. return maxWait;

  48. }


  49. public void setMaxWait(long maxWait) {

  50. this.maxWait = maxWait;

  51. }


  52. public long getTimeBetweenEvictionRunsMillis() {

  53. return timeBetweenEvictionRunsMillis;

  54. }


  55. public void setTimeBetweenEvictionRunsMillis(long timeBetweenEvictionRunsMillis) {

  56. this.timeBetweenEvictionRunsMillis = timeBetweenEvictionRunsMillis;

  57. }


  58. public long getMinEvictableIdleTimeMillis() {

  59. return minEvictableIdleTimeMillis;

  60. }


  61. public void setMinEvictableIdleTimeMillis(long minEvictableIdleTimeMillis) {

  62. this.minEvictableIdleTimeMillis = minEvictableIdleTimeMillis;

  63. }


  64. public String getValidationQuery() {

  65. return validationQuery;

  66. }


  67. public void setValidationQuery(String validationQuery) {

  68. this.validationQuery = validationQuery;

  69. }


  70. public boolean isTestWhileIdle() {

  71. return testWhileIdle;

  72. }


  73. public void setTestWhileIdle(boolean testWhileIdle) {

  74. this.testWhileIdle = testWhileIdle;

  75. }


  76. public boolean isTestOnBorrow() {

  77. return testOnBorrow;

  78. }


  79. public void setTestOnBorrow(boolean testOnBorrow) {

  80. this.testOnBorrow = testOnBorrow;

  81. }


  82. public boolean isTestOnReturn() {

  83. return testOnReturn;

  84. }


  85. public void setTestOnReturn(boolean testOnReturn) {

  86. this.testOnReturn = testOnReturn;

  87. }


  88. public boolean isPoolPreparedStatements() {

  89. return poolPreparedStatements;

  90. }


  91. public void setPoolPreparedStatements(boolean poolPreparedStatements) {

  92. this.poolPreparedStatements = poolPreparedStatements;

  93. }


  94. public int getMaxPoolPreparedStatementPerConnectionSize() {

  95. return maxPoolPreparedStatementPerConnectionSize;

  96. }


  97. public void setMaxPoolPreparedStatementPerConnectionSize(int maxPoolPreparedStatementPerConnectionSize) {

  98. this.maxPoolPreparedStatementPerConnectionSize = maxPoolPreparedStatementPerConnectionSize;

  99. }


  100. public String getFilters() {

  101. return filters;

  102. }


  103. public void setFilters(String filters) {

  104. this.filters = filters;

  105. }


  106. public String getDriverClassName() {

  107. return driverClassName;

  108. }


  109. public void setDriverClassName(String driverClassName) {

  110. this.driverClassName = driverClassName;

  111. }


  112. public String getUrl() {

  113. return url;

  114. }


  115. public void setUrl(String url) {

  116. this.url = url;

  117. }


  118. public String getUsername() {

  119. return username;

  120. }


  121. public void setUsername(String username) {

  122. this.username = username;

  123. }


  124. public String getPassword() {

  125. return password;

  126. }


  127. public void setPassword(String password) {

  128. this.password = password;

  129. }


  130. }

3)配置DataSource

类中引入刚才的配置信息类

  1. @Configuration

  2. @EnableConfigurationProperties({DruidDataSourceProperties.class})

  3. public class DataSourceConfig{


  4. @Autowired

  5. private DruidDataSourceProperties druidProperties;

  6. }

手动配置相关信息

  1. private void setDruidDataSourceConfig(DruidDataSource druidDataSource) {

  2. druidDataSource.setDriverClassName(druidProperties.getDriverName);

  3. druidDataSource.setUrl(druidProperties.getUrl);

  4. druidDataSource.setUsername(druidProperties.getUsername);

  5. druidDataSource.setPassword(druidProperties.getPassword);

  6. druidDataSource.setConnectionInitSqls(new ArrayList<String>(){{add(initSqls);}});

  7. // 初始化大小,最小,最大

  8. druidDataSource.setInitialSize(druidProperties.getInitialSize());

  9. druidDataSource.setMinIdle(druidProperties.getMinIdle());

  10. druidDataSource.setMaxActive(druidProperties.getMaxActive());

  11. // 配置获取连接等待超时的时间

  12. druidDataSource.setMaxWait(druidProperties.getMaxWait());

  13. // 配置间隔多久才进行一次检测,检测需要关闭的空闲连接,单位是毫秒

  14. druidDataSource.setTimeBetweenEvictionRunsMillis(druidProperties.getTimeBetweenEvictionRunsMillis());

  15. // 配置一个连接在池中最小生存的时间,单位是毫秒

  16. druidDataSource.setMinEvictableIdleTimeMillis(druidProperties.getMinEvictableIdleTimeMillis());

  17. druidDataSource.setTestWhileIdle(druidProperties.isTestWhileIdle());

  18. druidDataSource.setTestOnBorrow(druidProperties.isTestOnBorrow());

  19. druidDataSource.setTestOnReturn(druidProperties.isTestOnReturn());

  20. // 打开PSCache,并且指定每个连接上PSCache的大小

  21. druidDataSource.setPoolPreparedStatements(druidProperties.isPoolPreparedStatements());

  22. druidDataSource.setMaxPoolPreparedStatementPerConnectionSize(druidProperties.getMaxPoolPreparedStatementPerConnectionSize());

  23. druidDataSource.setUseGlobalDataSourceStat(false);


  24. // 配置慢SQL信息,注意druid.timeBetweenLogStatsMillis为多久记录日志并清空一次信息,与setUseGlobalDataSourceStat互斥

  25. Properties properties = new Properties();

  26. properties.setProperty("druid.stat.mergeSql", "true");

  27. properties.setProperty("druid.stat.slowSqlMillis", "5000");

  28. properties.setProperty("druid.timeBetweenLogStatsMillis", "100000");

  29. druidDataSource.setConnectProperties(properties);


  30. try {

  31. druidDataSource.setFilters(druidProperties.getFilters());

  32. druidDataSource.init();

  33. } catch (SQLException e) {

  34. e.printStackTrace();

  35. }

  36. }

一般情况下,我们采用纯配置文件的方式使用即可。

配置完成后,启动访问http://localhost:8080/druid/index.html

如果配置了访问账号密码,需要登录后查看相关信息

二、扩展

1、Spring类的AOP监控

在以上信息完成完成后,有一项还无法查看,就是Spring监控,是因为Spring监控需要对方法做AOP拦截,需要额外配置。这个功能非常强大并且实用,以下是配置方法

新建druid-bean.xml,放入resource文件夹中

  1. <?xml version="1.0" encoding="UTF-8"?>

  2. <beans xmlns="http://www.springframework.org/schema/beans"

  3. xmlns:aop="http://www.springframework.org/schema/aop" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"

  4. xsi:schemaLocation="

  5. http://www.springframework.org/schema/beans

  6. http://www.springframework.org/schema/beans/spring-beans.xsd

  7. http://www.springframework.org/schema/aop

  8. http://www.springframework.org/schema/aop/spring-aop.xsd">


  9. <!-- 配置_Druid和Spring关联监控配置 -->

  10. <bean id="druid-stat-interceptor"

  11. class="com.alibaba.druid.support.spring.stat.DruidStatInterceptor"></bean>


  12. <!-- 方法名正则匹配拦截配置 -->

  13. <bean id="druid-stat-pointcut" class="org.springframework.aop.support.JdkRegexpMethodPointcut"

  14. scope="prototype">

  15. <property name="patterns">

  16. <list>

  17. <value>com.sogou.test.*.service.*</value>

  18. </list>

  19. </property>

  20. </bean>


  21. <aop:config proxy-target-class="true">

  22. <aop:advisor advice-ref="druid-stat-interceptor"

  23. pointcut-ref="druid-stat-pointcut" />

  24. </aop:config>


  25. </beans>

其中patterns处配置你需要切的位置,比如dao或service层,会对你配置方法进行监控

引入配置

  1. @SpringBootApplication

  2. @ImportResource(locations = { "classpath:druid-bean.xml" })

  3. public class Application {


  4. public static void main(String[] args) {

  5. TimeZone.setDefault(TimeZone.getTimeZone("Asia/Shanghai"));

  6. SpringApplication.run(Application.class, args);

  7. }


  8. }

配置完成后,即可在Spring监控中查看到对应方法的sql执行信息,可以方便地查看到哪个方法的sql执行有异常情况。

2、日志数据持久化

druid监控的数据,都是存储在缓存中,当应用重启或重新发布时数据会清空,页面上也有两个重置按钮,其中记录日志并重置会将当前日志打印。

当配置了druid.timeBetweenLogStatsMillis参数时,会每隔一段时间记录日志并重置统计信息,会将连接数、SQL信息都打印到日志中,但这样有个缺点是会将这段时间的SQL也打印出来,没有必要,可以通过自定义StatLogger的方式来自定义输出格式。

  1. public class DruidStatLogger extends DruidDataSourceStatLoggerAdapter implements DruidDataSourceStatLogger {


  2. private static final Log LOG = LogFactory.getLog(DruidDataSourceStatLoggerImpl.class);


  3. private Log logger = LOG;


  4. public DruidStatLogger() {

  5. this.configFromProperties(System.getProperties());

  6. }


  7. public boolean isLogEnable() {

  8. return logger.isInfoEnabled();

  9. }


  10. public void log(String value) {

  11. logger.info(value);

  12. }


  13. @Override

  14. public void log(DruidDataSourceStatValue druidDataSourceStatValue) {

  15. if (!isLogEnable()) {

  16. return;

  17. }

  18. Map<String, Object> map = new LinkedHashMap<>();


  19. map.put("dbType", druidDataSourceStatValue.getDbType());

  20. map.put("name", druidDataSourceStatValue.getName());

  21. map.put("activeCount", druidDataSourceStatValue.getActiveCount());


  22. if (druidDataSourceStatValue.getActivePeak() > 0) {

  23. map.put("activePeak", druidDataSourceStatValue.getActivePeak());

  24. map.put("activePeakTime", druidDataSourceStatValue.getActivePeakTime());

  25. }

  26. map.put("poolingCount", druidDataSourceStatValue.getPoolingCount());

  27. if (druidDataSourceStatValue.getPoolingPeak() > 0) {

  28. map.put("poolingPeak", druidDataSourceStatValue.getPoolingPeak());

  29. map.put("poolingPeakTime", druidDataSourceStatValue.getPoolingPeakTime());

  30. }

  31. map.put("connectCount", druidDataSourceStatValue.getConnectCount());

  32. map.put("closeCount", druidDataSourceStatValue.getCloseCount());


  33. if (druidDataSourceStatValue.getWaitThreadCount() > 0) {

  34. map.put("waitThreadCount", druidDataSourceStatValue.getWaitThreadCount());

  35. }


  36. if (druidDataSourceStatValue.getNotEmptyWaitCount() > 0) {

  37. map.put("notEmptyWaitCount", druidDataSourceStatValue.getNotEmptyWaitCount());

  38. }


  39. if (druidDataSourceStatValue.getNotEmptyWaitMillis() > 0) {

  40. map.put("notEmptyWaitMillis", druidDataSourceStatValue.getNotEmptyWaitMillis());

  41. }


  42. if (druidDataSourceStatValue.getLogicConnectErrorCount() > 0) {

  43. map.put("logicConnectErrorCount", druidDataSourceStatValue.getLogicConnectErrorCount());

  44. }


  45. if (druidDataSourceStatValue.getPhysicalConnectCount() > 0) {

  46. map.put("physicalConnectCount", druidDataSourceStatValue.getPhysicalConnectCount());

  47. }


  48. if (druidDataSourceStatValue.getPhysicalCloseCount() > 0) {

  49. map.put("physicalCloseCount", druidDataSourceStatValue.getPhysicalCloseCount());

  50. }


  51. if (druidDataSourceStatValue.getPhysicalConnectErrorCount() > 0) {

  52. map.put("physicalConnectErrorCount", druidDataSourceStatValue.getPhysicalConnectErrorCount());

  53. }


  54. if (druidDataSourceStatValue.getExecuteCount() > 0) {

  55. map.put("executeCount", druidDataSourceStatValue.getExecuteCount());

  56. }


  57. if (druidDataSourceStatValue.getErrorCount() > 0) {

  58. map.put("errorCount", druidDataSourceStatValue.getErrorCount());

  59. }


  60. if (druidDataSourceStatValue.getCommitCount() > 0) {

  61. map.put("commitCount", druidDataSourceStatValue.getCommitCount());

  62. }


  63. if (druidDataSourceStatValue.getRollbackCount() > 0) {

  64. map.put("rollbackCount", druidDataSourceStatValue.getRollbackCount());

  65. }


  66. if (druidDataSourceStatValue.getPstmtCacheHitCount() > 0) {

  67. map.put("pstmtCacheHitCount", druidDataSourceStatValue.getPstmtCacheHitCount());

  68. }


  69. if (druidDataSourceStatValue.getPstmtCacheMissCount() > 0) {

  70. map.put("pstmtCacheMissCount", druidDataSourceStatValue.getPstmtCacheMissCount());

  71. }


  72. if (druidDataSourceStatValue.getStartTransactionCount() > 0) {

  73. map.put("startTransactionCount", druidDataSourceStatValue.getStartTransactionCount());

  74. map.put("transactionHistogram", (druidDataSourceStatValue.getTransactionHistogram()));

  75. }


  76. if (druidDataSourceStatValue.getConnectCount() > 0) {

  77. map.put("connectionHoldTimeHistogram", (druidDataSourceStatValue.getConnectionHoldTimeHistogram()));

  78. }


  79. if (druidDataSourceStatValue.getClobOpenCount() > 0) {

  80. map.put("clobOpenCount", druidDataSourceStatValue.getClobOpenCount());

  81. }


  82. if (druidDataSourceStatValue.getBlobOpenCount() > 0) {

  83. map.put("blobOpenCount", druidDataSourceStatValue.getBlobOpenCount());

  84. }


  85. if (druidDataSourceStatValue.getSqlSkipCount() > 0) {

  86. map.put("sqlSkipCount", druidDataSourceStatValue.getSqlSkipCount());

  87. }

  88. if (!isLogEnable()) {

  89. return;

  90. }

  91. //Map<String, Object> map = new LinkedHashMap<String, Object>();

  92. myArrayList<Map<String, Object>> sqlList = new myArrayList<Map<String, Object>>();


  93. //有执行sql的话 只显示sql语句

  94. if (druidDataSourceStatValue.getSqlList().size() > 0) {

  95. for (JdbcSqlStatValue sqlStat : druidDataSourceStatValue.getSqlList()) {

  96. Map<String, Object> sqlStatMap = new LinkedHashMap<String, Object>();

  97. sqlStatMap.put("执行了sql语句:", sqlStat.getSql());

  98. sqlList.add(sqlStatMap);

  99. String text = sqlList.toString();

  100. //log(text);

  101. }

  102. }

  103. //没有sql语句的话就显示最上面那些

  104. else{

  105. String text = map.toString();

  106. log(text);

  107. }

  108. }


  109. @Override

  110. public void configFromProperties(Properties properties) {

  111. String property = properties.getProperty("druid.stat.loggerName");

  112. if (property != null && property.length() > 0) {

  113. setLoggerName(property);

  114. }

  115. }


  116. @Override

  117. public void setLogger(Log log) {

  118. if (log == null) {

  119. throw new IllegalArgumentException("logger can not be null");

  120. }

  121. this.logger = log;

  122. }


  123. @Override

  124. public void setLoggerName(String loggerName) {

  125. logger = LogFactory.getLog(loggerName);

  126. }


  127. class myArrayList<E> extends ArrayList<E> {

  128. @Override

  129. public String toString() {

  130. Iterator<E> it = iterator();

  131. if (!it.hasNext()) {

  132. return "";

  133. }


  134. StringBuilder sb = new StringBuilder();

  135. for (; ; ) {

  136. E e = it.next();

  137. sb.append(e == this ? "(this Collection)" : e);

  138. if (!it.hasNext()) {

  139. return sb.toString();

  140. }

  141. sb.append(',').append(' ');

  142. }

  143. }

  144. }

DataSource配置

  1. @Configuration

  2. public class DruidConfig {


  3. @ConfigurationProperties(prefix="spring.datasource")

  4. @Bean

  5. public DataSource druidDataSource()

  6. {

  7. DruidDataSource dataSource = new DruidDataSource();

  8. dataSource.setStatLogger(new MyStatLogger());

  9. return dataSource;

  10. }


  11. @Bean

  12. public ServletRegistrationBean statViewServlet()

  13. {

  14. ServletRegistrationBean<StatViewServlet> bean=new ServletRegistrationBean<>(new StatViewServlet(),"/druid/*");


  15. //后台需要有人登录监控

  16. HashMap<String,String> initParameters=new HashMap<>();


  17. //增加配置

  18. initParameters.put("loginUsername","admin");

  19. initParameters.put("loginPassword","123456");


  20. //允许谁能访问

  21. initParameters.put("allow"," ");


  22. bean.setInitParameters(initParameters);//设置初始化参数

  23. return bean;

  24. }


  25. @Bean

  26. public FilterRegistrationBean webStatFilter()

  27. {

  28. FilterRegistrationBean bean=new FilterRegistrationBean();

  29. bean.setFilter(new WebStatFilter());


  30. HashMap<String,String> initParameters=new HashMap<>();


  31. initParameters.put("exclusions","*.js,*.css,/druid/*");


  32. bean.setInitParameters(initParameters);


  33. return bean;

  34. }


  35. }

可以设置每隔24小时记录日志并清空当前数据。

你会用了吗?


文章转载自重口味码农,如果涉嫌侵权,请发送邮件至:contact@modb.pro进行举报,并提供相关证据,一经查实,墨天轮将立刻删除相关内容。

评论