博客
关于我
强烈建议你试试无所不能的chatGPT,快点击我
Sptingboot AOP实现多数据源切换(Hive Impala oracle)
阅读量:4181 次
发布时间:2019-05-26

本文共 10046 字,大约阅读时间需要 33 分钟。

pom文件

com.baomidou
mybatis-plus-boot-starter
3.3.0
org.springframework.data
spring-data-hadoop
2.4.0.RELEASE
org.apache.phoenix
phoenix-core
4.14.0-cdh5.14.2
org.apache.hbase
hbase-client
1.4.4
org.apache.hive
hive-jdbc
2.1.1
org.projectlombok
lombok
1.16.10
org.springframework.boot
spring-boot-configuration-processor
true
org.springframework.boot
spring-boot-starter-test
test
org.springframework.boot
spring-boot-starter-aop
com.oracle
ojdbc6
11.2.0.2.0
com.cloudera
ImpalaJDBC41
2.5.41
src/main/resources
src/main/java
**/*.xml
true

jdbc.properties

# master#spring.datasource.master.driver-class-name=oracle.jdbc.driver.OracleDriverspring.datasource.master.jdbc-url=jdbc:oracle:thin:@172.22.7.127 :1521:orclspring.datasource.master.username=orclspring.datasource.master.password=orcl# slave#spring.datasource.slave.driver-class-name=oracle.jdbc.driver.OracleDriverspring.datasource.slave.jdbc-url=jdbc:oracle:thin:@172.22.7.127 :1521:orclspring.datasource.slave.username=orclspring.datasource.slave.password=orcl#impala  自己打包spring.datasource.impala.driver-class-name=com.cloudera.impala.jdbc41.Driverspring.datasource.impala.jdbc-url=jdbc:impala://172.25.6.99:21050/impala;AuthMech=3;spring.datasource.impala.username=impalaspring.datasource.impala.password=impala##  Hikari 连接池配置 ------ 前缀和驱动设置保持一致,后面需要和HikariConfig的属性名保持一致 是通过get set方法来实现的 spring.datasource.impala.hikari.minimum-idle=5 多了个hikari导致属性赋值不进去## 最小空闲连接数量spring.datasource.impala.minimum-idle=5## 空闲连接存活最大时间,默认600000(10分钟)spring.datasource.impala.idle-timeout=180000## 连接池最大连接数,默认是10spring.datasource.impala.maximum-pool-size=10## 此属性控制从池返回的连接的默认自动提交行为,默认值:truespring.datasource.impala.auto-commit=true## 连接池namespring.datasource.impala.pool-name=MyHikariCP_Impala## 此属性控制池中连接的最长生命周期,值0表示无限生命周期,默认1800000即30分钟spring.datasource.impala.max-lifetime=1800000## 数据库连接超时时间,默认30秒,即30000spring.datasource.impala.connection-timeout=30000spring.datasource.impala.connection-test-query=SELECT 1#hivespring.datasource.hive.jdbc-url=jdbc:hive2://172.25.6.11:10000/hivespring.datasource.hive.username=hivespring.datasource.hive.password=hivespring.datasource.hive.driver-class-name=org.apache.hive.jdbc.HiveDriver#连接池的属性可以按照spring.datasource.hive的形式添加

数据源注解

@Retention(RetentionPolicy.RUNTIME)@Target({
ElementType.METHOD, ElementType.TYPE})public @interface DynamicRoute {
/** * 主数据源 * @return 数据源名称 * @since 1.0.0 */ String value() default "";}

数据源

public class DataSourceType {
public static final String MASTER = "master"; public static final String SLAVE = "slave"; public static final String IMPALA = "impala"; public static final String HIVE = "hive";}
public class DynamicDataSource extends AbstractRoutingDataSource {
@Override protected Object determineCurrentLookupKey() {
return DataBaseContextHolder.getDataSourceType(); }}

数据源切换

public class DataBaseContextHolder {
private static final ThreadLocal
contextHolder = new ThreadLocal<>(); public static void setDataSourceType(String type) {
if (type == null) {
throw new NullPointerException(); } System.out.println("切换数据源"+type); contextHolder.set(type); } public static String getDataSourceType() {
String type = contextHolder.get(); return type; } public static void clearDataSourceType() {
contextHolder.remove(); }}

数据源配置

@Configuration@PropertySource("classpath:config/jdbc.properties")@MapperScan("com.xiaobu.mapper")public class DataSourceConfig {
@Bean(name = "datasourceMaster") @Primary @ConfigurationProperties(prefix = "spring.datasource.master") public DataSource datasourceMaster() {
return DataSourceBuilder.create().build(); } @Bean(name = "datasourceSlave") @ConfigurationProperties(prefix = "spring.datasource.slave") public DataSource datasourceSlave() {
return DataSourceBuilder.create().build(); } /** * 三种返回DataSource的方式都可以 因为默认是HikariDataSource */ @Bean(DataSourceConstants.DS_KEY_IMPALA) @ConfigurationProperties(prefix = "spring.datasource.impala") public DataSource impalaDataSource() {
return DataSourceBuilder.create().type(HikariDataSource.class).build(); // return DataSourceBuilder.create().build(); // return new HikariDataSource(); } @Bean(name="hive") @ConfigurationProperties(prefix = "spring.datasource.hive") public DataSource hiveDataSource() {
return DataSourceBuilder.create().build(); } @Bean public DynamicDataSource dynamicDataSource(@Qualifier("datasourceMaster") DataSource ds1, @Qualifier("datasourceSlave") DataSource ds2,@Qualifier("impala") DataSource impalaDataSource,@Qualifier("hive") DataSource hiveDataSource) {
Map
targetDataSource = new HashMap<>(16); targetDataSource.put(DataSourceType.MASTER, ds1); targetDataSource.put(DataSourceType.SLAVE, ds2); targetDataSource.put(DataSourceType.IMPALA, impalaDataSource); targetDataSource.put(DataSourceType.HIVE, hiveDataSource); DynamicDataSource dataSource = new DynamicDataSource(); dataSource.setTargetDataSources(targetDataSource); dataSource.setDefaultTargetDataSource(ds1); return dataSource; }@Bean public SqlSessionFactory sqlSessionFactory() throws Exception {
MybatisSqlSessionFactoryBean sqlSessionFactory = new MybatisSqlSessionFactoryBean(); ResourcePatternResolver resolver = new PathMatchingResourcePatternResolver(); sqlSessionFactory.setDataSource(dynamicDataSource()); MybatisConfiguration configuration = new MybatisConfiguration(); configuration.setJdbcTypeForNull(JdbcType.NULL); configuration.setMapUnderscoreToCamelCase(true); configuration.setCacheEnabled(false); sqlSessionFactory.setConfiguration(configuration); TypeHandler
[] typeHandlers = new TypeHandler[]{
stringTypeCustomizeHandler}; sqlSessionFactory.setTypeHandlers(typeHandlers); sqlSessionFactory.setMapperLocations(resolver.getResources("classpath*:com/xiaobu/mapper/xml/*.xml")); return sqlSessionFactory.getObject(); } @Bean public DataSourceTransactionManager transactionManager(DynamicDataSource dynamicDataSource) {
return new DataSourceTransactionManager(dynamicDataSource); }}

AOP切面

package com.xiaobu.aspect;import com.xiaobu.dynamic.DataBaseContextHolder;import com.xiaobu.dynamic.DynamicRoute;import org.aspectj.lang.ProceedingJoinPoint;import org.aspectj.lang.Signature;import org.aspectj.lang.annotation.Around;import org.aspectj.lang.annotation.Aspect;import org.aspectj.lang.annotation.Pointcut;import org.aspectj.lang.reflect.MethodSignature;import org.slf4j.Logger;import org.slf4j.LoggerFactory;import org.springframework.context.annotation.EnableAspectJAutoProxy;import org.springframework.stereotype.Component;import java.lang.reflect.Method;/** * @author xiaobu * @version JDK1.8.0_171 * @date on  2021/4/27 10:19 * @description */@Aspect@Component@EnableAspectJAutoProxypublic class DataSourceAspect {
/** * 日志实例 * @since 1.0.0 */ private static final Logger LOG = LoggerFactory.getLogger(DataSourceAspect.class); /** * 拦截注解指定的方法 */ @Pointcut("@within(com.xiaobu.dynamic.DynamicRoute)||@annotation(com.xiaobu.dynamic.DynamicRoute)") public void pointCut() {
// } /** * 拦截处理 * * @param point point 信息 * @return result * @throws Throwable if any */ @Around("pointCut()") public Object around(ProceedingJoinPoint point) throws Throwable {
try {
// 获取当前拦截的方法签名 String signatureShortStr = point.getSignature().toShortString(); Method method = getCurrentMethod(point); DynamicRoute route = method.getAnnotation(DynamicRoute.class); String value = route.value(); // 设置 DataBaseContextHolder.setDataSourceType(value); return point.proceed(); } finally {
LOG.info("清空类型"); DataBaseContextHolder.clearDataSourceType(); } } /** * 获取当前方法信息 * * @param point 切点 * @return 方法 */ private Method getCurrentMethod(ProceedingJoinPoint point) {
try {
Signature sig = point.getSignature(); MethodSignature msig = (MethodSignature) sig; Object target = point.getTarget(); return target.getClass().getMethod(msig.getName(), msig.getParameterTypes()); } catch (NoSuchMethodException e) {
throw new RuntimeException(e); } }}

转载地址:http://tkgai.baihongyu.com/

你可能感兴趣的文章
ArrayBlockingQueue的简单使用
查看>>
Git 常用命令总结(一)
查看>>
Git 常用命令总结(二)
查看>>
JAVA 并发——synchronized的分析
查看>>
Echarts——使用 dataset 管理数据
查看>>
DES 加解密工具类
查看>>
SpringBoot多模块项目实践(Multi-Module)
查看>>
第一篇: 服务的注册与发现Eureka(Greenwich版)
查看>>
第二篇: 服务消费者(rest+ribbon)(Greenwich版本)
查看>>
第三篇: 服务消费者(Feign)(Greenwich版本)
查看>>
获取客户的真实IP地址
查看>>
第四篇: 熔断器(Ribbon+Feign)(Greenwich版本)
查看>>
Linux的常用命令(一)
查看>>
Linux的常用命令(二)
查看>>
第六篇: 分布式配置中心(Greenwich版本)
查看>>
SpringBoot | 配置logback-spring.xml
查看>>
SpringBoot | 第一章:构建第一个SpringBoot工程
查看>>
SpringBoot | 第二章:配置多环境以及上传文件
查看>>
Spring Data JPA |自定义非实体类的映射
查看>>
SpringBoot | 常用注解记录
查看>>