用了多年的老SSH框架,想改造成读写分离架构,一个写库,其他为只读库,需要注意的是,只读库有一定延迟,根据事务大小不同,最小延迟200ms,如果保存完立即刷新列表,则有可能显示操作完之前的数据,建议操作成功后跳到中转页面,再返回列表读取数据,基本不会有延迟问题。
框架所有service基本都有事务注解,更新操作@Transactional,查询操作@Transactional(readOnly=true),所有基于此注解就可以实现读写分离,写事务使用写库,只读事务使用只读库,事务嵌套情况,如果使用了写库,则后面无论如何一直使用写库,保证数据一致性。
实现原理是使用AOP读取切面方法的注解(DynamicReadWriteAspect),使用ThreadLocal线程变量保存使用的是读库还是写库(DbContextHolder),使用自定义数据源来路由到对应的实际数据源(DynamicReadWriteDataSource)
例如,类注解为readOnly=true,方法不写注解会继承只读事务,方法写了@Transactional会使用写事务
@Transactional(readOnly=true)
public class AbcServiceImpl implements IAbcService{
@Transactional
public void save(Abc abc) {
abcDao.save(abc);
}
@Transactional
public void delete(java.lang.String ...id) {
for(java.lang.String i :id) {
abcDao.delete(i);
}
}
public Abc queryById(java.lang.String id) {
return abcDao.get(id);
}
public Page queryByPage(Page<Abc> page) {
return abcDao.findPage(page, filters);
}}
spring配置文件:红色为改造部分
<?xml version="1.0" encoding="UTF-8"?>
<beans xmlns="http://www.springframework.org/schema/beans"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xmlns:jee="http://www.springframework.org/schema/jee"
xmlns:tx="http://www.springframework.org/schema/tx"
xmlns:aop="http://www.springframework.org/schema/aop"
xmlns:context="http://www.springframework.org/schema/context"
xmlns:ehcache="http://ehcache-spring-annotations.googlecode.com/svn/schema/ehcache-spring"
xsi:schemaLocation="http://www.springframework.org/schema/beans
http://www.springframework.org/schema/beans/spring-beans-2.5.xsd
http://www.springframework.org/schema/tx
http://www.springframework.org/schema/tx/spring-tx-2.5.xsd
http://www.springframework.org/schema/jee
http://www.springframework.org/schema/jee/spring-jee-2.5.xsd
http://www.springframework.org/schema/context
http://www.springframework.org/schema/context/spring-context-2.5.xsd
http://ehcache-spring-annotations.googlecode.com/svn/schema/ehcache-spring
http://ehcache-spring-annotations.googlecode.com/svn/schema/ehcache-spring/ehcache-spring-1.1.xsd
http://www.springframework.org/schema/aop http://www.springframework.org/schema/aop/spring-aop-3.0.xsd"
><description>Spring公共配置文件</description>
<!-- 允许使用注解的方式注入对象 -->
<context:annotation-config />
<!-- 搜索bean功能 resource="classpath:applicationContext-action.xml" -->
<!-- 完成权限缓存
<import resource="applicationContext-init.xml"/>-->
<context:component-scan base-package="com.Development.Platform" >
</context:component-scan>
<context:component-scan base-package="javacommon.base"/>
<!--default-lazy-init="true" 定义受环境影响易变的变量 -->
<bean
class="org.springframework.beans.factory.config.PropertyPlaceholderConfigurer">
<property name="systemPropertiesModeName"
value="SYSTEM_PROPERTIES_MODE_OVERRIDE" />
<property name="ignoreResourceNotFound" value="true" />
<property name="locations">
<list>
<!-- 标准配置 -->
<value>classpath:application.properties</value>
<!-- 本地开发环境配置 -->
<!-- <value>classpath:application.local.properties</value> -->
<!-- 服务器生产环境配置 -->
<!-- <value>file:/var/myapp/application.server.properties</value> -->
</list>
</property>
</bean>
<!-- 写数据源配置,使用应用内的Druid数据库连接池 -->
<bean id="dataSourceWrite" class="com.alibaba.druid.pool.DruidDataSource" init-method="init" destroy-method="close">
<!-- Connection Info -->
<!-- 驱动名称 -->
<property name="driverClassName" value="${jdbc.driver}" />
<!-- JDBC连接串 -->
<property name="url" value="${jdbc.url}" />
<!-- 数据库用户名称 -->
<property name="username" value="${jdbc.username}" />
<!-- 数据库密码 -->
<property name="password" value="${jdbc.password}" />.....................连接池配置略......................
<!-- druid连接池多个数据库合并统计 -->
<property name="filters" value="mergeStat" />
</bean>
<!-- 读数据源1配置,使用应用内的Druid数据库连接池 -->
<bean id="dataSourceRead1" class="com.alibaba.druid.pool.DruidDataSource" init-method="init" destroy-method="close">
<!-- Connection Info -->
<!-- 驱动名称 -->
<property name="driverClassName" value="${jdbc.driver}" />
<!-- JDBC连接串 -->
<property name="url" value="${jdbc.url}" />
<!-- 数据库用户名称 -->
<property name="username" value="${jdbc.username}" />
<!-- 数据库密码 -->
<property name="password" value="${jdbc.password}" />...................连接池配置略......................
<!-- druid连接池多个数据库合并统计 -->
<property name="filters" value="mergeStat" />
</bean>
<!-- 读数据源2配置,使用应用内的Druid数据库连接池 -->
<bean id="dataSourceRead2" class="com.alibaba.druid.pool.DruidDataSource" init-method="init" destroy-method="close">
<!-- Connection Info -->
<!-- 驱动名称 -->
<property name="driverClassName" value="${jdbc.driver}" />
<!-- JDBC连接串 -->
<property name="url" value="${jdbc.url}" />
<!-- 数据库用户名称 -->
<property name="username" value="${jdbc.username}" />
<!-- 数据库密码 -->
<property name="password" value="${jdbc.password}" />...................连接池配置略......................
<!-- 连接池扩展 -->
<!-- druid连接池多个数据库合并统计 -->
<property name="filters" value="mergeStat" />
</bean>
<!-- 动态数据源配置,读写分离,使用了自定义数据源类-->
<bean id="dataSource" class="javacommon.base.datasource.DynamicReadWriteDataSource">
<property name="writeDataSource" ref="dataSourceWrite" />
<property name="readDataSources">
<list>
<ref bean="dataSourceRead1" />
<ref bean="dataSourceRead2" />
</list>
</property>
<property name="readDataSourcePollPattern" value="1" /><!--只读数据选择方式:1轮询方式、0随机 -->
<property name="defaultTargetDataSource" ref="dataSourceWrite" />
</bean>
<bean id="dataSourceAspect" class="javacommon.base.datasource.DynamicReadWriteAspect"/>
<aop:config>
<aop:pointcut id="txPointcut" expression="(@within(org.springframework.transaction.annotation.Transactional)
or @target(org.springframework.transaction.annotation.Transactional))"/>
<!-- 将切面应用到自定义的切面处理器上,-9999保证该切面优先级最高执行 -->
<aop:aspect ref="dataSourceAspect" order="-9999">
<aop:around method="processAround" pointcut-ref="txPointcut"/>
</aop:aspect>
</aop:config>
<!-- Hibernate配置 -->
<bean id="sessionFactory" class="org.springframework.orm.hibernate3.annotation.AnnotationSessionFactoryBean">
<property name="dataSource" ref="dataSource" />
<property name="namingStrategy">
<bean class="org.hibernate.cfg.ImprovedNamingStrategy" />
</property>
<property name="hibernateProperties">
<props>
<prop key="hibernate.dialect">${hibernate.dialect}</prop>
<prop key="hibernate.show_sql">${hibernate.show_sql}</prop>
<prop key="hibernate.format_sql">${hibernate.format_sql}</prop>
<!-- <prop key="hibernate.hbm2ddl.auto">update</prop> -->
</props>
</property>
<property name="packagesToScan" value="com.Development.Platform.*" />
</bean><!-- 事务管理器配置,单数据源事务 key="hibernate.hbm2ddl.auto">${hibernate.hbm2ddl_auto} -->
<bean id="transactionManager"
class="org.springframework.orm.hibernate3.HibernateTransactionManager">
<property name="sessionFactory" ref="sessionFactory" />
</bean>
<!-- 注解的方式配置事物 -->
<tx:annotation-driven transaction-manager="transactionManager" />
</beans>
使用到的几个类
public enum DbType {
WRITE, READ,
}
public class DbContextHolder {
private static final ThreadLocal<DbType> contextHolder = new ThreadLocal<DbType>();
public static void setDbType(DbType dbType) {
if (dbType == null) {
throw new NullPointerException();
}
contextHolder.set(dbType);
}public static DbType getDbType() {
return (DbType) contextHolder.get();
}public static void clearDbType() {
contextHolder.remove();
}
}
import java.lang.reflect.Method;
import java.util.Date;import javax.servlet.ServletContext;
import org.aspectj.lang.ProceedingJoinPoint;
import org.aspectj.lang.reflect.MethodSignature;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.transaction.interceptor.TransactionAttribute;
import org.springframework.transaction.interceptor.TransactionAttributeSource;
import org.springframework.web.context.ContextLoaderListener;
import org.springframework.web.context.WebApplicationContext;
import org.springframework.web.context.support.WebApplicationContextUtils;import com.googlecode.ehcache.annotations.Cacheable;
public class DynamicReadWriteAspect {
private static final Logger logger = LoggerFactory.getLogger(DynamicReadWriteAspect.class);
public Object processAround(ProceedingJoinPoint proceedingJoinPoint) throws Throwable {
try {
ServletContext servletContext = ContextLoaderListener.getCurrentWebApplicationContext().getServletContext();
WebApplicationContext applicationContext = WebApplicationContextUtils.getWebApplicationContext(servletContext);
TransactionAttributeSource transactionAttributeSource = (TransactionAttributeSource)applicationContext.getBean(TransactionAttributeSource.class);
Class<?> targetClass = proceedingJoinPoint.getTarget().getClass(); //当前调用的类
Class<?>[] params = ((MethodSignature) proceedingJoinPoint.getSignature()).getParameterTypes(); //当前参数
Method method = targetClass.getMethod(proceedingJoinPoint.getSignature().getName(), params); //当前调用的方法
Cacheable cacheable = method.getAnnotation(Cacheable.class);
if(DbContextHolder.getDbType() == null) { //如果数据源类型不为空,表示此线程之前已经读取了数据,为保证数据一致性,不切换数据源
//查找当前调用的方法是否有Transaction注解信息,,包括继承类上的注解
TransactionAttribute attrs = transactionAttributeSource.getTransactionAttribute(method, targetClass);
//如果readonly为true并且未启用缓存,则使用只读数据源,
if(attrs != null && attrs.isReadOnly() && cacheable==null) { //
DbContextHolder.setDbType(DbType.READ);
}
}
Date now = new Date();
logger.info("切面[" + now.getTime() + "]调用开始:分配给" + DbContextHolder.getDbType() + "库 " + method);
Object result = proceedingJoinPoint.proceed();
logger.info("切面[" + now.getTime() + "]调用结束,耗时"+ (new Date().getTime()-now.getTime())+" ms");
return result;
} finally {
DbContextHolder.clearDbType(); //结束后将数据源类型设为空
// logger.info("restore database connection");
}
}
}
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Random;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantLock;import org.springframework.jdbc.datasource.lookup.AbstractRoutingDataSource;
public class DynamicReadWriteDataSource extends AbstractRoutingDataSource {
private Object writeDataSource; // 写数据源
private List<Object> readDataSources; // 多个读数据源
private int readDataSourceSize; // 读数据源个数
private int readDataSourcePollPattern = 1; // 获取读数据源方式,0:随机,1:轮询
private AtomicLong counter = new AtomicLong(0);
private static final Long MAX_POOL = Long.MAX_VALUE;
private final Lock lock = new ReentrantLock();
private final Random random = new Random(new Date().getTime());
@Override
public void afterPropertiesSet() {
if (this.writeDataSource == null) {
throw new IllegalArgumentException("Property 'writeDataSource' is required");
}
setDefaultTargetDataSource(writeDataSource);
Map<Object, Object> targetDataSources = new HashMap<Object, Object>();
targetDataSources.put(DbType.WRITE.name(), writeDataSource);
if (this.readDataSources == null) {
readDataSourceSize = 0;
} else {
for (int i = 0; i < readDataSources.size(); i++) {
targetDataSources.put(DbType.READ.name() + i, readDataSources.get(i));
}
readDataSourceSize = readDataSources.size();
}
setTargetDataSources(targetDataSources);
super.afterPropertiesSet();
}
@Override
protected Object determineCurrentLookupKey() {DbType dynamicDataSourceGlobal = DbContextHolder.getDbType();
if (dynamicDataSourceGlobal == null || dynamicDataSourceGlobal == DbType.WRITE || readDataSourceSize <= 0) {
return DbType.WRITE.name();
}
int index = 1;if (readDataSourcePollPattern == 1) {
// 轮询方式
long currValue = counter.incrementAndGet();
if ((currValue + 1) >= MAX_POOL) {
try {
lock.lock();
if ((currValue + 1) >= MAX_POOL) {
counter.set(0); //计数器超过Long最大值,重新设为0
}
} finally {
lock.unlock();
}
}
index = (int) (currValue % readDataSourceSize);
} else {
// 随机方式
// index = ThreadLocalRandom.current().nextInt(0, readDataSourceSize); //jdk7+支持,性能更好
index = random.nextInt(readDataSourceSize);
}
return dynamicDataSourceGlobal.name() + index;
}public void setWriteDataSource(Object writeDataSource) {
this.writeDataSource = writeDataSource;
}public void setReadDataSources(List<Object> readDataSources) {
this.readDataSources = readDataSources;
}
public void setReadDataSourcePollPattern(int readDataSourcePollPattern) {
this.readDataSourcePollPattern = readDataSourcePollPattern;
}}
参考文章
https://www.cnblogs.com/weixiaole/p/5230367.html
https://www.cnblogs.com/ouyanxia/p/7568908.html