基于ShardingSphere实现水平分表

前言:

        需求是有张表数据量太大了(例如: order表),  查询太慢,  所以需要分表,  根据基表(order)的天字段(order.day='2023-05-06','2023-05-07'.....), 按照月进行分表; 变成=> [order_202305, order_202306.....], 每张月表存储当月30多天的数据;

        当然也可以, 直接按照天粒度来分表[如: order_20230506, order_20230507....], 每张天表存储当天的数据.


1. 引入jar包


<dependency>   
     <groupId>org.apache.shardingsphere</groupId>   
      <artifactId>sharding-jdbc-spring-boot-starter</artifactId>    
      <version>4.1.1</version>
 </dependency>
 
<!-- 为了避免和sharding-jdbc的冲突,不引入druid starter,引入如下依赖-->
<dependency>    
    <groupId>com.alibaba</groupId>    
    <artifactId>druid</artifactId>    
    <version>1.2.4</version>
</dependency>

2. 配置文件


spring:
  shardingsphere:
    datasource:
      names: main
      main:
        type: com.alibaba.druid.pool.DruidDataSource
        # 数据库连接
        url: jdbc:postgresql://127.0.0.1:5555/postgres?autoReconnect=true&useUnicode=true&characterEncoding=utf-8&zeroDateTimeBehavior=convertToNull&createDatabaseIfNotExist=true&useSSL=false&serverTimezone=CTT
        username: *****
        password: *****
        driver-class-name: org.postgresql.Driver
      druid:
        # 初始连接数
        initialSize: 5
        # 最小连接池数量
        minIdle: 10
        # 最大连接池数量
        maxActive: 20
        # 配置获取连接等待超时的时间
        maxWait: 60000
        # 配置间隔多久才进行一次检测,检测需要关闭的空闲连接,单位是毫秒
        timeBetweenEvictionRunsMillis: 60000
        # 配置一个连接在池中最小生存的时间,单位是毫秒
        minEvictableIdleTimeMillis: 300000
        # 配置一个连接在池中最大生存的时间,单位是毫秒
        maxEvictableIdleTimeMillis: 900000
        # 配置检测连接是否有效
        validationQuery: SELECT 1
        testWhileIdle: true
        testOnBorrow: true
        testOnReturn: false
        filters: stat
    sharding:
      tables:
        qt_gen_plot: # 表名
          actual-data-nodes: main.qt_gen_plot_20240219
          #分片策略
          table-strategy:
            standard:
              sharding-column: upload_time # 分片字段
              precise-algorithm-class-name: com.qtw.commons.sharding.jdbc.algorithm.DatePreciseShardingTableAlgorithm
              range-algorithm-class-name: com.qtw.commons.sharding.jdbc.algorithm.DateRangeShardingTableAlgorithm
        
        # qt_gen_plot_week: # 表名
        #   actual-data-nodes: main.qt_gen_plot_week_202310
        #   #分片策略
        #   table-strategy:
        #     standard:
        #       sharding-column: upload_time # 分片字段
        #       precise-algorithm-class-name: com.qtw.commons.sharding.jdbc.algorithm.DatePreciseShardingTableAlgorithm
        #       range-algorithm-class-name: com.qtw.commons.sharding.jdbc.algorithm.DateRangeShardingTableAlgorithm

        qt_third_labour_cell: # 表名
          actual-data-nodes: main.qt_third_labour_cell_202308
          #分片策略
          table-strategy:
            standard:
              sharding-column: day # 分片字段
              precise-algorithm-class-name: com.qtw.commons.sharding.jdbc.algorithm.DateWorkNanotubePreciseShardingTableAlgorithm
              range-algorithm-class-name: com.qtw.commons.sharding.jdbc.algorithm.DateRangeShardingTableAlgorithm 




# 分表配置自动刷新开关, 分表代码会用到, 这个属于自定义的配置;
operation-data:
  sharding:
    dynamic-refresh: true
    refresh-times:
      -
        table-name: qt_gen_plot 
        refresh-time: "23:00:00"
        t-days: 1
        refresh-type: day
      # -
      #   table-name: qt_gen_plot_week
      #   refresh-time: "14:00:00"
      #   #周日到周六 从1-7
      #   t-days: 3
      #   refresh-type: week
      -
        table-name: qt_third_labour_cell 
        refresh-time: "23:00:00"
        t-days: 1
        refresh-type: dayToMonth

3. 配置自定义的查询分片规则

1.精准匹配 - 代码可以根据自己的实际需求修改


/** 
* qt_third_labour_cell - 精准匹配 
* 日期精确匹配 
* <p> 
* 本类功能: 
* 正常通过Mybatis-plus查询主表, 会走这里, 然后通过代码判断匹配后, 返回具体的分表表名, 之后会查询分表的数据; 
* @author qtw 
*/
public class DateWorkNanotubePreciseShardingTableAlgorithm implements PreciseShardingAlgorithm<String> {

    /**     
    * @param collection           分表的集合, 根据配置的日期开始到最新的分表日期集合;  如: [ qt_third_labour_cell_202402, qt_third_labour_cell_202401.....]     
    * @param preciseShardingValue 对象(logicTableName=qt_third_labour_cell, columnName=day, value=2024-02-05)     
    * @return 分表的表名: 如: qt_third_labour_cell_202402     
    */    
    @Override    
    public String doSharding(Collection<String> collection, PreciseShardingValue<String> preciseShardingValue) {

        // 通过天2024-02-05 截取年月202402        
        String date = preciseShardingValue.getValue().replace("-", "");        
        String yearMonth = date.substring(0, 6);       
        
        // 获取以此年月为后缀的分表名称        
        String tableName = null;        
        for (String tmp : collection) {
            if (tmp.endsWith(yearMonth)) {
                // 如果以当前日期结尾                
                tableName = tmp;                
                break;            
            }
        }

        // 如果没有值, 则查询原表返回        
        if (null == tableName) {
            String str = collection.iterator().next();            
            tableName = str.substring(0, str.lastIndexOf("_"));        
        }

        return tableName;    
      }
}

/**
 * DatePreciseShardingTableAlgorithm
 * 日期精确匹配
 

 */
public class DatePreciseShardingTableAlgorithm implements PreciseShardingAlgorithm<String> {
    @Override
    public String doSharding(Collection<String> collection, PreciseShardingValue<String> preciseShardingValue) {

        String date = preciseShardingValue.getValue().replace("-", "");

        String tableName = null;
        for (String tmp : collection) {
            if (tmp.endsWith(date)) {
                // 如果以当前日期结尾
                tableName = tmp;
                break;
            }
        }

        if (null == tableName) {
            String str = collection.iterator().next();
            tableName = str.substring(0, str.lastIndexOf("_"));
        }

        return tableName;
    }
}

2. 范围匹配


/**
 * DateRangeShardingTableAlgorithm
 *

 */
public class DateRangeShardingTableAlgorithm implements RangeShardingAlgorithm<String> {
    @Override
    public Collection<String> doSharding(Collection<String> collection, RangeShardingValue<String> rangeShardingValue) {

        // 这里可以处理日期字段,避免多余查询
        Range<String> range = rangeShardingValue.getValueRange();

        String start = null;
        if (range.hasLowerBound()) {
            start = range.lowerEndpoint().replace("-", "");
        }
        String end = null;
        if (range.hasUpperBound()) {
            end = range.upperEndpoint().replace("-", "");
        }

        if (null == start && null == end) {
            return collection;
        }

        List<String> list = new ArrayList<>();
        for (String tableName : collection) {
            String suffix = tableName.substring(tableName.lastIndexOf("_") + 1);
            if (suffix.compareTo(start) < 0) {
                continue;
            }
            if (suffix.compareTo(end) > 0) {
                continue;
            }
            list.add(tableName);
        }

        if (CollectionUtils.isEmpty(list)) {
            String str = collection.iterator().next();
            list.add(str.substring(0, str.lastIndexOf("_")));
        }

        return list;
    }
}

4. 配置自动分表规则: 通过定时任务或者调用接口等

        注意: 这一步骤如果不想通过代码创建分表, 也可以省略, 可以手动通过sql创建分表, 下面代码中有创建的sql, 这里用的是PostgreSQL, 如果是MySQL数据库, 创建语句可能不一样;

        创建分表SQL也可以参考之前发布的文章总结:

数据库 - 开发常用SQL语句总结 - 持续更新-优快云博客


package com.qtw.service.impl;

import cn.hutool.core.date.DatePattern;
import cn.hutool.core.date.DateTime;
import cn.hutool.core.date.DateUtil;
import com.qtw.config.conf.OperationDataShardingConfig;
import com.qtw.entity.dto.OperationDataShardingConfigRefreshTime;
import com.qtw.service.IDynamicShardingService;
import com.qtw.common.util.LocalDateUtils;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.collections4.CollectionUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.shardingsphere.core.rule.ShardingRule;
import org.apache.shardingsphere.core.rule.TableRule;
import org.apache.shardingsphere.shardingjdbc.jdbc.core.datasource.ShardingDataSource;
import org.apache.shardingsphere.underlying.common.rule.DataNode;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.data.jpa.repository.Modifying;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;

import javax.annotation.Resource;
import javax.persistence.EntityManager;
import javax.sql.DataSource;
import java.lang.reflect.Field;
import java.lang.reflect.Modifier;
import java.math.BigInteger;
import java.text.ParseException;
import java.util.*;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.stream.Collectors;

/**
 * DynamicShardingServiceImpl
 *
 */
@Slf4j
@Service
public class DynamicShardingServiceImpl implements IDynamicShardingService {
    @Resource(name = "shardingDataSource")
    private final DataSource dataSource;

    private final EntityManager entityManager;

    private final static String SUB_TABLE_SEPARATOR = "_";

    private final OperationDataShardingConfig operationDataShardingConfig;

    /**
     * 配置文件中是否开启运营分析数据分表动态刷新配置
     */
    @Value("${operation-data.sharding.dynamic-refresh}")
    Boolean dynamicRefresh;

    public DynamicShardingServiceImpl(DataSource dataSource, EntityManager entityManager,
                                      OperationDataShardingConfig operationDataShardingConfig) {
        this.dataSource = dataSource;
        this.entityManager = entityManager;
        this.operationDataShardingConfig = operationDataShardingConfig;
    }

    /**
     * xxl-job 创建相关数据表分表、同步数据、刷新 sharding-jdbc 配置
     */

    @Override
    @Transactional(rollbackFor = Throwable.class)
    public void syncAndRefreshOperationAnalyseSharding(String tableName) throws NoSuchFieldException, IllegalAccessException {
        log.info("动态刷新 actualDataNodes START,参数:{}", tableName);
        if (!dynamicRefresh) {
            log.info("动态刷新处于关闭状态");
            return;
        }

        List<OperationDataShardingConfigRefreshTime> tablesToRefresh = new ArrayList<>();
        List<OperationDataShardingConfigRefreshTime> refreshTimes = operationDataShardingConfig.getRefreshTimes();
        if (StringUtils.isNotBlank(tableName)) {
            for (OperationDataShardingConfigRefreshTime refreshTime : refreshTimes) {
                if (Objects.equals(tableName, refreshTime.getTableName())) {
                    tablesToRefresh.add(refreshTime);
                    log.info("刷新表:{} 的分表配置", tableName);
                    break;
                }
            }
        } else {
            log.info("刷新全部已配置表的分表配置");
            tablesToRefresh = refreshTimes;
        }

        ShardingDataSource shardingDataSource = (ShardingDataSource) dataSource;
        ShardingRule shardingRule = shardingDataSource.getRuntimeContext().getRule();
        for (OperationDataShardingConfigRefreshTime tableInfo : tablesToRefresh) {
            log.info("动态刷新 actualDataNodes START,表信息:{}", tableInfo);


            String dynamicTableName = tableInfo.getTableName();
            TableRule tableRule = shardingRule.getTableRule(dynamicTableName);

            String dataSourceName = tableRule.getActualDataNodes().get(0).getDataSourceName();
            String logicTableName = tableRule.getLogicTable();

            // 提取 sharding 字段
            Collection<String> shardingColumns = tableRule.getTableShardingStrategy().getShardingColumns();
            String shardingColumn = "";
            for (String sc : shardingColumns) {
                shardingColumn = sc;
                break;
            }

            // 获取待创建的分表集合
            List<DataNode> newDataNodes = getDataNodes(dynamicTableName, dataSourceName, tableRule.getActualDataNodes(),
                    tableInfo.getRefreshTime(), tableInfo.getTDays(), tableInfo.getRefreshType());
            if (newDataNodes.isEmpty()) {
                log.info("没有需要创建和同步的子表");
                continue;
            }
            log.info("同步以下日期的数据,{}", newDataNodes);
            // 创建分表并且同步数据
            createSubTableIfAbsent(newDataNodes, dynamicTableName, shardingColumn, tableInfo);
            // 分表之后, 反向把最新的列表, 重新刷新到shardingsphere中;
            dynamicRefreshDatasource(dataSourceName, tableRule, newDataNodes);
        }
        


        log.info("Job 动态刷新 actualDataNodes END");
    }

    

    /**
     * 补偿创建分表, 同步数据
     */
    @Modifying
    private void createSubTableIfAbsent(List<DataNode> newDataNodes, String tableName, String shardingColumn,
                                        OperationDataShardingConfigRefreshTime tableInfo) {
        if (CollectionUtils.isNotEmpty(newDataNodes)) {
            List<String> subTableNames = newDataNodes.stream().map(DataNode::getTableName).collect(Collectors.toList());
            log.info("创建新表,并同步数据");
            for (String subTableName : subTableNames) {
                String sql = String.format("SELECT COUNT(1) FROM pg_class where relname = '%s'", subTableName);
                try {
                    List resultList = entityManager.createNativeQuery(sql).getResultList();
                    log.info("查询表:{} 是否已存在,结果:{}", subTableName, resultList);
                    Integer result = ((BigInteger) resultList.get(0)).intValue();
                    if (result.equals(1)) {
                        log.info("表:{} 已存在, 不再进行重复处理", subTableName);
                        continue;
                    }
                } catch (Exception e) {
                    log.error("创建失败:{}, ", subTableName, e);
                    throw e;
                }
                // 表不存在
                String syncSql = "";
                String createSql = String.format("CREATE TABLE %s (LIKE %s INCLUDING ALL)", subTableName, tableName);
                String[] splits = subTableName.split(SUB_TABLE_SEPARATOR);
                // 最新的一天
                if ("day".equals(tableInfo.getRefreshType())) {
                    String dateStr = splits[splits.length - 1];
                    Date subTableDate = DateUtil.parse(dateStr, DatePattern.PURE_DATE_PATTERN);
                    syncSql = String.format("INSERT INTO %s SELECT * FROM %s WHERE %s='%s'", subTableName,
                            tableName, shardingColumn, DateUtil.format(subTableDate, DatePattern.NORM_DATE_PATTERN));
                } else if ("week".equals(tableInfo.getRefreshType())) {
                    String weekStr = splits[splits.length - 1];
                    String subTableDate = weekStr.substring(0, 4) + "-" + weekStr.substring(4, 6);
                    syncSql = String.format("INSERT INTO %s SELECT * FROM %s WHERE %s='%s'", subTableName,
                            tableName, shardingColumn, subTableDate);
                } else if ("dayToMonth".equals(tableInfo.getRefreshType())) {
                    String monthStr = splits[splits.length - 1];
                    String subTableDate = monthStr.substring(0, 4) + "-" + monthStr.substring(4, 6);
                    syncSql = String.format("INSERT INTO %s SELECT * FROM %s WHERE %s like '%s'", subTableName,
                            tableName, shardingColumn, subTableDate + "%");
                }
                try {
                    // 创建分表
                    entityManager.createNativeQuery(createSql).executeUpdate();
                    if (StringUtils.isNotBlank(syncSql)) {
                        // 同步主表条件查询数据进分表
                        entityManager.createNativeQuery(syncSql).executeUpdate();
                    }
                } catch (Exception e) {
                    log.error("创建失败, createSql: {}, syncSql: {}", createSql, syncSql);
                    log.error("错误信息, ", e);
                }
            }
        }
    }

    /**
     * 获取数据节点
     */
    private List<DataNode> getDataNodes(String tableName, String dataSourceName,
                                        List<DataNode> dataNodes, String refreshTime,
                                        Integer tDays, String refreshType) {
        List<DataNode> newDataNodes = new ArrayList<>();
        if ("day".equals(refreshType)) {
            Date startDate = DateUtil.date();
            startDate = DateUtil.parse(DateUtil.format(startDate, DatePattern.PURE_DATE_PATTERN), DatePattern.PURE_DATE_PATTERN);
            if (CollectionUtils.isNotEmpty(dataNodes)) {
                String latestTableName = dataNodes.get(dataNodes.size() - 1).getTableName();
                String[] nameStrs = latestTableName.split(SUB_TABLE_SEPARATOR);

                try {
                    startDate = DateUtil.parse(nameStrs[nameStrs.length - 1], DatePattern.PURE_DATE_PATTERN);
                } catch (Exception e) {
                    log.warn("解析分表日期失败, ", e);
                    return newDataNodes;
                }

                startDate = DateUtil.offsetDay(startDate, 1);
            }
            Date endDate = DateUtil.date();
            DateTime refreshTimeSet = DateUtil.parse(DateUtil.format(endDate, DatePattern.NORM_DATE_PATTERN)
                    + " " + refreshTime, DatePattern.NORM_DATETIME_PATTERN);
            if (endDate.before(refreshTimeSet)) {
                // 如果当前时间早于设定的动态刷新时间,则截止日期取前一天
                endDate = DateUtil.parse(DateUtil.format(DateUtil.offsetDay(endDate, -1 * tDays),
                        DatePattern.PURE_DATE_PATTERN), DatePattern.PURE_DATE_PATTERN);
            } else {
                endDate = DateUtil.parse(DateUtil.format(DateUtil.offsetDay(endDate, -1 * tDays + 1),
                        DatePattern.PURE_DATE_PATTERN), DatePattern.PURE_DATE_PATTERN);
            }

            while (startDate.before(endDate)) {
                String subTableName = dataSourceName + "." + tableName + SUB_TABLE_SEPARATOR
                        + DateUtil.format(startDate, DatePattern.PURE_DATE_PATTERN);
                newDataNodes.add(new DataNode(subTableName));
                startDate = DateUtil.offsetDay(startDate, 1);
            }
        } else if ("week".equals(refreshType)) {
            // 周  yyyyww
            String stringNowWeek = LocalDateUtils.getStringNowWeek(new Date()).replace("-", "");
            if (CollectionUtils.isNotEmpty(dataNodes)) {
                // 获取最新的一个表
                String latestTableName = dataNodes.get(dataNodes.size() - 1).getTableName();
                String[] nameStrs = latestTableName.split(SUB_TABLE_SEPARATOR);
                try {
                    // 周  yyyyww
                    stringNowWeek = nameStrs[nameStrs.length - 1];
                } catch (Exception e) {
                    log.warn("解析分表日期失败, ", e);
                    return newDataNodes;
                }
            }
            String endWeek = LocalDateUtils.getStringNowWeek(new Date()).replace("-", "");
            if (endWeek.compareTo(stringNowWeek) >= 0) {
                // 如果当前时间早于设定的动态刷新时间,到分表时间截取到前一周,不到分表时间截取到前两周,同一天当做没有分表
                int nowWeekDay = LocalDateUtils.getNowWeekDay(LocalDateUtils.format(new Date()));
                if (nowWeekDay <= tDays) {
                    // 上一周
                    endWeek = LocalDateUtils.getStringNowWeek(LocalDateUtils.addDateWeeks(new Date(), -1))
                            .replace("-", "");
                }
            }
            while (stringNowWeek.compareTo(endWeek) < 0) {
                String subTableName = dataSourceName + "." + tableName + SUB_TABLE_SEPARATOR + stringNowWeek;
                newDataNodes.add(new DataNode(subTableName));
                stringNowWeek = LocalDateUtils.addDataWeeks(
                        stringNowWeek.substring(0, 4) + "-" + stringNowWeek.substring(4, 6), 1
                ).replace("-", "");
            }
        } else if ("dayToMonth".equals(refreshType)) {
            // 获取当前系统年月
            String stringNowMonth = DateUtil.getLastYearMonthStr(0).replace("-", "");

            if (CollectionUtils.isNotEmpty(dataNodes)) {
                // 获取最的新一个表
                String latestTableName = dataNodes.get(dataNodes.size() - 1).getTableName();
                String[] nameStrs = latestTableName.split(SUB_TABLE_SEPARATOR);
                try {
                    // 年月
                    stringNowMonth = nameStrs[nameStrs.length - 1];
                } catch (Exception e) {
                    log.warn("解析分表日期失败, ", e);
                    return newDataNodes;
                }
            }

            String endMonth = com.citc.analyse.utils.DateUtil.getLastYearMonthStr(0).replace("-", "");
            
            while (stringNowMonth.compareTo(endMonth) <= 0) {
                String subTableName = dataSourceName + "." + tableName + SUB_TABLE_SEPARATOR + stringNowMonth;
                newDataNodes.add(new DataNode(subTableName));

                // 把月份加1, 然后在继续循环
                try {
                    stringNowMonth = DateUtil.getYearMonthOperateNum(stringNowMonth.substring(0, 4) + "-" + stringNowMonth.substring(4, 6), 1).replace("-", "");
                } catch (ParseException e) {
                    log.error("日期转换报错....");
                }
            }
        }
        return newDataNodes;
    }

    /**
     * 动态刷新数据源
     */
    private void dynamicRefreshDatasource(String dataSourceName, TableRule tableRule, List<DataNode> newDataNodes)
            throws NoSuchFieldException, IllegalAccessException {
        // 合并原有的 dataNode
        newDataNodes.addAll(0, tableRule.getActualDataNodes());

        Set<String> actualTables = Sets.newHashSet();
        Map<DataNode, Integer> dataNodeIndexMap = Maps.newHashMap();
        AtomicInteger index = new AtomicInteger(0);
        newDataNodes.forEach(dataNode -> {
            actualTables.add(dataNode.getTableName());
            if (index.intValue() == 0) {
                dataNodeIndexMap.put(dataNode, 0);
            } else {
                dataNodeIndexMap.put(dataNode, index.intValue());
            }
            index.incrementAndGet();
        });
        // 动态刷新:actualDataNodesField
        Field actualDataNodesField = TableRule.class.getDeclaredField("actualDataNodes");
        Field modifiersField = Field.class.getDeclaredField("modifiers");
        modifiersField.setAccessible(true);
        modifiersField.setInt(actualDataNodesField, actualDataNodesField.getModifiers() & ~Modifier.FINAL);
        actualDataNodesField.setAccessible(true);
        actualDataNodesField.set(tableRule, newDataNodes);
        // 动态刷新:actualTablesField
        Field actualTablesField = TableRule.class.getDeclaredField("actualTables");
        actualTablesField.setAccessible(true);
        actualTablesField.set(tableRule, actualTables);
        // 动态刷新:dataNodeIndexMapField
        Field dataNodeIndexMapField = TableRule.class.getDeclaredField("dataNodeIndexMap");
        dataNodeIndexMapField.setAccessible(true);
        dataNodeIndexMapField.set(tableRule, dataNodeIndexMap);
        // 动态刷新:datasourceToTablesMapField
        Map<String, Collection<String>> datasourceToTablesMap = Maps.newHashMap();
        datasourceToTablesMap.put(dataSourceName, actualTables);
        Field datasourceToTablesMapField = TableRule.class.getDeclaredField("datasourceToTablesMap");
        datasourceToTablesMapField.setAccessible(true);
        datasourceToTablesMapField.set(tableRule, datasourceToTablesMap);
    }
}

5. 正常查询就会查询分表数据了

        正常通过Mybatis-plus简单查询主表, 就会走第三步骤......PreciseShardingTableAlgorithm的规则, 然后通过代码判断匹配后, 返回具体的分表表名, 之后会查询分表的数据;


类似博客, 可以看一下:

Spring Boot 集成 Sharding JDBC 分库分表 - 知乎                   参考配置文件就行

ShardingJdbc 分库分表简单实现_tableshardingalgorithm-优快云博客

评论 2
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值