Mybatis foreach 填充 Merge into

本文详细解读了Mybatis中的MERGEINTO语句在数据库操作中的应用,包括语法解析和实际案例。同时介绍了foreach标签在处理集合数据时的用法,通过实例展示了如何利用foreach遍历List并将数据插入或更新到目标表中。

摘要生成于 C知道 ,由 DeepSeek-R1 满血版支持, 前往体验 >

一、merge into语句的语法以及解析

MERGE INTO schema.table alias
USING { schema. table | views | query} alias
ON {(condition) }
WHEN MATCHED THEN
  UPDATE SET {clause}
WHEN NOT MATCHED THEN
  INSERT VALUES {clause};

参数介绍:

INTO 子句
        用于指定你所update或者Insert目的表。
USING 子句
        用于指定你要update或者Insert的记录的来源,它可能是一个表,视图,子查询。
ON Clause
        用于目的表和源表(视图,子查询)的关联,如果匹配(或存在),则更新,否则插入。
merge_update_clause 
        用于写update语句
merge_insert_clause
        用于写insert语句

二、Mybatis foreach的使用

<foreach item="item" index="index" collection="list|array|map key" open="(" separator="," close=")">

参数值的使用

</foreach>

foreach 标签主要有以下属性,说明如下。

  1. item:表示集合中每一个元素进行迭代时的别名。
  2. index:指定一个名字,表示在迭代过程中每次迭代到的位置。
  3. open:表示该语句以什么开始(既然是 in 条件语句,所以必然以(开始)。
  4. separator:表示在每次进行迭代之间以什么符号作为分隔符(既然是 in 条件语句,所以必然以,作为分隔符)。
  5. close:表示该语句以什么结束(既然是 in 条件语句,所以必然以)开始)。


collection 属性,是必选的,主要有以下 3 种情况:

  1. 如果传入的是单参数且参数类型是一个 List,collection 属性值为 list。
  2. 如果传入的是单参数且参数类型是一个 array 数组,collection 的属性值为 array。
  3. 如果传入的参数是多个,需要把它们封装成一个 Map,当然单参数也可以封装成 Map。Map 的 key 是参数名,collection 属性值是传入的 List 或 array 对象在自己封装的 Map 中的 key。

三、demo

数据来源是一个List<Object>,先将所有数据加载到一个临时表dual中,再去与目标表进行主键比较;主键冲突则更新,否则插入;

<insert id="saveTableList" parameterType="com.example.test.beans">
	MERGE INTO TABLENAME A USING (
	<foreach collection="paramList" item="item" separator="union all"
		SELECT 
			#{item.paramName} AS PARAM_NAME,
			#{item.paramValue} AS PARAM_VALUE,
			#{item. paramType} AS PARAM_TYPE 
			FRON dual B
	</foreach>
	)C 
	ON (A.PARAM_NAME = C.PARAM_NAME AND A.PARAM_TYPE = C.PARAM_TYPE)
	WHEN MATCHED THEN
		UPDATE
			SET A.PARAM_VALUE= C.PARAM_VALUE
	WHEN NOT MATCHED THEN
		INSERT 
			(A.PARAM_NAME,A.PARAM_VALUE,A.PARAM_TYPE )
		VALUES
			(C.PARAM_NAME,C.PARAM_VALUE,C.PARAM_TYPE )
</insert>
package com.cmb.lul.lulm.service.data.sink; import com.cmb.dtpframework.exception.AppException; import com.cmb.dtpframework.utils.SpringContextHolder; import com.cmb.dtpframework.utils.StringUtil; import com.cmb.lp.za20.util.BaseLayerFacade; import com.cmb.lul.lulm.service.data.cache.metadata.ColumnMetadata; import com.cmb.lul.lulm.service.data.cache.metadata.JdbcMetadataManager; import com.cmb.lul.lulm.service.data.cache.metadata.TableMetadata; import org.apache.ibatis.session.SqlSession; import org.apache.ibatis.session.SqlSessionFactory; import java.math.BigDecimal; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.Types; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.stream.Collectors; public abstract class AbstractTableSink implements TableSink { protected final String sinkDbName; protected final String sinkSchemaName; protected final String sinkTableName; protected final TableMetadata sinkDsTableMetadata; protected final String indexName; protected final List<Map<String, String>> datas; protected final List<String> dataColumnNames; protected final List<String> primaryKeys; protected final List<String> updateColumnNames; protected long connectCostMs = 0L; protected long sinkCostMs = 0L; public AbstractTableSink(String sinkSchemaName, String sinkTableName, String indexName, List<String> primaryKeys, List<Map<String, String>> datas) { this.sinkDbName = sinkSchemaName; this.sinkSchemaName = sinkSchemaName; this.sinkTableName = sinkTableName; this.indexName = indexName; this.primaryKeys = primaryKeys; this.sinkDsTableMetadata = BaseLayerFacade.getSingleton(JdbcMetadataManager.class).getTableMetadata(sinkSchemaName, sinkTableName); this.datas = datas; this.dataColumnNames = getAndSortDataColumnNames(sinkDsTableMetadata, datas); this.updateColumnNames = dataColumnNames.stream().filter(column -> !primaryKeys.contains(column)).collect(Collectors.toList()); } public AbstractTableSink(String sinkSchemaName, String sinkTableName, List<String> primaryKeys, List<Map<String, String>> datas) { this(sinkSchemaName, sinkTableName, null, primaryKeys, datas); } protected List<String> getAndSortDataColumnNames(TableMetadata sinkDsTableMetadata, List<Map<String, String>> datas) { // 用源表data数据确定要写哪些字段 Set<String> tempDataColumnNamesSet = new HashSet<>(16); datas.forEach(data -> tempDataColumnNamesSet.addAll(data.keySet())); // 用目标表metadata重排列字段顺序 List<String> tempDataColumnNamesList = new ArrayList<>(tempDataColumnNamesSet.size()); sinkDsTableMetadata.getColumnMetadatas().keySet().forEach(metadataColumnName -> { if (tempDataColumnNamesSet.contains(metadataColumnName)) { tempDataColumnNamesList.add(metadataColumnName); tempDataColumnNamesSet.remove(metadataColumnName); } }); // 源表data中存在,但目标表metadata中不存在的字段放到最后(这部分写的时候会报错,表明目标表字段比源表少) tempDataColumnNamesList.addAll(tempDataColumnNamesSet); return tempDataColumnNamesList; } @Override public void doSink() { if (!datas.isEmpty()) { /* 构建sql语句 */ String sql = buildSql(); SqlSession session = null; try { long beforeConnectTime = System.currentTimeMillis(); session = SpringContextHolder.getBean(SqlSessionFactory.class).openSession(); Connection connection = session.getConnection(); long afterConnectTime = System.currentTimeMillis(); try (PreparedStatement preparedStatement = connection.prepareStatement(sql)) { /* 设置参数 */ for (Map<String, String> data : datas) { // 设置参数 setPreparedStatementParameter(preparedStatement, data); preparedStatement.addBatch(); } int[] results = preparedStatement.executeBatch(); } /* 事务提交 */ session.commit(true); long afterSinkTime = System.currentTimeMillis(); connectCostMs = afterConnectTime - beforeConnectTime; sinkCostMs = afterSinkTime - afterConnectTime; } catch (Exception e) { if (null != session) { /* 事务回滚 */ session.rollback(true); } throw new AppException(e, "LULM605", sinkDbName, sinkTableName, sql, e.getMessage()); } finally { if (null != session) { session.close(); } } } } protected String buildSql() { StringBuilder sql = new StringBuilder(); sql.append("MERGE "); if (indexName != null) { sql.append("/*+INDEX(").append(sinkTableName).append(" ").append(indexName).append(") */ "); } sql.append("INTO ").append(buildTableName()).append(" USING DUAL ON \n") .append("(").append(buildWhereCondition()).append(") \n") .append(" WHEN MATCHED THEN UPDATE SET \n") .append(buildOnDuplicateKeyUpdateStatement()).append(" \n") .append(" WHEN NOT MATCHED THEN INSERT \n") .append("(").append(buildColumnNamesSpiltByComma()).append(") \n") .append("values (").append(buildPlaceHoldersSpiltByComma()).append(")"); return sql.toString(); } protected String buildWhereCondition() { StringBuilder result = new StringBuilder(); for (int i = 0; i < primaryKeys.size(); i++) { String primaryKey = primaryKeys.get(i); result.append(primaryKeys.get(i)); result.append((i < primaryKeys.size() - 1) ? " = ? AND " : " = ?"); } return result.toString(); } protected String buildTableName() { StringBuilder tableName = new StringBuilder(); tableName.append(sinkSchemaName).append(".") .append(sinkTableName); return tableName.toString(); } protected String buildColumnNamesSpiltByComma() { StringBuilder columnNamesSpiltByComma = new StringBuilder(); for (String columnName : dataColumnNames) { columnNamesSpiltByComma.append(columnName).append(","); } if (columnNamesSpiltByComma.length() >= 1) { columnNamesSpiltByComma.deleteCharAt(columnNamesSpiltByComma.length() - 1); } return columnNamesSpiltByComma.toString(); } protected String buildPlaceHoldersSpiltByComma() { StringBuilder placeHolders = new StringBuilder(); for (int i = 0; i < dataColumnNames.size(); i++) { placeHolders.append("?,"); } if (placeHolders.length() >= 1) { placeHolders.deleteCharAt(placeHolders.length() - 1); } return placeHolders.toString(); } protected String buildOnDuplicateKeyUpdateStatement() { StringBuilder updateStatement = new StringBuilder(); for (String columnName : updateColumnNames) { updateStatement.append(columnName).append(" = ?,"); } if (updateStatement.length() >= 1) { updateStatement.deleteCharAt(updateStatement.length() - 1); } return updateStatement.toString(); } protected void setPreparedStatementParameter(PreparedStatement preparedStatement, Map<String, String> data) { // 设置参数 // 通过binlog字段找metadata,找不到时getDsTableColumnMetadata(columnName)会报错,以此确保所有binlog数据字段都被写入目标表 int i = 1; for (String primaryKey : primaryKeys) { setOneColumn(preparedStatement, i, primaryKey, data, true); i++; } for (String columnName : updateColumnNames) { setOneColumn(preparedStatement, i, columnName, data, false); i++; } for (String columnName : dataColumnNames) { setOneColumn(preparedStatement, i, columnName, data, false); i++; } } protected void setOneColumn(PreparedStatement preparedStatement, int index, String columnName, Map<String, String> data, boolean format) { String columnValue = data.get(columnName); int columnDataType = -1; int columnSize = 0; try { ColumnMetadata metadata = sinkDsTableMetadata.getColumnMetadata(columnName); if (metadata == null) { throw new IllegalArgumentException("目标表不存在该字段!"); } columnDataType = metadata.getDataType(); columnSize = metadata.getColumnSize(); preparedStatement.setObject(index, transferData(columnName, columnDataType, columnValue, columnSize, format), columnDataType); } catch (Exception e) { throw new AppException(e, "LULM606", sinkDbName, sinkTableName, columnName, columnDataType, columnValue, columnValue.getClass().getName(), e.getMessage()); } } private Object transferData(String columnName, int columnType, String columnValue, int columnSize, boolean format) { switch (columnType) { case Types.CHAR: if (format) { if (columnValue == null) { columnValue = ""; } return columnValue.length() < columnSize ? String.format("%-" + columnSize + "s", columnValue) : columnValue; } return checkEmpty(columnValue) ? " " : columnValue; case Types.VARCHAR: case Types.LONGVARCHAR: return checkEmpty(columnValue) ? " " : columnValue; case Types.SMALLINT: return checkEmpty(columnValue) ? 0 : Short.parseShort(columnValue); case Types.INTEGER: return checkEmpty(columnValue) ? 0 : Integer.parseInt(columnValue); case Types.BIGINT: return checkEmpty(columnValue) ? 0 : Long.parseLong(columnValue); case Types.FLOAT: case Types.DOUBLE: return checkEmpty(columnValue) ? 0 : Double.parseDouble(columnValue); case Types.NUMERIC: case Types.DECIMAL: return checkEmpty(columnValue) ? BigDecimal.ZERO : new BigDecimal(columnValue); case Types.DATE: case Types.TIMESTAMP: case Types.TIME: return checkEmpty(columnValue) ? 0 : columnValue; default: throw new AppException("LULM603", sinkDbName, sinkSchemaName, sinkTableName, columnName, columnType); } } private boolean checkEmpty(Object value) { return value == null || (value instanceof String && StringUtil.isBlank((String)value)); } } 请你给我详细分析解释一下这段代码
07-22
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值