mysql handler_delete_Handler_delete和Com_delete有什么关系

本文解析了MySQL中关于删除操作的状态变量Handler_delete与Com_delete的区别及联系,并通过具体示例展示了这两种变量如何随DELETE命令的执行而变化。

今天有人问, Handler_delete和Com_delete有什么关系?

Handler_delete:

com_delete:[@more@]

这两个都是MYSQL,SHOW STATUS里的一个状态变量.

根据MYSQL 5.0 官方文档解释:

Handler_delete:

The number of times that rows have been deleted from tables.

DELETED行数.

com_delete:

The Com_xxx statement counter variables indicate the number of times each xxx statement has been executed.

DELETE命令的执行次数.

下面我们来测试一下:

(root@sns-ptst4:)[test]> select * from t1;

+------+-------+

| name | n1 |

+------+-------+

| aaa | NULL |

| bbb | NULL |

| ccc | NULL |

| aaa | aaaaa |

+------+-------+

4 rows in set (0.00 sec)

(root@sns-ptst4:)[test]> show global status like '%delete%';

+---------------------+-------+

| Variable_name | Value |

+---------------------+-------+

| Com_delete | 0 |

| Com_delete_multi | 0 |

| Handler_delete | 0 |

| Innodb_rows_deleted | 0 |

+---------------------+-------+

4 rows in set (0.00 sec)

(root@sns-ptst4:)[test]> delete from t1;

Query OK, 4 rows affected (0.00 sec)

(root@sns-ptst4:)[test]> commit;

Query OK, 0 rows affected (0.00 sec)

(root@sns-ptst4:)[test]> show global status like '%delete%';

+---------------------+-------+

| Variable_name | Value |

+---------------------+-------+

| Com_delete | 1 |

| Com_delete_multi | 0 |

| Handler_delete | 4 |

| Innodb_rows_deleted | 4 |

+---------------------+-------+

4 rows in set (0.00 sec)

从两次SHOW STATUS的结果来看,官方文档的解释是正确的!

import json import time from contextlib import contextmanager import pymysql from adscommon.utils.utils import LiteUtil from loguru import logger from redis.exceptions import RedisError from sqlalchemy import Column, Integer, String from sqlalchemy.exc import SQLAlchemyError from sqlalchemy.ext.declarative import declarative_base from app.bean.mysql_client import mysql_client from app.bean.redis_client import redis_client pymysql.install_as_MySQLdb() Base = declarative_base() ID2HANDLER_KEY = "data-preprocessing-id2handler" class Id2Handler(Base): __tablename__ = 't_id2handler' id = Column(Integer, primary_key=True, autoincrement=True) event_id = Column(Integer, nullable=False, unique=True) handler_list = Column(String(5000), nullable=False) class BaseRepository: def __init__(self): self.session = mysql_client.get_session() self.redis = redis_client @contextmanager def session_scope(self): try: self.session.begin() yield self.session self.session.commit() except Exception as err: logger.error(f"mysql session err for {err.__class__.__name__}") self.session.rollback() raise finally: self.session.close() class IdHandlerRepository(BaseRepository): @LiteUtil.retry_wrapper(exception=SQLAlchemyError) def query_handler_by_event_id(self, event_id): id_handler = self.session.query(Id2Handler).filter_by(event_id=event_id).first() self.session.close() return id_handler @LiteUtil.retry_wrapper(exception=SQLAlchemyError) def query_all_data(self): id_handlers = self.session.query(Id2Handler).all() id_handler_list = [{"event_id": id_handler.event_id, "handler_list": id_handler.handler_list} for id_handler in id_handlers] return id_handler_list @LiteUtil.retry_wrapper(exception=SQLAlchemyError) def insert_or_update_id_handler(self, event_id, handler_list): query_data = self.query_handler_by_event_id(event_id) if not query_data: id_handler = Id2Handler(event_id=event_id, handler_list=handler_list) self.session.add(id_handler) self.session.flush() else: self.session.query(Id2Handler).filter_by(event_id=event_id).update({ Id2Handler.handler_list: handler_list }) self.session.close() @LiteUtil.retry_wrapper(exception=SQLAlchemyError) def delete_by_id(self, event_id): with self.session_scope(): self.session.query(Id2Handler).filter(Id2Handler.event_id == event_id).delete() class RedisRepository(BaseRepository): @LiteUtil.retry_wrapper(exception=RedisError) def get_id_handler(self): try: data = json.loads(self.redis.get(ID2HANDLER_KEY)) return data except Exception as e: logger.error(f"get config from redis error {e.__class__.__name__}") return {} @LiteUtil.retry_wrapper(exception=RedisError) def update_id_handler_config(self, data): result_json = json.dumps(data) self.redis.set(ID2HANDLER_KEY, result_json) @LiteUtil.retry_wrapper(exception=RedisError) def publish_message(self, channel, message): self.redis.publish(channel, message) @LiteUtil.retry_wrapper(exception=RedisError) def subscribe_channel(self, channel, config_cache): pubsub = self.redis.pubsub() pubsub.subscribe(channel) data_in_redis = self.get_id_handler() if data_in_redis: config_cache.clear() for key, value in data_in_redis.items(): config_cache[key] = value if self.redis.exists(ID2HANDLER_KEY): time.sleep(2) config = self.get_id_handler() config_cache.clear() for key, value in config.items(): config_cache[key] = value 这个repository的代码 import time from loguru import logger from app.service import repositories def convert_data_mysql2redis(data): result = {} for item in data: event_id = str(item['event_id']) handler_list = item['handler_list'].split(',') handler_list = [handler.strip() for handler in handler_list] result[event_id] = handler_list return result def is_need_refresh_config(config_in_mysql, config_in_redis): for key, new_list in config_in_mysql.items(): if key not in config_in_redis or set(new_list) != set(config_in_redis[key]): return True for key in config_in_redis: if key not in config_in_mysql: return True return False class ConfigManger: def __init__(self): self.redis_repo = repositories.RedisRepository() self.mysql_config_repo = repositories.IdHandlerRepository() def sync_all_config(self, timeout, config_cache): """同步所有的配置""" config = convert_data_mysql2redis(self.mysql_config_repo.query_all_data()) lock = self.redis_repo.redis.lock("data-preprocessing:sync_config_lock", timeout=timeout) if lock.acquire(blocking=False): try: self.redis_repo.update_id_handler_config(config) finally: lock.release() if config: for key, value in config.items(): config_cache[key] = value def sync_config_timely(self): data = self.mysql_config_repo.query_all_data() transformed_data = convert_data_mysql2redis(data) config_in_redis = self.redis_repo.get_id_handler() need_refresh = is_need_refresh_config(transformed_data, config_in_redis) lock = self.redis_repo.redis.lock("data-preprocessing:refresh_config", timeout=20 * 60) if need_refresh: with lock: self.redis_repo.update_id_handler_config(transformed_data) self.redis_repo.publish_message("test_channel", "messaged changed") def update_config(self, event_id, handler_list): """配置更新""" self.mysql_config_repo.insert_or_update_id_handler(event_id, handler_list) data_in_redis = self.redis_repo.get_id_handler() new_handler_list = [handler.strip() for handler in handler_list.split(",")] if event_id not in data_in_redis or set(data_in_redis.get(event_id)) != set(new_handler_list): data_in_redis[event_id] = new_handler_list self.redis_repo.update_id_handler_config(data_in_redis) # 发布消息 self.redis_repo.publish_message("test_channel", "message changed") def delete_config_by_id(self, event_id): """配置删除""" self.mysql_config_repo.delete_by_id(event_id) data_in_redis = self.redis_repo.get_id_handler() if str(event_id) in data_in_redis: data_in_redis.pop(str(event_id)) self.redis_repo.update_id_handler_config(data_in_redis) self.redis_repo.publish_message("test_channel", "message changed") def get_config(self): """获取配置""" data_in_redis = self.redis_repo.get_id_handler() if data_in_redis: return data_in_redis data_in_mysql = self.mysql_config_repo.query_all_data() if data_in_mysql: transformed_data = convert_data_mysql2redis(data_in_mysql) self.redis_repo.update_id_handler_config(transformed_data) return transformed_data return {} def subscribe(self, config): self.redis_repo.subscribe_channel("test_channel", config) 这个是get_config的代码, 我想要实现一下的需求 1.mysql负责配置的持久化,每次启动时会同时将mysql的配置同步到redis 2.其他服务优先从redis中获取配置 3.配置通过fastapi更新,如果发生了更新,mysql数据进行更新,同时推送最新的配置到redis,同时redis发布一条消息 4.订阅redis消息,如果发现消息更新,就将本地的配置更新 5.使用apscheduler启动定时器任务,20分钟查询一次mysql的最新配置,如果redis不一致,更新redis并发布消息表更。请帮忙改造下代码
08-01
import json import time from contextlib import contextmanager import pymysql from adscommon.utils.utils import LiteUtil from loguru import logger from redis.exceptions import RedisError from sqlalchemy import Column, Integer, String from sqlalchemy.exc import SQLAlchemyError from sqlalchemy.ext.declarative import declarative_base from app.bean.mysql_client import mysql_client from app.bean.redis_client import redis_client pymysql.install_as_MySQLdb() Base = declarative_base() ID2HANDLER_KEY = “data-preprocessing-id2handler” class Id2Handler(Base): tablename = ‘t_id2handler’ id = Column(Integer, primary_key=True, autoincrement=True) event_id = Column(Integer, nullable=False, unique=True) handler_list = Column(String(5000), nullable=False) class BaseRepository: def init(self): self.session = mysql_client.get_session() self.redis = redis_client @contextmanager def session_scope(self): try: self.session.begin() yield self.session self.session.commit() except Exception as err: logger.error(f"mysql session err for {err.__class__.__name__}") self.session.rollback() raise finally: self.session.close() class IdHandlerRepository(BaseRepository): @LiteUtil.retry_wrapper(exception=SQLAlchemyError) def query_handler_by_event_id(self, event_id): id_handler = self.session.query(Id2Handler).filter_by(event_id=event_id).first() self.session.close() return id_handler @LiteUtil.retry_wrapper(exception=SQLAlchemyError) def query_all_data(self): id_handlers = self.session.query(Id2Handler).all() id_handler_list = [{"event_id": id_handler.event_id, "handler_list": id_handler.handler_list} for id_handler in id_handlers] return id_handler_list @LiteUtil.retry_wrapper(exception=SQLAlchemyError) def insert_or_update_id_handler(self, event_id, handler_list): query_data = self.query_handler_by_event_id(event_id) if not query_data: id_handler = Id2Handler(event_id=event_id, handler_list=handler_list) self.session.add(id_handler) self.session.flush() else: self.session.query(Id2Handler).filter_by(event_id=event_id).update({ Id2Handler.handler_list: handler_list }) self.session.close() @LiteUtil.retry_wrapper(exception=SQLAlchemyError) def delete_by_id(self, event_id): with self.session_scope(): self.session.query(Id2Handler).filter(Id2Handler.event_id == event_id).delete() class RedisRepository(BaseRepository): @LiteUtil.retry_wrapper(exception=RedisError) def get_id_handler(self): try: data = json.loads(self.redis.get(ID2HANDLER_KEY)) return data except Exception as e: logger.error(f"get config from redis error {e.__class__.__name__}") return {} @LiteUtil.retry_wrapper(exception=RedisError) def update_id_handler_config(self, data): result_json = json.dumps(data) self.redis.set(ID2HANDLER_KEY, result_json) @LiteUtil.retry_wrapper(exception=RedisError) def publish_message(self, channel, message): self.redis.publish(channel, message) @LiteUtil.retry_wrapper(exception=RedisError) def subscribe_channel(self, channel, config_cache): pubsub = self.redis.pubsub() pubsub.subscribe(channel) data_in_redis = self.get_id_handler() if data_in_redis: config_cache.clear() for key, value in data_in_redis.items(): config_cache[key] = value if self.redis.exists(ID2HANDLER_KEY): time.sleep(2) config = self.get_id_handler() config_cache.clear() for key, value in config.items(): config_cache[key] = value 这个repository的代码 import time from loguru import logger from app.service import repositories def convert_data_mysql2redis(data): result = {} for item in data: event_id = str(item[‘event_id’]) handler_list = item[‘handler_list’].split(‘,’) handler_list = [handler.strip() for handler in handler_list] result[event_id] = handler_list return result def is_need_refresh_config(config_in_mysql, config_in_redis): for key, new_list in config_in_mysql.items(): if key not in config_in_redis or set(new_list) != set(config_in_redis[key]): return True for key in config_in_redis: if key not in config_in_mysql: return True return False class ConfigManger: def init(self): self.redis_repo = repositories.RedisRepository() self.mysql_config_repo = repositories.IdHandlerRepository() def sync_all_config(self, timeout, config_cache): """同步所有的配置""" config = convert_data_mysql2redis(self.mysql_config_repo.query_all_data()) lock = self.redis_repo.redis.lock("data-preprocessing:sync_config_lock", timeout=timeout) if lock.acquire(blocking=False): try: self.redis_repo.update_id_handler_config(config) finally: lock.release() if config: for key, value in config.items(): config_cache[key] = value def sync_config_timely(self): data = self.mysql_config_repo.query_all_data() transformed_data = convert_data_mysql2redis(data) config_in_redis = self.redis_repo.get_id_handler() need_refresh = is_need_refresh_config(transformed_data, config_in_redis) lock = self.redis_repo.redis.lock("data-preprocessing:refresh_config", timeout=20 * 60) if need_refresh: with lock: self.redis_repo.update_id_handler_config(transformed_data) self.redis_repo.publish_message("test_channel", "messaged changed") def update_config(self, event_id, handler_list): """配置更新""" self.mysql_config_repo.insert_or_update_id_handler(event_id, handler_list) data_in_redis = self.redis_repo.get_id_handler() new_handler_list = [handler.strip() for handler in handler_list.split(",")] if event_id not in data_in_redis or set(data_in_redis.get(event_id)) != set(new_handler_list): data_in_redis[event_id] = new_handler_list self.redis_repo.update_id_handler_config(data_in_redis) # 发布消息 self.redis_repo.publish_message("test_channel", "message changed") def delete_config_by_id(self, event_id): """配置删除""" self.mysql_config_repo.delete_by_id(event_id) data_in_redis = self.redis_repo.get_id_handler() if str(event_id) in data_in_redis: data_in_redis.pop(str(event_id)) self.redis_repo.update_id_handler_config(data_in_redis) self.redis_repo.publish_message("test_channel", "message changed") def get_config(self): """获取配置""" data_in_redis = self.redis_repo.get_id_handler() if data_in_redis: return data_in_redis data_in_mysql = self.mysql_config_repo.query_all_data() if data_in_mysql: transformed_data = convert_data_mysql2redis(data_in_mysql) self.redis_repo.update_id_handler_config(transformed_data) return transformed_data return {} def subscribe(self, config): self.redis_repo.subscribe_channel("test_channel", config) 这个是get_config的代码, 我想要实现一下的需求 1.mysql负责配置的持久化,每次启动时会同时将mysql的配置同步到redis 2.其他服务优先从redis中获取配置 3.配置通过fastapi更新,如果发生了更新,mysql数据进行更新,同时推送最新的配置到redis,同时redis发布一条消息 4.订阅redis消息,如果发现消息更新,就将本地的配置更新 5.使用apscheduler启动定时器任务,20分钟查询一次mysql的最新配置,如果redis不一致,更新redis并发布消息表更。请帮忙改造下代码 请帮忙使用fastapi启动一个接口,可以同时更新,新增,或者删除多个config
最新发布
08-01
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值