memory

对于应用程序而言,内存中存储区域大致有堆、栈、静态区域三个部分。系统在分配内存的时候,会在内存中寻找一个能够满足申请大小的区域进行标记,并返回此区域的大小。


在应用程序运行时,系统为应用程序分配的只是虚拟的地址空间,并不是实际的物理存储器。这就需要给进程预定的区域(地址空间)调拨物理存储器(内存),来存放相关的数据。但是通过CPU来进行操作,也可以将磁盘早上的页交换文件作为内存的一部分。通过页交换文件使得内存增加,但是同时将消耗大量CPU资源。


如果多个进程共享一个内存映射文件,在修改文件内容时,系统会产生一个新视图,来复制当前文件中的内容,也就是说,修改的其实是复制视图的内容,修改完毕后,系统将新视图的内容更新到源文件中,此时所有共享此内存的进程全部会看到修改的内容。


 


堆:对于应用程序而言,是一块由系统为应用程序而分配的内存区域。一个进程默认的有一个默认堆区域,程序中所有的内存申请,全部来自此区域。如:new/malloc申请的内存。在进程结束后,默认堆空间释放。应用程序对堆的访问原则上是顺序的,如3个线程同时访问堆,必须进行排队。程序猿也可以不使用进程默认的堆,进行自己创建一个堆。将 new操作符重载即可使用自己创建的堆空间。但是delete操作符也必须重载。


 


栈:一个进程中,默认的也会有一个栈区域,但是栈是由系统编译器进行分配和销毁。栈的存在是因为进程中的主线程,在线程中(程序中)声明的局部变量全部都在栈上。进程中的每一个线程都对应有一个线程栈,进行存储线程中的局部变量。由系统进行回收。


 


静态区域:程序中所有的静态或者全局变量都存储在此区域,在进程结束后,由系统进行回收。


 


顺便提一下new和malloc.个人觉得要说两者区别无非两点:


1、  new是操作符,在申请内存的时候,编译器会调用构造函数自动对申请的区域进行初始化。Malloc是个函数,只会申请内存不会进行初始化。


2、  new申请的时候,必须指定类型,成功后会返回指定类型的指针。指针指向分配的内存地址。Malloc 申请的时候,如果不强制指定类型,返回一个void *的指针。也就是说申请的内存区域可以存放任何类型的数据。


两者都是在堆上进行申请。New申请的对象在释放后,应该讲此对象的指针指向NULL,而不是不处理。
# conscious_memory.py import time import logging import math import numpy as np from collections import defaultdict, deque from typing import List, Dict, Any, Tuple, Set from datetime import datetime, timedelta from enum import Enum from dataclasses import dataclass, field # 配置日志 logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(name)s - %(levelname)s - %(message)s') logger = logging.getLogger('MemorySystem') # 在conscious_memory.py中 def store_from_perception(perception_input): # 从感知输入创建记忆 content = { "type": perception_input["sensor_type"].value, "description": perception_input["data"]["content"] } # ...存储到记忆系统 # 情感类型枚举 class EmotionType(Enum): JOY = 1 SADNESS = 2 FEAR = 3 ANGER = 4 SURPRISE = 5 DISGUST = 6 NEUTRAL = 7 # 记忆内容类型 class MemoryContentType(Enum): VISUAL = "visual" AUDITORY = "auditory" OLFACTORY = "olfactory" TACTILE = "tactile" EMOTIONAL = "emotional" CONCEPTUAL = "conceptual" EVENT = "event" TRAUMA = "trauma" DAILY = "daily" # 基于间隔重复的记忆强度模型 @dataclass class MemoryStrength: base: float = 1.0 # 初始强度 (0.0-1.0) decay_rate: float = 0.01 # 每日衰减率 last_accessed: float = field(default_factory=time.time) # 最后访问时间戳 reinforcement_count: int = 0 # 强化次数 next_review: float = field(default_factory=lambda: time.time() + 24 * 3600) # 下次复习时间 def current_strength(self) -> float: """计算当前记忆强度,考虑时间衰减和强化""" hours_passed = (time.time() - self.last_accessed) / 3600 # 应用指数衰减 decay_factor = math.exp(-self.decay_rate * hours_passed / 24) base_strength = self.base * decay_factor # 强化奖励 reinforcement_bonus = min(0.3, self.reinforcement_count * 0.05) return max(0.0, min(1.0, base_strength + reinforcement_bonus)) def reinforce(self, reinforcement: float = 0.15): """强化记忆,增加基础强度并调整衰减率""" self.base = min(1.0, self.base + reinforcement) self.reinforcement_count += 1 self.last_accessed = time.time() # 根据强化次数调整衰减率 self.decay_rate = max(0.001, self.decay_rate * (0.9 ** self.reinforcement_count)) # 设置下次复习时间(间隔增长) review_interval = 24 * (2 ** min(5, self.reinforcement_count)) # 指数增长,最多32天 self.next_review = time.time() + review_interval * 3600 # 情感关联模型 @dataclass class EmotionalAssociation: emotion: EmotionType intensity: float # 0.0-1.0 context: str = "" # 情感关联的上下文描述 class MemorySystem: """增强型记忆系统 - 支持情感关联、时间衰减和多模态索引""" def __init__(self, initial_memories: List[Dict] = None): self.memories = [] # 所有记忆存储 self.memory_strengths = {} # 记忆ID -> MemoryStrength self.memory_index = defaultdict(list) # 标签索引 # 多模态索引 self.emotion_index = defaultdict(list) # 情感索引: emotion_name -> list of (memory_id, intensity) self.context_index = defaultdict(list) # 上下文关键词索引 self.temporal_index = defaultdict(list) # 时间索引 (按天分组) self.content_type_index = defaultdict(list) # 内容类型索引 # 情感关联网络 self.emotion_network = defaultdict(list) # 情感 -> 相关记忆ID # 初始化记忆 if initial_memories: for mem in initial_memories: self.store( content=mem["content"], tags=mem.get("tags", []), emotional_associations=mem.get("emotional_associations", []) ) def store(self, content: Dict, tags: List[str] = None, emotional_associations: List[EmotionalAssociation] = None) -> str: """存储记忆,支持情感关联""" # 创建记忆条目 memory_id = f"mem_{int(time.time() * 1000)}" timestamp = time.time() # 确保内容类型是枚举值 content_type = content.get("type") if isinstance(content_type, MemoryContentType): content["type"] = content_type.value memory_entry = { "id": memory_id, "content": content, "timestamp": timestamp, "tags": tags or [], "emotional_associations": emotional_associations or [], "retrieval_count": 0, "content_type": content.get("type", MemoryContentType.CONCEPTUAL.value) } # 添加到记忆库 self.memories.append(memory_entry) self.memory_strengths[memory_id] = MemoryStrength() # 索引标签 for tag in memory_entry["tags"]: self.memory_index[tag].append(memory_id) # 索引情感关联 for emotion_assoc in memory_entry["emotional_associations"]: emotion_key = emotion_assoc.emotion.name # 存储记忆ID和强度 self.emotion_index[emotion_key].append((memory_id, emotion_assoc.intensity)) # 构建情感网络 self.emotion_network[emotion_key].append({ "memory_id": memory_id, "intensity": emotion_assoc.intensity, "context": emotion_assoc.context }) # 索引上下文关键词 if "description" in content: for word in content["description"].split(): if len(word) > 3: # 只索引有意义的词 self.context_index[word.lower()].append(memory_id) # 索引时间 (按日期分组) date_key = datetime.fromtimestamp(timestamp).strftime("%Y-%m-%d") self.temporal_index[date_key].append(memory_id) # 索引内容类型 content_type = memory_entry["content_type"] self.content_type_index[content_type].append(memory_id) logger.info(f"存储新记忆: {content.get('type')} (ID: {memory_id}, 标签: {tags})") return memory_id def retrieve(self, context: Dict, affective_state: Dict, max_results: int = 5) -> List[Dict]: """增强型记忆检索,考虑情感状态和上下文""" # 情感状态分析 current_emotion = affective_state.get("emotion", {}).get("current_emotion", "NEUTRAL") emotion_intensity = affective_state.get("emotion", {}).get("intensity", 0.5) # 使用集合避免重复 candidate_memory_ids = set() # 1. 标签匹配 if "tags" in context: for tag in context["tags"]: candidate_memory_ids.update(self.memory_index.get(tag, [])) # 2. 情感匹配 - 使用优化后的情感索引 emotion_memories = self.emotion_index.get(current_emotion.upper(), []) for mem_id, intensity in emotion_memories: if intensity >= emotion_intensity * 0.7: candidate_memory_ids.add(mem_id) # 3. 上下文关键词匹配 if "keywords" in context: for keyword in context["keywords"]: candidate_memory_ids.update(self.context_index.get(keyword.lower(), [])) # 4. 时间相关性 (最近事件优先) if "time_recency" in context and context["time_recency"]: # 获取最近30天的日期键 recent_dates = self._get_recent_date_keys(30) for date in recent_dates: candidate_memory_ids.update(self.temporal_index.get(date, [])) # 5. 内容类型过滤 if "content_types" in context: for ctype in context["content_types"]: candidate_memory_ids.update(self.content_type_index.get(ctype, [])) # 6. 通过情感网络扩展(关联情感) if "emotion_network" in context and context["emotion_network"]: # 获取当前情感的关联情感 related_emotions = self._get_related_emotions(current_emotion) for emotion in related_emotions: # 添加这些情感的记忆 for mem_id, _ in self.emotion_index.get(emotion, []): candidate_memory_ids.add(mem_id) # 转换为记忆对象并评分 candidate_memories = [self._get_memory_by_id(mid) for mid in candidate_memory_ids] scored_memories = self._score_memories(candidate_memories, context, affective_state) # 按分数排序并返回前N个结果 scored_memories.sort(key=lambda x: x[1], reverse=True) return [mem[0] for mem in scored_memories[:max_results]] def reinforce_memory(self, memory_id: str, reinforcement: float = 0.15): """强化特定记忆""" if memory_id in self.memory_strengths: self.memory_strengths[memory_id].reinforce(reinforcement) logger.info(f"强化记忆 {memory_id},新强度: {self.memory_strengths[memory_id].current_strength():.2f}") def reinforce_related_memories(self, memory_id: str, radius: int = 2): """强化相关记忆(基于情感网络)""" main_memory = self._get_memory_by_id(memory_id) if not main_memory: return 0 reinforced_count = 0 # 获取主记忆的情感 for emotion_assoc in main_memory["emotional_associations"]: emotion_key = emotion_assoc.emotion.name # 获取情感网络中的相关记忆 for related in self.emotion_network.get(emotion_key, []): if related["memory_id"] != memory_id: self.reinforce_memory(related["memory_id"], reinforcement=0.1) reinforced_count += 1 return reinforced_count def decay_memories(self, decay_factor: float = 0.05): """定期衰减记忆强度""" for memory_id, strength in self.memory_strengths.items(): # 衰减率与当前强度成正比 decay_rate = decay_factor * (1 - strength.current_strength()) strength.decay_rate = min(0.1, strength.decay_rate + decay_rate) logger.info(f"记忆衰减完成,总衰减因子: {decay_factor}") def forget_weak_memories(self, threshold: float = 0.1): """遗忘强度低于阈值的记忆""" to_forget = [] for mem in self.memories: strength = self.memory_strengths[mem["id"]].current_strength() if strength < threshold: to_forget.append(mem["id"]) # 移除记忆 for memory_id in to_forget: self._remove_memory(memory_id) if to_forget: logger.warning(f"遗忘{len(to_forget)}个弱记忆: {', '.join(to_forget)}") return to_forget def get_emotion_network(self, emotion: str) -> List[Dict]: """获取特定情感相关的记忆网络""" return self.emotion_network.get(emotion.upper(), []) def get_stats(self) -> Dict: """获取记忆系统统计信息""" total_memories = len(self.memories) strengths = [s.current_strength() for s in self.memory_strengths.values()] return { "total_memories": total_memories, "tag_distribution": {tag: len(ids) for tag, ids in self.memory_index.items()}, "emotion_distribution": {emo: len(ids) for emo, ids in self.emotion_index.items()}, "content_type_distribution": {ctype: len(ids) for ctype, ids in self.content_type_index.items()}, "avg_strength": sum(strengths) / total_memories if total_memories else 0, "weak_memories": sum(1 for s in strengths if s < 0.3), "next_review_count": sum(1 for s in self.memory_strengths.values() if s.next_review < time.time() + 24 * 3600) } def get_memory_timeline(self, days: int = 30) -> List[Dict]: """获取时间线视图(最近days天的记忆)""" timeline = defaultdict(list) cutoff = time.time() - days * 24 * 3600 for mem in self.memories: if mem["timestamp"] > cutoff: date = datetime.fromtimestamp(mem["timestamp"]).strftime("%Y-%m-%d") timeline[date].append({ "id": mem["id"], "content_preview": mem["content"].get("description", "")[:50] + "..." if len(mem["content"].get("description", "")) > 50 else mem["content"].get("description", ""), "strength": self.memory_strengths[mem["id"]].current_strength() }) # 转换为按日期排序的列表 sorted_timeline = [] for date in sorted(timeline.keys(), reverse=True): sorted_timeline.append({ "date": date, "memories": timeline[date] }) return sorted_timeline # ===== 内部辅助方法 ===== def _get_recent_date_keys(self, days: int) -> List[str]: """获取最近days天的日期键(格式:YYYY-MM-DD)""" today = datetime.today() return [(today - timedelta(days=i)).strftime("%Y-%m-%d") for i in range(days)] def _get_related_emotions(self, base_emotion: str) -> List[str]: """获取与基础情感相关的情感""" # 简单映射 - 实际应用中应更复杂 emotion_groups = { "JOY": ["JOY", "SURPRISE"], "FEAR": ["FEAR", "DISGUST"], "SADNESS": ["SADNESS"], "ANGER": ["ANGER"], "NEUTRAL": [] } return emotion_groups.get(base_emotion.upper(), []) def _score_memories(self, memories: List[Dict], context: Dict, affective_state: Dict) -> List[Tuple[Dict, float]]: """为记忆计算综合相关性分数""" scored = [] current_emotion = affective_state.get("emotion", {}).get("current_emotion", "NEUTRAL") emotion_intensity = affective_state.get("emotion", {}).get("intensity", 0.5) for mem in memories: if not mem: continue mem_id = mem["id"] strength = self.memory_strengths[mem_id].current_strength() # 1. 基础分数 (记忆强度 + 检索次数) score = strength * 0.6 + min(1.0, mem["retrieval_count"] * 0.1) # 2. 情感匹配分数 emotion_score = 0 for assoc in mem["emotional_associations"]: if assoc.emotion.name == current_emotion: # 情感强度匹配度 intensity_match = 1.0 - abs(assoc.intensity - emotion_intensity) emotion_score = intensity_match * 0.3 break score += emotion_score # 3. 时间衰减补偿 (最近记忆加分) recency = 1 - (time.time() - mem["timestamp"]) / (365 * 24 * 3600) score += recency * 0.1 # 4. 上下文匹配分数 if "keywords" in context: context_words = set(word.lower() for word in context["keywords"]) content_words = set(mem["content"].get("description", "").lower().split()) match_count = len(context_words & content_words) score += min(0.2, match_count * 0.05) # 5. 情感网络加成 (如果与当前情感网络相关) if "emotion_network" in context and context["emotion_network"]: for assoc in mem["emotional_associations"]: if assoc.emotion.name in self._get_related_emotions(current_emotion): score += 0.05 # 更新检索计数 mem["retrieval_count"] += 1 self.memory_strengths[mem_id].last_accessed = time.time() scored.append((mem, score)) return scored def _get_memory_by_id(self, memory_id: str) -> Dict: """通过ID获取记忆""" for mem in self.memories: if mem["id"] == memory_id: return mem return None def _remove_memory(self, memory_id: str): """从系统中完全移除记忆""" # 从主列表移除 self.memories = [mem for mem in self.memories if mem["id"] != memory_id] # 从索引中移除 for index in [self.memory_index, self.emotion_index, self.context_index, self.temporal_index, self.content_type_index]: for key, ids in index.items(): if memory_id in ids: ids.remove(memory_id) # 从情感网络中移除 for emotion, associations in self.emotion_network.items(): self.emotion_network[emotion] = [a for a in associations if a["memory_id"] != memory_id] # 移除强度记录 if memory_id in self.memory_strengths: del self.memory_strengths[memory_id] logger.info(f"完全移除记忆: {memory_id}") # ===== 测试用例 ===== if __name__ == "__main__": print("=== 增强型记忆系统测试 ===") # 初始化记忆系统 memory_system = MemorySystem() # 创建情感关联 birthday_joy = EmotionalAssociation(EmotionType.JOY, 0.9, "生日派对") accident_fear = EmotionalAssociation(EmotionType.FEAR, 0.85, "车祸现场") # 添加记忆 memory_system.store( content={"type": MemoryContentType.EVENT.value, "description": "我的10岁生日派对", "location": "家里后院"}, tags=["childhood", "celebration"], emotional_associations=[birthday_joy] ) memory_system.store( content={"type": MemoryContentType.EVENT.value, "description": "高中毕业典礼", "location": "学校礼堂"}, tags=["education", "achievement"], emotional_associations=[EmotionalAssociation(EmotionType.JOY, 0.8)] ) memory_system.store( content={"type": MemoryContentType.TRAUMA.value, "description": "目睹严重车祸", "location": "市中心"}, tags=["accident", "trauma"], emotional_associations=[accident_fear] ) # 添加近期记忆 recent_memory_id = memory_system.store( content={"type": MemoryContentType.DAILY.value, "description": "早晨喝咖啡阅读新闻", "location": "厨房"}, tags=["routine", "morning"] ) # 模拟检索上下文 context = { "tags": ["celebration"], "keywords": ["生日"], "time_recency": True, "emotion_network": True # 启用情感网络扩展 } # 模拟情感状态 affective_state = { "emotion": { "current_emotion": "JOY", "intensity": 0.7 } } # 检索记忆 print("\n检索与'庆祝'相关的记忆:") results = memory_system.retrieve(context, affective_state) for mem in results: print( f" - {mem['content']['description']} (强度: {memory_system.memory_strengths[mem['id']].current_strength():.2f})") # 强化特定记忆 print("\n强化近期记忆:") memory_system.reinforce_memory(recent_memory_id) # 强化相关记忆 print("\n强化相关记忆:") reinforced_count = memory_system.reinforce_related_memories(recent_memory_id) print(f"强化了{reinforced_count}个相关记忆") # 模拟时间流逝 print("\n模拟时间流逝(30天)...") for _ in range(30): memory_system.decay_memories(decay_factor=0.01) # 再次检索 print("\n再次检索相同上下文:") results = memory_system.retrieve(context, affective_state) for mem in results: strength = memory_system.memory_strengths[mem['id']].current_strength() print(f" - {mem['content']['description']} (衰减后强度: {strength:.2f})") # 遗忘弱记忆 print("\n遗忘弱记忆:") forgotten = memory_system.forget_weak_memories(threshold=0.3) if forgotten: print(f"已遗忘记忆: {', '.join(forgotten)}") else: print("没有需要遗忘的记忆") # 获取系统统计 stats = memory_system.get_stats() print("\n系统统计:") print(f"总记忆数: {stats['total_memories']}") print(f"平均强度: {stats['avg_strength']:.2f}") print(f"弱记忆数: {stats['weak_memories']}") print(f"需要复习的记忆数: {stats['next_review_count']}") # 获取时间线视图 print("\n最近7天记忆时间线:") timeline = memory_system.get_memory_timeline(days=7) for day in timeline: print(f"\n{day['date']}:") for mem in day["memories"]: print(f" - {mem['content_preview']} (强度: {mem['strength']:.2f})") 这个对吗?
最新发布
08-11
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值