f_chat.c

企业即时通讯

#include "internal.h"

#include "accountopt.h"
#include "blist.h"
#include "conversation.h"
#include "dnsquery.h"
#include "debug.h"
#include "notify.h"
#include "privacy.h"
#include "prpl.h"
#include "plugin.h"
#include "util.h"
#include "version.h"
#include "network.h"
#include "xmlnode.h"
#include "request.h"
#include "imgstore.h"
#include "sslconn.h"

#include "sipmsg.h"
#include "dnssrv.h"
#include "ntlm.h"

#include "sipmsg.h"
#include "f_chat.h"


void SendInvite_cb(struct fetion_account_data *sip, struct sipmsg *msg, struct transaction *tc)
{
 const gchar *to;
 gchar *fullto;

 to = sipmsg_find_header(msg,"T"); 
 fullto = g_strdup_printf("T: %s/r/n",to);

 purple_debug_info("fetion:","SendACK:/n");
 send_sip_request(sip->gc,"A","",fullto,NULL,NULL,NULL,NULL);

 g_free(fullto);
}


void SendInvite(struct fetion_account_data *sip,const gchar *who)
{
 gchar *body,*hdr,*fullto;
 const gchar *my_ip;
 gint my_port;
 struct fetion_buddy * buddy = NULL;
 if(strncmp("sip:", who, 4)==0)
  fullto = g_strdup_printf("T: %s/r/n", who);
 else
  return;
 buddy = g_hash_table_lookup(sip->buddies,who);
 g_return_if_fail(buddy!=NULL);
 my_ip = purple_network_get_my_ip(sip->fd);
 my_port = purple_network_get_port_from_fd(sip->fd);
 purple_debug_info("fetion:","SendInvite:[%s:%d]/n",my_ip,my_port);
 hdr = g_strdup_printf("K: text/html-fragment/r/n"
   "K: multiparty/r/n"); 
 body = g_strdup_printf("v=0/r/n"
   "o=-0 0 IN %s:%d/r/n"
   "s=session/r/n"
   "c=IN IP4 %s:%d/r/n"
   "t=0 0/r/n"
   "m=message %d sip %s/r/n",
   my_ip,my_port,my_ip,my_port,my_port,sip->uri);

 purple_debug_info("fetion:","SendInvite:[%s]/n",body);
 send_sip_request(sip->gc,"I","",fullto,hdr,body,buddy->dialog,(TransCallback) SendInvite_cb);

 g_free(fullto);
 g_free(hdr);
 g_free(body);
}
void process_incoming_invite(struct fetion_account_data *sip,struct sipmsg *msg)
{
 const gchar * to,*callid; 
 gchar * body;
 const gchar *my_ip;
 gint my_port;
 struct group_chat *g_chat;
 struct fetion_buddy * buddy =NULL;
 my_ip = purple_network_get_my_ip(sip->fd);
 my_port = purple_network_get_port_from_fd(sip->fd);
 purple_debug_info("fetion:","Invite:[%s:%d]/n",my_ip,my_port);
 body = g_strdup_printf("v=0/r/n"
   "o=-0 0 IN %s:%d/r/n"
   "s=session/r/n"
   "c=IN IP4 %s:%d/r/n"
   "t=0 0/r/n"
   "m=message %d sip %s/r/n",
   my_ip,my_port,my_ip,my_port,my_port,sip->uri);

 purple_debug_info("fetion:","Invite:answer[%s]/n",body);
 send_sip_response(sip->gc,msg,200,"OK",body);

 callid = sipmsg_find_header(msg,"I");
 to = sipmsg_find_header(msg,"F");
 if(strncmp(to,"sip:TG",6)!=0)
 {
  buddy = g_hash_table_lookup(sip->buddies,to);
  if(buddy==NULL)
  {
   buddy = g_new0(struct fetion_buddy, 1);
   buddy->name = g_strdup(to);
   g_hash_table_insert(sip->buddies, buddy->name, buddy);
  }
  if(buddy->dialog==NULL)
   buddy->dialog = g_new0(struct sip_dialog,1);
  else
   g_free(buddy->dialog->callid);
  buddy->dialog->callid = g_strdup(callid);
 }
 else
 {
  g_chat = g_new0(struct group_chat,1);
  g_chat->chatid = sip->tg++;
  g_chat->callid = g_strdup(callid); 
  g_chat->groupname = g_strdup(to);
  g_hash_table_insert(sip->tempgroup, g_chat->groupname, g_chat);
  serv_got_joined_chat(sip->gc,g_chat->chatid,"Fetion Chat");
 }
 g_free(body);
}

void fetion_send_message(struct fetion_account_data *sip, const gchar *to, const gchar *msg, const gchar *type)
{
 gchar *hdr;
 gchar *fullto;
 gint  self_flag,sms_flag;
 struct fetion_buddy * buddy =NULL;
 PurplePresence *presence;
 PurpleBuddy *b;

 self_flag = 0;
 sms_flag = 0;
 buddy = g_hash_table_lookup(sip->buddies,to);
 if(buddy==NULL)
 {
  buddy = g_new0(struct fetion_buddy, 1);
  buddy->name = g_strdup(to);
  g_hash_table_insert(sip->buddies, buddy->name, buddy);
 }

 if(strcmp(sip->uri,to)!=0)
 {
  b = purple_find_buddy(sip->account,to);
  presence = purple_buddy_get_presence(b);
  if(buddy->dialog==NULL)
  {
   buddy->dialog = g_new0(struct sip_dialog,1);
   buddy->dialog->callid = gencallid();
  // if(purple_presence_is_online(presence))
   if(!purple_presence_is_status_primitive_active(presence, PURPLE_STATUS_MOBILE))
    SendInvite(sip,to);
  }
  //if(purple_presence_is_online(presence))
  if(!purple_presence_is_status_primitive_active(presence, PURPLE_STATUS_MOBILE))
   sms_flag = 0;
  else
   sms_flag = 1;

 }
 else
  self_flag = 1;

 

 if((sms_flag == 0) &&(self_flag!=1) &&(strncmp("sip:", to, 4)==0))
  fullto = g_strdup_printf("T: %s/r/n", to);
 else
  fullto = g_strdup_printf("T: %s/r/nN: SendSMS/r/n", to);


 purple_debug_info("fetion:sending ","to:[%s] msg:[%s]/n",to,msg);
 if(type)
  hdr = g_strdup_printf("C: %s/r/n", type);
 else
  hdr = g_strdup("C: text/plain/r/n");

 

 send_sip_request(sip->gc, "M", NULL, fullto, hdr, msg, buddy->dialog, NULL);
 g_free(hdr);
 g_free(fullto);
}

void process_incoming_message(struct fetion_account_data *sip, struct sipmsg *msg)
{
 const gchar * from;
 struct group_chat *g_chat=NULL;
 const gchar *contenttype;
 gboolean found = FALSE;

 from = sipmsg_find_header(msg,"F");
 if(!from) return;

 purple_debug(PURPLE_DEBUG_MISC, "fetion", "got message from %s: %s/n", from, msg->body);

 contenttype = sipmsg_find_header(msg, "C");
 if(!contenttype || !strncmp(contenttype, "text/plain", 10) || !strncmp(contenttype, "text/html-fragment", 18))
 {
  if(strncmp(from,"sip:TG",6)==0)
  {
   g_chat = g_hash_table_lookup(sip->tempgroup,from);
   g_return_if_fail(g_chat!=NULL);
   from = sipmsg_find_header(msg,"SO");
   g_return_if_fail(from!=NULL);
   serv_got_chat_in(sip->gc,g_chat->chatid,from,0,msg->body,time(NULL));
  }
  else
   serv_got_im(sip->gc, from, msg->body, 0, time(NULL));
  sipmsg_remove_header(msg,"C");
  sipmsg_remove_header(msg,"D");
  sipmsg_remove_header(msg,"K");
  sipmsg_remove_header(msg,"XI");
  send_sip_response(sip->gc, msg, 200, "OK", NULL);
  found = TRUE;
 }

 if(!found)
 {
  purple_debug_info("fetion", "got unknown mime-type/n");
  send_sip_response(sip->gc, msg, 415, "Unsupported media type", NULL);
 }
}


api接口代码如下:# !/usr/bin/env python # -*- coding: utf-8 -*- # 版权信息:华为技术有限公司,版本所有(C) 2025-2099 """ 功 能:供应链 SCM Agent -- interface/roles/order/supply_manager_assistant_app_langgraph-供应经理助手fastapi接口 """ import json import os import sys from time import time import uvicorn from aipaas.logger_factory import logger from fastapi import Request, FastAPI from fastapi.responses import StreamingResponse from langchain_core.messages import HumanMessage from infrastructure.auth_fastapi import SoaAuth from infrastructure.langfuse_telemetery.trace_langgraph import create_langfuse_callback from scm_agent.src.application.roles.order.supply_manager_assistant_graph.main_graph.graph import \ get_supply_manager_assistant_main_graph from scm_agent.src.common.agent_name import AgentName from scm_agent.src.common.constants import Status from scm_agent.src.infrastructures.agent_config_download.config_download import dowload_agent_config_langgraph from scm_agent.src.infrastructures.agent_config_read.read_yaml_config import read_project_config from scm_agent.src.infrastructures.agent_state.agent_state_helper import get_redis_key from scm_agent.src.infrastructures.app_postprocess.output_process import str_to_output_json from scm_agent.src.infrastructures.app_postprocess.output_process import str_to_stream_output_langgraph from scm_agent.src.infrastructures.memory.postgre_checkpointer.postgre_checkpointer import FrameworkAdapter from scm_agent.src.infrastructures.read_config import app_config from scm_agent.src.interface.input_output_parameters import SupplyManagerAssistantChatInput, ConfigUpdateInput, \ ConfigUpdateOutput os.environ['NO_PROXY'] = '127.0.0.1,localhost' fastapi_app = FastAPI(lifespan=FrameworkAdapter.lifespan_wrapper) env = os.environ.get("env") soa = SoaAuth(env_type=env, skip_soa_auth=False, only_check_token=True) agent_name = AgentName.SupplyManagerAssistantLangGraph project_config = {} common_prompt_config = {} def preload_agent_config(name): """ 预加载agent配置 Args: name: 助手/技能名称,app_config中配置 """ global project_config global common_prompt_config dowload_agent_config_langgraph(name) # 读取项目配置文件 project_config = read_project_config(agent_name, f"{agent_name}.yaml").get(env) # 读取公共Prompt配置文件 common_prompt_config = read_project_config("common", "prompt_config.yaml") if 'PYCHARM_HOSTED' in os.environ or 'PYCHARM_DEBUG_PROCESS' in os.environ: logger.info("debug模式请在此打断点") # raise Exception("debug模式请在此打断点,注释此行即可") preload_agent_config(agent_name) async def generator(graph, supply_manager_assistant_chat_input, initial_state, config): yield str_to_stream_output_langgraph('<think>') yield str_to_stream_output_langgraph('**问题**') question = supply_manager_assistant_chat_input.question.strip() yield str_to_stream_output_langgraph('\n' + question) async for chunk in graph.astream( input=initial_state, stream_mode="custom", config=config, subgraphs=True ): yield str_to_stream_output_langgraph(chunk[1]) @fastapi_app.post('/roles/supply_manager_assistant_chat_langgraph') @soa.required async def supply_manager_assistant_chat(request: Request, supply_manager_assistant_chat_input: SupplyManagerAssistantChatInput): strategy = request.app.state.presist_param["checkpointer"] thread_id = get_redis_key(supply_manager_assistant_chat_input) user_id = supply_manager_assistant_chat_input.user_id session_id = supply_manager_assistant_chat_input.session_id langfuse_callback = create_langfuse_callback( user_id=user_id, session_id=session_id, trace_name=AgentName.SupplyManagerAssistantLangGraph ) config = { "configurable": {"thread_id": thread_id}, "metadata": { "user_id": user_id, "project_config": project_config, "common_prompt_config": common_prompt_config, "ctx_params": supply_manager_assistant_chat_input.ctxParams }, "callbacks": [langfuse_callback], } try: async with strategy as checkpointer: graph = get_supply_manager_assistant_main_graph(checkpointer) initial_state = {"messages": [HumanMessage(content=supply_manager_assistant_chat_input.question.strip())]} return StreamingResponse( generator(graph, supply_manager_assistant_chat_input, initial_state, config), media_type="text/event-stream", headers={"Cache-Control": "no-cache", "Connection": "keep-alive"} ) except Exception as e: logger.error(f"[supply_manager_assistant_chat] 执行失败: {e}", exc_info=True) return str_to_output_json(f'处理异常,异常原因:{e}') @fastapi_app.post('/config_update') @soa.required async def config_update(request: Request, config_update_input: ConfigUpdateInput): start_time = time() config_update_output = ConfigUpdateOutput() try: preload_agent_config(config_update_input.agent_name) config_update_output.status = Status.SUCCESS except Exception as e: config_update_output.error_message = "[SCM-Agent] Update config error." # 耗时统计 config_update_output.elapsed_time = str(time() - start_time) return config_update_output.to_dict() @fastapi_app.get('/health') @soa.required async def health(request: Request, ): return json.dumps({"success": True}, ensure_ascii=False) if __name__ == '__main__': uvicorn.run("supply_manager_assistant_app_langgraph:fastapi_app", host=app_config.get('host', '0.0.0.0'), # port=app_config.get('port', 8080), loop="asyncio", port=8080) # workers=app_config.get('workers', 4))
09-25
接口代码如下:# !/usr/bin/env python # -*- coding: utf-8 -*- # 版权信息:华为技术有限公司,版本所有(C) 2025-2099 """ 功 能:供应链 SCM Agent -- interface/roles/order/supply_manager_assistant_app_langgraph-供应经理助手fastapi接口 """ import json import os import sys from time import time import uvicorn from aipaas.logger_factory import logger from fastapi import Request, FastAPI from fastapi.responses import StreamingResponse from langchain_core.messages import HumanMessage from infrastructure.auth_fastapi import SoaAuth from infrastructure.langfuse_telemetery.trace_langgraph import create_langfuse_callback from scm_agent.src.application.roles.order.supply_manager_assistant_graph.main_graph.graph import \ get_supply_manager_assistant_main_graph from scm_agent.src.common.agent_name import AgentName from scm_agent.src.common.constants import Status from scm_agent.src.infrastructures.agent_config_download.config_download import dowload_agent_config_langgraph from scm_agent.src.infrastructures.agent_config_read.read_yaml_config import read_project_config from scm_agent.src.infrastructures.agent_state.agent_state_helper import get_redis_key from scm_agent.src.infrastructures.app_postprocess.output_process import str_to_output_json from scm_agent.src.infrastructures.app_postprocess.output_process import str_to_stream_output_langgraph from scm_agent.src.infrastructures.memory.postgre_checkpointer.postgre_checkpointer import FrameworkAdapter from scm_agent.src.infrastructures.read_config import app_config from scm_agent.src.interface.input_output_parameters import SupplyManagerAssistantChatInput, ConfigUpdateInput, \ ConfigUpdateOutput os.environ['NO_PROXY'] = '127.0.0.1,localhost' fastapi_app = FastAPI(lifespan=FrameworkAdapter.lifespan_wrapper) env = os.environ.get("env") soa = SoaAuth(env_type=env, skip_soa_auth=False, only_check_token=True) agent_name = AgentName.SupplyManagerAssistantLangGraph project_config = {} common_prompt_config = {} def preload_agent_config(name): """ 预加载agent配置 Args: name: 助手/技能名称,app_config中配置 """ global project_config global common_prompt_config dowload_agent_config_langgraph(name) # 读取项目配置文件 project_config = read_project_config(agent_name, f"{agent_name}.yaml").get(env) # 读取公共Prompt配置文件 common_prompt_config = read_project_config("common", "prompt_config.yaml") if 'PYCHARM_HOSTED' in os.environ or 'PYCHARM_DEBUG_PROCESS' in os.environ: logger.info("debug模式请在此打断点") # raise Exception("debug模式请在此打断点,注释此行即可") preload_agent_config(agent_name) async def generator(graph, supply_manager_assistant_chat_input, initial_state, config): yield str_to_stream_output_langgraph('<think>') yield str_to_stream_output_langgraph('**问题**') question = supply_manager_assistant_chat_input.question.strip() yield str_to_stream_output_langgraph('\n' + question) async for chunk in graph.astream( input=initial_state, stream_mode="custom", config=config, subgraphs=True ): yield str_to_stream_output_langgraph(chunk[1]) @fastapi_app.post('/roles/supply_manager_assistant_chat_langgraph') @soa.required async def supply_manager_assistant_chat(request: Request, supply_manager_assistant_chat_input: SupplyManagerAssistantChatInput): # checkpointer = presit_param.get("checkpointer") checkpointer = request.app.state.presist_param.get("checkpointer") thread_id = get_redis_key(supply_manager_assistant_chat_input) user_id = supply_manager_assistant_chat_input.user_id session_id = supply_manager_assistant_chat_input.session_id langfuse_callback = create_langfuse_callback(user_id=user_id, session_id=session_id, trace_name=AgentName.SupplyManagerAssistantLangGraph) config = {"configurable": {"thread_id": thread_id}, "metadata": {"user_id": supply_manager_assistant_chat_input.user_id, "project_config": project_config, "common_prompt_config": common_prompt_config, "ctx_params": supply_manager_assistant_chat_input.ctxParams}, "callbacks": [langfuse_callback], } try: graph = get_supply_manager_assistant_main_graph(checkpointer) initial_state = {"messages": [HumanMessage(content=supply_manager_assistant_chat_input.question)]} return StreamingResponse( generator(graph, supply_manager_assistant_chat_input, initial_state, config), media_type="text/event-stream", headers={"Cache-Control": "no-cache", "Connection": "keep-alive"} ) except Exception as e: return str_to_output_json(f'处理异常,异常原因: {e}') @fastapi_app.post('/config_update') @soa.required async def config_update(request: Request, config_update_input: ConfigUpdateInput): start_time = time() config_update_output = ConfigUpdateOutput() try: preload_agent_config(config_update_input.agent_name) config_update_output.status = Status.SUCCESS except Exception as e: config_update_output.error_message = "[SCM-Agent] Update config error." # 耗时统计 config_update_output.elapsed_time = str(time() - start_time) return config_update_output.to_dict() @fastapi_app.get('/health') @soa.required async def health(request: Request, ): return json.dumps({"success": True}, ensure_ascii=False) if __name__ == '__main__': uvicorn.run("supply_manager_assistant_app_langgraph:fastapi_app", host=app_config.get('host', '0.0.0.0'), # port=app_config.get('port', 8080), loop="asyncio", port=8080) # workers=app_config.get('workers', 4)) postgre_checkpointer.py目前没问题了,但是现在接口代码报错如下:ERROR: Exception in ASGI application + Exception Group Traceback (most recent call last): | File "D:\code\iscp-app-aigc-ai\.venv\Lib\site-packages\uvicorn\protocols\http\httptools_impl.py", line 426, in run_asgi | result = await app( # type: ignore[func-returns-value] | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | File "D:\code\iscp-app-aigc-ai\.venv\Lib\site-packages\uvicorn\middleware\proxy_headers.py", line 84, in __call__ | return await self.app(scope, receive, send) | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | File "D:\code\iscp-app-aigc-ai\.venv\Lib\site-packages\fastapi\applications.py", line 1054, in __call__ | await super().__call__(scope, receive, send) | File "D:\code\iscp-app-aigc-ai\.venv\Lib\site-packages\starlette\applications.py", line 123, in __call__ | await self.middleware_stack(scope, receive, send) | File "D:\code\iscp-app-aigc-ai\.venv\Lib\site-packages\starlette\middleware\errors.py", line 186, in __call__ | raise exc | File "D:\code\iscp-app-aigc-ai\.venv\Lib\site-packages\starlette\middleware\errors.py", line 164, in __call__ | await self.app(scope, receive, _send) | File "D:\code\iscp-app-aigc-ai\.venv\Lib\site-packages\starlette\middleware\exceptions.py", line 65, in __call__ | await wrap_app_handling_exceptions(self.app, conn)(scope, receive, send) | File "D:\code\iscp-app-aigc-ai\.venv\Lib\site-packages\starlette\_exception_handler.py", line 64, in wrapped_app | raise exc | File "D:\code\iscp-app-aigc-ai\.venv\Lib\site-packages\starlette\_exception_handler.py", line 53, in wrapped_app | await app(scope, receive, sender) | File "D:\code\iscp-app-aigc-ai\.venv\Lib\site-packages\starlette\routing.py", line 756, in __call__ | await self.middleware_stack(scope, receive, send) | File "D:\code\iscp-app-aigc-ai\.venv\Lib\site-packages\starlette\routing.py", line 776, in app | await route.handle(scope, receive, send) | File "D:\code\iscp-app-aigc-ai\.venv\Lib\site-packages\starlette\routing.py", line 297, in handle | await self.app(scope, receive, send) | File "D:\code\iscp-app-aigc-ai\.venv\Lib\site-packages\starlette\routing.py", line 77, in app | await wrap_app_handling_exceptions(app, request)(scope, receive, send) | File "D:\code\iscp-app-aigc-ai\.venv\Lib\site-packages\starlette\_exception_handler.py", line 64, in wrapped_app | raise exc | File "D:\code\iscp-app-aigc-ai\.venv\Lib\site-packages\starlette\_exception_handler.py", line 53, in wrapped_app | await app(scope, receive, sender) | File "D:\code\iscp-app-aigc-ai\.venv\Lib\site-packages\starlette\routing.py", line 75, in app | await response(scope, receive, send) | File "D:\code\iscp-app-aigc-ai\.venv\Lib\site-packages\starlette\responses.py", line 258, in __call__ | async with anyio.create_task_group() as task_group: | File "D:\code\iscp-app-aigc-ai\.venv\Lib\site-packages\anyio\_backends\_asyncio.py", line 772, in __aexit__ | raise BaseExceptionGroup( | ExceptionGroup: unhandled errors in a TaskGroup (1 sub-exception) +-+---------------- 1 ---------------- | Traceback (most recent call last): | File "D:\code\iscp-app-aigc-ai\.venv\Lib\site-packages\starlette\responses.py", line 261, in wrap | await func() | File "D:\code\iscp-app-aigc-ai\.venv\Lib\site-packages\starlette\responses.py", line 250, in stream_response | async for chunk in self.body_iterator: | File "D:\code\iscp-app-aigc-ai\scm_agent\src\interface\roles\order\supply_manager_assistant_app_langgraph.py", line 71, in generator | async for chunk in graph.astream( | File "D:\code\iscp-app-aigc-ai\.venv\Lib\site-packages\langgraph\pregel\main.py", line 2883, in astream | async with AsyncPregelLoop( | File "D:\code\iscp-app-aigc-ai\.venv\Lib\site-packages\langgraph\pregel\_loop.py", line 1186, in __aenter__ | saved = await self.checkpointer.aget_tuple(self.checkpoint_config) | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | File "D:\code\iscp-app-aigc-ai\.venv\Lib\site-packages\langgraph\checkpoint\postgres\aio.py", line 192, in aget_tuple | async with self._cursor() as cur: | File "C:\Users\zwx1453293\AppData\Local\Programs\Python\Python311\Lib\contextlib.py", line 204, in __aenter__ | return await anext(self.gen) | ^^^^^^^^^^^^^^^^^^^^^ | File "D:\code\iscp-app-aigc-ai\.venv\Lib\site-packages\langgraph\checkpoint\postgres\aio.py", line 388, in _cursor | async with conn.cursor(binary=True, row_factory=dict_row) as cur: | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | File "D:\code\iscp-app-aigc-ai\.venv\Lib\site-packages\psycopg\connection_async.py", line 256, in cursor | self._check_connection_ok() | File "D:\code\iscp-app-aigc-ai\.venv\Lib\site-packages\psycopg\_connection_base.py", line 528, in _check_connection_ok | raise e.OperationalError("the connection is closed") | psycopg.OperationalError: the connection is closed +------------------------------------ 之前接口代码是没问题的
最新发布
09-25
评论
成就一亿技术人!
拼手气红包6.0元
还能输入1000个字符
 
红包 添加红包
表情包 插入表情
 条评论被折叠 查看
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值