plt_arr2.cpp

  name="google_ads_frame" marginwidth="0" marginheight="0" src="http://pagead2.googlesyndication.com/pagead/ads?client=ca-pub-5572165936844014&dt=1194442938015&lmt=1194190197&format=336x280_as&output=html&correlator=1194442937843&url=file%3A%2F%2F%2FC%3A%2FDocuments%2520and%2520Settings%2Flhh1%2F%E6%A1%8C%E9%9D%A2%2FCLanguage.htm&color_bg=FFFFFF&color_text=000000&color_link=000000&color_url=FFFFFF&color_border=FFFFFF&ad_type=text&ga_vid=583001034.1194442938&ga_sid=1194442938&ga_hid=1942779085&flash=9&u_h=768&u_w=1024&u_ah=740&u_aw=1024&u_cd=32&u_tz=480&u_java=true" frameborder="0" width="336" scrolling="no" height="280" allowtransparency="allowtransparency"> #include <iostream.h>
#include <strstrea.h>

const int size = 5;

class plot {
   int x, y;
 public:
   plot(void);
   plot(int i, int j)
    {
      if(i>size)
         i = size;
      if(i<0)
         i = 0;
      if(j>size)
         j = size;
      if(j<0)
         j=0;
      x=i;
      y=j;
    }
   friend ostream &operator<<(ostream &stream, plot obj);
};

ostream &operator<<(ostream & stream, plot obj)
 {
   register int i, j;

   for(j=size; j>=0; j--)
    {
      stream << j;
      if(j==obj.y)
       {
         for(i=0; i<obj.x; i++)
            stream << "  ";
            stream << '*';
       }
      stream << endl;
    }
   for(i=0; i<=size; i++)
      stream << " " << i;
   stream << endl;
   return stream;
 }

plot::plot(void)
{
 cout << "Enter x value: ";
 cin >> this->x;
 cout << "/nEnter y value: ";
 cin >> this->y;
}


void main(void)
 {
   plot a(2,3), b(1,1), c;
   char str[200];
   ostrstream outs(str, sizeof(str));

   cout << "Output using cout:" << endl;
   cout << a << endl << b << endl << c << endl << endl;
   outs << a << b << c << ends;
   cout << "output using in-RAM formatting:" << endl;
   cout << str;
   istrstream ins(str);
 }

 

C:\Users\ZXDN\miniconda3\envs\pytorch\python.exe C:\Users\ZXDN\PyCharmMiscProject\草稿2.py A module that was compiled using NumPy 1.x cannot be run in NumPy 2.0.1 as it may crash. To support both 1.x and 2.x versions of NumPy, modules must be compiled with NumPy 2.0. Some module may need to rebuild instead e.g. with 'pybind11>=2.12'. If you are a user of the module, the easiest solution will be to downgrade to 'numpy<2' or try to upgrade the affected module. We expect that some modules will need time to support NumPy 2. Traceback (most recent call last): File "C:\Users\ZXDN\PyCharmMiscProject\草稿2.py", line 4, in <module> import torch File "C:\Users\ZXDN\miniconda3\envs\pytorch\lib\site-packages\torch\__init__.py", line 1382, in <module> from .functional import * # noqa: F403 File "C:\Users\ZXDN\miniconda3\envs\pytorch\lib\site-packages\torch\functional.py", line 7, in <module> import torch.nn.functional as F File "C:\Users\ZXDN\miniconda3\envs\pytorch\lib\site-packages\torch\nn\__init__.py", line 1, in <module> from .modules import * # noqa: F403 File "C:\Users\ZXDN\miniconda3\envs\pytorch\lib\site-packages\torch\nn\modules\__init__.py", line 35, in <module> from .transformer import TransformerEncoder, TransformerDecoder, \ File "C:\Users\ZXDN\miniconda3\envs\pytorch\lib\site-packages\torch\nn\modules\transformer.py", line 20, in <module> device: torch.device = torch.device(torch._C._get_default_device()), # torch.device('cpu'), C:\Users\ZXDN\miniconda3\envs\pytorch\lib\site-packages\torch\nn\modules\transformer.py:20: UserWarning: Failed to initialize NumPy: _ARRAY_API not found (Triggered internally at C:\cb\pytorch_1000000000000\work\torch\csrc\utils\tensor_numpy.cpp:84.) device: torch.device = torch.device(torch._C._get_default_device()), # torch.device('cpu'), Traceback (most recent call last): File "C:\Users\ZXDN\miniconda3\envs\pytorch\lib\site-packages\pandas\core\indexes\base.py", line 3805, in get_loc return self._engine.get_loc(casted_key) File "index.pyx", line 167, in pandas._libs.index.IndexEngine.get_loc File "index.pyx", line 196, in pandas._libs.index.IndexEngine.get_loc File "pandas\\_libs\\hashtable_class_helper.pxi", line 7081, in pandas._libs.hashtable.PyObjectHashTable.get_item File "pandas\\_libs\\hashtable_class_helper.pxi", line 7089, in pandas._libs.hashtable.PyObjectHashTable.get_item KeyError: 'OT' The above exception was the direct cause of the following exception: Traceback (most recent call last): File "C:\Users\ZXDN\PyCharmMiscProject\草稿2.py", line 24, in <module> true_data = np.array(true_data['OT']) File "C:\Users\ZXDN\miniconda3\envs\pytorch\lib\site-packages\pandas\core\frame.py", line 4102, in __getitem__ indexer = self.columns.get_loc(key) File "C:\Users\ZXDN\miniconda3\envs\pytorch\lib\site-packages\pandas\core\indexes\base.py", line 3812, in get_loc raise KeyError(key) from err KeyError: 'OT' 进程已结束,退出代码为 1 结合此运行结果帮我修改上述代码
06-23
E:\pycharm\代码\.venv\Scripts\python.exe E:\pycharm\代码\代码调试9.py ⚠️ skopt未安装!贝叶斯优化不可用。请运行: pip install scikit-optimize ⚠️ shap未安装!SHAP可解释性不可用。请运行: pip install shap ========================================================================================== 【增强版机器学习分析系统】 基于张继权教授‘四因子理论’:致灾因子、孕灾环境、承灾体、防灾减灾能力 功能:自动区分分类/回归 + 多模型 + PCA降维 + 贝叶斯超参优化 + SHAP可解释性 + 中文可视化 ========================================================================================== 📁 数据路径: E:\pycharm\meta\整合数据.csv 🎯 目标变量: PFOA 1. 加载数据... ❌ 编码 'utf-8' 解码失败,尝试下一个... ✅ 数据加载成功!编码: gbk | 形状: (137, 111) 🧹 已删除 96 个 'Unnamed' 列。 2.1 应用张继权教授四因子理论进行特征分组... 【四因子特征分组表】 风险因子 特征数量 特征列表 致灾因子 2 PFBA;PFOS 孕灾环境 2 经度;纬度 承灾体 2 城市;作物类型 防灾减灾能力 0 无 其他 8 样本编号;PFPeA;PFHxA;PFHpA;PFNA;PFDA;PFBS;PFHxS 2. 探索性数据分析(EDA)... 数据基本信息: 统计项 值 样本数 137 特征数 14 目标变量 PFOA 类别数/唯一值 111 🔍 检测到目标变量 'PFOA' 为连续型变量(唯一值占比0.81),切换为回归任务。 3. 数据预处理与标准化... 🧹 检测到 21 个异常值,已限制在合理范围。 训练集: (109, 14), 测试集: (28, 14) 4. 初始化 回归 模型... ✅ 已加载 10 个回归模型 5. 模型训练与评分评估... ➜ 训练 线性回归... ❌ 线性回归 失败: Input y contains NaN. ➜ 训练 决策树回归... ❌ 决策树回归 失败: Input y contains NaN. ➜ 训练 随机森林回归... ❌ 随机森林回归 失败: Input y contains NaN. ➜ 训练 梯度提升回归... ❌ 梯度提升回归 失败: Input y contains NaN. ➜ 训练 XGBoost回归... ❌ XGBoost回归 失败: [16:00:55] C:\buildkite-agent\builds\buildkite-windows-cpu-autoscaling-group-i-08cbc0333d8d4aae1-1\xgboost\xgboost-ci-windows\src\data\data.cc:514: Check failed: valid: Label contains NaN, infinity or a value too large. ➜ 训练 LightGBM回归... File "E:\pycharm\代码\.venv\lib\site-packages\joblib\externals\loky\backend\context.py", line 257, in _count_physical_cores cpu_info = subprocess.run( File "C:\Users\hp\AppData\Local\Programs\Python\Python38\lib\subprocess.py", line 493, in run with Popen(*popenargs, **kwargs) as process: File "C:\Users\hp\AppData\Local\Programs\Python\Python38\lib\subprocess.py", line 858, in __init__ self._execute_child(args, executable, preexec_fn, close_fds, File "C:\Users\hp\AppData\Local\Programs\Python\Python38\lib\subprocess.py", line 1311, in _execute_child hp, ht, pid, tid = _winapi.CreateProcess(executable, args, [LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.000098 seconds. You can set `force_col_wise=true` to remove the overhead. [LightGBM] [Info] Total Bins 270 [LightGBM] [Info] Number of data points in the train set: 109, number of used features: 11 [LightGBM] [Info] Start training from score 0.152646 [LightGBM] [Warning] No further splits with positive gain, best gain: -inf [LightGBM] [Warning] No further splits with positive gain, best gain: -inf [LightGBM] [Warning] No further splits with positive gain, best gain: -inf [LightGBM] [Warning] No further splits with positive gain, best gain: -inf [LightGBM] [Warning] No further splits with positive gain, best gain: -inf [LightGBM] [Warning] No further splits with positive gain, best gain: -inf [LightGBM] [Warning] No further splits with positive gain, best gain: -inf [LightGBM] [Warning] No further splits with positive gain, best gain: -inf [LightGBM] [Warning] No further splits with positive gain, best gain: -inf [LightGBM] [Warning] No further splits with positive gain, best gain: -inf [LightGBM] [Warning] No further splits with positive gain, best gain: -inf [LightGBM] [Warning] No further splits with positive gain, best gain: -inf [LightGBM] [Warning] No further splits with positive gain, best gain: -inf [LightGBM] [Warning] No further splits with positive gain, best gain: -inf [LightGBM] [Warning] No further splits with positive gain, best gain: -inf [LightGBM] [Warning] No further splits with positive gain, best gain: -inf [LightGBM] [Warning] No further splits with positive gain, best gain: -inf [LightGBM] [Warning] No further splits with positive gain, best gain: -inf [LightGBM] [Warning] No further splits with positive gain, best gain: -inf [LightGBM] [Warning] No further splits with positive gain, best gain: -inf [LightGBM] [Warning] No further splits with positive gain, best gain: -inf [LightGBM] [Warning] No further splits with positive gain, best gain: -inf [LightGBM] [Warning] No further splits with positive gain, best gain: -inf [LightGBM] [Warning] No further splits with positive gain, best gain: -inf [LightGBM] [Warning] No further splits with positive gain, best gain: -inf [LightGBM] [Warning] No further splits with positive gain, best gain: -inf [LightGBM] [Warning] No further splits with positive gain, best gain: -inf [LightGBM] [Warning] No further splits with positive gain, best gain: -inf [LightGBM] [Warning] No further splits with positive gain, best gain: -inf [LightGBM] [Warning] No further splits with positive gain, best gain: -inf [LightGBM] [Warning] No further splits with positive gain, best gain: -inf [LightGBM] [Warning] No further splits with positive gain, best gain: -inf [LightGBM] [Warning] No further splits with positive gain, best gain: -inf [LightGBM] [Warning] No further splits with positive gain, best gain: -inf [LightGBM] [Warning] No further splits with positive gain, best gain: -inf [LightGBM] [Warning] No further splits with positive gain, best gain: -inf [LightGBM] [Warning] No further splits with positive gain, best gain: -inf [LightGBM] [Warning] No further splits with positive gain, best gain: -inf [LightGBM] [Warning] No further splits with positive gain, best gain: -inf [LightGBM] [Warning] No further splits with positive gain, best gain: -inf [LightGBM] [Warning] No further splits with positive gain, best gain: -inf [LightGBM] [Warning] No further splits with positive gain, best gain: -inf [LightGBM] [Warning] No further splits with positive gain, best gain: -inf [LightGBM] [Warning] No further splits with positive gain, best gain: -inf [LightGBM] [Warning] No further splits with positive gain, best gain: -inf [LightGBM] [Warning] No further splits with positive gain, best gain: -inf [LightGBM] [Warning] No further splits with positive gain, best gain: -inf [LightGBM] [Warning] No further splits with positive gain, best gain: -inf [LightGBM] [Warning] No further splits with positive gain, best gain: -inf [LightGBM] [Warning] No further splits with positive gain, best gain: -inf [LightGBM] [Warning] No further splits with positive gain, best gain: -inf [LightGBM] [Warning] No further splits with positive gain, best gain: -inf [LightGBM] [Warning] No further splits with positive gain, best gain: -inf [LightGBM] [Warning] No further splits with positive gain, best gain: -inf [LightGBM] [Warning] No further splits with positive gain, best gain: -inf [LightGBM] [Warning] No further splits with positive gain, best gain: -inf [LightGBM] [Warning] No further splits with positive gain, best gain: -inf [LightGBM] [Warning] No further splits with positive gain, best gain: -inf [LightGBM] [Warning] No further splits with positive gain, best gain: -inf [LightGBM] [Warning] No further splits with positive gain, best gain: -inf [LightGBM] [Warning] No further splits with positive gain, best gain: -inf [LightGBM] [Warning] No further splits with positive gain, best gain: -inf [LightGBM] [Warning] No further splits with positive gain, best gain: -inf [LightGBM] [Warning] No further splits with positive gain, best gain: -inf [LightGBM] [Warning] No further splits with positive gain, best gain: -inf [LightGBM] [Warning] No further splits with positive gain, best gain: -inf [LightGBM] [Warning] No further splits with positive gain, best gain: -inf [LightGBM] [Warning] No further splits with positive gain, best gain: -inf [LightGBM] [Warning] No further splits with positive gain, best gain: -inf [LightGBM] [Warning] No further splits with positive gain, best gain: -inf [LightGBM] [Warning] No further splits with positive gain, best gain: -inf [LightGBM] [Warning] No further splits with positive gain, best gain: -inf [LightGBM] [Warning] No further splits with positive gain, best gain: -inf [LightGBM] [Warning] No further splits with positive gain, best gain: -inf [LightGBM] [Warning] No further splits with positive gain, best gain: -inf [LightGBM] [Warning] No further splits with positive gain, best gain: -inf [LightGBM] [Warning] No further splits with positive gain, best gain: -inf [LightGBM] [Warning] No further splits with positive gain, best gain: -inf [LightGBM] [Warning] No further splits with positive gain, best gain: -inf [LightGBM] [Warning] No further splits with positive gain, best gain: -inf [LightGBM] [Warning] No further splits with positive gain, best gain: -inf [LightGBM] [Warning] No further splits with positive gain, best gain: -inf [LightGBM] [Warning] No further splits with positive gain, best gain: -inf [LightGBM] [Warning] No further splits with positive gain, best gain: -inf [LightGBM] [Warning] No further splits with positive gain, best gain: -inf [LightGBM] [Warning] No further splits with positive gain, best gain: -inf [LightGBM] [Warning] No further splits with positive gain, best gain: -inf [LightGBM] [Warning] No further splits with positive gain, best gain: -inf [LightGBM] [Warning] No further splits with positive gain, best gain: -inf [LightGBM] [Warning] No further splits with positive gain, best gain: -inf [LightGBM] [Warning] No further splits with positive gain, best gain: -inf [LightGBM] [Warning] No further splits with positive gain, best gain: -inf [LightGBM] [Warning] No further splits with positive gain, best gain: -inf [LightGBM] [Warning] No further splits with positive gain, best gain: -inf [LightGBM] [Warning] No further splits with positive gain, best gain: -inf [LightGBM] [Warning] No further splits with positive gain, best gain: -inf [LightGBM] [Warning] No further splits with positive gain, best gain: -inf [LightGBM] [Warning] No further splits with positive gain, best gain: -inf [LightGBM] [Warning] No further splits with positive gain, best gain: -inf [LightGBM] [Warning] No further splits with positive gain, best gain: -inf ❌ LightGBM回归 失败: Input contains NaN. ➜ 训练 KNN回归... ❌ KNN回归 失败: Input y contains NaN. ➜ 训练 SVR... ❌ SVR 失败: Input y contains NaN. ➜ 训练 MLP回归... ❌ MLP回归 失败: Input y contains NaN. ➜ 训练 CatBoost回归... ❌ CatBoost回归 失败: catboost/libs/metrics/metric.cpp:6956: metric/loss-function RMSE do not allows nan value on target Traceback (most recent call last): File "E:\pycharm\代码\代码调试9.py", line 390, in <module> pipeline.run_complete_pipeline() File "E:\pycharm\代码\代码调试9.py", line 376, in run_complete_pipeline self.train_and_evaluate() File "E:\pycharm\代码\代码调试9.py", line 350, in train_and_evaluate results_df.sort_values('r2', ascending=False, inplace=True) File "E:\pycharm\代码\.venv\lib\site-packages\pandas\core\frame.py", line 6758, in sort_values k = self._get_label_or_level_values(by, axis=axis) File "E:\pycharm\代码\.venv\lib\site-packages\pandas\core\generic.py", line 1778, in _get_label_or_level_values raise KeyError(key) KeyError: 'r2' 进程已结束,退出代码为 1 继续解决报错并安装未安装的代码,生成完整代码
最新发布
11-12
评论
成就一亿技术人!
拼手气红包6.0元
还能输入1000个字符
 
红包 添加红包
表情包 插入表情
 条评论被折叠 查看
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值