Binary_interval_method

本文介绍了一种用于寻找给定范围内浮点数平方根的迭代算法,通过不断逼近找到精确解,适用于数学和计算机科学领域。
#include<stdio.h>
#define N 100
float cal(float p)
{
float sum=p*p*p+p-4;
return sum; 
}
float main(){
float a;
float b;
float flag;
int n;
float p;
float FP;
float FA;
flag=0.001f;
n=0;
a=1;
b=3; 
if((FA=cal(a))==0)
{
return a;
}
if((FP=cal(b))==0)
{
return b;
}
while(n<N||n==N)
{
p=(a+b)/2.0;
FP=cal(p);
n++;
if(FP==0||(b-a)/2.0<flag)
{
printf("x: %f  N:%d ok \n",p,n);

return p;
}
else 
if(FA*FP>0)
{
a=p;
FA=FP;
}
else 
b=p;
}

printf("error\n");
return 0;  
}
这段代码有问题吗: /* * DaecBox.cpp * * Created on: Dec 2, 2023 * Author: careray */ #include "DaecBox.h" #include "CommonMethod.h" #include "Singleton.h" #include "CommonMethod.h" #include "Driver/det/dttapi.h" #include <zmq.h> #include <sys/timerfd.h> #include <fstream> #include <safeclib/safe_str_lib.h> using namespace std; #define CHECK_BOX_INTERVAL 6 // 6 seconds #define MAX_PARING_TIME 180 // 180 seconds == 3 minutes #define SAVE_PAIRED_BOX_FILE "/usr/local/bin/conf/paired_box.txt" #define MAC_SOURCE_ADDR 0x0002 #define MAC_TARGET_ADDR 0x0003 CDaecBox *GetDaecBoxInstance() { return CSingleton<CDaecBox>::GetInstance(); } CDaecBox::CDaecBox() { // TODO Auto-generated constructor stub m_nPairedBoxId = 0xFFFF; m_nCheckBoxTimer = -1; m_nNeedPairingCount = MAX_PARING_TIME / CHECK_BOX_INTERVAL; m_nNotifyFlag.store(0); ifstream oBoxFile; oBoxFile.open(SAVE_PAIRED_BOX_FILE, ios::binary); if(oBoxFile.good()) { oBoxFile.seekg(0, ios::end); int nFileSize = oBoxFile.tellg(); oBoxFile.seekg(0, ios::beg); char szBoxId[8] = {0}; oBoxFile.read(szBoxId, nFileSize); m_nPairingBoxId = static_cast<u16>(atoi(szBoxId)); } else { m_nPairingBoxId = 0; } oBoxFile.close(); printf("read pairing box id : %d\n", m_nPairingBoxId); Start(); } CDaecBox::~CDaecBox() { // TODO Auto-generated destructor stub Stop(); m_nPairedBoxId = 0xFFFF; m_nPairingBoxId = 0; m_nNeedPairingCount = 0; } int CDaecBox::SetNotifyFlag(int nNotifyFlag) { m_nNotifyFlag = nNotifyFlag; m_nNeedPairingCount = MAX_PARING_TIME / CHECK_BOX_INTERVAL; return 0; } //Make MachineID as detectorID, and write into DAEC action register. int CDaecBox::SetDetrId(const char *szMachineId, int nIdMaxLen) { // u16 nDetrId = MAC_SOURCE_ADDR; // if (0 != m_nPairingBoxId) // { // nDetrId = DoUsb16Crc(reinterpret_cast<const unsigned char *>(szMachineId), strnlen_s(szMachineId, nIdMaxLen)); // } u16 nDetrId = DoUsb16Crc(reinterpret_cast<const unsigned char *>(szMachineId), strnlen_s(szMachineId, nIdMaxLen)); printf("Convert machine id %s to detector id %04x\n", szMachineId, nDetrId); return set_detr_id_for_daec_box(nDetrId); } unsigned short CDaecBox::GetPairedBoxId() { return m_nPairedBoxId; } void CDaecBox::SetEventCallbackFun(CDetrRuntimeParams *pDetrRuntimeParams) { m_pDetrRuntimeParams = pDetrRuntimeParams; } void CDaecBox::EnableTimer() { DisableTimer(); if (-1 != m_nCheckBoxTimer) { struct itimerspec oSysStatInterval; oSysStatInterval.it_value.tv_sec = CHECK_BOX_INTERVAL; oSysStatInterval.it_value.tv_nsec = 0; oSysStatInterval.it_interval.tv_sec = oSysStatInterval.it_value.tv_sec; oSysStatInterval.it_interval.tv_nsec = oSysStatInterval.it_value.tv_nsec; timerfd_settime(m_nCheckBoxTimer, 0, &oSysStatInterval, NULL); printf("enable box timer\n"); } } void CDaecBox::DisableTimer() { if (-1 != m_nCheckBoxTimer) { struct itimerspec oSysStatInterval; oSysStatInterval.it_value.tv_sec = 0; oSysStatInterval.it_value.tv_nsec = 0; oSysStatInterval.it_interval.tv_sec = oSysStatInterval.it_value.tv_sec; oSysStatInterval.it_interval.tv_nsec = oSysStatInterval.it_value.tv_nsec; timerfd_settime(m_nCheckBoxTimer, 0, &oSysStatInterval, NULL); printf("disable box timer\n"); } m_nPairedBoxId = 0xFFFF; } int CDaecBox::TryPairingDaecBox() { int nRet = pairing_daec_box(&m_nPairingBoxId, 100); printf("pairing_daec_box result = %d, pairing_daec_box = %u\n", nRet, m_nPairingBoxId); if (0 == nRet) { m_nNeedPairingCount = 0; string strPairedId = to_string(m_nPairingBoxId); ofstream oBoxFile; oBoxFile.open(SAVE_PAIRED_BOX_FILE, ios::binary | ios::trunc); if(oBoxFile.good()) { oBoxFile.write(strPairedId.c_str(), strPairedId.size()); } oBoxFile.close(); __LOG_ERROR("save pairing box id : " + to_string(m_nPairingBoxId)); } else { --m_nNeedPairingCount; } return nRet; } int CDaecBox::StartPairingBox(unsigned short nDaecBoxId) { m_nNotifyFlag = 1; m_nNeedPairingCount = MAX_PARING_TIME / CHECK_BOX_INTERVAL; m_nPairingBoxId = nDaecBoxId; return 0; } int CDaecBox::StopPairingBox() { m_nNotifyFlag = 0; m_nNeedPairingCount = 0; return 0; } int CDaecBox::GetBoxParingProgress(int *pProgress) { // in pairing if (m_nNeedPairingCount > 0) { *pProgress = -1; } // paired successfully else if (0 != m_nPairedBoxId && 0xFFFF != m_nPairedBoxId) { *pProgress = 0; } // paired failed else { *pProgress = 1; } return 0; } int CDaecBox::Run() { const int RECV_BUFF_SIZE = 16; char szRecvBuffer[RECV_BUFF_SIZE] = {0}; m_nCheckBoxTimer = timerfd_create(CLOCK_MONOTONIC, TFD_NONBLOCK | TFD_CLOEXEC); DisableTimer(); CrEvent oEvent; memset(&oEvent, 0, sizeof(oEvent)); oEvent.detr_index = 1; zmq_pollitem_t m_oPollItems[1]; m_oPollItems[0].socket = NULL; m_oPollItems[0].fd = m_nCheckBoxTimer; m_oPollItems[0].events = ZMQ_POLLIN; m_oPollItems[0].revents = 0; printf("DAEC box thread is running...\n"); int nRecvLen = 0; while (m_isRunning) { if(zmq_poll(m_oPollItems, 1, -1) == -1) { perror("zmq_poll failed"); continue; } if (m_oPollItems[0].revents & ZMQ_POLLIN) { m_oPollItems[0].revents = 0; nRecvLen = read(m_nCheckBoxTimer, szRecvBuffer, RECV_BUFF_SIZE); if (-1 == nRecvLen) { printf("read box timer failed!\n"); continue; } //--------------- DAEC box status -------------// int nCheckRet = -1; if (m_nNeedPairingCount > 0) { nCheckRet = TryPairingDaecBox(); } else if (0 == m_nPairingBoxId) { nCheckRet = check_daec_box_connection(MAC_TARGET_ADDR, 100); printf("check_daec_box_connection result = %d, pairing_daec_box = %u\n", nCheckRet, MAC_TARGET_ADDR); } else { nCheckRet = check_daec_box_connection(m_nPairingBoxId, 100); printf("check_daec_box_connection result = %d, pairing_daec_box = %u\n", nCheckRet, m_nPairingBoxId); } if (0 == nCheckRet) { m_nPairedBoxId = m_nPairingBoxId; } else if (m_nNeedPairingCount > 0) { continue; } else if (0 == m_nPairedBoxId) { continue; } else { m_nPairedBoxId = 0; } if (0 != m_nNotifyFlag) { oEvent.event_id = CR_EVT_DAEC_BOX_INFO; oEvent.data = &m_nPairedBoxId; oEvent.header_len = 0; m_pDetrRuntimeParams->ProcessCallback(&oEvent); m_nNotifyFlag = 0; } } } DisableTimer(); if (m_nCheckBoxTimer != -1) { close(m_nCheckBoxTimer); m_nCheckBoxTimer = -1; } m_isRunning = false; return 0; }
06-25
npp_common dimensions: FrozenMappingWarningOnValuesAccess({'time': 648, 'bnds': 2, 'lat': 1, 'lon': 1}) npp_common['npp'].shape: (648, 1, 1) soil_common dimensions: FrozenMappingWarningOnValuesAccess({'time': 648, 'lat': 1, 'lon': 1, 'hist_interval': 2}) soil_common['fLitterSoil'].shape: (648, 1, 1) Traceback (most recent call last): File "C:/Users/ASUS/AppData/Local/Programs/Python/Python313/DC-run/8.27程序包/耦合运算.py", line 34, in <module> difference = npp_common['npp'] - soil_common['fLitterSoil'] File "C:\Users\ASUS\AppData\Local\Programs\Python\Python313\Lib\site-packages\xarray\core\_typed_ops.py", line 540, in __sub__ return self._binary_op(other, operator.sub) File "C:\Users\ASUS\AppData\Local\Programs\Python\Python313\Lib\site-packages\xarray\core\dataarray.py", line 4845, in _binary_op f(self.variable, other_variable_or_arraylike) File "C:\Users\ASUS\AppData\Local\Programs\Python\Python313\Lib\site-packages\xarray\core\_typed_ops.py", line 920, in __sub__ return self._binary_op(other, operator.sub) File "C:\Users\ASUS\AppData\Local\Programs\Python\Python313\Lib\site-packages\xarray\core\variable.py", line 2358, in _binary_op self_data, other_data, dims = _broadcast_compat_data(self, other) File "C:\Users\ASUS\AppData\Local\Programs\Python\Python313\Lib\site-packages\xarray\core\variable.py", line 2961, in _broadcast_compat_data if all(hasattr(other, attr) for attr in ["dims", "data", "shape", "encoding"]): File "C:\Users\ASUS\AppData\Local\Programs\Python\Python313\Lib\site-packages\xarray\core\variable.py", line 2961, in <genexpr> if all(hasattr(other, attr) for attr in ["dims", "data", "shape", "encoding"]): File "C:\Users\ASUS\AppData\Local\Programs\Python\Python313\Lib\site-packages\xarray\core\variable.py", line 436, in data duck_array = self._data.get_duck_array() File "C:\Users\ASUS\AppData\Local\Programs\Python\Python313\Lib\site-packages\xarray\core\indexing.py", line 843, in get_duck_array self._ensure_cached() File "C:\Users\ASUS\AppData\Local\Programs\Python\Python313\Lib\site-packages\xarray\core\indexing.py", line 840, in _ensure_cached self.array = as_indexable(self.array.get_duck_array()) File "C:\Users\ASUS\AppData\Local\Programs\Python\Python313\Lib\site-packages\xarray\core\indexing.py", line 797, in get_duck_array return self.array.get_duck_array() File "C:\Users\ASUS\AppData\Local\Programs\Python\Python313\Lib\site-packages\xarray\coding\common.py", line 80, in get_duck_array return self.func(self.array.get_duck_array()) File "C:\Users\ASUS\AppData\Local\Programs\Python\Python313\Lib\site-packages\xarray\core\indexing.py", line 652, in get_duck_array array = self.array[self.key] File "C:\Users\ASUS\AppData\Local\Programs\Python\Python313\Lib\site-packages\xarray\backends\netCDF4_.py", line 108, in __getitem__ return indexing.explicit_indexing_adapter( File "C:\Users\ASUS\AppData\Local\Programs\Python\Python313\Lib\site-packages\xarray\core\indexing.py", line 1021, in explicit_indexing_adapter result = raw_indexing_method(raw_key.tuple) File "C:\Users\ASUS\AppData\Local\Programs\Python\Python313\Lib\site-packages\xarray\backends\netCDF4_.py", line 121, in _getitem array = getitem(original_array, key) File "src\\netCDF4\\_netCDF4.pyx", line 5055, in netCDF4._netCDF4.Variable.__getitem__ File "C:\Users\ASUS\AppData\Local\Programs\Python\Python313\Lib\site-packages\netCDF4\utils.py", line 429, in _StartCountStride start[...,i] = np.apply_along_axis(lambda x: e*x, i, np.ones(sdim[:-1])) File "C:\Users\ASUS\AppData\Local\Programs\Python\Python313\Lib\site-packages\numpy\lib\_shape_base_impl.py", line 390, in apply_along_axis raise ValueError( ValueError: Cannot apply_along_axis when any iteration dimensions are 0
09-15
"""Compute evaluation metrics for a single experiment.""" __author__ = "Paul Bergmann, David Sattlegger" __copyright__ = "2021, MVTec Software GmbH" import json from os import listdir, makedirs, path import argparse import numpy as np from PIL import Image from tqdm import tqdm import generic_util as util from pro_curve_util import compute_pro from roc_curve_util import compute_classification_roc def parse_user_arguments(): """Parse user arguments for the evaluation of a method on the MVTec AD dataset. Returns: Parsed user arguments. """ parser = argparse.ArgumentParser(description="""Parse user arguments.""") parser.add_argument('--anomaly_maps_dir', required=True, help="""Path to the directory that contains the anomaly maps of the evaluated method.""") parser.add_argument('--dataset_base_dir', required=True, help="""Path to the directory that contains the dataset images of the MVTec AD dataset.""") parser.add_argument('--output_dir', help="""Path to the directory to store evaluation results. If no output directory is specified, the results are not written to drive.""") parser.add_argument('--pro_integration_limit', type=float, default=0.3, help="""Integration limit to compute the area under the PRO curve. Must lie within the interval of (0.0, 1.0].""") parser.add_argument('--evaluated_objects', nargs='+', help="""List of objects to be evaluated. By default, all dataset objects will be evaluated.""", choices=util.OBJECT_NAMES, default=util.OBJECT_NAMES) args = parser.parse_args() # Check that the PRO integration limit is within the valid range. assert 0.0 < args.pro_integration_limit <= 1.0 return args def parse_dataset_files(object_name, dataset_base_dir, anomaly_maps_dir): """Parse the filenames for one object of the MVTec AD dataset. Args: object_name: Name of the dataset object. dataset_base_dir: Base directory of the MVTec AD dataset. anomaly_maps_dir: Base directory where anomaly maps are located. """ assert object_name in util.OBJECT_NAMES # Store a list of all ground truth filenames. gt_filenames = [] # Store a list of all corresponding anomaly map filenames. prediction_filenames = [] # Test images are located here. test_dir = path.join(dataset_base_dir, object_name, 'test') gt_base_dir = path.join(dataset_base_dir, object_name, 'ground_truth') anomaly_maps_base_dir = path.join(anomaly_maps_dir, object_name, 'test') # List all ground truth and corresponding anomaly images. for subdir in listdir(str(test_dir)): if not subdir.replace('_', '').isalpha(): continue # Get paths to all test images in the dataset for this subdir. test_images = [path.splitext(file)[0] for file in listdir(path.join(test_dir, subdir)) if path.splitext(file)[1] == '.png'] # If subdir is not 'good', derive corresponding GT names. if subdir != 'good': gt_filenames.extend( [path.join(gt_base_dir, subdir, file + '_mask.png') for file in test_images]) else: # No ground truth maps exist for anomaly-free images. gt_filenames.extend([None] * len(test_images)) # Fetch corresponding anomaly maps. prediction_filenames.extend( [path.join(anomaly_maps_base_dir, subdir, file) for file in test_images]) print(f"Parsed {len(gt_filenames)} ground truth image files.") return gt_filenames, prediction_filenames def calculate_au_pro_au_roc(gt_filenames, prediction_filenames, integration_limit): """Compute the area under the PRO curve for a set of ground truth images and corresponding anomaly images. In addition, the function computes the area under the ROC curve for image level classification. Args: gt_filenames: List of filenames that contain the ground truth images for a single dataset object. prediction_filenames: List of filenames that contain the corresponding anomaly images for each ground truth image. integration_limit: Integration limit to use when computing the area under the PRO curve. Returns: au_pro: Area under the PRO curve computed up to the given integration limit. au_roc: Area under the ROC curve. pro_curve: PRO curve values for localization (fpr,pro). roc_curve: ROC curve values for image level classifiction (fpr,tpr). """ # Read all ground truth and anomaly images. ground_truth = [] predictions = [] print("Read ground truth files and corresponding predictions...") for (gt_name, pred_name) in tqdm(zip(gt_filenames, prediction_filenames), total=len(gt_filenames)): prediction = util.read_tiff(pred_name) predictions.append(prediction) if gt_name is not None: ground_truth.append(np.asarray(Image.open(gt_name))) else: ground_truth.append(np.zeros(prediction.shape)) # Compute the PRO curve. pro_curve = compute_pro( anomaly_maps=predictions, ground_truth_maps=ground_truth) # Compute the area under the PRO curve. au_pro = util.trapezoid( pro_curve[0], pro_curve[1], x_max=integration_limit) au_pro /= integration_limit print(f"AU-PRO (FPR limit: {integration_limit}): {au_pro}") # Derive binary labels for each input image: # (0 = anomaly free, 1 = anomalous). binary_labels = [int(np.any(x > 0)) for x in ground_truth] del ground_truth # Compute the classification ROC curve. roc_curve = compute_classification_roc( anomaly_maps=predictions, scoring_function=np.max, ground_truth_labels=binary_labels) # Compute the area under the classification ROC curve. au_roc = util.trapezoid(roc_curve[0], roc_curve[1]) print(f"Image-level classification AU-ROC: {au_roc}") # Return the evaluation metrics. return au_pro, au_roc, pro_curve, roc_curve def main(): """Calculate the performance metrics for a single experiment on the MVTec AD dataset. """ # Parse user arguments. args = parse_user_arguments() # Store evaluation results in this dictionary. evaluation_dict = dict() # Keep track of the mean performance measures. au_pros = [] au_rocs = [] # Evaluate each dataset object separately. for obj in args.evaluated_objects: print(f"=== Evaluate {obj} ===") evaluation_dict[obj] = dict() # Parse the filenames of all ground truth and corresponding anomaly # images for this object. gt_filenames, prediction_filenames = \ parse_dataset_files( object_name=obj, dataset_base_dir=args.dataset_base_dir, anomaly_maps_dir=args.anomaly_maps_dir) # Calculate the PRO and ROC curves. au_pro, au_roc, pro_curve, roc_curve = \ calculate_au_pro_au_roc( gt_filenames, prediction_filenames, args.pro_integration_limit) evaluation_dict[obj]['au_pro'] = au_pro evaluation_dict[obj]['classification_au_roc'] = au_roc evaluation_dict[obj]['classification_roc_curve_fpr'] = roc_curve[0] evaluation_dict[obj]['classification_roc_curve_tpr'] = roc_curve[1] # Keep track of the mean performance measures. au_pros.append(au_pro) au_rocs.append(au_roc) print('\n') # Compute the mean of the performance measures. evaluation_dict['mean_au_pro'] = np.mean(au_pros).item() evaluation_dict['mean_classification_au_roc'] = np.mean(au_rocs).item() # If required, write evaluation metrics to drive. if args.output_dir is not None: makedirs(args.output_dir, exist_ok=True) with open(path.join(args.output_dir, 'metrics.json'), 'w') as file: json.dump(evaluation_dict, file, indent=4) print(f"Wrote metrics to {path.join(args.output_dir, 'metrics.json')}") if __name__ == "__main__": main()
05-29
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值