NSDictionary write to url, error

本文探讨了Swift中使用NSDictionary时遇到nil错误的问题及解决方案。指出在Objective-C的NSDictionary中值不能为nil,而在Swift的Dictionary中则可以接受nil值。文章提供了处理数据清除和替代的方法来解决此问题。

NSDictionary write to url, error

出现 

nil error


提示非常准确

Swift/OC 的 NSDictionary , val 不可为 nil

Swift 的 Dictionary , val 可为 nil


数据清除,

数据替代,就可以了

 

nil 转 “”

// // LivePhoto.swift // Live Photos import UIKit import AVFoundation import MobileCoreServices import Photos class LivePhoto { // MARK: PUBLIC typealias LivePhotoResources = (pairedImage: URL, pairedVideo: URL) /// Returns the paired image and video for the given PHLivePhoto public class func extractResources(from livePhoto: PHLivePhoto, completion: @escaping (LivePhotoResources?) -> Void) { queue.async { shared.extractResources(from: livePhoto, completion: completion) } } /// Generates a PHLivePhoto from an image and video. Also returns the paired image and video. public class func generate(from imageURL: URL?, videoURL: URL, progress: @escaping (CGFloat) -> Void, completion: @escaping (PHLivePhoto?, LivePhotoResources?) -> Void) { queue.async { shared.generate(from: imageURL, videoURL: videoURL, progress: progress, completion: completion) } } /// Save a Live Photo to the Photo Library by passing the paired image and video. public class func saveToLibrary(_ resources: LivePhotoResources, completion: @escaping (Bool) -> Void) { PHPhotoLibrary.shared().performChanges({ let creationRequest = PHAssetCreationRequest.forAsset() let options = PHAssetResourceCreationOptions() creationRequest.addResource(with: PHAssetResourceType.pairedVideo, fileURL: resources.pairedVideo, options: options) creationRequest.addResource(with: PHAssetResourceType.photo, fileURL: resources.pairedImage, options: options) }, completionHandler: { (success, error) in if error != nil { print(error as Any) } completion(success) }) } // MARK: PRIVATE private static let shared = LivePhoto() private static let queue = DispatchQueue(label: "com.limit-point.LivePhotoQueue", attributes: .concurrent) lazy private var cacheDirectory: URL? = { if let cacheDirectoryURL = try? FileManager.default.url(for: .cachesDirectory, in: .userDomainMask, appropriateFor: nil, create: false) { let fullDirectory = cacheDirectoryURL.appendingPathComponent("com.limit-point.LivePhoto", isDirectory: true) if !FileManager.default.fileExists(atPath: fullDirectory.absoluteString) { try? FileManager.default.createDirectory(at: fullDirectory, withIntermediateDirectories: true, attributes: nil) } return fullDirectory } return nil }() deinit { clearCache() } private func generateKeyPhoto(from videoURL: URL) -> URL? { var percent:Float = 0.5 let videoAsset = AVURLAsset(url: videoURL) if let stillImageTime = videoAsset.stillImageTime() { percent = Float(stillImageTime.value) / Float(videoAsset.duration.value) } guard let imageFrame = videoAsset.getAssetFrame(percent: percent) else { return nil } guard let jpegData = UIImageJPEGRepresentation(imageFrame, 1.0) else { return nil } guard let url = cacheDirectory?.appendingPathComponent(UUID().uuidString).appendingPathExtension("jpg") else { return nil } do { try? jpegData.write(to: url) return url } } private func clearCache() { if let cacheDirectory = cacheDirectory { try? FileManager.default.removeItem(at: cacheDirectory) } } private func generate(from imageURL: URL?, videoURL: URL, progress: @escaping (CGFloat) -> Void, completion: @escaping (PHLivePhoto?, LivePhotoResources?) -> Void) { guard let cacheDirectory = cacheDirectory else { DispatchQueue.main.async { completion(nil, nil) } return } let assetIdentifier = UUID().uuidString let _keyPhotoURL = imageURL ?? generateKeyPhoto(from: videoURL) guard let keyPhotoURL = _keyPhotoURL, let pairedImageURL = addAssetID(assetIdentifier, toImage: keyPhotoURL, saveTo: cacheDirectory.appendingPathComponent(assetIdentifier).appendingPathExtension("jpg")) else { DispatchQueue.main.async { completion(nil, nil) } return } addAssetID(assetIdentifier, toVideo: videoURL, saveTo: cacheDirectory.appendingPathComponent(assetIdentifier).appendingPathExtension("mov"), progress: progress) { (_videoURL) in if let pairedVideoURL = _videoURL { _ = PHLivePhoto.request(withResourceFileURLs: [pairedVideoURL, pairedImageURL], placeholderImage: nil, targetSize: CGSize.zero, contentMode: PHImageContentMode.aspectFit, resultHandler: { (livePhoto: PHLivePhoto?, info: [AnyHashable : Any]) -> Void in if let isDegraded = info[PHLivePhotoInfoIsDegradedKey] as? Bool, isDegraded { return } DispatchQueue.main.async { completion(livePhoto, (pairedImageURL, pairedVideoURL)) } }) } else { DispatchQueue.main.async { completion(nil, nil) } } } } private func extractResources(from livePhoto: PHLivePhoto, to directoryURL: URL, completion: @escaping (LivePhotoResources?) -> Void) { let assetResources = PHAssetResource.assetResources(for: livePhoto) let group = DispatchGroup() var keyPhotoURL: URL? var videoURL: URL? for resource in assetResources { let buffer = NSMutableData() let options = PHAssetResourceRequestOptions() options.isNetworkAccessAllowed = true group.enter() PHAssetResourceManager.default().requestData(for: resource, options: options, dataReceivedHandler: { (data) in buffer.append(data) }) { (error) in if error == nil { if resource.type == .pairedVideo { videoURL = self.saveAssetResource(resource, to: directoryURL, resourceData: buffer as Data) } else { keyPhotoURL = self.saveAssetResource(resource, to: directoryURL, resourceData: buffer as Data) } } else { print(error as Any) } group.leave() } } group.notify(queue: DispatchQueue.main) { guard let pairedPhotoURL = keyPhotoURL, let pairedVideoURL = videoURL else { completion(nil) return } completion((pairedPhotoURL, pairedVideoURL)) } } private func extractResources(from livePhoto: PHLivePhoto, completion: @escaping (LivePhotoResources?) -> Void) { if let cacheDirectory = cacheDirectory { extractResources(from: livePhoto, to: cacheDirectory, completion: completion) } } private func saveAssetResource(_ resource: PHAssetResource, to directory: URL, resourceData: Data) -> URL? { let fileExtension = UTTypeCopyPreferredTagWithClass(resource.uniformTypeIdentifier as CFString,kUTTagClassFilenameExtension)?.takeRetainedValue() guard let ext = fileExtension else { return nil } var fileUrl = directory.appendingPathComponent(NSUUID().uuidString) fileUrl = fileUrl.appendingPathExtension(ext as String) do { try resourceData.write(to: fileUrl, options: [Data.WritingOptions.atomic]) } catch { print("Could not save resource \(resource) to filepath \(String(describing: fileUrl))") return nil } return fileUrl } func addAssetID(_ assetIdentifier: String, toImage imageURL: URL, saveTo destinationURL: URL) -> URL? { guard let imageDestination = CGImageDestinationCreateWithURL(destinationURL as CFURL, kUTTypeJPEG, 1, nil), let imageSource = CGImageSourceCreateWithURL(imageURL as CFURL, nil), let imageRef = CGImageSourceCreateImageAtIndex(imageSource, 0, nil), var imageProperties = CGImageSourceCopyPropertiesAtIndex(imageSource, 0, nil) as? [AnyHashable : Any] else { return nil } let assetIdentifierKey = "17" let assetIdentifierInfo = [assetIdentifierKey : assetIdentifier] imageProperties[kCGImagePropertyMakerAppleDictionary] = assetIdentifierInfo CGImageDestinationAddImage(imageDestination, imageRef, imageProperties as CFDictionary) CGImageDestinationFinalize(imageDestination) return destinationURL } var audioReader: AVAssetReader? var videoReader: AVAssetReader? var assetWriter: AVAssetWriter? func addAssetID(_ assetIdentifier: String, toVideo videoURL: URL, saveTo destinationURL: URL, progress: @escaping (CGFloat) -> Void, completion: @escaping (URL?) -> Void) { var audioWriterInput: AVAssetWriterInput? var audioReaderOutput: AVAssetReaderOutput? let videoAsset = AVURLAsset(url: videoURL) let frameCount = videoAsset.countFrames(exact: false) guard let videoTrack = videoAsset.tracks(withMediaType: .video).first else { completion(nil) return } do { // Create the Asset Writer assetWriter = try AVAssetWriter(outputURL: destinationURL, fileType: .mov) // Create Video Reader Output videoReader = try AVAssetReader(asset: videoAsset) let videoReaderSettings = [kCVPixelBufferPixelFormatTypeKey as String: NSNumber(value: kCVPixelFormatType_32BGRA as UInt32)] let videoReaderOutput = AVAssetReaderTrackOutput(track: videoTrack, outputSettings: videoReaderSettings) videoReader?.add(videoReaderOutput) // Create Video Writer Input let videoWriterInput = AVAssetWriterInput(mediaType: .video, outputSettings: [AVVideoCodecKey : AVVideoCodecH264, AVVideoWidthKey : videoTrack.naturalSize.width, AVVideoHeightKey : videoTrack.naturalSize.height]) videoWriterInput.transform = videoTrack.preferredTransform videoWriterInput.expectsMediaDataInRealTime = true assetWriter?.add(videoWriterInput) // Create Audio Reader Output & Writer Input if let audioTrack = videoAsset.tracks(withMediaType: .audio).first { do { let _audioReader = try AVAssetReader(asset: videoAsset) let _audioReaderOutput = AVAssetReaderTrackOutput(track: audioTrack, outputSettings: nil) _audioReader.add(_audioReaderOutput) audioReader = _audioReader audioReaderOutput = _audioReaderOutput let _audioWriterInput = AVAssetWriterInput(mediaType: .audio, outputSettings: nil) _audioWriterInput.expectsMediaDataInRealTime = false assetWriter?.add(_audioWriterInput) audioWriterInput = _audioWriterInput } catch { print(error) } } else { audioReader = nil } // Create necessary identifier metadata and still image time metadata let assetIdentifierMetadata = metadataForAssetID(assetIdentifier) let stillImageTimeMetadataAdapter = createMetadataAdaptorForStillImageTime() assetWriter?.metadata = [assetIdentifierMetadata] assetWriter?.add(stillImageTimeMetadataAdapter.assetWriterInput) // Start the Asset Writer assetWriter?.startWriting() assetWriter?.startSession(atSourceTime: kCMTimeZero) // Add still image metadata let _stillImagePercent: Float = 0.5 stillImageTimeMetadataAdapter.append(AVTimedMetadataGroup(items: [metadataItemForStillImageTime()],timeRange: videoAsset.makeStillImageTimeRange(percent: _stillImagePercent, inFrameCount: frameCount))) // For end of writing / progress var writingVideoFinished = false var writingAudioFinished = false var currentFrameCount = 0 func didCompleteWriting() { guard writingAudioFinished && writingVideoFinished else { return } assetWriter?.finishWriting { if self.assetWriter?.status == .completed { completion(destinationURL) } else { completion(nil) } } } // Start writing video if videoReader?.startReading() ?? false { videoWriterInput.requestMediaDataWhenReady(on: DispatchQueue(label: "videoWriterInputQueue")) { while videoWriterInput.isReadyForMoreMediaData { if let sampleBuffer = videoReaderOutput.copyNextSampleBuffer() { currentFrameCount += 1 let percent:CGFloat = CGFloat(currentFrameCount)/CGFloat(frameCount) progress(percent) if !videoWriterInput.append(sampleBuffer) { print("Cannot write: \(String(describing: self.assetWriter?.error?.localizedDescription))") self.videoReader?.cancelReading() } } else { videoWriterInput.markAsFinished() writingVideoFinished = true didCompleteWriting() } } } } else { writingVideoFinished = true didCompleteWriting() } // Start writing audio if audioReader?.startReading() ?? false { audioWriterInput?.requestMediaDataWhenReady(on: DispatchQueue(label: "audioWriterInputQueue")) { while audioWriterInput?.isReadyForMoreMediaData ?? false { guard let sampleBuffer = audioReaderOutput?.copyNextSampleBuffer() else { audioWriterInput?.markAsFinished() writingAudioFinished = true didCompleteWriting() return } audioWriterInput?.append(sampleBuffer) } } } else { writingAudioFinished = true didCompleteWriting() } } catch { print(error) completion(nil) } } private func metadataForAssetID(_ assetIdentifier: String) -> AVMetadataItem { let item = AVMutableMetadataItem() let keyContentIdentifier = "com.apple.quicktime.content.identifier" let keySpaceQuickTimeMetadata = "mdta" item.key = keyContentIdentifier as (NSCopying & NSObjectProtocol)? item.keySpace = AVMetadataKeySpace(rawValue: keySpaceQuickTimeMetadata) item.value = assetIdentifier as (NSCopying & NSObjectProtocol)? item.dataType = "com.apple.metadata.datatype.UTF-8" return item } private func createMetadataAdaptorForStillImageTime() -> AVAssetWriterInputMetadataAdaptor { let keyStillImageTime = "com.apple.quicktime.still-image-time" let keySpaceQuickTimeMetadata = "mdta" let spec : NSDictionary = [ kCMMetadataFormatDescriptionMetadataSpecificationKey_Identifier as NSString: "\(keySpaceQuickTimeMetadata)/\(keyStillImageTime)", kCMMetadataFormatDescriptionMetadataSpecificationKey_DataType as NSString: "com.apple.metadata.datatype.int8" ] var desc : CMFormatDescription? = nil CMMetadataFormatDescriptionCreateWithMetadataSpecifications(kCFAllocatorDefault, kCMMetadataFormatType_Boxed, [spec] as CFArray, &desc) let input = AVAssetWriterInput(mediaType: .metadata, outputSettings: nil, sourceFormatHint: desc) return AVAssetWriterInputMetadataAdaptor(assetWriterInput: input) } private func metadataItemForStillImageTime() -> AVMetadataItem { let item = AVMutableMetadataItem() let keyStillImageTime = "com.apple.quicktime.still-image-time" let keySpaceQuickTimeMetadata = "mdta" item.key = keyStillImageTime as (NSCopying & NSObjectProtocol)? item.keySpace = AVMetadataKeySpace(rawValue: keySpaceQuickTimeMetadata) item.value = 0 as (NSCopying & NSObjectProtocol)? item.dataType = "com.apple.metadata.datatype.int8" return item } } fileprivate extension AVAsset { func countFrames(exact:Bool) -> Int { var frameCount = 0 if let videoReader = try? AVAssetReader(asset: self) { if let videoTrack = self.tracks(withMediaType: .video).first { frameCount = Int(CMTimeGetSeconds(self.duration) * Float64(videoTrack.nominalFrameRate)) if exact { frameCount = 0 let videoReaderOutput = AVAssetReaderTrackOutput(track: videoTrack, outputSettings: nil) videoReader.add(videoReaderOutput) videoReader.startReading() // count frames while true { let sampleBuffer = videoReaderOutput.copyNextSampleBuffer() if sampleBuffer == nil { break } frameCount += 1 } videoReader.cancelReading() } } } return frameCount } //返回视频中作为“主图像帧”的时间点 func stillImageTime() -> CMTime? { var stillTime:CMTime? = nil if let videoReader = try? AVAssetReader(asset: self) { if let metadataTrack = self.tracks(withMediaType: .metadata).first { let videoReaderOutput = AVAssetReaderTrackOutput(track: metadataTrack, outputSettings: nil) videoReader.add(videoReaderOutput) videoReader.startReading() let keyStillImageTime = "com.apple.quicktime.still-image-time" //是 Apple 在 Live Photo 中用于指示哪一帧作为静态图的元数据 key; let keySpaceQuickTimeMetadata = "mdta" //是 QuickTime 的 metadata 命名空间; var found = false while found == false { if let sampleBuffer = videoReaderOutput.copyNextSampleBuffer() { if CMSampleBufferGetNumSamples(sampleBuffer) != 0 { let group = AVTimedMetadataGroup(sampleBuffer: sampleBuffer) for item in group?.items ?? [] { if item.key as? String == keyStillImageTime && item.keySpace!.rawValue == keySpaceQuickTimeMetadata { stillTime = group?.timeRange.start //print("stillImageTime = \(CMTimeGetSeconds(stillTime!))") found = true break } } } } else { break; } } videoReader.cancelReading() } } return stillTime } func makeStillImageTimeRange(percent:Float, inFrameCount:Int = 0) -> CMTimeRange { var time = self.duration var frameCount = inFrameCount if frameCount == 0 { frameCount = self.countFrames(exact: true) } let frameDuration = Int64(Float(time.value) / Float(frameCount)) time.value = Int64(Float(time.value) * percent) //print("stillImageTime = \(CMTimeGetSeconds(time))") return CMTimeRangeMake(time, CMTimeMake(frameDuration, time.timescale)) } func getAssetFrame(percent:Float) -> UIImage? { let imageGenerator = AVAssetImageGenerator(asset: self) imageGenerator.appliesPreferredTrackTransform = true imageGenerator.requestedTimeToleranceAfter = CMTimeMake(1,100) imageGenerator.requestedTimeToleranceBefore = CMTimeMake(1,100) var time = self.duration time.value = Int64(Float(time.value) * percent) do { var actualTime = kCMTimeZero let imageRef = try imageGenerator.copyCGImage(at: time, actualTime:&actualTime) let img = UIImage(cgImage: imageRef) return img } catch let error as NSError { print("Image generation failed with error \(error)") return nil } } } 详细解释一下这段代码
09-11
// // TPSSAppContext.mm // SurveillanceCore // // Created by ChenYongan on 6/13/16. // Copyright © 2016 TP-LINK. All rights reserved. // #import "TPSSAppContext.h" #import "TPSSNotification.h" // FIXME //#import "TPSSLocalizationConstants.h" #include "IPCAppContext.h" #include "tpwlog.h" #include <set> #include "TPECCommonMethods.h" #import "TPSSAppContext+Private.h" #import <TPFoundation/TPLocalizationUtils.h> #define APPCONTEXT_LOG_TAG "TPSSAppContext:: " static BasicToken getBasicToken(const char *pcToken) { BasicToken token = BasicToken(); if (pcToken != NULL){ strlcpy(token.pcToken, [[TPECCommonMethods AES256DencryptWithTokenString:[NSString stringWithUTF8String: pcToken]] UTF8String], TPWCOMM_MAX_TOKEN_LENGTH); } return token; } TPBasicTokenCallMethod getBasicTokenMethod = { getBasicToken }; static struct tm LocalTime(long long time) { NSDate *date = [NSDate dateWithTimeIntervalSince1970:time]; NSDateComponents *dateComponents = [NSCalendar.currentCalendar components:NSCalendarUnitYear | NSCalendarUnitMonth | NSCalendarUnitDay | NSCalendarUnitHour | NSCalendarUnitMinute | NSCalendarUnitSecond fromDate:date]; struct tm tm = { 0 }; tm.tm_year = (int)dateComponents.year - 1900; tm.tm_mon = (int)dateComponents.month - 1; tm.tm_mday = (int)dateComponents.day; tm.tm_hour = (int)dateComponents.hour; tm.tm_min = (int)dateComponents.minute; tm.tm_sec = (int)dateComponents.second; time_t t = mktime(&tm); return *localtime(&t); } @interface TPSSAppContext () @property (nonatomic, assign, readwrite) BOOL didRequestLogout; @property (nonatomic, assign, readwrite) TPAPPTargetType curTarget; @end @implementation TPSSAppContext + (instancetype)sharedContext { static TPSSAppContext *appContext = nil; static dispatch_once_t onceToken; dispatch_once(&onceToken, ^{ appContext = [[TPSSAppContext alloc] init]; }); return appContext; } - (instancetype)init { self = [super init]; if (self) { pLocalTimeFunction = LocalTime; pDecryptBasicsToken = getBasicTokenMethod.getBasicToken; _pAppContext = (IPCAPPCONTEXT *)[self createLowLevelAppContext]; // 加载target type,加载对应target的target config #ifdef APP_VIGI self.curTarget = TPAPPTargetTypeSurveillanceHome; NSString *bundlePath = [[NSBundle mainBundle] pathForResource:@"targetConfig" ofType:@"plist"]; self.targetConfig = [[NSDictionary alloc] initWithContentsOfFile:bundlePath]; #else self.curTarget = TPAPPTargetTypeOmadaSurveillance; NSString *bundlePath = [[NSBundle mainBundle] pathForResource:@"OmadaSurveillance-targetConfig" ofType:@"plist"]; self.targetConfig = [[NSDictionary alloc] initWithContentsOfFile:bundlePath]; #endif _pAppContext->SetCurTarget((APPTargetType)self.curTarget); _pAppContext->SetTimeDifference((int)NSTimeZone.localTimeZone.secondsFromGMT * 1000); _pAppContext->RegisterEventFlingerCallback(EventCallback, ExitCallback, (__bridge void *)self); //因要设置图片、视频存储路径,该path已通过fishhook修改为library path NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES); NSString *configPath = [paths objectAtIndex:0]; configPath = [configPath stringByAppendingPathComponent:@"AppConfig"]; if (![[NSFileManager defaultManager] fileExistsAtPath:configPath]) { [[NSFileManager defaultManager] createDirectoryAtPath:configPath withIntermediateDirectories:NO attributes:nil error:nil]; } _pAppContext->SetAppDataPath([configPath UTF8String]); NSString *filePath = [paths objectAtIndex:0]; filePath = [filePath stringByAppendingPathComponent:@"AppFiles"]; if (![[NSFileManager defaultManager] fileExistsAtPath:filePath]) { [[NSFileManager defaultManager] createDirectoryAtPath:filePath withIntermediateDirectories:NO attributes:nil error:nil]; } _pAppContext->SetExternalDataPath([filePath UTF8String]); #if !TARGET_OS_IOS NSString *picturePath = [TPSSAppContext innerDownloadPath:TPGuardDownloadPathTypePicture context:_pAppContext]; NSString *videoPath = [TPSSAppContext innerDownloadPath:TPGuardDownloadPathTypeVideo context:_pAppContext]; _pAppContext->SetAlbumPath(picturePath.length > 0 ? picturePath.UTF8String : filePath.UTF8String, TP_LOCALALBUM_PATH_TYPE_PICTURE); _pAppContext->SetAlbumPath(videoPath.length > 0 ? videoPath.UTF8String : filePath.UTF8String, TP_LOCALALBUM_PATH_TYPE_VIDEO); #endif NSString *cachePath = [paths objectAtIndex:0]; cachePath = [cachePath stringByAppendingPathComponent:@"AppCache"]; if (![[NSFileManager defaultManager] fileExistsAtPath:cachePath]) { [[NSFileManager defaultManager] createDirectoryAtPath:cachePath withIntermediateDirectories:NO attributes:nil error:nil]; } _pAppContext->SetCachePath([cachePath UTF8String]); #ifdef DEBUG TPWLogInit(); TPWLogEnableModule(1, TPWLOG_MODULE_COMM); TPWLogEnableModule(1, TPWLOG_MODULE_PLAYER); TPWLogEnableModule(1, TPWLOG_MODULE_IPCAPP); TPWLogEnableModule(0, TPWLOG_MODULE_NET_CLIENT); TPWLogEnableModule(1, TPWLOG_MODULE_STATISTICS); TPWLogSetLevel(TPWLOG_LEVEL_VERBOSE); TPWLogEnableTimestamp(1); TPWLogSetTimestampFormat(TPWLOG_TIMESTAMP_FORMAT_DATETIME_MIL); #endif self.didRequestLogout = NO; [self setupPresetDSTMap]; [TPSSAppContext setPresetTimeZone:_pAppContext]; _currentSiteDevicelist = [NSArray array]; _localDeviceMac = [NSMutableSet set]; } return self; } - (void)dealloc { // wait unit stop finish, and then delete _pAppContext->AppReqStop(&_uiRequestID, 1); delete _pAppContext; } #pragma mark - low level context - (void *)createLowLevelAppContext { return new IPCAPPCONTEXT(); } - (void *)lowLevelAppContext { return _pAppContext; } - (BOOL)allowCelluar { if (_pAppContext) { return _pAppContext->GetWindowControllerAllowCelluar(); } return NO; } #pragma mark - getter - (TPSSAppContextStatus)status { switch (_pAppContext->GetAppContextStatus()) { case IPCAPP_STARTED: return TPSSAppContextStatusStarted; case IPCAPP_STOPPED: return TPSSAppContextStatusStopped; } } - (TPSSAppContextConfig)config { #ifdef BETA_EXPORT_SALE_CLOUD return TPSSAppContextConfigTestBeta; #endif return TPSSAppContextConfigNormal; } - (NSArray <TPSSDeviceForDeviceList *> *)searchSiteList { if (_searchSiteList == nil) { _searchSiteList = [NSArray new]; } return _searchSiteList; } #pragma mark - START/STOP - (TPSSCode)start { unsigned int uiRequestID; int iRet = _pAppContext->AppReqStart(&uiRequestID, 0); return REQUEST_RESULT(uiRequestID, iRet); } - (TPSSCode)syncStart { unsigned int uiRequestID; int iRet = _pAppContext->AppReqStart(&uiRequestID, 1); return REQUEST_RESULT(uiRequestID, iRet); } - (TPSSCode)stop { unsigned int uiRequestID; int iRet = _pAppContext->AppReqStop(&uiRequestID, 0); return REQUEST_RESULT(uiRequestID, iRet); } - (TPSSCode)syncStop { unsigned int uiRequestID; int iRet = _pAppContext->AppReqStop(&uiRequestID, 1); return REQUEST_RESULT(uiRequestID, iRet); } - (void)setupPresetDSTMap { static NSString *dstStartTimeKey = @"start_time"; static NSString *dstEndTimeKey = @"end_time"; static NSString *dstOffsetKey = @"dst_saving"; NSString *path = [[NSBundle mainBundle] pathForResource:@"daylight_saving" ofType:@"json"]; NSData *jsonData = [NSData dataWithContentsOfFile:path]; NSDictionary *dict = [NSJSONSerialization JSONObjectWithData:jsonData options:kNilOptions error:nil]; map<string, map<string, TPWDSTInfo>> *pDSTMap = _pAppContext->GetDSTMap(); for (NSString *dstName in dict.allKeys) { NSDictionary *dstDict = dict[dstName]; map<string, TPWDSTInfo> DSTInfoMap; for (NSString *year in dstDict.allKeys) { NSDictionary *dstInfoDict = dstDict[year]; TPWDSTInfo DSTInfo = { 0 }; DSTInfo.llStartTime = [dstInfoDict[dstStartTimeKey] longLongValue]; DSTInfo.llEndTime = [dstInfoDict[dstEndTimeKey] longLongValue]; DSTInfo.iDSTOffset = [dstInfoDict[dstOffsetKey] intValue]; DSTInfoMap[year.UTF8String] = DSTInfo; } (*pDSTMap)[dstName.UTF8String] = DSTInfoMap; } } #pragma mark - Notification handler static void EventCallback(int iQueueID, TPMESSAGE * pMessage, void * pArgs) { @autoreleasepool { TPSSAppContext *ac = (__bridge TPSSAppContext *)pArgs; [ac _handleEventWithQueueId:iQueueID andMessage:pMessage]; } } - (void)_handleEventWithQueueId:(int)iQueueId andMessage:(TPMESSAGE *)pMessage { NSNotification *notification; TPSSEventType eventType; if (TPSequenceNumberGetPrefix(pMessage->iID) == IPC_BROADCAST_SEQ_PREFIX) { eventType = TPSSEventTypeBroadcast; } else { eventType = TPSSEventTypeResponse; } notification = [NSNotification notificationWithMessage:pMessage eventType:eventType]; dispatch_async(dispatch_get_main_queue(), ^{ [[NSNotificationCenter defaultCenter] postNotification:notification]; }); } static void ExitCallback(void * pArgs) { @autoreleasepool { TPSSAppContext *ac = (__bridge TPSSAppContext *)pArgs; [ac _handleExit]; } } - (void)_handleExit { } #pragma mark - network - (void)setNetworkType:(NSInteger)networkType provider:(NSString *)provider { _pAppContext->SetNetworkType((int)networkType, [provider UTF8String]); } - (void)connectivityChanged { _pAppContext->AppConnectivityChanged(); } - (void)pipeManagerOptimize { _pAppContext->GetNetworkPipeManager()->Optimize(); _pAppContext->GetNetworkPipeManager()->ReconnectAllPreconn(); } - (void)pipeManagerChangeMaxPunchingNum:(NSInteger)playWindowNum deviceIDArray:(NSArray<NSNumber *> *)deviceIDArray { //后续可根据需求进行定制 if (!deviceIDArray || playWindowNum == 0) { return; } std::set<long long> *llDeviceIDSet = new std::set<long long>(); for (int i = 0; i < deviceIDArray.count; i++) { llDeviceIDSet->insert([deviceIDArray[i] longLongValue]); } _pAppContext->GetNetworkPipeManager()->ChangeMaxPunchingNum((int)playWindowNum, llDeviceIDSet); } - (void)resetDeviceLocalValid { _pAppContext->AppResetDeviceLocalValid(); } #pragma mark - error message - (NSString *)legacyErrorMessageForIndex:(SInt32)errorIndex { char pcMessage[IPC_ERROR_MSG_STASH_ENTRY_BUFF_SIZE] = { 0 }; _pAppContext->GetErrorMessage(pcMessage, NULL, NULL, (int)errorIndex); NSString *errorMessage = [NSString stringWithUTF8String:pcMessage]; return [TPSSAppContext errorMsgForLocalizedKey:errorMessage]; } - (TPSSError *)legacyErrorForIndex:(SInt32)errorIndex { int iRval = 0; int iErrorCode = 0; int iCode = 0; char pcMessage[IPC_ERROR_MSG_STASH_ENTRY_BUFF_SIZE] = { 0 }; _pAppContext->GetErrorMessage(pcMessage, &iErrorCode, &iRval, (int)errorIndex); if (iErrorCode != IPC_EC_RVAL) { iCode = iErrorCode; } else { iCode = iRval; } return [TPSSError errorWithCode:iCode andMessage:[NSString stringWithUTF8String:pcMessage]]; } + (NSString *)errorMsgForLocalizedKey:(NSString *)localizedKey { NSString *localizedString = [TPLocalizationUtils localizedStringForKey:localizedKey andTableName:@"IPCAppStringResourceDefines"]; if (![localizedString isEqualToString:localizedKey]) { return localizedString; } NSString *localizedErrorString = [TPLocalizationUtils localizedStringForKey:localizedKey andTableName:@"commonErrormsg"]; if (![localizedErrorString isEqualToString:localizedKey]) { return localizedErrorString; } else if ([localizedErrorString isEqualToString:localizedKey]) { NSString *diffLocalizedString = [TPLocalizationUtils localizedStringForKey:localizedKey andTableName:@"Localizable_diff"]; if (![diffLocalizedString isEqualToString:localizedKey]) { return diffLocalizedString; } } return localizedKey; } #pragma mark - task - (TPSSCode)cancelTask:(TPSSCode)requestID { int iRet = _pAppContext->AppCancelTask(requestID); return iRet; } - (TPSSTaskInfo *)taskInfoByID:(NSUInteger)requestID { TASKINFO *pTaskinfo = new TASKINFO; _pAppContext->GetTaskInfo((unsigned int)requestID, pTaskinfo); TPSSTaskInfo *taskinfo = [[TPSSTaskInfo alloc] initWithTaskinfo:pTaskinfo]; delete pTaskinfo; return taskinfo; } #pragma mark - tool method - (void)updateDidRequestLogout:(BOOL)didRequestLogout { self.didRequestLogout = didRequestLogout; } #pragma mark - device type //桥接层的DeviceType值转化成C层的DeviceType值 - (int)determainDeviceType:(TPSSDeviceType)deviceType { int deviceTypeC; switch (deviceType) { case TPSSDeviceTypeIPC: deviceTypeC = TPW_DEVICE_TYPE_IPC; break; case TPSSDeviceTypeNVR: deviceTypeC = TPW_DEVICE_TYPE_NVR; break; case TPSSDeviceTypeSolar: deviceTypeC = TPW_DEVICE_TYPE_SOLAR; break; default: deviceTypeC = TPW_DEVICE_TYPE_IPC; break; } return deviceTypeC; } //桥接层的DeviceSubType值转化成C层的DeviceSubType值 - (int)determainDeviceSubType:(TPSSDeviceSubType)subType { int deviceSubType; switch (subType) { case TPSSDeviceSubTypeNVR: deviceSubType = TPW_DEVICE_TYPE_NVR; break; case TPSSDeviceSubTypeCameraDisplay: deviceSubType = TPW_DEVICE_TYPE_CAMERA_DISPLAY; break; case TPSSDeviceSubTypeDoorBellCamera: deviceSubType = TPW_DEVICE_TYPE_DOORBELL_CAMERA; break; case TPSSDeviceSubTypeSolar: deviceSubType = TPW_DEVICE_TYPE_SOLAR; break; default: deviceSubType = TPW_DEVICE_TYPE_IPC; break; } return deviceSubType; } + (NSString *)downloadPathKey:(TPGuardDownloadPathType)type { return [NSString stringWithFormat:@"download_path_%@", @(type)]; } + (void)setDownloadPath:(NSString *)path type:(TPGuardDownloadPathType)type { if (path.length == 0 || type == TPGuardDownloadPathTypeCount) { return; } NSURL *fileURL = [NSURL fileURLWithPath:path]; if (fileURL) { NSData *bookmarkData = [self createBookmarkForURL:fileURL]; [[NSUserDefaults standardUserDefaults] setObject:bookmarkData forKey:[self downloadPathKey:type]]; } else { return; } auto &context = [TPSSAppContext sharedContext]->_pAppContext; if (type == TPGuardDownloadPathTypePicture) { context->SetAlbumPath(path.UTF8String, TP_LOCALALBUM_PATH_TYPE_PICTURE); } else if (type == TPGuardDownloadPathTypeVideo) { context->SetAlbumPath(path.UTF8String, TP_LOCALALBUM_PATH_TYPE_VIDEO); } } + (NSString *)innerDownloadPath:(TPGuardDownloadPathType)type context:(IPCAPPCONTEXT *)pContext { NSString *path = [[NSUserDefaults standardUserDefaults] valueForKey:[self downloadPathKey:type]]; NSData *boomarkData = [[NSUserDefaults standardUserDefaults] dataForKey:[self downloadPathKey:type]]; if (boomarkData) { NSURL *fileURL = [self resolveBookmarkData:boomarkData]; if (fileURL.path.length > 0) { return fileURL.path; } } if (path.length == 0 && pContext != NULL) { path = [NSString stringWithUTF8String: pContext->GetExternalDataPath()]; } return path.length > 0 ? path : @""; } + (NSString *)downloadPath:(TPGuardDownloadPathType)type { return [self innerDownloadPath:type context:[TPSSAppContext sharedContext]->_pAppContext]; } + (void)setPresetTimeZone:(IPCAPPCONTEXT *)pContext { NSString *filePath = [[NSBundle mainBundle] pathForResource:@"timezone" ofType:@"json"]; NSData *data = [NSData dataWithContentsOfFile:filePath]; if (!data || pContext == NULL) { return; } NSError *error; NSDictionary *jsonObject = [NSJSONSerialization JSONObjectWithData:data options:NSJSONReadingMutableContainers error:&error]; NSArray *timeZones = jsonObject[@"timezones"]; if (![timeZones isKindOfClass:[NSArray class]]) { return; } map<string, TPWTimeZoneInfo> *map = pContext->GetTimeZoneMap(); for (NSDictionary *item in timeZones) { if (![item isKindOfClass:[NSDictionary class]]) { continue; } NSString *name = item[@"name"]; NSString *zone = item[@"timezone"]; int offset = [item[@"offset"] intValue]; if (name.length == 0 || zone.length == 0) { continue; } TPWTimeZoneInfo info = TPWTimeZoneInfo(); info.iTimeOffset = offset / 1000; strlcpy(info.pcTimezone, name.cString, MIN(name.cStringLength, TPW_URL_MAX_LENGTH)); strlcpy(info.pcZoneId, zone.cString, MIN(zone.cStringLength, TPW_URL_MAX_LENGTH)); map->insert(make_pair(zone.UTF8String, info)); } } + (NSURL *)resolveBookmarkData:(NSData *)data { NSError *error; BOOL isStale = NO; NSURL *url = [NSURL URLByResolvingBookmarkData:data options:NSURLBookmarkResolutionWithSecurityScope relativeToURL:nil bookmarkDataIsStale:&isStale error:&error]; if (error) { NSLog(@"解析书签失败: %@", error); return nil; } if (isStale) { NSLog(@"书签已过期,重新生成"); NSData *newBookmark = [self createBookmarkForURL:url]; if (newBookmark) { [[NSUserDefaults standardUserDefaults] setObject:newBookmark forKey:@"savedBookmark"]; } } if ([url startAccessingSecurityScopedResource]) { return url; } return nil; } + (nullable NSData *)createBookmarkForURL:(NSURL *)url { NSError *error; NSData *bookmarkData = [url bookmarkDataWithOptions:NSURLBookmarkCreationWithSecurityScope includingResourceValuesForKeys:nil relativeToURL:nil error:&error]; if (error) { NSLog(@"创建书签失败: %@", error); } return bookmarkData; } @end TPSSAppContext类在这里了
最新发布
09-20
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值