iOS动态壁纸下载到本地

DownLoadVideoService.h

#import

#import

typedefvoid(^FinishBlock)(NSString*filePath);

typedefvoid(^Failed)();

NS_ASSUME_NONNULL_BEGIN

@interfaceDownLoadVideoService :NSObject

/*

 * url                  音频网址

 * directoryPath  存放的地址

 * fileName        要存的名字

 */

+ (void)downloadAudioWithUrl:(NSString*)url

           saveDirectoryPath:(NSString*)directoryPath

                    fileName:(NSString*)fileName

                      finish:(FinishBlock)finishBlock

                      failed:(Failed)failed;

@end

DownLoadVideoService.m

@implementationDownLoadVideoService

+ (void)downloadAudioWithUrl:(NSString*)url

           saveDirectoryPath:(NSString*)directoryPath

                    fileName:(NSString*)fileName

                      finish:(FinishBlock)finishBlock

                      failed:(Failed)failed

{

    NSString *file_path = [directoryPath stringByAppendingPathComponent:[NSString stringWithFormat:@"%@.mp4", fileName]];

    NSFileManager *fm = [NSFileManager defaultManager];


    /// 判断文件是否已经存在

    if([fmfileExistsAtPath:file_path])

    {

        finishBlock(file_path);

    }

    /// 不存在时

    else

    {

        NSURL*URL = [NSURLURLWithString:url];

        NSURLSessionConfiguration *configuration = [NSURLSessionConfiguration defaultSessionConfiguration];

        //AFN3.0+基于封住URLSession的句柄

        AFURLSessionManager *manager = [[AFURLSessionManager alloc] initWithSessionConfiguration:configuration];

        //请求

        NSURLRequest *request = [NSURLRequest requestWithURL:URL];

        //下载Task操作

        NSURLSessionDownloadTask*_downloadTask = [managerdownloadTaskWithRequest:requestprogress:^(NSProgress*_NonnulldownloadProgress) {

            //进度


        }destination:^NSURL*_Nonnull(NSURL*_NonnulltargetPath,NSURLResponse*_Nonnullresponse) {


            NSString *cachesPath = [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) lastObject];

            NSString *path = [cachesPath stringByAppendingPathComponent:response.suggestedFilename];

            return[NSURLfileURLWithPath:path];


        }completionHandler:^(NSURLResponse*_Nonnullresponse,NSURL*_NullablefilePath,NSError*_Nullableerror) {

            // filePath就是你下载文件的位置,你可以解压,也可以直接拿来使用

            NSString*armFilePath = [filePathpath];// 将NSURL转成NSString

            finishBlock(armFilePath);

            //            if ([DownloadAudioService amrToWavFrom:armFilePath to:file_path]) {

            //                finishBlock(file_path);

            //            }

            //            else {

            //                failed();

            //            }

        }];

        [_downloadTaskresume];

    }

}

@end

ZYLivePhotoTool.h

#import

#import

#import

@interfaceZYLivePhotoTool :NSObject

+ (instancetype)shareTool;

- (AVAsset*)cutVideoWithPath:(NSString*)videoPath startTime:(NSTimeInterval)start endTime:(NSTimeInterval)end;

- (void)generatorLivePhotoWithAsset:(AVAsset*)asset

                      originImgPath:(NSString*)originImgPath

                   livePhotoImgPath:(NSString*)imgPath

                 livePhotoVideoPath:(NSString*)videoPath

                    handleLivePhoto:(void(^)(PHLivePhoto*livePhoto))handle;

- (void)generatorOriginImgWithAsset:(AVAsset*)asset

                            seconds:(NSTimeInterval)seconds

                          imageName:(NSString*)imgName

                          handleImg:(void(^)(UIImage*originImage,NSString*imagePath,NSError*error))handle;

- (void)saveLivePhotoWithVideoPath:(NSString*)videoPath

                         imagePath:(NSString*)imagePath

                            handle:(void(^)(BOOL,NSError*))saveHandle;

@end

ZYLivePhotoTool.m

#import "ZYLivePhotoTool.h"

#import

#import

#import

staticNSString*constkFigAppleMakerNote_AssetIdentifier =@"17";

static NSString * constkKeyContentIdentifier =  @"com.apple.quicktime.content.identifier";

static NSString * const kKeyStillImageTime = @"com.apple.quicktime.still-image-time";

staticNSString*constkKeySpaceQuickTimeMetadata =@"mdta";

static ZYLivePhotoTool *tool = nil;

@implementationZYLivePhotoTool

+ (instancetype)shareTool{

    staticdispatch_once_tonceToken;

    dispatch_once(&onceToken, ^{

        tool = [[ZYLivePhotoTool alloc] init];

    });

    return tool;

}

- (void)generatorLivePhotoWithAsset:(AVAsset*)asset

                      originImgPath:(NSString*)originImgPath

                   livePhotoImgPath:(NSString*)imgPath

                 livePhotoVideoPath:(NSString*)videoPath

                    handleLivePhoto:(void(^)(PHLivePhoto*))handle{


    NSString *assetID = [NSUUID UUID].UUIDString;

    dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{

        [self dealImageWithOriginPath:originImgPath filePath:imgPath assetIdentifier:assetID];

        [self dealVideoWithWriteFilePath:videoPath AssetIdentifier:assetID asset:asset];

        dispatch_async(dispatch_get_main_queue(), ^{

           AVAssetTrack *track = [asset tracksWithMediaType:AVMediaTypeVideo].firstObject;

            [PHLivePhoto requestLivePhotoWithResourceFileURLs:@[[NSURL fileURLWithPath:videoPath],[NSURL fileURLWithPath:imgPath]] placeholderImage:nil targetSize:track.naturalSize contentMode:PHImageContentModeAspectFit resultHandler:^(PHLivePhoto * _Nullable livePhoto, NSDictionary * _Nonnull info) {

                handle(livePhoto);

            }];

        });

    });

}

- (void)generatorOriginImgWithAsset:(AVAsset*)asset

                            seconds:(NSTimeInterval)seconds

                          imageName:(NSString*)imgName

                          handleImg:(void(^)(UIImage*originImage,NSString*imagePath,NSError*error))handle{

    AVAssetTrack *track = [asset tracksWithMediaType:AVMediaTypeVideo].firstObject;

    AVAssetImageGenerator *generator = [[AVAssetImageGenerator alloc] initWithAsset:asset];

    generator.appliesPreferredTrackTransform = true;

    generator.maximumSize= track.naturalSize;

    CGImageRef image = [generator copyCGImageAtTime:CMTimeMakeWithSeconds(seconds, asset.duration.timescale) actualTime:nil error:nil];

    if(image !=nil) {

        NSData *data = UIImagePNGRepresentation([UIImage imageWithCGImage:image]);

        NSArray *urls = [[NSFileManager defaultManager] URLsForDirectory:NSDocumentDirectory inDomains:NSUserDomainMask];

        NSURL*url = urls[0];

        NSString *imageURL = [url.path stringByAppendingPathComponent:[NSString stringWithFormat:@"%@.jpg",imgName]];

        [datawriteToFile:imageURLatomically:true];

        handle([UIImageimageWithCGImage:image],imageURL,nil);

        CGImageRelease(image);

    }

}

- (AVAsset*)cutVideoWithPath:(NSString*)videoPath startTime:(NSTimeInterval)start endTime:(NSTimeInterval)end{

     AVURLAsset *asset = [AVURLAsset assetWithURL:[NSURL fileURLWithPath:videoPath]];

    AVMutableComposition *composition = [[AVMutableComposition alloc] init];

    AVMutableCompositionTrack *muTrack = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];


    AVMutableCompositionTrack *audioTrack = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];


    AVAssetTrack *originTrack = [asset tracksWithMediaType:AVMediaTypeVideo].firstObject;

    AVAssetTrack *originAudioTrack = [asset tracksWithMediaType:AVMediaTypeAudio].firstObject;


    [muTrackinsertTimeRange:CMTimeRangeMake(CMTimeMakeWithSeconds(start, asset.duration.timescale), CMTimeMakeWithSeconds(end, asset.duration.timescale)) ofTrack:originTrack atTime:kCMTimeZero error:nil];

    [audioTrackinsertTimeRange:CMTimeRangeMake(CMTimeMakeWithSeconds(start, asset.duration.timescale), CMTimeMakeWithSeconds(end, asset.duration.timescale)) ofTrack:originAudioTrack atTime:kCMTimeZero error:nil];

    muTrack.preferredTransform = originTrack.preferredTransform;


    returncomposition;

}

- (void)dealImageWithOriginPath:(NSString*)originPath

                 filePath:(NSString*)finalPath

                     assetIdentifier:(NSString*)assetIdentifier {

    CGImageDestinationRef dest = CGImageDestinationCreateWithURL((CFURLRef)[NSURL fileURLWithPath:finalPath], kUTTypeJPEG, 1, nil);

    CGImageSourceRef imageSourceRef = CGImageSourceCreateWithData((CFDataRef)[NSData dataWithContentsOfFile:originPath], nil);

    NSMutableDictionary *metaData = [(__bridge_transfer  NSDictionary*)CGImageSourceCopyPropertiesAtIndex(imageSourceRef, 0, nil) mutableCopy];


    NSMutableDictionary *makerNote = [NSMutableDictionary dictionary];

    [makerNotesetValue:assetIdentifierforKey:kFigAppleMakerNote_AssetIdentifier];

    [metaDatasetValue:makerNote forKey:(__bridge_transfer  NSString*)kCGImagePropertyMakerAppleDictionary];

    CGImageDestinationAddImageFromSource(dest, imageSourceRef, 0, (CFDictionaryRef)metaData);

    CGImageDestinationFinalize(dest);

    CFRelease(dest);

}

- (void)dealVideoWithWriteFilePath:(NSString*)finalMovPath

                     AssetIdentifier:(NSString*)assetIdentifier

                               asset:(AVAsset*)asset{


    AVAssetTrack *videoTrack = [asset tracksWithMediaType:AVMediaTypeVideo].firstObject;

    AVAssetTrack *audioTrack = [asset tracksWithMediaType:AVMediaTypeAudio].firstObject;


    if(!videoTrack) {

        return;

    }


    AVAssetReaderOutput *videoOutput = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:videoTrack outputSettings:@{(__bridge_transfer  NSString*)kCVPixelBufferPixelFormatTypeKey : [NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA]}];


    NSDictionary *audioDic = @{AVFormatIDKey :@(kAudioFormatLinearPCM),

                               AVLinearPCMIsBigEndianKey:@NO,

                               AVLinearPCMIsFloatKey:@NO,

                               AVLinearPCMBitDepthKey:@(16)

                               };


    NSError*error;



    AVAssetReader *reader = [AVAssetReader assetReaderWithAsset:asset error:&error];

    if([readercanAddOutput:videoOutput]) {

        [readeraddOutput:videoOutput];

    }else{

        NSLog(@"Add video output error\n");

    }


    AVAssetReaderTrackOutput *audioOutput = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:audioTrack outputSettings:audioDic];


    if([readercanAddOutput:audioOutput]) {

        [readeraddOutput:audioOutput];

    }else{

        NSLog(@"Add audio output error\n");

    }


    NSDictionary * outputSetting = @{AVVideoCodecKey: AVVideoCodecH264,

                                     AVVideoWidthKey: [NSNumbernumberWithFloat:videoTrack.naturalSize.width],

                                     AVVideoHeightKey: [NSNumber numberWithFloat:videoTrack.naturalSize.height]

                                     };


    AVAssetWriterInput *videoInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:outputSetting];

    videoInput.expectsMediaDataInRealTime = true;

    videoInput.transform= videoTrack.preferredTransform;


    NSDictionary *audioSettings = [NSDictionary dictionaryWithObjectsAndKeys:

                                   [NSNumber numberWithInt: kAudioFormatMPEG4AAC], AVFormatIDKey,

                                   [NSNumbernumberWithInt:1],AVNumberOfChannelsKey,

                                   [NSNumbernumberWithFloat:44100],AVSampleRateKey,

                                   [NSNumbernumberWithInt:128000],AVEncoderBitRateKey,

                                   nil];


    AVAssetWriterInput *audioInput = [AVAssetWriterInput assetWriterInputWithMediaType:[audioTrack mediaType] outputSettings:audioSettings];

    audioInput.expectsMediaDataInRealTime = true;

    audioInput.transform= audioTrack.preferredTransform;


    NSError*error_two;


    AVAssetWriter *writer = [AVAssetWriter assetWriterWithURL:[NSURL fileURLWithPath:finalMovPath] fileType:AVFileTypeQuickTimeMovie error:&error_two];

    if(error_two) {

        NSLog(@"CreateWriterError:%@\n",error_two);

    }

    writer.metadata=@[[selfmetaDataSet:assetIdentifier]];

    [writeraddInput:videoInput];

    [writeraddInput:audioInput];


    NSDictionary *sourcePixelBufferAttributesDictionary = [NSDictionary dictionaryWithObjectsAndKeys:

                                                           [NSNumbernumberWithInt:kCVPixelFormatType_32BGRA],

                                                           kCVPixelBufferPixelFormatTypeKey,nil];


    AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:videoInput sourcePixelBufferAttributes:sourcePixelBufferAttributesDictionary];


    AVAssetWriterInputMetadataAdaptor *adapter = [self metadataSetAdapter];

    [writeraddInput:adapter.assetWriterInput];

    [writerstartWriting];

    [readerstartReading];

    [writerstartSessionAtSourceTime:kCMTimeZero];


    CMTimeRange dummyTimeRange = CMTimeRangeMake(CMTimeMake(0, 1000), CMTimeMake(200, 3000));

    //Meta data reset:

    AVMutableMetadataItem *item = [AVMutableMetadataItem metadataItem];

    item.key = kKeyStillImageTime;

    item.keySpace = kKeySpaceQuickTimeMetadata;

    item.value = [NSNumber numberWithInt:0];

    item.dataType = @"com.apple.metadata.datatype.int8";

    [adapterappendTimedMetadataGroup:[[AVTimedMetadataGroup alloc] initWithItems:[NSArray arrayWithObject:item] timeRange:dummyTimeRange]];



    dispatch_queue_t createMovQueue = dispatch_queue_create("createMovQueue", DISPATCH_QUEUE_SERIAL);

    dispatch_async(createMovQueue, ^{

        while (reader.status == AVAssetReaderStatusReading) {

            CMSampleBufferRefvideoBuffer = [videoOutputcopyNextSampleBuffer];

            CMSampleBufferRefaudioBuffer = [audioOutputcopyNextSampleBuffer];


            if(videoBuffer) {

                while(!videoInput.isReadyForMoreMediaData|| !audioInput.isReadyForMoreMediaData) {

                    usleep(1);

                }


                if(audioBuffer) {

                    [audioInputappendSampleBuffer:audioBuffer];

                    CFRelease(audioBuffer);

                }

                [adaptor.assetWriterInputappendSampleBuffer:videoBuffer];

                CMSampleBufferInvalidate(videoBuffer);

                CFRelease(videoBuffer);

                videoBuffer =nil;


            }else{

                continue;

            }

            // NULL?

        }

        dispatch_sync(dispatch_get_main_queue(), ^{

            [writerfinishWritingWithCompletionHandler:^{

                NSLog(@"Finish \n");

            }];

        });

    });


    while (writer.status == AVAssetWriterStatusWriting) {

        [[NSRunLoop currentRunLoop] runUntilDate:[NSDate dateWithTimeIntervalSinceNow:0.5]];

    }

}

- (AVAssetWriterInputMetadataAdaptor *)metadataSetAdapter {

    NSString *identifier = [kKeySpaceQuickTimeMetadata stringByAppendingFormat:@"/%@",kKeyStillImageTime];

    const NSDictionary *spec = @{(__bridge_transfer  NSString*)kCMMetadataFormatDescriptionMetadataSpecificationKey_Identifier :

                                     identifier,

                                 (__bridge_transfer  NSString*)kCMMetadataFormatDescriptionMetadataSpecificationKey_DataType :

                                     @"com.apple.metadata.datatype.int8"

                                 };

    CMFormatDescriptionRef desc;

    CMMetadataFormatDescriptionCreateWithMetadataSpecifications(kCFAllocatorDefault, kCMMetadataFormatType_Boxed, (__bridge CFArrayRef)@[spec], &desc);

    AVAssetWriterInput *input = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeMetadata outputSettings:nil sourceFormatHint:desc];

    CFRelease(desc);

    return [AVAssetWriterInputMetadataAdaptor assetWriterInputMetadataAdaptorWithAssetWriterInput:input];


}

- (AVMetadataItem*)metaDataSet:(NSString*)assetIdentifier {

    AVMutableMetadataItem *item = [AVMutableMetadataItem metadataItem];

    item.key = kKeyContentIdentifier;

    item.keySpace = kKeySpaceQuickTimeMetadata;

    item.value= assetIdentifier;

    item.dataType = @"com.apple.metadata.datatype.UTF-8";

    returnitem;

}

- (void)saveLivePhotoWithVideoPath:(NSString*)videoPath imagePath:(NSString*)imagePath handle:(void(^)(BOOL,NSError*))saveHandle{

    [[PHPhotoLibrary sharedPhotoLibrary] performChanges:^{

        PHAssetCreationRequest *creationRequest = [PHAssetCreationRequest creationRequestForAsset];

        PHAssetResourceCreationOptions *options = [[PHAssetResourceCreationOptions alloc] init];


        [creationRequestaddResourceWithType:PHAssetResourceTypePairedVideo fileURL:[NSURL fileURLWithPath:videoPath] options:options];

        [creationRequestaddResourceWithType:PHAssetResourceTypePhoto fileURL:[NSURL fileURLWithPath:imagePath] options:options];


    }completionHandler:^(BOOLsuccess,NSError*_Nullableerror) {

        saveHandle(success,error);

    }];

}

@end

核心代码


 dispatch_async(dispatch_get_main_queue(), ^{

        __weaktypeof(self)weakself =self;

        [MBProgressHUD showMessag:@"Loading" toView:self.view];


    });


    flag=1;

    [DownLoadVideoService downloadAudioWithUrl:self.object[@"videoFileUrl"] saveDirectoryPath:[NSHomeDirectory() stringByAppendingPathComponent:@"Documents"] fileName:[NSString stringWithFormat:@"%@", self.object.objectId] finish:^(NSString *filePath2) {


        AVURLAsset*audioAsset=[[AVURLAsset alloc] initWithURL:[NSURL fileURLWithPath:filePath2] options:nil];

        CMTimeaudioDuration=audioAsset.duration;

        floataudioDurationSeconds=CMTimeGetSeconds(audioDuration);


        AVAsset*asset = [[ZYLivePhotoToolshareTool]cutVideoWithPath:filePath2startTime:0.0endTime:audioDurationSeconds];



        [[ZYLivePhotoTool shareTool] generatorOriginImgWithAsset:asset seconds:2.0 imageName:@"image" handleImg:^(UIImage *originImage, NSString *imagePath, NSError *error) {


            NSString *outPut = NSSearchPathForDirectoriesInDomains(NSCachesDirectory, NSUserDomainMask, true).firstObject;

            NSString *newImgPath = [outPut stringByAppendingPathComponent:@"IMG.JPG"];

            NSString*newVideoPath = [outPutstringByAppendingPathComponent:@"IMG.MOV"];

            [[NSFileManager defaultManager] removeItemAtPath:newImgPath error:nil];

            [[NSFileManager defaultManager] removeItemAtPath:newVideoPath error:nil];


            [[ZYLivePhotoTool shareTool] generatorLivePhotoWithAsset:asset originImgPath:imagePath livePhotoImgPath:newImgPath livePhotoVideoPath:newVideoPath handleLivePhoto:^(PHLivePhoto *livePhoto) {

                //                    photoView.livePhoto = livePhoto;

                if(self->flag==1) {


                    self->flag=3;

                [[ZYLivePhotoToolshareTool]saveLivePhotoWithVideoPath:newVideoPathimagePath:newImgPathhandle:^(BOOLsuccess,NSError*error) {

                    if(success) {

                        NSLog(@"aaa");

                        dispatch_sync(dispatch_get_main_queue(), ^{

                            [MBProgressHUD hideHUDForView:self.view animated:YES];

                            [MBProgressHUDshowError:NSLocalizedString(@"baocunchenggong",nil)toView:self.view];

                        });


                    }else{

                        dispatch_sync(dispatch_get_main_queue(), ^{

                            [MBProgressHUD hideHUDForView:self.view animated:YES];

                            [MBProgressHUDshowError:NSLocalizedString(@"baocunshibai",nil)toView:self.view];

                        });


                        NSLog(@"bbb");

                    }


                }];


                      }

            }];



        }];





    }failed:^{


       [MBProgressHUD hideHUDForView:self.view animated:YES];


    }];

你可能感兴趣的:(iOS动态壁纸下载到本地)