ios 视频的裁剪,转换,压缩原创
金蝶云社区-honey缘木鱼
honey缘木鱼
4人赞赏了该文章 1486次浏览 未经作者许可,禁止转载编辑于2019年05月16日 09:34:15

最近项目中有一个视频编辑功能,类似一个视频处理工具,针对于视频的裁剪(大小和尺寸),格式的转换(mp4,mov,gif),视频的压缩等功能。

实现功能截图


主要实现步骤为:

一.从相册选择视频播放

TZImagePickerController *imagePickerVc = [[TZImagePickerController alloc] initWithMaxImagesCount:1 delegate:nil];
    imagePickerVc.allowPickingVideo = YES;
    imagePickerVc.allowPickingImage = NO;
    WS(ws);
    [imagePickerVc setDidFinishPickingVideoHandle:^(UIImage *coverImage,id asset){
        MBProgressHUD *hud = [MBProgressHUD showHUDAddedTo:self.view animated:YES];
        hud.label.text = @"Processing...";
        PHVideoRequestOptions *options = [[PHVideoRequestOptions alloc] init];
        options.version = PHImageRequestOptionsVersionOriginal;
        options.networkAccessAllowed = YES;
        options.deliveryMode = PHVideoRequestOptionsDeliveryModeAutomatic;
        PHImageManager *manager = [PHImageManager defaultManager];
        [manager requestAVAssetForVideo:asset options:options resultHandler:^(AVAsset * _Nullable asset, AVAudioMix * _Nullable audioMix, NSDictionary * _Nullable info) {            AVURLAsset *urlAsset = (AVURLAsset *)asset;            NSURL *url = urlAsset.URL;            dispatch_async(dispatch_get_main_queue(), ^{
             [MBProgressHUD hideHUDForView:ws.view animated:YES];              
                    
              NSString * sandboxExtensionTokenKey = info[@"PHImageFileSandboxExtensionTokenKey"];                NSArray * arr = [sandboxExtensionTokenKey componentsSeparatedByString:@";"];                 NSString * filePath = [arr[arr.count - 1]substringFromIndex:9];                CGFloat size = [MyUtility getFileSize:filePath];
              SNHVideoTrimmerController *vc = [[SNHVideoTrimmerController alloc] init];
              vc.videoUrl = url;//视频地址
              vc.fileSize = size;
               vc.changeAsset = asset;
              vc.flag = tap.view.tag;
              [self.navigationController pushViewController:vc animated:YES];
            
            });
        }];
    }];
    [self presentViewController:imagePickerVc animated:YES completion:nil];

二.用AVPlayer实现视频播放(自己封装个简单的播放器即可)

   self.playerItem = [AVPlayerItem playerItemWithAsset:self.asset];  
    self.player = [AVPlayer playerWithPlayerItem:self.playerItem];   
     self.playerLayer = [AVPlayerLayer playerLayerWithPlayer:self.player];  
       self.playerLayer.videoGravity = AVLayerVideoGravityResizeAspect;   
        self.player.actionAtItemEnd = AVPlayerActionAtItemEndNone;    
        self.playerLayer.frame = self.videoPlayerView.bounds;
    [self.videoPlayerView.layer addSublayer:self.playerLayer];

三.具体功能的实现


(1).视频大小的裁剪主要是实现滑动视图(视图遮盖、左右滑动裁剪)功能。

 //拖动手势:改变要截取的内容
    UIPanGestureRecognizer *panGestureRecognizer = [[UIPanGestureRecognizer alloc] initWithTarget:self action:@selector(moveOverlayView:)];
    panGestureRecognizer.maximumNumberOfTouches = 1;
    [self addGestureRecognizer:panGestureRecognizer];

用户左右滑动裁剪视频的主要功能代码的实现:

#pragma mark - =================== 滑动手势 ===================
- (void)moveOverlayView:(UIPanGestureRecognizer *)gesture
{    
    switch (gesture.state) {   
         case UIGestureRecognizerStateBegan:
        {         
           CGFloat offsetWidth = 12;      
           CGPoint po = [gesture locationInView:self];            
            CGRect leftThumbViewRectInSelf = [self convertRect:CGRectMake(-offsetWidth, 0, _leftThumbView.frame.size.width + offsetWidth * 2, _leftThumbView.frame.size.height) fromView:_leftThumbView];            BOOL isLeft =  CGRectContainsPoint(leftThumbViewRectInSelf, po);
            _isDraggingLeftOverlayView = isLeft;            
            CGRect rightThumbViewRectInSelf = [self convertRect:CGRectMake(-offsetWidth, 0, _rightThumbView.frame.size.width + offsetWidth * 2, _rightThumbView.frame.size.height) fromView:_rightThumbView];            BOOL isRight =  CGRectContainsPoint(rightThumbViewRectInSelf, po);
            _isDraggingRightOverlayView = isRight;    
                    if (!isLeft && !isRight) {       
                             return;
            }            if (CMTimeGetSeconds([self.asset duration]) <= self.minLength) {
            
            
                [self showNoSmallerTrimMessage];         
                       return;
            }            if (isRight){            
                self.rightStartPoint = [gesture locationInView:self];
                _isDraggingRightOverlayView = YES;
                _isDraggingLeftOverlayView = NO;
            }          
              else if (isLeft){        
                self.leftStartPoint = [gesture locationInView:self];
                _isDraggingRightOverlayView = NO;
                _isDraggingLeftOverlayView = YES;
            }
            
        }  
          break;    
            case UIGestureRecognizerStateChanged:
        {            
            if (CMTimeGetSeconds([self.asset duration]) <= self.minLength) {                return;
            }            
            CGPoint point = [gesture locationInView:self];            //------------------------------------------------------------------------------------------------------------
            // Right
            if (_isDraggingRightOverlayView){                
                CGFloat deltaX = point.x - self.rightStartPoint.x;                
                CGPoint center = self.rightOverlayView.center;
                center.x += deltaX;                CGFloat newRightViewMidX = center.x;                CGFloat minX = CGRectGetMaxX(self.leftOverlayView.frame) + self.minLength * self.widthPerSecond;                CGFloat maxX = CMTimeGetSeconds([self.asset duration]) <= self.maxLength + 0.5 ? CGRectGetMaxX(self.frameView.frame) : CGRectGetWidth(self.frame) - self.thumbWidth;                if (newRightViewMidX - self.overlayWidth/2 < minX) {
                    newRightViewMidX = minX + self.overlayWidth/2;                    //
                    [self showNoSmallerTrimMessage];
                } else if (newRightViewMidX - self.overlayWidth/2 > maxX) {
                    newRightViewMidX = maxX + self.overlayWidth/2;
                }                
                self.rightOverlayView.center = CGPointMake(newRightViewMidX, self.rightOverlayView.center.y);                self.rightStartPoint = point;
            }            else if (_isDraggingLeftOverlayView){                
                //------------------------------------------------------------------------------------------------------------
                // Left
                CGFloat deltaX = point.x - self.leftStartPoint.x;                
                CGPoint center = self.leftOverlayView.center;
                center.x += deltaX;     
                           
                    
                 CGFloat newLeftViewMidX = center.x;      
                  CGFloat maxWidth = CGRectGetMinX(self.rightOverlayView.frame) - (self.minLength * self.widthPerSecond);                CGFloat newLeftViewMinX = newLeftViewMidX - self.overlayWidth/2;                if (newLeftViewMinX < self.thumbWidth - self.overlayWidth) {
                    newLeftViewMidX = self.thumbWidth - self.overlayWidth + self.overlayWidth/2;
                } else if (newLeftViewMinX + self.overlayWidth > maxWidth) {
                    newLeftViewMidX = maxWidth - self.overlayWidth / 2;                    //
                    [self showNoSmallerTrimMessage];
                }                
                self.leftOverlayView.center = CGPointMake(newLeftViewMidX, self.leftOverlayView.center.y);                self.leftStartPoint = point;
            }            //------------------------------------------------------------------------------------------------------------
            
            [self updateBorderFrames];
            [self notifyDelegateOfDidChange];            
            
            break;
        }       
         case UIGestureRecognizerStateEnded:
        {            
            if (CMTimeGetSeconds([self.asset duration]) <= self.minLength) {                return;
            }
            [self notifyDelegateOfEndEditing];
        }            
        default:     
              break;
    }
}

监听左右滑动距离,显示时间即为要裁剪的视频区间段。

- (void)trimmerView:(SNHVideoTrimmerView *)trimmerView didChangeLeftPosition:(CGFloat)startTime rightPosition:(CGFloat)endTime
{
    _restartOnPlay = YES;
    [self.player pause];    self.isPlaying = NO;   
     self.playBtn.hidden = NO;
    [self stopPlaybackTimeChecker];
    
    [self.trimmerView hideTracker:true];    
    if (startTime != self.startTime) {    
        //then it moved the left position, we should rearrange the bar
        [self seekVideoToPos:startTime];
    }    else{ // right has changed
        [self seekVideoToPos:endTime];
    }    self.startTime = startTime;  
      self.stopTime = endTime;
}

(2).视频转换格式(mp4,mov,gif)


我们用手机录制视频时,一般格式为mov,所以我们就需要格式转化。
//转化为mov,mp4时

 exporter.outputFileType = [self transOutPutFileType:fileType];
 //导出的视频格式类型- (NSString *)transOutPutFileType:(NSString *)fileType {  
   if ([fileType isEqualToString:@"mov"]) {  
         return AVFileTypeQuickTimeMovie;
    }else if ([fileType isEqualToString:@"mp4"]) {    
        return AVFileTypeMPEG4;
    }else {   
         return AVFileTypeQuickTimeMovie;
    }
}

//mov转化为GIF(视频转化为图片,图片转化为GIF图)

//把视频转换成图片数组- (void)creatAvasset:(AVAsset *)asset{ 
  AVAssetImageGenerator *imagegenerator = [AVAssetImageGenerator assetImageGeneratorWithAsset:asset];    CMTime time = asset.duration;    
    NSInteger totalTimer = (NSInteger)CMTimeGetSeconds(time);    //总共要取的张数
    NSInteger totalCount = 0;  
      //最多取的时常
    if (totalTimer > Maxsconds) { 
         self.animationTimer = Maxsconds;
        totalCount = Maxsconds * FTPNumber;
    }else{       
         self.animationTimer = totalTimer;
        totalCount = totalTimer * FTPNumber;
        
    }    
    NSMutableArray *timeArray = [NSMutableArray array]; 
       for (NSInteger i = 0; i < totalCount; i++) {   
            CMTime timeFrame = CMTimeMake(i, FTPNumber);   
          NSValue *timeValue = [NSValue valueWithCMTime:timeFrame];
        [timeArray addObject:timeValue];
    }    //防止出现偏差
    imagegenerator.requestedTimeToleranceBefore = kCMTimeZero;
    imagegenerator.requestedTimeToleranceAfter = kCMTimeZero;
    
    [self.totalImageArray removeAllObjects];    
    //转换成图片
    
    [imagegenerator generateCGImagesAsynchronouslyForTimes:timeArray completionHandler:^(CMTime requestedTime, CGImageRef  _Nullable image, CMTime actualTime, AVAssetImageGeneratorResult result, NSError * _Nullable error) {        
        
        switch (result) {     
               case AVAssetImageGeneratorFailed:
            {                //获取失败
                NSLog(@"获取失败");
            }            break;          
                case AVAssetImageGeneratorCancelled:
            {//                获取已取消
                NSLog(@"获取已经取消");
            }               
             break;              
              case AVAssetImageGeneratorSucceeded:
            {//                获取成功

                    NSData *data = UIImageJPEGRepresentation([UIImage imageWithCGImage:image], 0.6);
                    [self.totalImageArray addObject:data];          
                          if ( requestedTime.value >= totalCount -1) {                    
                    dispatch_async(dispatch_get_main_queue(), ^{       
                         if (self.blockVideo) {                            
                            self.blockVideo(@"成功处理");                 
                            //随便处理生产一个gif
                            [self ImageArrayToGif];
                        }
                      
                    });
                    
                }
               
            }             
               break;         
             default:           
              break;
        }
    }];
}

//把图片转成gif

-(void)ImageArrayToGif{    
    NSMutableArray * images= [NSMutableArray array];    
    for (NSData *dataImage in self.totalImageArray) {
        [images addObject:[UIImage imageWithData:dataImage]];
        
    }//生成载gif的文件在Document中
    NSString *path = [self creatPathGif];    
    self.gifPath = path;    //配置gif属性
    CGImageDestinationRef destion;    
    CFURLRef url = CFURLCreateWithFileSystemPath(kCFAllocatorDefault, (CFStringRef)path, kCFURLPOSIXPathStyle, false);
    destion = CGImageDestinationCreateWithURL(url, kUTTypeGIF, images.count, NULL);    
    NSDictionary *frameDic = [NSDictionary dictionaryWithObject:[NSMutableDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithFloat:0.3],(NSString*)kCGImagePropertyGIFDelayTime, nil] forKey:(NSString*)kCGImagePropertyGIFDelayTime];    
    NSMutableDictionary *gifParmdict = [NSMutableDictionary dictionaryWithCapacity:2];    //颜色
    [gifParmdict setObject:[NSNumber numberWithBool:YES] forKey:(NSString*)kCGImagePropertyGIFHasGlobalColorMap];//    颜色类型
    [gifParmdict setObject:(NSString*)kCGImagePropertyColorModelRGB forKey:(NSString*)kCGImagePropertyColorModel];//   颜色深度
    [gifParmdict setObject:[NSNumber numberWithInt:8] forKey:(NSString*)kCGImagePropertyDepth];//   是否重复
    [gifParmdict setObject:[NSNumber numberWithInt:0] forKey:(NSString*)kCGImagePropertyGIFLoopCount];    
    NSDictionary *gifProperty = [NSDictionary dictionaryWithObject:gifParmdict forKey:(NSString*)kCGImagePropertyGIFDictionary];//    合成gif
    for (UIImage *dimage in images) {        
        //可以在这里对图片进行压缩
        
        CGImageDestinationAddImage(destion, dimage.CGImage, (__bridge CFDictionaryRef)frameDic);
    }    
    CGImageDestinationSetProperties(destion,(__bridge CFDictionaryRef)gifProperty);    
    CGImageDestinationFinalize(destion);   
     CFRelease(destion);   
      if(self.getGifPath){  
      
              self.getGifPath(path);
    }
}

(3).视频压缩


AVAssetExportPresetLowQuality     低质量 可以通过移动网络分享   

AVAssetExportPresetMediumQuality     中等质量 可以通过WIFI网络分享
AVAssetExportPresetHighestQuality    高等质量

关键代码为:

- (void) sliderChange:(id) sender {  
  if ([sender isKindOfClass:[UISlider class]]) {   
       UISlider * slider = sender;     
          int value = slider.value;       
           if(value<=30){
            _compressType = AVAssetExportPresetLowQuality;
        }else if (value>30&&value<=70){
              _compressType = AVAssetExportPresetMediumQuality;
        }else
        {
               _compressType = AVAssetExportPresetHighestQuality;
        }        if(self.fileSize*value/100>1024){
        
             _targetTitle.text = [NSString stringWithFormat:@"Target size: %@(%d)%%",[NSString stringWithFormat:@"%d MB",(int)self.fileSize*value/100/1024],value];
        }else
        {
            _targetTitle.text = [NSString stringWithFormat:@"Target size: %@(%d)%%",[NSString stringWithFormat:@"%d Kb",(int)self.fileSize*value/100],value];
        }
    }
}

四.视频导出


当我们完成上述的功能后,需要保存新得到的视频,我们要先把新的视频导出,然后保存到本地。

//5.视频输出
    self.outputURL = [NSURL fileURLWithPath: self.outPutPath];   
     AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:
     self.mixComposition presetName:self.presetName];
    
    exporter.outputURL = self.outputURL;
    exporter.videoComposition = self.videoComposition;
    exporter.outputFileType = [self transOutPutFileType:fileType];
    exporter.shouldOptimizeForNetworkUse = self.shouldOptimizeForNetworkUse;
    [exporter exportAsynchronouslyWithCompletionHandler:^{        
        dispatch_async(dispatch_get_main_queue(), ^{          
          if (exporter.status == AVAssetExportSessionStatusCompleted) {                
          if (successBlock) {
                    successBlock(self.outputURL);
                }
            }else{               
             NSLog(@"exporter %@",exporter.error);                
            if (failureBlock) {
                    failureBlock(exporter.error);
                }
                
            }
        });
    }];

具体实现请参考demo下载地址,如有错误,请评论指出!相互学习!



本篇独发金蝶云社区


赞 4