iOS视频倒放

视频的倒放就是视频从后往前播放,这个只适应于视频图像,对声音来说倒放只是噪音,没什么意义,所以倒放的时候声音都是去除的。


倒放实现

一般对H264编码的视频进行解码,都是从头至尾进行的,因为视频存在I帧、P帧、B帧,解码P帧的时候需要依赖前面最近的I帧或者前一个P帧,解码B帧的时候,不仅要依赖前面的缓存数据还要依赖后面的数据,这就导致了我们没法真正让解码器从后往前解码,只能把视频分成很多足够小的片段,对每一个片段单独进行处理。具体思路如下:我们需要先seek到最后第n个GOP的第一帧-I帧,然后把当前这个点到视频最后的图像都解码出来,存储在一个数组里面。这个n是根据解码数据大小定的,因为如果解码出来的数据太大,内存占用过多,会导致程序被杀掉,我是把视频分成一秒一个小片段,对这些片段,倒过来进行解码,然后把每一段解出来的图像,倒过来编码。使用AVFoundation可以很方便的实现github

//  SJReverseUtility.h
// playback
//
// Created by Lightning on 2018/7/12. #import <Foundation/Foundation.h>
#import <AVFoundation/AVFoundation.h> typedef void(^ReverseCallBack)(AVAssetWriterStatus status, float progress, NSError *error); @interface SJReverseUtility : NSObject - (instancetype)initWithAsset:(AVAsset *)asset outputPath:(NSString *)path; - (void)startProcessing; - (void)cancelProcessing; @property (nonatomic, copy) ReverseCallBack callBack; @property (nonatomic, assign) CMTimeRange timeRange; @end
//
// SJReverseUtility.m
// playback
//
// Created by Lightning on 2018/7/12. #import "SJReverseUtility.h" @interface SJReverseUtility() @property (nonatomic, strong) NSMutableArray *samples; @property (nonatomic, strong) AVAsset *asset; @property (nonatomic, strong) NSMutableArray *tracks; @property (nonatomic, strong) AVMutableComposition *composition; @property (nonatomic, strong) AVAssetWriter *writer; @property (nonatomic, strong) AVAssetWriterInput *writerInput; @property (nonatomic, strong) AVAssetWriterInputPixelBufferAdaptor *writerAdaptor; @property (nonatomic, assign) uint frame_count; @property (nonatomic, strong) AVMutableCompositionTrack *compositionTrack; @property (nonatomic, assign) CMTime offsetTime; @property (nonatomic, assign) CMTime intervalTime; @property (nonatomic, assign) CMTime segDuration; @property (nonatomic, assign) BOOL shouldStop; @property (nonatomic, copy) NSString *path; @end @implementation SJReverseUtility - (instancetype)initWithAsset:(AVAsset *)asset outputPath:(NSString *)path
{
self = [super init];
if (self) {
_asset = asset; _composition = [AVMutableComposition composition];
AVMutableCompositionTrack *ctrack = [_composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
_compositionTrack = ctrack; _timeRange = kCMTimeRangeInvalid;
_frame_count = 0;
_offsetTime = kCMTimeZero;
_intervalTime = kCMTimeZero;
[self setupWriterWithPath:path]; }
return self;
} - (void)cancelProcessing
{
self.shouldStop = YES;
} - (void)startProcessing
{
if (CMTIMERANGE_IS_INVALID(_timeRange)) {
_timeRange = CMTimeRangeMake(kCMTimeZero, _asset.duration);
}
CMTime duration = _asset.duration;
CMTime segDuration = CMTimeMake(1, 1);
self.segDuration = segDuration;
NSArray *videoTracks = [_asset tracksWithMediaType:AVMediaTypeVideo];
AVAssetTrack *track = videoTracks[0];
//should set before starting
self.writerInput.transform = track.preferredTransform;//fix video orientation [self.writer startWriting];
[self.writer startSessionAtSourceTime:kCMTimeZero]; //start processing //divide video into n segmentation
int n = (int)(CMTimeGetSeconds(duration)/CMTimeGetSeconds(segDuration)) + 1;
if (CMTIMERANGE_IS_VALID(_timeRange)) {
n = (int)(CMTimeGetSeconds(_timeRange.duration)/CMTimeGetSeconds(segDuration)) + 1;
duration = CMTimeAdd(_timeRange.start, _timeRange.duration); } __weak typeof(self) weakSelf = self;
for (int i = 1; i < n; i++) {
CMTime offset = CMTimeMultiply(segDuration, i);
if (CMTimeCompare(offset, duration) > 0) {
break;
}
CMTime start = CMTimeSubtract(duration, offset);
if (CMTimeCompare(start, _timeRange.start) < 0) {
start = kCMTimeZero;
segDuration = CMTimeSubtract(duration, CMTimeMultiply(segDuration, i-1));
}
self.compositionTrack = [_composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[self.compositionTrack insertTimeRange:CMTimeRangeMake(start, segDuration) ofTrack:track atTime:kCMTimeZero error:nil]; [self generateSamplesWithTrack:_composition]; [self encodeSampleBuffer]; if (self.shouldStop) {
[self.writer cancelWriting];
if ([[NSFileManager defaultManager] fileExistsAtPath:_path]) {
[[NSFileManager defaultManager] removeItemAtPath:_path error:nil];
}
!weakSelf.callBack? :weakSelf.callBack(weakSelf.writer.status, -1, weakSelf.writer.error); return;
} [self.compositionTrack removeTimeRange:CMTimeRangeMake(start, segDuration)]; !weakSelf.callBack? :weakSelf.callBack(weakSelf.writer.status, (float)i/n, weakSelf.writer.error);
}
[self.writer finishWritingWithCompletionHandler:^{
!weakSelf.callBack? :weakSelf.callBack(weakSelf.writer.status, 1.0f, weakSelf.writer.error);
}]; } - (void)setupWriterWithPath:(NSString *)path
{
NSURL *outputURL = [NSURL fileURLWithPath:path];
AVAssetTrack *videoTrack = [[_asset tracksWithMediaType:AVMediaTypeVideo] lastObject]; // Initialize the writer
self.writer = [[AVAssetWriter alloc] initWithURL:outputURL
fileType:AVFileTypeMPEG4
error:nil];
NSDictionary *videoCompressionProps = [NSDictionary dictionaryWithObjectsAndKeys:
@(videoTrack.estimatedDataRate), AVVideoAverageBitRateKey,
nil];
int width = videoTrack.naturalSize.width;
int height = videoTrack.naturalSize.height;
NSDictionary *writerOutputSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInt:videoTrack.naturalSize.width], AVVideoWidthKey,
[NSNumber numberWithInt:videoTrack.naturalSize.height], AVVideoHeightKey,
videoCompressionProps, AVVideoCompressionPropertiesKey,
nil];
AVAssetWriterInput *writerInput = [[AVAssetWriterInput alloc] initWithMediaType:AVMediaTypeVideo
outputSettings:writerOutputSettings
sourceFormatHint:(__bridge CMFormatDescriptionRef)[videoTrack.formatDescriptions lastObject]];
[writerInput setExpectsMediaDataInRealTime:NO];
self.writerInput = writerInput;
self.writerAdaptor = [[AVAssetWriterInputPixelBufferAdaptor alloc] initWithAssetWriterInput:writerInput sourcePixelBufferAttributes:nil];
[self.writer addInput:self.writerInput]; } - (void)generateSamplesWithTrack:(AVAsset *)asset
{
// Initialize the reader
AVAssetReader *reader = [[AVAssetReader alloc] initWithAsset:asset error:nil];
AVAssetTrack *videoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] lastObject]; NSDictionary *readerOutputSettings = [NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarFullRange], kCVPixelBufferPixelFormatTypeKey, nil];
AVAssetReaderTrackOutput* readerOutput = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:videoTrack
outputSettings:readerOutputSettings];
[reader addOutput:readerOutput];
[reader startReading]; // read in the samples
_samples = [[NSMutableArray alloc] init]; CMSampleBufferRef sample;
while(sample = [readerOutput copyNextSampleBuffer]) {
[_samples addObject:(__bridge id)sample];
NSLog(@"count = %d",_samples.count);
CFRelease(sample);
}
if (_samples.count > 0 ) {
self.intervalTime = CMTimeMakeWithSeconds(CMTimeGetSeconds(self.segDuration)/(float)(_samples.count), _asset.duration.timescale);
} } - (void)encodeSampleBuffer
{
for(NSInteger i = 0; i < _samples.count; i++) {
// Get the presentation time for the frame CMTime presentationTime = CMSampleBufferGetPresentationTimeStamp((__bridge CMSampleBufferRef)_samples[i]); presentationTime = CMTimeAdd(_offsetTime, self.intervalTime); size_t index = _samples.count - i - 1; if (0 == _frame_count) {
presentationTime = kCMTimeZero;
index = _samples.count - i - 2; //倒过来的第一帧是黑的丢弃
}
CMTimeShow(presentationTime); CVPixelBufferRef imageBufferRef = CMSampleBufferGetImageBuffer((__bridge CMSampleBufferRef)_samples[index]); while (!_writerInput.readyForMoreMediaData) {
[NSThread sleepForTimeInterval:0.1];
}
_offsetTime = presentationTime; BOOL success = [self.writerAdaptor appendPixelBuffer:imageBufferRef withPresentationTime:presentationTime];
_frame_count++;
if (!success) {
NSLog(@"status = %ld",(long)self.writer.status);
NSLog(@"status = %@",self.writer.error);
} } } @end

在iOS里面,这段代码可以倒放任意时长的视频。但是在每一帧的时间戳上,还有待改进。


最新文章

  1. Oracle 记录插入时“Invalid parameter binding ”错误
  2. spring cloud config 入门
  3. 【原】Storm 入门教程目录
  4. 使用轻量级Spring @Scheduled注解执行定时任务
  5. python--for循环
  6. java整体集合框架
  7. 基于visual Studio2013解决面试题之0208二叉搜索树后序遍历序列
  8. MC-设置 止盈
  9. div标签清除float浮动样式方法
  10. .NET作品集:linux下的博客程序
  11. hdu1022 Train Problem I---模拟栈
  12. ASP.NET and ADO.NET
  13. 『最大M子段和 线性DP』
  14. springboot + schedule
  15. Python-模块导入-63
  16. iPhone IOS10安装APP没提示连接网络(无法联网)的解决办法
  17. PSP(5.4——5.10)以及周记录
  18. 4-具体学习git--分支
  19. 开发组件:REST API
  20. jQuery中的AJAX的使用

热门文章

  1. react+javascript前端进阶
  2. python anaconda 安装 环境变量 升级 以及特殊库安装
  3. js中的this--执行上下文
  4. 关于 class 的命名
  5. Android 查看和管理sqlite数据库
  6. Ubuntu上如何搭建Android开发环境
  7. python 数据结构应用
  8. CentOS 附加软件包
  9. abstract(抽象)修饰符
  10. SQL Server 有序GUID,SequentialGuid,