oc视频抽取

视频抽取

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
+ (void)drawFramesFromVideoAtPath:(NSString *)videoPath toDirectory:(NSString *)outPath withTargetFrame:(NSUInteger)targetFrame {
DLog(@"-------------------------视频抽帧-------------------------");

NSFileManager *fileManager = [NSFileManager defaultManager];
NSError *error = nil;
if (![fileManager fileExistsAtPath:outPath]) {
// 判断文件夹是否存在
[fileManager createDirectoryAtPath:outPath withIntermediateDirectories:YES attributes:nil error:&error];
if (error) {
DLog(@"创建输出目录失败:%@", error);
return;
}
}

NSURL *videoURL = [NSURL fileURLWithPath:videoPath];
AVAsset *asset = [AVAsset assetWithURL:videoURL];
AVAssetTrack *videoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] firstObject];
NSUInteger frames = CMTimeGetSeconds(asset.duration) * videoTrack.nominalFrameRate;
DLog(@"视频共%lu帧,抽取%lu帧......", (unsigned long)frames, (unsigned long)(frames / targetFrame) + 1);

AVAssetReader *assetReader = [AVAssetReader assetReaderWithAsset:asset error:&error];
if (error) {
DLog(@"创建AVAssetReader失败:%@", error);
return;
}

AVAssetReaderTrackOutput *output = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:videoTrack outputSettings:@{
(NSString *)kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_32BGRA)
}];
[assetReader addOutput:output];
[assetReader startReading];

NSUInteger count = 0;
NSUInteger imageIndex = 1000001;
while (assetReader.status == AVAssetReaderStatusReading) {
CMSampleBufferRef sampleBuffer = [output copyNextSampleBuffer];
if (sampleBuffer == NULL) {
break;
}

if (count % targetFrame == 0 || count == frames - 1) {
DLog(@"已提取 %d", (imageIndex - 1000000));
@autoreleasepool
{
UIImage *uiImage = [self convertSampleBufferRefToUIImage:sampleBuffer];
NSString *savePath = [outPath stringByAppendingPathComponent:[NSString stringWithFormat:@"%lu.png", (unsigned long)imageIndex]];
@autoreleasepool {
NSData *imageData = UIImagePNGRepresentation(uiImage);
[imageData writeToFile:savePath atomically:YES];
}
}
imageIndex++;
}
count++;
CFRelease(sampleBuffer);
}

if (assetReader.status == AVAssetReaderStatusCompleted) {
DLog(@"视频已全部抽帧完成......");
} else if (assetReader.status == AVAssetReaderStatusFailed) {
DLog(@"抽帧失败:%@", assetReader.error);
} else if (assetReader.status == AVAssetReaderStatusCancelled) {
DLog(@"抽帧被取消");
}

DLog(@"-------------------------抽帧完成-------------------------");
}


+ (UIImage *)convertSampleBufferRefToUIImage:(CMSampleBufferRef)sampleBufferRef
{
@autoreleasepool
{
CGImageRef cgImage = [self convertSamepleBufferRefToCGImage:sampleBufferRef];
UIImage *image;

CGFloat height = CGImageGetHeight(cgImage);
CGFloat width = CGImageGetWidth(cgImage);

// height = height / 5;
// width = width / 5;

UIGraphicsBeginImageContextWithOptions(CGSizeMake(width, height), NO, 1);

#define UseUIImage 0
#if UseUIImage

[image drawInRect:CGRectMake(0, 0, width, height)];
#else
CGContextRef context = UIGraphicsGetCurrentContext();
CGContextTranslateCTM(context, 0, height);
CGContextScaleCTM(context, 1.0, -1.0);
CGContextDrawImage(context, CGRectMake(0, 0, width, height), cgImage);
#endif
image = UIGraphicsGetImageFromCurrentImageContext();

UIGraphicsEndImageContext();

CGImageRelease(cgImage);

// UIGraphicsEndImageContext();
return image;
}
}


// Create a UIImage from sample buffer data
// 官方回答 https://developer.apple.com/library/ios/qa/qa1702/_index.html
+ (CGImageRef)convertSamepleBufferRefToCGImage:(CMSampleBufferRef)sampleBufferRef {
@autoreleasepool {

// Get a CMSampleBuffer's Core Video image buffer for the media data
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBufferRef);
// Lock the base address of the pixel buffer
CVPixelBufferLockBaseAddress(imageBuffer, 0);

// Get the number of bytes per row for the pixel buffer
void *baseAddress = CVPixelBufferGetBaseAddress(imageBuffer);

// Get the number of bytes per row for the pixel buffer
size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
// Get the pixel buffer width and height
size_t width = CVPixelBufferGetWidth(imageBuffer);
size_t height = CVPixelBufferGetHeight(imageBuffer);

// Create a device-dependent RGB color space
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();

// Create a bitmap graphics context with the sample buffer data
CGContextRef context = CGBitmapContextCreate(baseAddress, width, height, 8,
bytesPerRow, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);
// Create a Quartz image from the pixel data in the bitmap graphics context
CGImageRef quartzImage = CGBitmapContextCreateImage(context);
// Unlock the pixel buffer
CVPixelBufferUnlockBaseAddress(imageBuffer,0);

// Free up the context and color space
CGContextRelease(context);
CGColorSpaceRelease(colorSpace);

return quartzImage;
}
}