我有一个视频处理应用程序,它使用 AVFoundation.h 头文件,其中保存了视频的屏幕截图。使用 UIImagepicker 上传屏幕截图后,我想播放该特定帧中的视频。为此我相信我应该将视频的每一帧与所拍摄视频的屏幕截图进行比较。
如何比较两个图像(不仅仅是相等,如果图像几乎相同,则应检测到它)。
如何播放特定画面的视频。
请仔细阅读代码并建议我进行修改以比较视频和屏幕截图的帧。我已成功将视频帧提取到tableview中。但无法将图片与屏幕截图进行比较。
我浏览了互联网上提供的大多数方法,但无法找到符合我需要的任何方法。提前致谢。快乐的编码。
- (void)viewDidLoad {
[super viewDidLoad];
_imageArray=[[NSMutableArray alloc]init];
_framesArray=[[NSMutableArray alloc]init];
// Do any additional setup after loading the view.
NSURL *url=[NSURL fileURLWithPath:@"/****Image path***/Desktop/ImageExtract/ImageExtract/The Avengers-Iron-man Vs Nuke scene.mp4"];
NSDictionary *options = @{ AVURLAssetPreferPreciseDurationAndTimingKey : @YES };
AVAsset *myAsset = [AVURLAsset URLAssetWithURL:url options:options];
self.imageGenerator = [AVAssetImageGenerator assetImageGeneratorWithAsset:myAsset];
Float64 durationSeconds = CMTimeGetSeconds([myAsset duration]);
_times = [[NSMutableArray alloc] init];
for (Float64 i = 0; i < durationSeconds; i++) // For 25 fps in 15 sec of Video
{
[_times addObject:[NSValue valueWithCMTime:CMTimeMakeWithSeconds(i, 60)]];
}
[_imageGenerator generateCGImagesAsynchronouslyForTimes:_times
completionHandler:^(CMTime requestedTime, CGImageRef image, CMTime actualTime,
AVAssetImageGeneratorResult result, NSError *error) {
NSString requestedTimeString = (NSString )
CFBridgingRelease(CMTimeCopyDescription(NULL, requestedTime));
NSString actualTimeString = (NSString )
CFBridgingRelease(CMTimeCopyDescription(NULL, actualTime));
NSLog(@"Requested: %@; actual %@", requestedTimeString, actualTimeString);
if (result == AVAssetImageGeneratorSucceeded) {
NSLog(@"image generated");
UIImage *generatedImage=[UIImage imageWithCGImage:image];
[_imageArray addObject:generatedImage];
NSString* filename = [NSString stringWithFormat:@"Documents/frame_%d.png", frameCount];
NSString* pngPath = [NSHomeDirectory() stringByAppendingPathComponent:filename];
[_framesArray addObject:filename];
[UIImagePNGRepresentation(generatedImage) writeToFile: pngPath atomically: YES];
NSLog(@"Frame name %@",filename);
NSLog(@"Time of Frame %f",CMTimeGetSeconds(requestedTime));
NSLog(@"Frames Array %@",_framesArray);
frameCount++;
}
if (result == AVAssetImageGeneratorFailed) {
NSLog(@"Failed with error: %@", [error localizedDescription]);
NSLog(@"image Array %@",_imageArray);
NSLog(@"Count %d",_imageArray.count);
_imageTable=[[UITableView alloc]init];
_imageTable.frame=CGRectMake(0, 100, 320, 400);
_imageTable.dataSource=self;
_imageTable.delegate=self;
[self.view addSubview:_imageTable];
}
if (result == AVAssetImageGeneratorCancelled) {
NSLog(@"Canceled");
}
}];
}
- (NSInteger)tableView:(UITableView *)tableView numberOfRowsInSection:(NSInteger)section
{
return _imageArray.count;
}
- (UITableViewCell )tableView:(UITableView )tableView cellForRowAtIndexPath:(NSIndexPath *)indexPath {
static NSString *CellIdentifier = @"Cell";
UITableViewCell *cell = [tableView dequeueReusableCellWithIdentifier:CellIdentifier];
if (cell == nil) {
cell = [[UITableViewCell alloc] initWithStyle:UITableViewCellStyleDefault reuseIdentifier:CellIdentifier];
}
// Configure the cell...
UIImageView *uuimage=[[UIImageView alloc]initWithFrame:CGRectMake(0, 0, 100, 100)];
uuimage.image=[_imageArray objectAtIndex:indexPath.row];
[cell addSubview:uuimage];
return cell;
}