在将AVVideoComposition应用到我的AVPlayerItem之后,我应用的过滤器确实可以工作,但是视频会在AVPlayerLayer中旋转。
我知道的一个事实是,问题不在于过滤的帧,因为如果我在UIImageView中显示该帧,则该帧将被100%正确地呈现。
视频显示正确,直到我应用videoComposition。在videoGravity上设置AVPlayerLayer无助于此。
视频顺时针旋转90度,并在图层中拉伸。
本质上,视频在AVPlayerLayer中显示得很完美,然后AVPlayerItem才会通过AVMutableVideoComposition传送。一旦发生这种情况,视频将旋转-90°,然后缩放以适应与视频相同的尺寸,然后过滤。这向我暗示,它没有意识到它的转换已经是正确的,因此它正在将变换重新应用于自身。
为什么会发生这种事,我怎样才能解决呢?
以下是一些代码:
private func filterVideo(with filter: Filter?) {
if let player = player, let playerItem = player.currentItem {
let composition = AVMutableComposition()
let videoAssetTrack = playerItem.asset.tracks(withMediaType: .video).first
let videoCompositionTrack = composition.addMutableTrack(withMediaType: .video, preferredTrackID: kCMPersistentTrackID_Invalid)
try? videoCompositionTrack?.insertTimeRange(CMTimeRange(start: kCMTimeZero, duration: playerItem.asset.duration), of: videoAssetTrack!, at: kCMTimeZero)
videoCompositionTrack?.preferredTransform = videoAssetTrack!.preferredTransform
let videoComposition = AVMutableVideoComposition(asset: composition, applyingCIFiltersWithHandler: { (request) in
let filteredImage = <...>
request.finish(with: filteredImage, context: nil)
})
playerItem.videoComposition = videoComposition
}
}发布于 2018-06-11 10:57:40
最后对我起作用的是:
private func filterVideo(with filter: Filter?) {
guard let player = playerLayer?.player, let playerItem = player.currentItem else { return }
let videoComposition = AVVideoComposition(asset: playerItem.asset, applyingCIFiltersWithHandler: { (request) in
if let filter = filter {
if let filteredImage = filter.filterImage(request.sourceImage) {
let output = filteredImage.cropping(to: request.sourceImage.extent)
request.finish(with: output, context: nil)
} else {
printError("Image not filtered")
request.finish(with: RenderError.couldNotFilter)
}
} else {
let output = request.sourceImage.cropping(to: request.sourceImage.extent)
request.finish(with: output, context: nil)
}
})
playerItem.videoComposition = videoComposition
}这是filterImage函数的Filter,它只是一个很好的小包装CIFilter
func filterImage(_ ciImage: CIImage) -> CIImage? {
guard let filter = ciFilter else { return nil }
filter.setDefaults()
filter.setValue(ciImage, forKey: kCIInputImageKey)
guard let filteredImageData = filter.value(forKey: kCIOutputImageKey) as? CIImage else { return nil }
return filteredImageData
}发布于 2017-07-12 12:43:09
您在renderingSize of AVVideoComposition中遇到了一个问题。你应该在AVMutableVideoCompositionInstruction上进行转换。Rotate和translate变换)。
我已经在Objective中这样做了,并且正在发布我的代码。您可以将语法转换为Swift
目标-c
//------------------------------------
// FIXING ORIENTATION
//------------------------------------
AVMutableVideoCompositionInstruction * MainInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
MainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, CMTimeAdd(firstAsset.duration, secondAsset.duration));
AVMutableVideoCompositionLayerInstruction *FirstlayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:secondTrack]; // second
AVAssetTrack *FirstAssetTrack = [[firstAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
UIImageOrientation FirstAssetOrientation_ = UIImageOrientationUp;
BOOL isFirstAssetPortrait_ = NO;
CGAffineTransform firstTransform = FirstAssetTrack.preferredTransform;
if(firstTransform.a == 0 && firstTransform.b == 1.0 && firstTransform.c == -1.0 && firstTransform.d == 0) {FirstAssetOrientation_= UIImageOrientationRight; isFirstAssetPortrait_ = YES;}
if(firstTransform.a == 0 && firstTransform.b == -1.0 && firstTransform.c == 1.0 && firstTransform.d == 0) {FirstAssetOrientation_ = UIImageOrientationLeft; isFirstAssetPortrait_ = YES;}
if(firstTransform.a == 1.0 && firstTransform.b == 0 && firstTransform.c == 0 && firstTransform.d == 1.0) {FirstAssetOrientation_ = UIImageOrientationUp;}
if(firstTransform.a == -1.0 && firstTransform.b == 0 && firstTransform.c == 0 && firstTransform.d == -1.0) {FirstAssetOrientation_ = UIImageOrientationDown;}
CGFloat FirstAssetScaleToFitRatio = 320.0/FirstAssetTrack.naturalSize.width;
if(isFirstAssetPortrait_){
FirstAssetScaleToFitRatio = 320.0/FirstAssetTrack.naturalSize.height;
CGAffineTransform FirstAssetScaleFactor = CGAffineTransformMakeScale(FirstAssetScaleToFitRatio,FirstAssetScaleToFitRatio);
[FirstlayerInstruction setTransform:CGAffineTransformConcat(FirstAssetTrack.preferredTransform, FirstAssetScaleFactor) atTime:kCMTimeZero];
}else{
CGAffineTransform FirstAssetScaleFactor = CGAffineTransformMakeScale(FirstAssetScaleToFitRatio,FirstAssetScaleToFitRatio);
[FirstlayerInstruction setTransform:CGAffineTransformConcat(CGAffineTransformConcat(FirstAssetTrack.preferredTransform, FirstAssetScaleFactor),CGAffineTransformMakeTranslation(0, 160)) atTime:kCMTimeZero];
}
[FirstlayerInstruction setOpacity:0.0 atTime:firstAsset.duration];
AVMutableVideoCompositionLayerInstruction *SecondlayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:firstTrack];
AVAssetTrack *SecondAssetTrack = [[secondAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
UIImageOrientation SecondAssetOrientation_ = UIImageOrientationUp;
BOOL isSecondAssetPortrait_ = NO;
CGAffineTransform secondTransform = SecondAssetTrack.preferredTransform;
if(secondTransform.a == 0 && secondTransform.b == 1.0 && secondTransform.c == -1.0 && secondTransform.d == 0) {SecondAssetOrientation_= UIImageOrientationRight; isSecondAssetPortrait_ = YES;}
if(secondTransform.a == 0 && secondTransform.b == -1.0 && secondTransform.c == 1.0 && secondTransform.d == 0) {SecondAssetOrientation_ = UIImageOrientationLeft; isSecondAssetPortrait_ = YES;}
if(secondTransform.a == 1.0 && secondTransform.b == 0 && secondTransform.c == 0 && secondTransform.d == 1.0) {SecondAssetOrientation_ = UIImageOrientationUp;}
if(secondTransform.a == -1.0 && secondTransform.b == 0 && secondTransform.c == 0 && secondTransform.d == -1.0) {SecondAssetOrientation_ = UIImageOrientationDown;}
CGFloat SecondAssetScaleToFitRatio = 320.0/SecondAssetTrack.naturalSize.width;
if(isSecondAssetPortrait_){
SecondAssetScaleToFitRatio = 320.0/SecondAssetTrack.naturalSize.height;
CGAffineTransform SecondAssetScaleFactor = CGAffineTransformMakeScale(SecondAssetScaleToFitRatio,SecondAssetScaleToFitRatio);
[SecondlayerInstruction setTransform:CGAffineTransformConcat(SecondAssetTrack.preferredTransform, SecondAssetScaleFactor) atTime:firstAsset.duration];
}else{
;
CGAffineTransform SecondAssetScaleFactor = CGAffineTransformMakeScale(SecondAssetScaleToFitRatio,SecondAssetScaleToFitRatio);
[SecondlayerInstruction setTransform:CGAffineTransformConcat(CGAffineTransformConcat(SecondAssetTrack.preferredTransform, SecondAssetScaleFactor),CGAffineTransformMakeTranslation(0, 160)) atTime:secondAsset.duration];
}
MainInstruction.layerInstructions = [NSArray arrayWithObjects:SecondlayerInstruction,nil];;
AVMutableVideoComposition *MainCompositionInst = [AVMutableVideoComposition videoComposition];
MainCompositionInst.instructions = [NSArray arrayWithObject:MainInstruction];
MainCompositionInst.frameDuration = CMTimeMake(1, 30);
MainCompositionInst.renderSize = CGSizeMake(320.0, 480.0);
// Now , you have Orientation Fixed Instrucation layer
// add this composition to your video
// If you want to export Video than you can do like below
NSString *documentsDirectory = [NSHomeDirectory()
stringByAppendingPathComponent:@"Documents"];
NSString *myPathDocs = [documentsDirectory stringByAppendingPathComponent:[NSString stringWithFormat:@"final_merged_video-%d.mp4",arc4random() % 1000]];
NSURL *url = [NSURL fileURLWithPath:myPathDocs];
// 5 - Create exporter
AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:mixComposition
presetName:AVAssetExportPreset640x480];
exporter.outputURL=url;
exporter.videoComposition=MainCompositionInst;
exporter.outputFileType = AVFileTypeQuickTimeMovie;
exporter.shouldOptimizeForNetworkUse = YES;
[exporter exportAsynchronouslyWithCompletionHandler:^{
dispatch_async(dispatch_get_main_queue(), ^
{
[[AppDelegate Getdelegate] hideIndicator];
[self exportDidFinish:exporter];
});
}];有关Swift,请参阅以下答案Click here
此外,您还可以尝试旋转您的视频层应用旋转转换。
#define degreeToRadian(x) (M_PI * x / 180.0)
[_playerLayer setAffineTransform:CGAffineTransformMakeRotation(degreeToRadian(degree))]发布于 2017-07-04 19:31:57
如果你想玩AVMutableCompostion游戏,你应该把AVAssetTrack的preferredTransform设置为AVMutableCompositionTrack的preferredTransform。
let asset = AVAsset(url: url!)
let composition = AVMutableComposition()
let compositionTrack = composition.addMutableTrack(withMediaType: AVMediaTypeVideo, preferredTrackID: kCMPersistentTrackID_Invalid)
let videoTrack = asset.tracks(withMediaType: AVMediaTypeVideo).first
try? compositionTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, asset.duration), of: videoTrack!, at: kCMTimeZero)
compositionTrack.preferredTransform = (videoTrack?.preferredTransform)!
let playerItem = AVPlayerItem(asset: composition)
let filter = CIFilter(name: "CIColorInvert")
playerItem.videoComposition = AVVideoComposition(asset: composition, applyingCIFiltersWithHandler: { (request: AVAsynchronousCIImageFilteringRequest) in
filter?.setValue(request.sourceImage, forKey: kCIInputImageKey)
request.finish(with: (filter?.outputImage)!, context: nil)
})
.... the rest of codehttps://stackoverflow.com/questions/44911802
复制相似问题