我正在尝试制作一个具有类似Snapchat或Instagram的照片编辑功能的应用程序。我使用gife,这样用户就可以向照片中添加动画贴纸/图片,但我不知道如何导出它们。到目前为止,我能够导出静态图像,但没有任何动画gifs。我可以将图片转换成MP4格式,但我仍然不知道如何在视频上覆盖它。如果有人帮助我,我会非常感激的。
这是我导出静态编辑的功能,我对AVFoundation不是很在行,我已经在网上搜索了解决方案,但是没有运气。
func convertVideoAndSaveTophotoLibrary(videoURL: URL) {
let documentsDirectory = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true)[0]
let myDocumentPath = URL(fileURLWithPath: documentsDirectory).appendingPathComponent("temp.mp4").absoluteString
_ = NSURL(fileURLWithPath: myDocumentPath)
let documentsDirectory2 = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)[0] as URL
let filePath = documentsDirectory2.appendingPathComponent("video.mp4")
deleteFile(filePath: filePath as NSURL)
//Check if the file already exists then remove the previous file
if FileManager.default.fileExists(atPath: myDocumentPath) {
do { try FileManager.default.removeItem(atPath: myDocumentPath)
} catch let error { print(error) }
}
// File to composit
let asset = AVURLAsset(url: videoURL as URL)
let composition = AVMutableComposition.init()
composition.addMutableTrack(withMediaType: AVMediaType.video, preferredTrackID: kCMPersistentTrackID_Invalid)
let clipVideoTrack = asset.tracks(withMediaType: AVMediaType.video)[0]
// Rotate to potrait
let transformer = AVMutableVideoCompositionLayerInstruction(assetTrack: clipVideoTrack)
let videoTransform:CGAffineTransform = clipVideoTrack.preferredTransform
//fix orientation
var videoAssetOrientation_ = UIImage.Orientation.right
var isVideoAssetPortrait_ = false
var text = "none"
if videoTransform.a == 0 && videoTransform.b == 1.0 && videoTransform.c == -1.0 && videoTransform.d == 0 {
videoAssetOrientation_ = UIImage.Orientation.right
isVideoAssetPortrait_ = true
text = "right"
}
if videoTransform.a == 0 && videoTransform.b == 1.0 && videoTransform.c == 1.0 && videoTransform.d == 0 {
videoAssetOrientation_ = UIImage.Orientation.right
isVideoAssetPortrait_ = true
text = "special"
print("this bitch is special :)")
}
if videoTransform.a == 0 && videoTransform.b == -1.0 && videoTransform.c == 1.0 && videoTransform.d == 0 {
videoAssetOrientation_ = UIImage.Orientation.left
isVideoAssetPortrait_ = true
text = "left"
}
if videoTransform.a == 1.0 && videoTransform.b == 0 && videoTransform.c == 0 && videoTransform.d == 1.0 {
videoAssetOrientation_ = UIImage.Orientation.up
text = "up)"
}
if videoTransform.a == -1.0 && videoTransform.b == 0 && videoTransform.c == 0 && videoTransform.d == -1.0 {
videoAssetOrientation_ = UIImage.Orientation.down;
text = "down"
}
print("width: \(clipVideoTrack.naturalSize.width),height:\(clipVideoTrack.naturalSize.height)")
if text == "special"{
print(clipVideoTrack.preferredTransform)
let transform = CGAffineTransform(a: 0, b: 1, c: 1, d: 0, tx: 00, ty: 0)
transformer.setTransform(transform.translatedBy(x: 0, y: 0), at: CMTime.zero)
}else{
print(clipVideoTrack.preferredTransform)
transformer.setTransform(clipVideoTrack.preferredTransform, at: CMTime.zero)
}
print("width: \(clipVideoTrack.naturalSize.width),height:\(clipVideoTrack.naturalSize.height)")
print("SIZES: \(videoTransform.a),\(videoTransform.b),\(videoTransform.c),\(videoTransform.tx),\(videoTransform.d),\(videoTransform.ty),")
print("VIdeo Asset Oreintation: \(text)")
transformer.setOpacity(0.0, at: asset.duration)
//adjust the render size if neccessary
var naturalSize: CGSize
if(isVideoAssetPortrait_){
naturalSize = CGSize(width: clipVideoTrack.naturalSize.height, height: clipVideoTrack.naturalSize.width)
} else {
naturalSize = clipVideoTrack.naturalSize;
}
var renderWidth: CGFloat!
var renderHeight: CGFloat!
renderWidth = naturalSize.width
renderHeight = naturalSize.height
let parentlayer = CALayer()
let videoLayer = CALayer()
let watermarkLayer = CALayer()
let videoComposition = AVMutableVideoComposition()
videoComposition.renderSize = CGSize(width: renderWidth, height: renderHeight)
videoComposition.frameDuration = CMTimeMake(value: 1, timescale: 30)
videoComposition.renderScale = 1.0
watermarkLayer.contents = tempImageView.asImage().cgImage
parentlayer.frame = CGRect(origin: CGPoint(x: 0, y: 0), size: naturalSize)
videoLayer.frame = CGRect(origin: CGPoint(x: 0, y: 0), size: naturalSize)
watermarkLayer.frame = CGRect(origin: CGPoint(x: 0, y: 0), size: naturalSize)
parentlayer.addSublayer(videoLayer)
parentlayer.addSublayer(watermarkLayer)
// Add watermark to video
videoComposition.animationTool = AVVideoCompositionCoreAnimationTool(postProcessingAsVideoLayers: [videoLayer], in: parentlayer)
let instruction = AVMutableVideoCompositionInstruction()
instruction.timeRange = CMTimeRangeMake(start: CMTime.zero, duration: CMTimeMakeWithSeconds(60, preferredTimescale: 30))
instruction.layerInstructions = [transformer]
videoComposition.instructions = [instruction]
let exporter = AVAssetExportSession.init(asset: asset, presetName: AVAssetExportPresetHighestQuality)
exporter?.outputFileType = AVFileType.mov
exporter?.outputURL = filePath
exporter?.videoComposition = videoComposition
exporter!.exportAsynchronously(completionHandler: {() -> Void in
if exporter?.status == .completed {
let outputURL: URL? = exporter?.outputURL
PHPhotoLibrary.shared().performChanges({
PHAssetChangeRequest.creationRequestForAssetFromVideo(atFileURL: outputURL!)
}) { saved, error in
if saved {
ProgressHUD.showSuccess("Video Saved")
let fetchOptions = PHFetchOptions()
fetchOptions.sortDescriptors = [NSSortDescriptor(key: "creationDate", ascending: true)]
let fetchResult = PHAsset.fetchAssets(with: .video, options: fetchOptions).lastObject
PHImageManager().requestAVAsset(forVideo: fetchResult!, options: nil, resultHandler: { (avurlAsset, audioMix, dict) in
let newObj = avurlAsset as! AVURLAsset
print(newObj.url)
DispatchQueue.main.async(execute: {
print(newObj.url.absoluteString)
})
})
print (fetchResult!)
}
}
}
})
}
谢谢您的帮助,如果您有任何问题或澄清,请告诉我。
版权声明:本文内容由阿里云实名注册用户自发贡献,版权归原作者所有,阿里云开发者社区不拥有其著作权,亦不承担相应法律责任。具体规则请查看《阿里云开发者社区用户服务协议》和《阿里云开发者社区知识产权保护指引》。如果您发现本社区中有涉嫌抄袭的内容,填写侵权投诉表单进行举报,一经查实,本社区将立刻删除涉嫌侵权内容。