Last active
January 31, 2020 13:31
-
-
Save woxtu/a918c354a51911372c60d2e1be91e6e9 to your computer and use it in GitHub Desktop.
Revisions
-
woxtu revised this gist
Sep 22, 2016 . 1 changed file with 2 additions and 2 deletions.There are no files selected for viewing
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters. Learn more about bidirectional Unicode charactersOriginal file line number Diff line number Diff line change @@ -69,7 +69,7 @@ do { if FileManager.default.fileExists(atPath: outputPath) { try FileManager.default.removeItem(at: outputUrl) } } catch let error { fatalError(error.localizedDescription) } @@ -78,7 +78,7 @@ let asset = AVAsset(url: URL(fileURLWithPath: inputPath)) let writer: AVAssetWriter do { writer = try AVAssetWriter(outputURL: outputUrl, fileType: AVFileTypeMPEG4) } catch let error { fatalError(error.localizedDescription) } -
woxtu revised this gist
Sep 22, 2016 . 1 changed file with 35 additions and 35 deletions.There are no files selected for viewing
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters. Learn more about bidirectional Unicode charactersOriginal file line number Diff line number Diff line change @@ -3,25 +3,25 @@ import AVFoundation extension Array { func nth(_ index: Int) -> Array.Element? { return (self.indices ~= index) ? self[index] : nil } } extension CGImage { var frame: CGRect { return CGRect(x: 0, y: 0, width: self.width, height: self.height) } } extension AVAsset { var size: CGSize { return self.tracks(withMediaType: AVMediaTypeVideo).nth(0)?.naturalSize ?? CGSize.zero } } extension AVAssetWriterInputPixelBufferAdaptor { func append(image: CGImage, withPresentationTime presentationTime: CMTime) -> Bool { guard let pixelBufferPool = self.pixelBufferPool else { fatalError("Failed to allocate the PixelBufferPool") } @@ -33,51 +33,51 @@ extension AVAssetWriterInputPixelBufferAdaptor { fatalError("Failed to create the PixelBuffer") } CVPixelBufferLockBaseAddress(pixelBuffer, CVPixelBufferLockFlags(rawValue: CVOptionFlags(0))) let context = CGContext( data: CVPixelBufferGetBaseAddress(pixelBuffer), width: image.width, height: image.height, bitsPerComponent: image.bitsPerComponent, bytesPerRow: image.bytesPerRow, space: CGColorSpaceCreateDeviceRGB(), bitmapInfo: image.bitmapInfo.rawValue) context?.draw(image, in: image.frame) CVPixelBufferUnlockBaseAddress(pixelBuffer, CVPixelBufferLockFlags(rawValue: CVOptionFlags(0))) return self.append(pixelBuffer, withPresentationTime: presentationTime) } } // ₍₍ (ง╹◡╹)ว ⁾⁾ guard let inputPath = CommandLine.arguments.nth(1) else { print("USAGE: timelapse <input-path> [output-path] [sampling-interval] [frame-rate]") exit(0) } let outputPath = CommandLine.arguments.nth(2) ?? "output.mp4" let interval = CommandLine.arguments.nth(3).flatMap { Double($0) } ?? 1 let frameRate = CommandLine.arguments.nth(4).flatMap { Int32($0) } ?? 15 let outputUrl = URL(fileURLWithPath: outputPath) let semaphore = DispatchSemaphore(value: 0) do { if FileManager.default.fileExists(atPath: outputPath) { try FileManager.default.removeItem(at: outputUrl) } } catch let error as NSError { fatalError(error.localizedDescription) } let asset = AVAsset(url: URL(fileURLWithPath: inputPath)) let writer: AVAssetWriter do { writer = try AVAssetWriter(outputURL: outputUrl, fileType: AVFileTypeMPEG4) } catch let error as NSError { fatalError(error.localizedDescription) } @@ -99,27 +99,27 @@ let adaptor = AVAssetWriterInputPixelBufferAdaptor( ]) var times = [kCMTimeZero] while let current = times.last, current < asset.duration { times.append(current + CMTimeMakeWithSeconds(interval, 100)) } writer.add(input) writer.startWriting() writer.startSession(atSourceTime: kCMTimeZero) AVAssetImageGenerator(asset: asset) .generateCGImagesAsynchronously(forTimes: times.map { NSValue(time: $0) }) { time, image, _, _, _ in if let image = image { let _ = adaptor.append(image: image, withPresentationTime: CMTimeMake(Int64(times.index(of: time)!), frameRate)) } if times.last == time { input.markAsFinished() writer.endSession(atSourceTime: CMTimeMake(Int64(times.count), frameRate)) writer.finishWriting { semaphore.signal() } } } let _ = semaphore.wait(timeout: DispatchTime.distantFuture) -
woxtu revised this gist
Apr 6, 2016 . 1 changed file with 1 addition and 1 deletion.There are no files selected for viewing
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters. Learn more about bidirectional Unicode charactersOriginal file line number Diff line number Diff line change @@ -16,7 +16,7 @@ extension CGImage { extension AVAsset { var size: CGSize { return self.tracksWithMediaType(AVMediaTypeVideo).nth(0)?.naturalSize ?? CGSizeZero } } -
woxtu revised this gist
Apr 5, 2016 . 1 changed file with 54 additions and 65 deletions.There are no files selected for viewing
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters. Learn more about bidirectional Unicode charactersOriginal file line number Diff line number Diff line change @@ -18,23 +18,6 @@ extension AVAsset { var size: CGSize { return self.tracksWithMediaType(AVMediaTypeVideo)[0].naturalSize } } extension AVAssetWriterInputPixelBufferAdaptor { @@ -70,67 +53,73 @@ extension AVAssetWriterInputPixelBufferAdaptor { // ₍₍ (ง╹◡╹)ว ⁾⁾ guard let inputPath = Process.arguments.nth(1) else { print("USAGE: timelapse <input-path> [output-path] [sampling-interval] [frame-rate]") exit(0) } let outputPath = Process.arguments.nth(2) ?? "output.mp4" let interval = Process.arguments.nth(3).flatMap { Double($0) } ?? 1 let frameRate = Process.arguments.nth(4).flatMap { Int32($0) } ?? 15 let outputUrl = NSURL(fileURLWithPath: outputPath) let semaphore = dispatch_semaphore_create(0) do { if NSFileManager.defaultManager().fileExistsAtPath(outputPath) { try NSFileManager.defaultManager().removeItemAtURL(outputUrl) } } catch let error as NSError { fatalError(error.localizedDescription) } let asset = AVAsset(URL: NSURL(fileURLWithPath: inputPath)) let writer: AVAssetWriter do { writer = try AVAssetWriter(URL: outputUrl, fileType: AVFileTypeMPEG4) } catch let error as NSError { fatalError(error.localizedDescription) } let input = AVAssetWriterInput( mediaType: AVMediaTypeVideo, outputSettings: [ AVVideoCodecKey: AVVideoCodecH264, AVVideoWidthKey: asset.size.width, AVVideoHeightKey: asset.size.height, ]) let adaptor = AVAssetWriterInputPixelBufferAdaptor( assetWriterInput: input, sourcePixelBufferAttributes: [ kCVPixelBufferPixelFormatTypeKey as String: Int(kCVPixelFormatType_32ARGB), kCVPixelBufferWidthKey as String: asset.size.width, kCVPixelBufferHeightKey as String: asset.size.height, ]) var times = [kCMTimeZero] while let current = times.last where current < asset.duration { times.append(current + CMTimeMakeWithSeconds(interval, 100)) } writer.addInput(input) writer.startWriting() writer.startSessionAtSourceTime(kCMTimeZero) AVAssetImageGenerator(asset: asset) .generateCGImagesAsynchronouslyForTimes(times.map { NSValue(CMTime: $0) }) { time, image, _, _, _ in if let image = image { adaptor.appendCGImage(image, withPresentationTime: CMTimeMake(Int64(times.indexOf(time)!), frameRate)) } if times.last == time { input.markAsFinished() writer.endSessionAtSourceTime(CMTimeMake(Int64(times.count), frameRate)) writer.finishWritingWithCompletionHandler { dispatch_semaphore_signal(semaphore) } } } dispatch_semaphore_wait(semaphore, DISPATCH_TIME_FOREVER) -
woxtu created this gist
Apr 2, 2016 .There are no files selected for viewing
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters. Learn more about bidirectional Unicode charactersOriginal file line number Diff line number Diff line change @@ -0,0 +1,13 @@ DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE Version 2, December 2004 Copyright (C) 2004 Sam Hocevar <sam@hocevar.net> Everyone is permitted to copy and distribute verbatim or modified copies of this license document, and changing it is allowed as long as the name is changed. DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION 0. You just DO WHAT THE FUCK YOU WANT TO. This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters. Learn more about bidirectional Unicode charactersOriginal file line number Diff line number Diff line change @@ -0,0 +1,136 @@ #!/usr/bin/env xcrun swift import AVFoundation extension Array { func nth(index: Int) -> Array.Element? { return (self.indices ~= index) ? self[index] : nil } } extension CGImage { var frame: CGRect { return CGRectMake(0, 0, CGFloat(CGImageGetWidth(self)), CGFloat(CGImageGetHeight(self))) } } extension AVAsset { var size: CGSize { return self.tracksWithMediaType(AVMediaTypeVideo)[0].naturalSize } func generateCGImagesAsynchronously(interval: CMTime, completion: ([CGImage] -> ())) { var times = [kCMTimeZero] while let current = times.last where current < self.duration { times.append(current + interval) } var images = [CGImage?]() AVAssetImageGenerator(asset: self) .generateCGImagesAsynchronouslyForTimes(times.map { NSValue(CMTime: $0) }) { time, image, _, _, _ in images.append(image) if times.last == time { completion(images.flatMap { $0 }) } } } } extension AVAssetWriterInputPixelBufferAdaptor { func appendCGImage(image: CGImage, withPresentationTime presentationTime: CMTime) -> Bool { guard let pixelBufferPool = self.pixelBufferPool else { fatalError("Failed to allocate the PixelBufferPool") } var pixelBufferOut: CVPixelBuffer? = nil CVPixelBufferPoolCreatePixelBuffer(kCFAllocatorDefault, pixelBufferPool, &pixelBufferOut) guard let pixelBuffer = pixelBufferOut else { fatalError("Failed to create the PixelBuffer") } CVPixelBufferLockBaseAddress(pixelBuffer, 0) let context = CGBitmapContextCreate( CVPixelBufferGetBaseAddress(pixelBuffer), CGImageGetWidth(image), CGImageGetHeight(image), CGImageGetBitsPerComponent(image), CGImageGetBytesPerRow(image), CGColorSpaceCreateDeviceRGB(), CGImageGetBitmapInfo(image).rawValue) CGContextDrawImage(context, image.frame, image) CVPixelBufferUnlockBaseAddress(pixelBuffer, 0) return self.appendPixelBuffer(pixelBuffer, withPresentationTime: presentationTime) } } // ₍₍ (ง╹◡╹)ว ⁾⁾ guard let input = Process.arguments.nth(1) else { print("USAGE: timelapse <input-path> [output-path] [sampling-interval] [frame-rate]") exit(0) } let output = Process.arguments.nth(2) ?? "output.mp4" let interval = Process.arguments.nth(3).flatMap { Double($0) } ?? 1 let rate = Process.arguments.nth(4).flatMap { Int32($0) } ?? 15 let semaphore = dispatch_semaphore_create(0) let asset = AVAsset(URL: NSURL(fileURLWithPath: input)) asset.generateCGImagesAsynchronously(CMTimeMakeWithSeconds(interval, 100)) { images in let outputUrl = NSURL(fileURLWithPath: output) do { if NSFileManager.defaultManager().fileExistsAtPath(output) { try NSFileManager.defaultManager().removeItemAtURL(outputUrl) } } catch let error as NSError { fatalError(error.localizedDescription) } let writer: AVAssetWriter do { writer = try AVAssetWriter(URL: outputUrl, fileType: AVFileTypeMPEG4) } catch let error as NSError { fatalError(error.localizedDescription) } let input = AVAssetWriterInput( mediaType: AVMediaTypeVideo, outputSettings: [ AVVideoCodecKey: AVVideoCodecH264, AVVideoWidthKey: asset.size.width, AVVideoHeightKey: asset.size.height, ]) let adaptor = AVAssetWriterInputPixelBufferAdaptor( assetWriterInput: input, sourcePixelBufferAttributes: [ kCVPixelBufferPixelFormatTypeKey as String: Int(kCVPixelFormatType_32ARGB), kCVPixelBufferWidthKey as String: asset.size.width, kCVPixelBufferHeightKey as String: asset.size.height, ]) writer.addInput(input) writer.startWriting() writer.startSessionAtSourceTime(kCMTimeZero) for (frame, image) in images.enumerate() { autoreleasepool { adaptor.appendCGImage(image, withPresentationTime: CMTimeMake(Int64(frame), rate)) } } input.markAsFinished() writer.endSessionAtSourceTime(CMTimeMake(Int64(images.count), rate)) writer.finishWritingWithCompletionHandler { dispatch_semaphore_signal(semaphore) } } dispatch_semaphore_wait(semaphore, DISPATCH_TIME_FOREVER)