Skip to content

Commit

Permalink
Fixes #1 #2 #4
Browse files Browse the repository at this point in the history
  • Loading branch information
Omer Karisman committed Sep 8, 2017
1 parent 4c78ca0 commit 70fb306
Show file tree
Hide file tree
Showing 3 changed files with 72 additions and 61 deletions.
6 changes: 2 additions & 4 deletions SceneKitVideoRecorder/Classes/Options.swift
Original file line number Diff line number Diff line change
Expand Up @@ -10,25 +10,23 @@ import AVFoundation
extension SceneKitVideoRecorder {
public struct Options {
public var timeScale: Int32
public var renderSize: CGSize
public var videoSize: CGSize
public var fps: Int
public var outputUrl: URL
public var fileType: String
public var codec: String
public var deleteFileIfExists: Bool

public static var `default`: Options {
return Options(timeScale: 600,
renderSize: CGSize(width: 640, height: 640),
videoSize: CGSize(width: 1280, height: 720),
fps: 60,
outputUrl: URL(fileURLWithPath: NSTemporaryDirectory() + "output.mp4"),
fileType: AVFileTypeAppleM4V,
codec: AVVideoCodecH264,
deleteFileIfExists: true)
}

var assetWriterInputSettings: [String : Any] {
return [
AVVideoCodecKey: codec,
Expand Down
13 changes: 6 additions & 7 deletions SceneKitVideoRecorder/Classes/PixelBufferFactory.swift
Original file line number Diff line number Diff line change
Expand Up @@ -15,21 +15,20 @@

static let context = CIContext(mtlDevice: MTLCreateSystemDefaultDevice()!)

static func make(with metalLayer: CAMetalLayer, usingBuffer pool: CVPixelBufferPool) -> (CVPixelBuffer?, UIImage) {
static func make(with currentDrawable: CAMetalDrawable, usingBuffer pool: CVPixelBufferPool) -> (CVPixelBuffer?, UIImage) {

let currentDrawable = metalLayer.nextDrawable()
let destinationTexture = currentDrawable?.texture
let destinationTexture = currentDrawable.texture

var pixelBuffer: CVPixelBuffer?
let status = CVPixelBufferPoolCreatePixelBuffer(kCFAllocatorDefault, pool, &pixelBuffer)
_ = CVPixelBufferPoolCreatePixelBuffer(kCFAllocatorDefault, pool, &pixelBuffer)
if let pixelBuffer = pixelBuffer {
CVPixelBufferLockBaseAddress(pixelBuffer, CVPixelBufferLockFlags.init(rawValue: 0))
let region = MTLRegionMake2D(0, 0, Int((currentDrawable?.layer.drawableSize.width)!), Int((currentDrawable?.layer.drawableSize.height)!))

let region = MTLRegionMake2D(0, 0, Int(currentDrawable.layer.drawableSize.width), Int(currentDrawable.layer.drawableSize.height))
print(currentDrawable.layer.drawableSize)
let bytesPerRow = CVPixelBufferGetBytesPerRow(pixelBuffer)

let tempBuffer = CVPixelBufferGetBaseAddress(pixelBuffer)
destinationTexture?.getBytes(tempBuffer!, bytesPerRow: Int(bytesPerRow), from: region, mipmapLevel: 0)
destinationTexture.getBytes(tempBuffer!, bytesPerRow: Int(bytesPerRow), from: region, mipmapLevel: 0)

let image = imageFromCVPixelBuffer(buffer: pixelBuffer)
CVPixelBufferUnlockBaseAddress(pixelBuffer, CVPixelBufferLockFlags.init(rawValue: 0))
Expand Down
114 changes: 64 additions & 50 deletions SceneKitVideoRecorder/Classes/SceneKitVideoRecorder.swift
Original file line number Diff line number Diff line change
Expand Up @@ -13,134 +13,148 @@
}
//Metal does not work in simulator :(
#else

import UIKit
import SceneKit
import ARKit
import AVFoundation
import Metal
import CoreImage

public class SceneKitVideoRecorder {
private let writer: AVAssetWriter
private let input: AVAssetWriterInput
private let pixelBufferAdaptor: AVAssetWriterInputPixelBufferAdaptor
private var writer: AVAssetWriter!
private var input: AVAssetWriterInput!
private var pixelBufferAdaptor: AVAssetWriterInputPixelBufferAdaptor!
private var options: Options

private var currentDrawable: CAMetalDrawable?

private let frameQueue = DispatchQueue(label: "com.svtek.SceneKitVideoRecorder.frameQueue")
private static let renderQueue = DispatchQueue(label: "com.svtek.SceneKitVideoRecorder.renderQueue", attributes: .concurrent)
private let bufferQueue = DispatchQueue(label: "com.svtek.SceneKitVideoRecorder.bufferQueue", attributes: .concurrent)

private static let renderSemaphore = DispatchSemaphore(value: 3)

private static let frameRenderSemaphore = DispatchSemaphore(value: 1)
private static let bufferAppendSemaphore = DispatchSemaphore(value: 1)

private var displayLink: CADisplayLink? = nil
private var initialTime: CFTimeInterval = 0.0
private var currentTime: CFTimeInterval = 0.0

private var sceneView: SCNView
private var metalLayer: CAMetalLayer

public var updateFrameHandler: ((_ image: UIImage, _ time: CMTime) -> Void)? = nil
private var finishedCompletionHandler: ((_ url: URL) -> Void)? = nil
private let context:CIContext

@available(iOS 11.0, *)
public convenience init?(withARSCNView view: ARSCNView, options: Options = .default) throws {
var options = options
options.renderSize = CGSize(width: view.bounds.width * view.contentScaleFactor, height: view.bounds.height * view.contentScaleFactor)
options.videoSize = CGSize(width: view.bounds.width * view.contentScaleFactor, height: view.bounds.height * view.contentScaleFactor)
try self.init(scene: view, options: options)
}

public init?(scene: SCNView, options: Options = .default) throws {

self.sceneView = scene

self.context = CIContext.init(mtlDevice: MTLCreateSystemDefaultDevice()!)

self.metalLayer = (sceneView.layer as? CAMetalLayer)!
self.metalLayer.framebufferOnly = false

self.options = options

let currentDrawable = metalLayer.nextDrawable()

self.options.videoSize = (currentDrawable?.layer.drawableSize)!

self.writer = try AVAssetWriter(outputURL: self.options.outputUrl,
fileType: self.options.fileType)

prepare(with: self.options)
}

private func prepare(with options: Options) {
self.writer = try! AVAssetWriter(outputURL: self.options.outputUrl,
fileType: self.options.fileType)
self.input = AVAssetWriterInput(mediaType: AVMediaTypeVideo,
outputSettings: self.options.assetWriterInputSettings)

self.input.mediaTimeScale = self.options.timeScale



self.pixelBufferAdaptor = AVAssetWriterInputPixelBufferAdaptor(assetWriterInput: input,sourcePixelBufferAttributes: self.options.sourcePixelBufferAttributes)
prepare(with: self.options)

writer.add(input)
}

private func prepare(with options: Options) {
public func cleanUp() {
if options.deleteFileIfExists {
FileController.delete(file: options.outputUrl)
}
writer.add(input)
}

public func startWriting() {
cleanUp()
SceneKitVideoRecorder.renderQueue.async { [weak self] in
SceneKitVideoRecorder.renderSemaphore.wait()
self?.startDisplayLink()
self?.startInputPipeline()
}
}

public func finishWriting(completionHandler: (@escaping (_ url: URL) -> Void)) {
let outputUrl = options.outputUrl
input.markAsFinished()
self.stopDisplayLink()
writer.finishWriting(completionHandler: { [weak self] in
writer.finishWriting(completionHandler: { _ in
completionHandler(outputUrl)
self.prepare(with: self.options)
SceneKitVideoRecorder.renderSemaphore.signal()
})
}

private func startDisplayLink() {

currentTime = 0.0
initialTime = CFAbsoluteTimeGetCurrent()
displayLink = CADisplayLink(target: self, selector: #selector(updateDisplayLink))
displayLink?.preferredFramesPerSecond = options.fps
displayLink?.add(to: .main, forMode: .commonModes)

}

@objc private func updateDisplayLink() {
frameQueue.async { [weak self] in
guard let input = self?.input, input.isReadyForMoreMediaData else { return }
self?.renderSnapshot()
}
}

private func startInputPipeline() {
writer.startWriting()
writer.startSession(atSourceTime: kCMTimeZero)
input.requestMediaDataWhenReady(on: frameQueue, using: {})
}

private func renderSnapshot() {
SceneKitVideoRecorder.frameRenderSemaphore.wait()
if !input.isReadyForMoreMediaData { return }

autoreleasepool {


while (currentDrawable == nil) {
currentDrawable = metalLayer.nextDrawable()
}


SceneKitVideoRecorder.frameRenderSemaphore.wait()


guard let pool = self.pixelBufferAdaptor.pixelBufferPool else { return }

let (pixelBufferTemp, image) = PixelBufferFactory.make(with: metalLayer, usingBuffer: pool)

let (pixelBufferTemp, image) = PixelBufferFactory.make(with: currentDrawable!, usingBuffer: pool)
currentDrawable = nil
guard let pixelBuffer = pixelBufferTemp else { return }
currentTime = CFAbsoluteTimeGetCurrent() - initialTime

let value: Int64 = Int64(currentTime * CFTimeInterval(options.timeScale))
let presentationTime = CMTimeMake(value, options.timeScale)
SceneKitVideoRecorder.bufferAppendSemaphore.wait()
Expand All @@ -152,16 +166,16 @@
}
SceneKitVideoRecorder.frameRenderSemaphore.signal()
}




private func stopDisplayLink() {
displayLink?.invalidate()
displayLink = nil
}
}

#endif


0 comments on commit 70fb306

Please sign in to comment.