1
Fork 0

Make time-lapse export cancellable

This commit is contained in:
Joshua Goins 2021-09-29 17:54:04 -04:00
parent 7e5028efa7
commit c9f7923ed1
2 changed files with 38 additions and 59 deletions

View file

@ -60,7 +60,11 @@ class AppDelegate: NSObject, NSApplicationDelegate, NSUserInterfaceValidations {
}
@IBAction func exportTimelapseAction(_ sender: Any) {
let document = NSApplication.shared.keyWindow?.windowController?.document as? Document;
guard let originalWindow = NSApplication.shared.keyWindow else {
return
}
let document = originalWindow.windowController?.document as? Document;
let savePanel = NSSavePanel()
savePanel.title = "Save Timelapse"
@ -74,10 +78,8 @@ class AppDelegate: NSObject, NSApplicationDelegate, NSUserInterfaceValidations {
let directory = NSTemporaryDirectory()
let mixComposition = AVMutableComposition()
var duration = CMTime.zero
var instructions: [AVMutableVideoCompositionLayerInstruction] = []
var duration = CMTime.zero
for entry in archive.makeIterator() {
if entry.path.contains(VideoPath) {
@ -90,60 +92,52 @@ class AppDelegate: NSObject, NSApplicationDelegate, NSUserInterfaceValidations {
let asset = AVAsset(url: fullURL)
guard
let firstTrack = mixComposition.addMutableTrack(
withMediaType: .video,
preferredTrackID: Int32(kCMPersistentTrackID_Invalid))
else { return }
// 3
do {
try firstTrack.insertTimeRange(
CMTimeRangeMake(start: .zero, duration: asset.duration),
of: asset.tracks(withMediaType: .video)[0],
at: duration)
} catch {
print("Failed to load first track")
guard let track = mixComposition.addMutableTrack(withMediaType: .video, preferredTrackID: Int32(kCMPersistentTrackID_Invalid)) else {
return
}
try? track.insertTimeRange(CMTimeRangeMake(start: .zero, duration: asset.duration),
of: asset.tracks(withMediaType: .video)[0],
at: duration)
duration = CMTimeAdd(duration, asset.duration)
let firstInstruction = AVMutableVideoCompositionLayerInstruction(
assetTrack: firstTrack)
firstInstruction.setOpacity(0.0, at: duration + asset.duration)
let opacityInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: track)
opacityInstruction.setOpacity(0.0, at: duration + asset.duration)
instructions.append(firstInstruction)
instructions.append(opacityInstruction)
}
}
let mainInstruction = AVMutableVideoCompositionInstruction()
mainInstruction.timeRange = CMTimeRangeMake(
start: .zero,
duration: duration)
mainInstruction.timeRange = CMTimeRangeMake(start: .zero, duration: duration)
mainInstruction.layerInstructions = instructions
let mainComposition = AVMutableVideoComposition()
mainComposition.instructions = [mainInstruction]
mainComposition.frameDuration = CMTimeMake(value: 1, timescale: 30)
mainComposition.renderSize = CGSize(
width: document!.info.videoFrameWidth,
height: document!.info.videoFrameHeight)
self.exporter = AVAssetExportSession(
asset: mixComposition,
presetName: AVAssetExportPresetHighestQuality)
mainComposition.renderSize = CGSize(width: document!.info.videoFrame.0, height: document!.info.videoFrame.1)
self.exporter = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPresetHighestQuality)
self.exporter?.outputURL = savePanel.url!
self.exporter?.outputFileType = AVFileType.mp4
self.exporter?.shouldOptimizeForNetworkUse = true
self.exporter?.videoComposition = mainComposition
let alert = NSAlert()
alert.messageText = "Exporting timelapse..."
alert.addButton(withTitle: "Cancel")
alert.beginSheetModal(for: originalWindow) { (resonse) in
self.exporter?.cancelExport()
alert.window.close()
}
self.exporter?.exportAsynchronously {
dump(self.exporter?.error?.localizedDescription);
if self.exporter?.status != .cancelled {
DispatchQueue.main.sync {
alert.window.close()
}
}
}
}
}
}

View file

@ -42,8 +42,7 @@ struct SilicaDocument {
var layers: [SilicaLayer] = []
var videoFrameWidth: Int = 0
var videoFrameHeight: Int = 0
var videoFrame: (Int, Int) = (0, 0)
lazy var nsSize = {
return NSSize(width: width, height: height)
@ -376,18 +375,7 @@ class Document: NSDocument {
let frameSizeClassID = getClassID(id: frameSizeClassKey)
let frameSize = objectsArray[frameSizeClassID] as! String
// frameSize
//SilicaDocumentVideoSegmentInfoKey
// videoQualityKey
dump(frameSize, indent: 5)
guard let (frameWidth, frameHeight) = parsePairString(frameSize) else {
return
}
info.videoFrameWidth = frameWidth
info.videoFrameHeight = frameHeight
info.videoFrame = parsePairString(frameSize)!
let colorProfileClassKey = dict["colorProfile"]
let colorProfileClassID = getClassID(id: colorProfileClassKey)
@ -437,12 +425,9 @@ class Document: NSDocument {
let sizeClassID = getClassID(id: sizeClassKey)
let sizeString = objectsArray[sizeClassID] as! String
let sizeComponents = sizeString.replacingOccurrences(of: "{", with: "").replacingOccurrences(of: "}", with: "").components(separatedBy: ", ")
let width = Int(sizeComponents[0])
let height = Int(sizeComponents[1])
info.width = width!
info.height = height!
let (width, height) = parsePairString(sizeString)!
info.width = width
info.height = height
columns = Int(ceil(Float(info.width) / Float(info.tileSize)))
rows = Int(ceil(Float(info.height) / Float(info.tileSize))) + 1 // TODO: lol why