diff --git a/SilicaViewer/AppDelegate.swift b/SilicaViewer/AppDelegate.swift index 65d742c..3f83f15 100644 --- a/SilicaViewer/AppDelegate.swift +++ b/SilicaViewer/AppDelegate.swift @@ -60,7 +60,11 @@ class AppDelegate: NSObject, NSApplicationDelegate, NSUserInterfaceValidations { } @IBAction func exportTimelapseAction(_ sender: Any) { - let document = NSApplication.shared.keyWindow?.windowController?.document as? Document; + guard let originalWindow = NSApplication.shared.keyWindow else { + return + } + + let document = originalWindow.windowController?.document as? Document; let savePanel = NSSavePanel() savePanel.title = "Save Timelapse" @@ -74,10 +78,8 @@ class AppDelegate: NSObject, NSApplicationDelegate, NSUserInterfaceValidations { let directory = NSTemporaryDirectory() let mixComposition = AVMutableComposition() - - var duration = CMTime.zero - var instructions: [AVMutableVideoCompositionLayerInstruction] = [] + var duration = CMTime.zero for entry in archive.makeIterator() { if entry.path.contains(VideoPath) { @@ -90,60 +92,52 @@ class AppDelegate: NSObject, NSApplicationDelegate, NSUserInterfaceValidations { let asset = AVAsset(url: fullURL) - guard - let firstTrack = mixComposition.addMutableTrack( - withMediaType: .video, - preferredTrackID: Int32(kCMPersistentTrackID_Invalid)) - else { return } - - // 3 - do { - try firstTrack.insertTimeRange( - CMTimeRangeMake(start: .zero, duration: asset.duration), - of: asset.tracks(withMediaType: .video)[0], - at: duration) - } catch { - print("Failed to load first track") - return + guard let track = mixComposition.addMutableTrack(withMediaType: .video, preferredTrackID: Int32(kCMPersistentTrackID_Invalid)) else { + return } + + try? track.insertTimeRange(CMTimeRangeMake(start: .zero, duration: asset.duration), + of: asset.tracks(withMediaType: .video)[0], + at: duration) duration = CMTimeAdd(duration, asset.duration) - let firstInstruction = AVMutableVideoCompositionLayerInstruction( - assetTrack: firstTrack) - firstInstruction.setOpacity(0.0, at: duration + asset.duration) + let opacityInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: track) + opacityInstruction.setOpacity(0.0, at: duration + asset.duration) - instructions.append(firstInstruction) + instructions.append(opacityInstruction) } } let mainInstruction = AVMutableVideoCompositionInstruction() - mainInstruction.timeRange = CMTimeRangeMake( - start: .zero, - duration: duration) + mainInstruction.timeRange = CMTimeRangeMake(start: .zero, duration: duration) mainInstruction.layerInstructions = instructions let mainComposition = AVMutableVideoComposition() mainComposition.instructions = [mainInstruction] mainComposition.frameDuration = CMTimeMake(value: 1, timescale: 30) - mainComposition.renderSize = CGSize( - width: document!.info.videoFrameWidth, - height: document!.info.videoFrameHeight) + mainComposition.renderSize = CGSize(width: document!.info.videoFrame.0, height: document!.info.videoFrame.1) - self.exporter = AVAssetExportSession( - asset: mixComposition, - presetName: AVAssetExportPresetHighestQuality) - + self.exporter = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPresetHighestQuality) self.exporter?.outputURL = savePanel.url! self.exporter?.outputFileType = AVFileType.mp4 - self.exporter?.shouldOptimizeForNetworkUse = true self.exporter?.videoComposition = mainComposition - self.exporter?.exportAsynchronously { - - dump(self.exporter?.error?.localizedDescription); + let alert = NSAlert() + alert.messageText = "Exporting timelapse..." + alert.addButton(withTitle: "Cancel") + alert.beginSheetModal(for: originalWindow) { (resonse) in + self.exporter?.cancelExport() + alert.window.close() + } + + self.exporter?.exportAsynchronously { + if self.exporter?.status != .cancelled { + DispatchQueue.main.sync { + alert.window.close() + } + } } - } } } diff --git a/SilicaViewer/Document.swift b/SilicaViewer/Document.swift index a2d2c04..6bb4778 100644 --- a/SilicaViewer/Document.swift +++ b/SilicaViewer/Document.swift @@ -42,8 +42,7 @@ struct SilicaDocument { var layers: [SilicaLayer] = [] - var videoFrameWidth: Int = 0 - var videoFrameHeight: Int = 0 + var videoFrame: (Int, Int) = (0, 0) lazy var nsSize = { return NSSize(width: width, height: height) @@ -375,19 +374,8 @@ class Document: NSDocument { let frameSizeClassKey = videoResolution["frameSize"] let frameSizeClassID = getClassID(id: frameSizeClassKey) let frameSize = objectsArray[frameSizeClassID] as! String - - // frameSize - //SilicaDocumentVideoSegmentInfoKey - // videoQualityKey - - dump(frameSize, indent: 5) - - guard let (frameWidth, frameHeight) = parsePairString(frameSize) else { - return - } - - info.videoFrameWidth = frameWidth - info.videoFrameHeight = frameHeight + + info.videoFrame = parsePairString(frameSize)! let colorProfileClassKey = dict["colorProfile"] let colorProfileClassID = getClassID(id: colorProfileClassKey) @@ -437,12 +425,9 @@ class Document: NSDocument { let sizeClassID = getClassID(id: sizeClassKey) let sizeString = objectsArray[sizeClassID] as! String - let sizeComponents = sizeString.replacingOccurrences(of: "{", with: "").replacingOccurrences(of: "}", with: "").components(separatedBy: ", ") - let width = Int(sizeComponents[0]) - let height = Int(sizeComponents[1]) - - info.width = width! - info.height = height! + let (width, height) = parsePairString(sizeString)! + info.width = width + info.height = height columns = Int(ceil(Float(info.width) / Float(info.tileSize))) rows = Int(ceil(Float(info.height) / Float(info.tileSize))) + 1 // TODO: lol why