How to Zoom .DAE 3D model using ARKit iOS - arkit

I have a 3D model with .scn extension. How to zoom it with pinch gesture without virtualobject file from iOS sample Placing Objects application.
Pinch gesture works well with .scn's if its converted from .obj file. But its not working with .dae model.
func addPinchGestureToSceneView() {
pinchGesture = UIPinchGestureRecognizer(target: self, action: #selector(scale))
pinchGesture.scale = 1.0;
pinchGesture.delegate = self
self.sceneView.addGestureRecognizer(pinchGesture)
}
private func node(at position: CGPoint) -> SCNNode? {
var hitTestOptions = [SCNHitTestOption: Any]()
hitTestOptions[SCNHitTestOption.boundingBoxOnly] = true
return sceneView.hitTest(position, options: hitTestOptions)
.first(where: { self.getOnlyModelName(name: $0.node.name ?? "") == self.currentmodel.modelname})?
.node
}
#objc func scale(_ gesture: UIPinchGestureRecognizer) {
if self.currentmodel.isZoomEnabled == false{
return
}
let location = gesture.location(in: sceneView)
guard let node = node(at: location)else{return}
// guard let node = node(at: location) else { return }
switch gesture.state {
case .began:
originalScale = node.scale
gesture.scale = CGFloat(node.scale.x)
print("Begin:: \(originalScale)")
case .changed:
guard var originalScale = originalScale else { return }
if gesture.scale > 2.0{
return
}
originalScale.x = Float(gesture.scale)
originalScale.y = Float(gesture.scale)
originalScale.z = Float(gesture.scale)
node.scale = originalScale
case .ended:
guard var originalScale = originalScale else { return }
if gesture.scale > 2.0{
return
}
originalScale.x = Float(gesture.scale)
originalScale.y = Float(gesture.scale)
originalScale.z = Float(gesture.scale)
node.scale = originalScale
gesture.scale = CGFloat(node.scale.x)
default:
gesture.scale = 1.0
originalScale = nil
}

When it's a dae, you might need to try to grab the parent of the node caught in the hit test. I had a similar issue with dae that got solved by getting the parent of the child, or even sometimes the grandparent of the child.

Related

Remove complete .scn instead of individual nodes Swift ARKit

I am attempting to delete the .scn objects I placed down. However, with my current code, it is just deleting individual nodes. Here is how I handle the tap delete.
#objc func Erase(sender: UITapGestureRecognizer){
print("rendering")
//sharedVM.count = sharedVM.count + 1
guard let pointOfView = sceneView.pointOfView else {return}
guard let cameraPosition = getCameraPosition(in: sceneView) else {
return
}
let location = sender.location(in: view)
let currentPositionOfCamera = cameraPosition + getRay(for: location, in: sceneView)
DispatchQueue.main.async{
//guard let location = touches.first?.location(in: sceneView) else { return }
let results = self.sceneView.hitTest(location, options: [SCNHitTestOption.searchMode : 1])
for result in results { /// See if the beam hit the cube
let Node = result.node
Node.enumerateChildNodes { (node, stop) in
node.removeFromParentNode() }
Node.removeFromParentNode()
}
}
}
Here is how I place the object:
var objecttest = VirtualObject(url: referenceURL)!
//var objecttest = VirtualObject(url: URL(string: "Models.scnassets/cup/cup.scn")!)
objecttest.load()
self.sceneView.scene.rootNode.addChildNode(objecttest)
class VirtualObject: SCNReferenceNode {
...
}

Datas not received with GameKit on swift

I'm currently making a multiplayer game with GameKit. I want to create a waiting viewController while each player receive the array of players and what they selected for their character.
Here is my extension
extension LoadingViewController: GKMatchDelegate {
func sendData() {
guard let match = match else { return }
do {
guard let data = gameModel.encode() else { return }
try match.sendData(toAllPlayers: data, with: .reliable)
} catch {
print("Send data failed")
}
}
func match(_ match: GKMatch, didReceive data: Data, fromRemotePlayer player: GKPlayer) {
guard let model = GameModel.decode(data: data) else { return }
gameModel = model
}
}
My Override which waits until 2 players fill the gameModel
override func viewDidLoad() {
super.viewDidLoad()
Timer.scheduledTimer(withTimeInterval: 2, repeats: true) { timer in
self.setupPlayers()
if self.gameModel.players.count == 2 {
if let view = self.view as! SKView? {
// Load the SKScene from 'GameScene.sks'
if let scene = SKScene(fileNamed: "GameScene") as? GameScene {
scene.match = self.match
scene.gameModel = self.gameModel
scene.localPlayer = self.localPlayer
scene.size = view.bounds.size
scene.scaleMode = .resizeFill
// Present the scene
view.presentScene(scene)
timer.invalidate()
view.ignoresSiblingOrder = true
view.showsFPS = true
view.showsNodeCount = true
}
}
}
}
}
And the setupPlayers which is called each time to try adding a player and set his preferencies
private func setupPlayers() {
guard let player2Name = match?.players.first?.displayName else { return }
let player1 = Player(displayName: GKLocalPlayer.local.displayName)
let player2 = Player(displayName: player2Name)
var players = [player1,player2]
players.sort { (player1, player2) -> Bool in
player1.displayName < player2.displayName
}
if players.first?.displayName == GKLocalPlayer.local.displayName {
if gameModel.players.count == 0 {
players[0].index = .one
players[0].race = .orc
gameModel.players.append(players[0])
localPlayer = players[0]
sendData()
}
} else {
if gameModel.players.count == 1 {
players[1].index = .two
players[1].race = .human
gameModel.players.append(players[1])
localPlayer = players[1]
sendData()
}
}
}
However the scene does not appear when i'm doing simulations, I tried finding the bug and when the first player go in setup Players it works and gameModel.players.count is now 1 but the second player never receive it and his own gameModel stay to one
Does anyone know why ?

how to make the annotation move over the polyline

i have 3 annotation and i draw polyline between first and second annotation but i need the therd one move over that polyline but it's always move in street polyline to the destnation
-my code
func moveDelivery(_ destinationCoordinate : CLLocationCoordinate2D{
self.deliveryAnnotation.coordinate = CLLocationCoordinate2DMake(29.959640, 31.270421)
let sourcePlaceMark = MKPlacemark(coordinate: self.userAnnotation.coordinate)
//sourcePlaceMark.title
let destPlaceMkark = MKPlacemark(coordinate: self.deliveryAnnotation.coordinate)
let sourceItem = MKMapItem(placemark: sourcePlaceMark)
let destItem = MKMapItem(placemark: destPlaceMkark)
let directionRequest = MKDirections.Request()
directionRequest.source = sourceItem
directionRequest.destination = destItem
directionRequest.transportType = .any
let direction = MKDirections(request: directionRequest)
direction.calculate(completionHandler: {
response, error in
guard let response = response else {
if let error = error {
print(error.localizedDescription)
} else {
self.deliveryAnnotation.courseDegrees = self.getHeadingForDirectionFromCoordinate(self.kitchenAnnotation.coordinate, toLoc: self.userAnnotation.coordinate)
self.view.transform = CGAffineTransform(rotationAngle:CGFloat(self.deliveryAnnotation.courseDegrees))
}
return
}
guard let primaryRoute = response.routes.first else { return }
let route = response.routes[0]
self.mapView.addOverlay(route.polyline, level: .aboveRoads)
let rekt = route.polyline.boundingMapRect
self.mapView.setRegion(MKCoordinateRegion(rekt), animated: true)
})
//
UIView.animate(withDuration: Double(60), animations: {
self.deliveryAnnotation.coordinate = destinationCoordinate
}, completion: { success in
if success {
}
})
}
Your third annotation isn't following the route because you're animating it moving in a straight line between the first and second line. Try getting the coordinates from the MKRoute's polyline and animate between each one (According to apple's docs MKRoutes are made up of coordinates, but you might be able to use points as well)
If you'd like it to animate over the span of 60 seconds:
func moveDelivery(_ destinationCoordinate: CLLocationCoordinate2D) {
// I don't know why you have the delivery annotation start here, is this for testing?
deliveryAnnotation.coordinate = CLLocationCoordinate2DMake(29.959640, 31.270421)
let sourcePlaceMark = MKPlacemark(coordinate: destinationCoordinate)
let destPlaceMkark = MKPlacemark(coordinate: userAnnotation.coordinate)
let directionRequest = MKDirections.Request()
directionRequest.source = MKMapItem(placemark: sourcePlaceMark)
directionRequest.destination = MKMapItem(placemark: destPlaceMkark)
directionRequest.transportType = .any
let direction = MKDirections(request: directionRequest)
direction.calculate(completionHandler: {
response, error in
guard let response = response else {
print("MKRequest gave no response")
if let error = error {
print(error.localizedDescription)
} else {
self.deliveryAnnotation.courseDegrees = self.getHeadingForDirectionFromCoordinate(self.kitchenAnnotation.coordinate, toLoc: self.userAnnotation.coordinate)
self.view.transform = CGAffineTransform(rotationAngle:CGFloat(self.deliveryAnnotation.courseDegrees))
}
return
}
guard let primaryRoute = response.routes.first else {
print("response has no routes")
return
}
self.mapView.addOverlay(primaryRoute.polyline, level: .aboveRoads)
let rekt = primaryRoute.polyline.boundingMapRect
self.mapView.setRegion(MKCoordinateRegion(rekt), animated: true)
let coordinateArray = primaryRoute.polyline.coordinates
assert(coordinateArray.count > 0, "coordinate array is empty")
self.routeCoordinates = coordinateArray
// initiate recursive animations
self.coordinateIndex = 0
})
}
var routeCoordinates = [CLLocationCoordinate2D]()
var avgAnimationTime: Double {
return 60 / Double(routeCoordinates.count)
}
var coordinateIndex: Int! {
didSet {
guard coordinateIndex != routeCoordinates.count else {
print("animated through all coordinates, stopping function")
return
}
animateToNextCoordinate()
}
}
func animateToNextCoordinate() {
let coordinate = routeCoordinates[coordinateIndex]
UIView.animate(withDuration: avgAnimationTime, animations: {
self.deliveryAnnotation.coordinate = coordinate
}, completion: { _ in
self.coordinateIndex += 1
print("moved between coordinates")
})
}
EDIT
make sure to include this extension, otherwise you won't be able to get the coordinates of the MKRoute (source: https://gist.github.com/freak4pc/98c813d8adb8feb8aee3a11d2da1373f)
public extension MKMultiPoint {
var coordinates: [CLLocationCoordinate2D] {
var coords = [CLLocationCoordinate2D](repeating: kCLLocationCoordinate2DInvalid,
count: pointCount)
getCoordinates(&coords, range: NSRange(location: 0, length: pointCount))
return coords
}
}
EDIT #2
See above, edited original answer to animate through each coordinate after the previous finishes animating. Really rough but it should work.
EDIT #3
Added your code to get the destination variable as well as some assert and debug printing calls. If things aren't working this time, please tell me which debug messages you get.
EDIT #4
I just demo'd my code and it works. Here is the MapViewController class I used along with necessary extensions:
private let reuseId = "deliveryReuseId"
private let userTitle = "user"
private let startingPointTitle = "store"
private let deliveryTitle = "delivery truck"
class MapViewController: UIViewController {
var mapView: MKMapView!
// annotations for this demo, replace with your own annotations
var deliveryAnnotation: MKPointAnnotation = {
let annotation = MKPointAnnotation()
annotation.title = deliveryTitle
return annotation
}()
let userAnnotation: MKPointAnnotation = {
let annotation = MKPointAnnotation()
annotation.title = userTitle
annotation.coordinate = CLLocationCoordinate2DMake(29.956694, 31.276854)
return annotation
}()
let startingPointAnnotation: MKPointAnnotation = {
let annotation = MKPointAnnotation()
annotation.title = startingPointTitle
annotation.coordinate = CLLocationCoordinate2DMake(29.959622, 31.270363)
return annotation
}()
override func viewDidLoad() {
super.viewDidLoad()
loadMapView()
navigate()
}
func loadMapView() {
// set map
mapView = MKMapView()
view = mapView
mapView.delegate = self
mapView.register(MKAnnotationView.self, forAnnotationViewWithReuseIdentifier: reuseId)
// add annotations
mapView.addAnnotation(userAnnotation)
mapView.addAnnotation(startingPointAnnotation)
mapView.addAnnotation(deliveryAnnotation)
}
func navigate() {
let sourcePlaceMark = MKPlacemark(coordinate: startingPointAnnotation.coordinate)
let destPlaceMkark = MKPlacemark(coordinate: userAnnotation.coordinate)
let directionRequest = MKDirections.Request()
directionRequest.source = MKMapItem(placemark: sourcePlaceMark)
directionRequest.destination = MKMapItem(placemark: destPlaceMkark)
directionRequest.transportType = .any
let direction = MKDirections(request: directionRequest)
direction.calculate(completionHandler: { response, error in
if let error = error {
print(error.localizedDescription)
return
}
guard let primaryRoute = response!.routes.first else {
print("response has no routes")
return
}
self.mapView.addOverlay(primaryRoute.polyline, level: .aboveRoads)
self.mapView.setRegion(MKCoordinateRegion(primaryRoute.polyline.boundingMapRect), animated: true)
// initiate recursive animation
self.routeCoordinates = primaryRoute.polyline.coordinates
self.coordinateIndex = 0
})
}
var routeCoordinates = [CLLocationCoordinate2D]()
var avgAnimationTime: Double {
// to show delivery in 60 second, replace 60 with amount of seconds you'd like to show
return 60 / Double(routeCoordinates.count)
}
var coordinateIndex: Int! {
didSet {
guard coordinateIndex != routeCoordinates.count else {
print("animated through all coordinates, stopping function")
return
}
animateToNextCoordinate()
}
}
func animateToNextCoordinate() {
let coordinate = routeCoordinates[coordinateIndex]
UIView.animate(withDuration: avgAnimationTime, animations: {
self.deliveryAnnotation.coordinate = coordinate
}, completion: { _ in
self.coordinateIndex += 1
})
}
}
extension MapViewController: MKMapViewDelegate {
func mapView(_ mapView: MKMapView, viewFor annotation: MKAnnotation) -> MKAnnotationView? {
let annotationView = MKAnnotationView(annotation: annotation, reuseIdentifier: reuseId)
// replace these images with your own
switch annotation.title {
case userTitle:
annotationView.image = UIImage(named: "user")
case startingPointTitle:
annotationView.image = UIImage(named: "store")
case deliveryTitle:
annotationView.image = UIImage(named: "deliveryTruck")
default: break
}
return annotationView
}
func mapView(_ mapView: MKMapView, rendererFor overlay: MKOverlay) -> MKOverlayRenderer {
guard overlay is MKPolyline else {
return MKOverlayRenderer()
}
let renderer = MKPolylineRenderer(overlay: overlay)
renderer.strokeColor = .black
renderer.lineWidth = 5
renderer.lineJoin = .round
return renderer
}
}
public extension MKMultiPoint {
var coordinates: [CLLocationCoordinate2D] {
var coords = [CLLocationCoordinate2D](repeating: kCLLocationCoordinate2DInvalid,
count: pointCount)
getCoordinates(&coords, range: NSRange(location: 0, length: pointCount))
return coords
}
}

interactive viewController using pan Gesture

everyone i've been tearing out my hair trying to find a solution to an interactive view controller transition where you use the pan gesture in the downward direction to bring a full screen view controller from the top to the bottom. Has anyone run across or created any code like this. Below is my code. I already have the dismiss gesture down but cant figure out how to present the view controller by swiping down on the screen. PLEASE HELP!!!
import UIKit
class ViewController: UIViewController {
let interactor = Interactor()
var interactors:Interactor? = nil
let Mview = ModalViewController()
let mViewT: ModalViewController? = nil
var presentedViewControllers: UIViewController?
override func viewDidLoad() {
Mview.transitioningDelegate = self
Mview.modalPresentationStyle = .FullScreen
}
#IBAction func cameraSlide(sender: UIPanGestureRecognizer) {
let percentThreshold:CGFloat = 0.3
// convert y-position to downward pull progress (percentage)
let translation = sender.translationInView(Mview.view)
let verticalMovement = translation.y / UIScreen.mainScreen().bounds.height
let downwardMovement = fmaxf(Float(verticalMovement), 0.0)
let downwardMovementPercent = fminf(downwardMovement, 1.0)
let progress = CGFloat(downwardMovementPercent)
guard let interactor = interactors else { return }
switch sender.state {
case .Began:
interactor.hasStarted = true
self.presentViewController(Mview, animated: true, completion: nil)
case .Changed:
interactor.shouldFinish = progress > percentThreshold
interactor.updateInteractiveTransition(progress)
case .Cancelled:
interactor.hasStarted = false
interactor.cancelInteractiveTransition()
case .Ended:
interactor.hasStarted = false
if !interactor.shouldFinish {
interactor.cancelInteractiveTransition()
} else {
interactor.finishInteractiveTransition()
} default:
break
}
}
}
extension ViewController: UIViewControllerTransitioningDelegate {
func animationControllerForDismissedController(dismissed: UIViewController) -> UIViewControllerAnimatedTransitioning? {
return DismissAnimator()
}
func interactionControllerForDismissal(animator: UIViewControllerAnimatedTransitioning) -> UIViewControllerInteractiveTransitioning? {
return interactor.hasStarted ? interactor : nil
}
func animationControllerForPresentedController(presented: UIViewController, presentingController presenting: UIViewController, sourceController source: UIViewController) -> UIViewControllerAnimatedTransitioning? {
return PresentAnimator()
}
func interactionControllerForPresentation(animator: UIViewControllerAnimatedTransitioning) -> UIViewControllerInteractiveTransitioning? {
return interactor.hasStarted ? interactor : nil
}
}
class PresentAnimator: NSObject {
}
extension PresentAnimator: UIViewControllerAnimatedTransitioning
{
func transitionDuration(transitionContext: UIViewControllerContextTransitioning?) -> NSTimeInterval {
return 1.0
}
func animateTransition(transitionContext: UIViewControllerContextTransitioning) {
guard
let fromVC2 = transitionContext.viewControllerForKey(UITransitionContextFromViewControllerKey),
let toVC2 = transitionContext.viewControllerForKey(UITransitionContextToViewControllerKey),
let containerView2 = transitionContext.containerView() else {return}
let initialFrame = transitionContext.initialFrameForViewController(fromVC2)
toVC2.view.frame = initialFrame
toVC2.view.frame.origin.y = -initialFrame.height * 2
containerView2.addSubview(fromVC2.view)
containerView2.addSubview(toVC2.view)
let screenbounds = UIScreen.mainScreen().bounds
let Stage = CGPoint(x: 0, y: 0)
let finalFrame = CGRect(origin: Stage, size: screenbounds.size)
UIView.animateWithDuration(transitionDuration(transitionContext), animations: {
toVC2.view.frame = finalFrame
}, completion: { _ in transitionContext.completeTransition(!transitionContext.transitionWasCancelled())
}
)
}
}
class ModalViewController: UIViewController {
let interactors = Interactor()
var interactor:Interactor? = nil
#IBAction func close(sender: UIButton) {
dismissViewControllerAnimated(true, completion: nil)
}
#IBAction func handleGesture(sender: UIPanGestureRecognizer) {
let percentThreshold:CGFloat = 0.3
// convert y-position to downward pull progress (percentage)
let translation = sender.translationInView(self.view)
let verticalMovement = translation.y / -view.bounds.height * 2
let downwardMovement = fmaxf(Float(verticalMovement), 0.0)
let downwardMovementPercent = fminf(downwardMovement, 1.0)
let progress = CGFloat(downwardMovementPercent)
guard let interactor = interactor else { return }
switch sender.state {
case .Began:
interactor.hasStarted = true
dismissViewControllerAnimated(true, completion: nil)
case .Changed:
interactor.shouldFinish = progress > percentThreshold
interactor.updateInteractiveTransition(progress)
case .Cancelled:
interactor.hasStarted = false
interactor.cancelInteractiveTransition()
case .Ended:
interactor.hasStarted = false
if !interactor.shouldFinish {
interactor.cancelInteractiveTransition()
} else {
interactor.finishInteractiveTransition()
} default:
break
}
}
}
import UIKit
class DismissAnimator: NSObject {
}
extension DismissAnimator : UIViewControllerAnimatedTransitioning {
func transitionDuration(transitionContext: UIViewControllerContextTransitioning?) -> NSTimeInterval {
return 1.0
}
func animateTransition(transitionContext: UIViewControllerContextTransitioning) {
guard
let fromVC = transitionContext.viewControllerForKey(UITransitionContextFromViewControllerKey),
let toVC = transitionContext.viewControllerForKey(UITransitionContextToViewControllerKey),
let containerView = transitionContext.containerView()
else {
return
}
containerView.insertSubview(toVC.view, belowSubview: fromVC.view)
let screenBounds = UIScreen.mainScreen().bounds
let topLeftCorner = CGPoint(x: 0, y: -screenBounds.height * 2)
let finalFrame = CGRect(origin: topLeftCorner, size: screenBounds.size)
UIView.animateWithDuration(
transitionDuration(transitionContext),animations: {fromVC.view.frame = finalFrame},
completion: { _ in transitionContext.completeTransition(!transitionContext.transitionWasCancelled())
}
)
}
}
If you want a simple Pan Gesture to switch between UIViewControllers, you can check out this:
http://www.appcoda.com/custom-segue-animations/
If you want it to be interactive, as in you can go back and forth between VCs without having to complete the whole transition, I suggest you check out this:
https://www.youtube.com/watch?v=3jAlg5BnYUU
If you want to go even further and have a custom dismissing animation, then look no further than this:
https://www.raywenderlich.com/110536/custom-uiviewcontroller-transitions

how to change orientation for AVCaptureMovieFileOutput in swift

I tried several different methods but they didn't help me. I want to change video orientation in AVFoundation. How can I make it?
override func viewDidLoad() {
super.viewDidLoad()
self.definesPresentationContext = true
// device capture for audio and video
let captureVideo = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeVideo)
let captureAudio = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeAudio)
// input
let audioInput = try! AVCaptureDeviceInput(device: captureAudio)
let videoInput = try! AVCaptureDeviceInput(device: captureVideo)
let capturePreview = AVCaptureVideoPreviewLayer(session: captureSession)
capturePreview.frame = self.view.frame
capturePreview.videoGravity = AVLayerVideoGravityResizeAspect
self.view.layer.addSublayer(capturePreview)
// setting of session
captureSession.beginConfiguration()
if captureSession.canAddInput(audioInput) {
captureSession.addInput(audioInput)
}
if captureSession.canAddInput(videoInput) {
captureSession.addInput(videoInput)
}
// output
movieOutput.movieFragmentInterval = kCMTimeInvalid
if captureSession.canAddOutput(movieOutput) {
captureSession.addOutput(movieOutput)
print("added moview")
}
captureSession.sessionPreset = AVCaptureSessionPresetHigh
captureSession.commitConfiguration()
captureSession.startRunning()
}
#IBAction func startStopSession(sender: UIBarButtonItem) {
if movieOutput.recording {
movieOutput.stopRecording()
} else {
print("start recording")
captureSession.beginConfiguration()
for connection in movieOutput.connections as! [AVCaptureConnection] {
for port in connection.inputPorts as! [AVCaptureInputPort] {
print(port)
if port.mediaType == AVMediaTypeVideo {
print(port)
self.captureConnection = AVCaptureConnection(inputPorts: [port], output: movieOutput)
if self.captureConnection.supportsVideoOrientation {
self.captureConnection.videoOrientation = AVCaptureVideoOrientation.LandscapeRight
print("video orientation right")
}
}
}
}
if self.captureConnection.supportsVideoStabilization {
captureConnection.preferredVideoStabilizationMode = .Cinematic
print("true video stabilization")
}
let digit = returnFileDigit()
let path = fileManager.URLsForDirectory(.DocumentDirectory, inDomains: .UserDomainMask).last!.path!.stringByAppendingString("/movie-\(digit).mp4")
captureSession.commitConfiguration()
let url = NSURL(fileURLWithPath: path)
print(movieOutput.connections)
movieOutput.startRecordingToOutputFileURL(url, recordingDelegate: self)
}
}
I tried it that I can to find outputs but I don't know how change orientation
for output in captureSession.outputs as! [AVCaptureOutput] {
output.connections.first?.mediaType
for var connection in output.connections {
if connection.mediaType == AVMediaTypeVideo {
print(connection.mediaType)
connection.videoOrientation = .LandscapeRight
}
}
}
I change my code and it works for me
#IBAction func startStopSession(sender: UIBarButtonItem) {
if movieOutput.recording {
movieOutput.stopRecording()
} else {
print("start recording")
movieOutput.connectionWithMediaType(AVMediaTypeVideo).videoOrientation = returnedOrientation()
if movieOutput.connectionWithMediaType(AVMediaTypeVideo).supportsVideoStabilization {
movieOutput.connectionWithMediaType(AVMediaTypeVideo).preferredVideoStabilizationMode = .Cinematic
}
let digit = returnFileDigit()
let path = fileManager.URLsForDirectory(.DocumentDirectory, inDomains: .UserDomainMask).last!.path!.stringByAppendingString("/movie-\(digit).mp4")
let url = NSURL(fileURLWithPath: path)
movieOutput.startRecordingToOutputFileURL(url, recordingDelegate: self)
}
}
func returnedOrientation() -> AVCaptureVideoOrientation {
var videoOrientation: AVCaptureVideoOrientation!
let orientation = UIDevice.currentDevice().orientation
switch orientation {
case .Portrait:
videoOrientation = .Portrait
userDefault.setInteger(0, forKey: "CaptureVideoOrientation")
case .PortraitUpsideDown:
videoOrientation = .PortraitUpsideDown
userDefault.setInteger(1, forKey: "CaptureVideoOrientation")
case .LandscapeLeft:
videoOrientation = .LandscapeRight
userDefault.setInteger(2, forKey: "CaptureVideoOrientation")
case .LandscapeRight:
videoOrientation = .LandscapeLeft
userDefault.setInteger(3, forKey: "CaptureVideoOrientation")
case .FaceDown, .FaceUp, .Unknown:
let digit = userDefault.integerForKey("CaptureVideoOrientation")
videoOrientation = AVCaptureVideoOrientation.init(rawValue: digit)
}
return videoOrientation
}