Issue with adding node to scene view - swift

I am having an issue with the way the model is being placed on the scene view.
The problem is that the model downloads successfully but for some reason it appears on top of the scene view.
here is a video of the problem:
https://www.youtube.com/watch?v=-pTPEZTF9zo
view did load:
override func viewDidLoad() {
super.viewDidLoad()
self.dowloadModel()
self.sceneView.debugOptions = [ARSCNDebugOptions.showWorldOrigin, ARSCNDebugOptions.showFeaturePoints]
self.configuration.planeDetection = .horizontal
self.sceneView.session.run(configuration)
self.registerGestureRecogniser()
}
Gesture recofniser:
func registerGestureRecogniser(){
let tapGestureRecogiser = UITapGestureRecognizer(target: self, action: #selector(tapped))
self.sceneView.addGestureRecognizer(tapGestureRecogiser)
}
Tapped function:
#objc func tapped(sender:UITapGestureRecognizer){
let sceneView = sender.view as! ARSCNView
let tapLocation = sender.location(in: sceneView)
let hitTest = sceneView.hitTest(tapLocation, types: .existingPlaneUsingExtent)
if !hitTest.isEmpty{
print("touched a horizontal surface")
self.addItem2(hitTestResult: hitTest.first!)
}
else{
print("no match")
}
}
Download model from AWS:
private func dowloadModel(){
let url = URL(string: "https://ENTER URL HERE")!
URLSession.shared.dataTask(with: url) { data, response, error in
if let error = error{
print(error.localizedDescription)
return
}
if let data = data{
print(data)
let documentDirectories = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)
if let documentDirectory = documentDirectories.first{
let fileURL = documentDirectory.appendingPathComponent("Food.scn")
let dataNS : NSData? = data as NSData
try! dataNS?.write(to: fileURL, options: .atomic)
print("Saved!")
}
}
}.resume()
}
Add Item 2 function:
func addItem2(hitTestResult : ARHitTestResult){
let documentDirectories = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)
if let documentDirectory = documentDirectories.first{
let fileURL = documentDirectory.appendingPathComponent("Food.scn")
do{
let scene = try SCNScene(url: fileURL, options: nil)
let node = scene.rootNode.childNode(withName: "Burger", recursively: true)!
let material = SCNMaterial()
material.diffuse.contents = UIImage(named: "Hamburger_BaseColor")
material.diffuse.wrapT = SCNWrapMode.repeat
material.diffuse.wrapS = SCNWrapMode.repeat
material.isDoubleSided = true
let transform = hitTestResult.worldTransform
let thirdColumn = transform.columns.3
node.position = SCNVector3(thirdColumn.x, thirdColumn.y, thirdColumn.z)
self.sceneView.scene.rootNode.addChildNode(node)
}
catch{
print(error)
}
}
}

It seems that this issue entirely depends on a pivot point's position and a scale of your model.
Pivot point meets ARAnchor and helps you control a model's offset, orientation and scale of model on a floor, a table-top, or a wall.
node.scale = SCNVector3(x: 0.2, y: 0.2, z: 0.2)
node.pivot = SCNMatrix4MakeTranslation(0, -0.5, 0)
...or for positioning pivot use this approach:
node.simdPivot.columns.3.y = -0.5
So if you want your model to precisely stand on an invisible detected plane move a pivot point in SceneKit toward desired place or set its position in 3D authoring tool (like 3dsMax or Blender).

Related

How to create ARQuickLookPreviewItem with a ModelEntity with RealityKit?

I'm trying to Load a model from a usdz file, change the material colour of it and then display the updated model with AR QuickLook. I have got my code to the point that it loads the original model in the quick look preview. How do i use the updated ModelEntity?
guard let path = Bundle.main.path(forResource: "atm", ofType: "usdz") else {
fatalError("Couldn't find the supported asset file.")
}
let url = URL(fileURLWithPath: path)
guard let modelEntity = try? Entity.loadModel(contentsOf: url) else {
fatalError("Entity not found")
}
var newMaterial = SimpleMaterial()
newMaterial.color.tint = UIColor.cyan
for i in 0...(modelEntity.model?.materials.count ?? 1) - 1 {
modelEntity.model?.materials[i] = newMaterial
}
//let temporaryDirectory = FileManager.default.temporaryDirectory
//let temporaryFileURL = temporaryDirectory.appendingPathComponent("model.usdz")
let previewItem = ARQuickLookPreviewItem(fileAt: url) // how do i use modelEntity here?
previewItem.allowsContentScaling = allowsContentScaling
previewItem.canonicalWebPageURL = nil
return previewItem
}
I just need to change the colour of the model at runtime and show it. TIA

How to load a usdz model and textures from a remote server using ARKit?

How to load a usdz model and textures from a remote server using ARKit?
As in the example below:
let myURL = NSURL(string: "https://mywebsite.com/vase.scn")
guard let scene = try? SCNScene(url: myURL! as URL, options: nil) else {
return
}
let node = scene.rootNode.childNode(withName: "vase", recursively: true)
let transform = queryResult.worldTransform
let thirdColumn = transform.columns.3
node!.position = SCNVector3(thirdColumn.x, thirdColumn.y, thirdColumn.z)
self.sceneView.scene.rootNode.addChildNode(node!)
I found solution as in the below:
Download model and save
func downloadModel() {
guard let url = URL(url: originalURL) else { return }
let urlSession = URLSession(configuration: .default, delegate: self, delegateQueue: OperationQueue())
let downloadTask = urlSession.downloadTask(with: url)
downloadTask.resume()
}
extension ARViewController: URLSessionDownloadDelegate {
public func urlSession(_ session: URLSession, downloadTask: URLSessionDownloadTask, didFinishDownloadingTo location: URL) { print("locationUrl:", location.path)
// create destination URL with the original file name
guard let url = downloadTask.originalRequest?.url else { return }
let documentsPath = FileManager.default.urls(for: .cachesDirectory, in: .userDomainMask)[0]
let destinationURL = documentsPath.appendingPathComponent(url.lastPathComponent)
// delete original copy
try? FileManager.default.removeItem(at: destinationURL)
// copy from temp to Document
do {
try FileManager.default.copyItem(at: location, to: destinationURL)
self.originalURL = destinationURL
print("originalURL:", originalURL!.path)
} catch let error {
print("Copy Error: \(error.localizedDescription)")
}
}
}
Load usdz model and textures
func addItem(queryResult: ARRaycastResult) {
let url = URL(string: originalURL!.path)
let mdlAsset = MDLAsset(url: url!)
mdlAsset.loadTextures()
let scene = SCNScene(mdlAsset: mdlAsset)
let node = scene.rootNode.childNode(withName: "model", recursively: true)
let transform = queryResult.worldTransform
let thirdColumn = transform.columns.3
node!.position = SCNVector3(thirdColumn.x, thirdColumn.y, thirdColumn.z)
self.sceneView.scene.rootNode.addChildNode(node!)
}

How to Create thumbnail from local files

I want to create a thumbnail image for files (word, excel, video ....)
This what i did:
import QuickLook
class ThumbsCreator: NSObject {
private var file : File?
init(file: File?) {
super.init()
self.file = file
}
func createThumb() {
let url = URL(string: (self.file?.path()))
}
}
After a lot of search, I found this solution :
import PDFKit
import AVKit
import WebKit
func createThumb() {
let url = URL(string: (self.file?.path()))
switch file?.type {
case: FileType.image.rawValue:
let image = UIImage(contentsOfFile: (url?.path)!)
_finalImage = self.createScaledImage(image: image!)
break
case: FileType.office.rawValue:
//Loading.......
break
case FileType.Pdf.rawValue:
guard let doc = PDFDocument(url: url!) else {return}
guard let page = doc.page(at: 0) else {return}
_finalImage = page.thumbnail(of: CGSize(width: 768, height: 1024), for: .cropBox)
break
case: FileType.video.rawValue:
let asset = AVAsset(url: url!)
let imageGenerator = AVAssetImageGenerator(asset: asset)
imageGenerator.appliesPreferredTrackTransform = true
let time = CMTime(seconds: 2, preferredTimescale: 1)
do {
let imageRef = try imageGenerator.copyCGImage(at: time, actualTime: nil)
_finalImage = UIImage(cgImage: imageRef)
} catch let error{
print("Error: \(error)")
}
break
}
}
func createScaledImage(image: UIImage) {
let THUMB_WIDTH = 150.0 - 40.0
let THUMB_HEIGHT = THUMB_WIDTH - 23.0
var itemThumb = resizeImage(image: image, constraintSize: CGSize(width: THUMB_WIDTH, height: THUMB_HEIGHT))
let thumbRect = CGRect(x: 0, y: 0, width: 10, height: 10)
UIGraphicsBeginImageContextWithOptions(thumbRect.size, true, 0.0)
let context = UIGraphicsGetCurrentContext()
// Fill a white rect
context?.setFillColor(gray: 1.0, alpha: 1.0)
context?.fill(thumbRect)
// Stroke a gray rect
let comps : [CGFloat] = [0.8, 0.8, 0.8, 1]
let colorSpace = CGColorSpaceCreateDeviceRGB()
let strokeColor = CGColor(colorSpace: colorSpace, components: comps)
context?.setStrokeColor(strokeColor!)
UIRectFrame(thumbRect)
//CGColorRelease(strokeColor!)
itemThumb.draw(in: thumbRect.insetBy(dx: 1, dy: 1))
itemThumb = UIGraphicsGetImageFromCurrentImageContext()!
UIGraphicsEndImageContext()
self.finishThumCreation(image: image)
}
}
Starting from iOS 13 and macOS 10.15, there is the QuickLook Thumbnailing API. It supports any file format for which the OS can provide a preview: either because the OS knows this format or because the owner of the third-party format provided a QuickLook plugin.
Here is an example based on Apple's tutorial:
func thumbnail(for fileURL: URL, size: CGSize, scale: CGFloat) {
let request = QLThumbnailGenerator
.Request(fileAt: fileURL, size: size, scale: scale,
representationTypes: .lowQualityThumbnail)
QLThumbnailGenerator.shared.generateRepresentations(for: request)
{ (thumbnail, type, error) in
DispatchQueue.main.async {
if thumbnail == nil || error != nil {
// Handle the error case gracefully.
} else {
// Display the thumbnail that you created.
}
}
}
}
On macOS before 10.15, in my app I fallback to NSWorkspace.shared.icon(forFile:) which provides a document icon based on the file type (but not a thumbnail).
You can use https://developer.apple.com/documentation/uikit/uidocumentinteractioncontroller/1616801-icons
var icons: [UIImage] { get }
let controller = UIDocumentInteractionController(url:someUrl)
print(controller.icons.first)
Only for a video
extension UIViewController {
func thumbnail(_ sourceURL:URL) -> UIImage {
let asset = AVAsset(url: sourceURL)
let imageGenerator = AVAssetImageGenerator(asset: asset)
imageGenerator.appliesPreferredTrackTransform = true
let time = CMTime(seconds: 1, preferredTimescale: 1)
do {
let imageRef = try imageGenerator.copyCGImage(at: time, actualTime: nil)
return UIImage(cgImage: imageRef)
} catch {
print(error)
return UIImage(named: "NoVideos")!
}
}
}
There's no good API for this yet. There is NSURLThumbnailDictionaryKey, but YMMV. You can indeed get icons via UIDocumentInteractionController.

How to set google map zoom level according to path & markers in swift

I am trying to show path in google maps from one place to another place I am getting something like this. but I need to show like this. Which means the whole path need to show & according to path on map, the zooming level should adjust
Here is the code which I tried to draw path from API. and here in let settingCam am setting camera to adjust to one of the location
func showingPathFromPickupLocToDropLoc(dropLat: Double, dropLong: Double){
let origin = "\(dropLat),\(dropLong)"
let destination = "\(dropLatitude),\(dropLongitude)"
let settingCam: CLLocationCoordinate2D = CLLocationCoordinate2D(latitude: CLLocationDegrees(dropLat), longitude: CLLocationDegrees(dropLong))
let url = URL(string: "https://maps.googleapis.com/maps/api/directions/json?origin=\(origin)&destination=\(destination)&key=\(NEWAPI.GOOGLE_APIKEY)")
URLSession.shared.dataTask(with: url!, completionHandler: {(data, response, error) in
if(error != nil){
print("error")
}else{
do{
let json = try JSONSerialization.jsonObject(with: data!, options:.allowFragments) as! [String : AnyObject]
if json["status"] as! String == "OK"{
let routes = json["routes"] as! [[String:AnyObject]]
OperationQueue.main.addOperation({
for route in routes{
let routeOverviewPolyline = route["overview_polyline"] as! [String:String]
let points = routeOverviewPolyline["points"]
let path = GMSPath.init(fromEncodedPath: points!)
self.PathFromPickupLocToDropLoc = GMSPolyline(path: path)
self.PathFromPickupLocToDropLoc.strokeColor = .gray
self.PathFromPickupLocToDropLoc.strokeWidth = 3.0
self.PathFromPickupLocToDropLoc.map = self.mapView
let camera = GMSCameraPosition.camera(withTarget: settingCam, zoom: 16.0)
self.mapView.animate(toLocation: settingCam)
self.mapView.animate(to: camera)
self.insertingMarkersFromPickupLocToDropLoc(dropLat: dropLat, dropLong: dropLong)
}
})
}
}catch let error as NSError{
print(error)
}
}
}).resume()
}
You need to do like this
DispatchQueue.main.async
{
if self.googleMap != nil
{
let bounds = GMSCoordinateBounds(path: path!)
self.googleMap!.animate(with: GMSCameraUpdate.fit(bounds, withPadding: 50.0))
}
}

How to refresh the markers on google maps, without refreshing the map using swift

Am showing some vehicles on google maps(latitudes & longitudes am getting from API).
I am getting latitude & longitudes from storingDataOftripLocDetails() method. And am calling that method for every 2 seconds.
After getting lat & long am showing one marker. and here because of timer my google maps is also refreshing. But my requirement is i have to refresh marker only.
how should i achieve this?
var driverLatitude: Double?
var driverLongitude: Double?
Here in the following method am calling timer
override func viewDidLoad() {
self.tripLocDetailsTimer = Timer.scheduledTimer(timeInterval: 2, target: self, selector: #selector(tripLocDetailCustomrWebserviceCall), userInfo: nil, repeats: true)
}
Here am getting data from API. and calling showingThePath()
func storingDataOftripLocDetails(_ data: TripLocationDetailsResponse){
self.driverLatitude = data.locationDetails?.driverDetails?.pickupLatitude
self.driverLongitude = data.locationDetails?.driverDetails?.pickupLongitude
self.showingThePath()
}
Here in the following method am showing 1 marker & showing the path
func showingThePath(){
let vehicleLocation = CLLocationCoordinate2D(latitude: CLLocationDegrees(self.driverLatitude!), longitude: CLLocationDegrees(self.driverLongitude!))
let cabIM = UIImageView(frame: CGRect(x: 0, y: 0, width: 40, height: 15))
cabIM.image = UIImage(named: "bike_color")
let vehicleImage = GMSMarker()
vehicleImage.iconView = cabIM
vehicleImage.infoWindowAnchor = CGPoint(x: 0.44, y: 0.40)
vehicleImage.tracksViewChanges = false
vehicleImage.position = vehicleLocation
vehicleImage.map = self.mapView
let origin = "\(String(describing: driverLatitude)),\(String(describing: driverLongitude))"
let destination = "\(String(describing: pickupLatitude!)),\(String(describing: pickupLongitude!))"
let url = URL(string: "https://maps.googleapis.com/maps/api/directions/json?origin=\(origin)&destination=\(destination)")
URLSession.shared.dataTask(with: url!, completionHandler: {(data, response, error) in
if(error != nil){
}else{
do{
let json = try JSONSerialization.jsonObject(with: data!, options:.allowFragments) as! [String : AnyObject]
print(json)
if json["status"] as! String == "OK"
{
let routes = json["routes"] as! [[String:AnyObject]]
OperationQueue.main.addOperation({
for route in routes
{
let routeOverviewPolyline = route["overview_polyline"] as! [String:String]
let points = routeOverviewPolyline["points"]
let path = GMSPath.init(fromEncodedPath: points!)
self.path = GMSPolyline(path: path)
self.path.strokeColor = .gray
self.path.strokeWidth = 3.0
self.path.map = self.mapView
}
})
}
}catch let error as NSError{
print(error)
}
}
}).resume()
}