I have a simple setup here representative of what I do in my MacOS app:
// AppDelegate.swift
import Cocoa
#main
class AppDelegate: NSObject, NSApplicationDelegate {
func applicationDidFinishLaunching(_ aNotification: Notification) {
TestWindow()
}
func applicationWillTerminate(_ aNotification: Notification) {}
func applicationSupportsSecureRestorableState(_ app: NSApplication) -> Bool {
return true
}
}
// BareView.swift
import SwiftUI
struct BareView: View {
#ObservedObject var testState: TestState
var body: some View {
Color.white.frame(width: testState.width, height: testState.height)
}
}
// TestWindow.swift
import Foundation
import AppKit
import SwiftUI
class TestState: ObservableObject {
#Published var width: CGFloat = 100
#Published var height: CGFloat = 100
}
class TestWindow: NSWindowController {
let testState = TestState()
required init?(coder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
init() {
super.init(window: NSWindow(
contentRect: .zero,
styleMask: [ .borderless ],
backing: .buffered,
defer: false))
self.window!.contentView = NSHostingView(rootView: BareView(testState: testState))
self.window!.setFrameOrigin(.init(x: NSScreen.main!.frame.width / 2, y: NSScreen.main!.frame.height / 2))
self.window!.makeKeyAndOrderFront(nil)
NSEvent.addLocalMonitorForEvents(matching: [.mouseMoved]) { event in
self.testState.width += 10
self.testState.height += 10
self.window!.makeKeyAndOrderFront(nil)
// behaviour only as expected if this line is added:
// self.window!.setContentSize(.init(width: self.testState.width, height: self.testState.height))
print("contentSize: \(self.testState.width)")
print("viewFrame: \(self.window!.contentView!.frame)")
return nil
}
}
}
The size of the SwiftUI View inside the NSHostingView is updated based on an EventHandler. The SwiftUI View is actually resized and increases its size, but the contentViews frame size is not directly notified about this change. This happens one iteration later (after the window has moved?).
So the console prints the following:
contentSize: 110.0
viewFrame: (0.0, 0.0, 100.0, 100.0)
contentSize: 120.0
viewFrame: (0.0, 0.0, 110.0, 110.0)
Once I manually update the contentViews size with setContentSize(_:) function the console output behaves as expected:
contentSize: 110.0
viewFrame: (0.0, 0.0, 110.0, 110.0)
contentSize: 120.0
viewFrame: (0.0, 0.0, 120.0, 120.0)
So it seems like Swift is recalculating the window only after the code has returned. Since the SwiftUI view has increased in size, but the Window does not know about this, the origin is not valid. This creates a bug in my app.
I can solve this problem by calling setContentSize(_:), but the information about its actual size is contained in a leave view all the way down the tree. I would need to use an ObservableObject to communicate the calculated size back to the NSWindowController.
Can someone explain to me if this is expected behaviour and if so, what happens here? There must be some kind of "window redraw cycle" that I don't know about.
If you're looking to capture window resizing as it happens, you could try:
.onReceive(NotificationCenter.default.publisher(for: NSWindow.didResizeNotification)) { newValue in
if let size = window?.frame.size {
windowWidth = size.width
windowHeight = size.height
}
}
Related
I have a project in SwiftUI on macOS where I draw to a canvas twice per second.
This is my ContentView:
struct ContentView: view {
#State var score: Int = 0
var body: some View {
VStack {
Text("Score: \(self.score)")
.fixedSize(horizontal: true, vertical: true)
Canvas(renderer: { gc, size in
start(
gc: &gc,
size: size
onPoint: { newScore in
self.score = newScore
}
)
)
}
}
}
The start function:
var renderer: Renderer
func start(
gc: inout GraphicsContext,
size: size,
onPoint: #escaping (Int) -> ()
) {
if renderer != nil {
renderer!.set(gc: &gc)
} else {
renderer = Renderer(
context: &gc,
canvasSize: size,
onPoint: onPoint
)
startGameLoop(renderer: renderer!)
}
renderer!.drawFrame()
}
var timer: Timer
func startGameLoop(renderer: Renderer) {
timer = Timer.scheduledTimer(withTimeInterval: 0.5, repeats: true, block: {
renderer!.handleNextFrame()
}
}
And the renderer roughly looks like this:
class Renderer {
var gc: GraphicsContext
var size: CGSize
var cellSize: CGFloat
let pointCallback: (Int) -> ()
var player: (Int, Int) = (0,0)
init(
context: inout GraphicsContext,
canvasSize: CGSize,
onPoint: (Int) -> ()
) {
self.gc = gc
self.size = canvasSize
self.pointCallback = onPoint
self.cellSize = min(self.size.width, self.size.height)
}
}
extension Renderer {
func handleNextFrame() {
self.player = (self.player.0 + 1, self.player.1 + 1)
self.drawFrame
}
func drawFrame() {
self.gc.fill(
Path(
x: CGFloat(self.player.0) * self.cellSize,
y: CGFloat(self.player.1) * self.cellSize,
width: self.cellSize,
height: self.cellSize
)
)
}
}
So the handleNextFrame method is called twice per second, which calls the drawFrame method, drawing the position of the player to the canvas.
However, there is nothing being drawn to the canvas.
Only the first frame is drawn, which comes from the renderer!.drawFrame() in start. When a point is scored, the canvas is also redrawn, because the start function gets called again.
The problem is that there is nothing being drawn to the Canvas when the drawFrame is called from handleNextFrame.
Where lies my problem, and how can I fix this issue?
Thanks in advance,
Jonas
I had the same issue.
Go to your project settings, select 'Build Settings' tab, and make sure 'Strict Concurrency Checking' is ON.
It will give you hints about GraphicsContext not being thread-safe, because it does not conform to Sendable protocol.
I am no expert on the topic, but I believe this means that GraphicsContext is not meant to be used in asynchronous context, you are not meant to save its reference for future use. You are given a GraphicsContext instance and you are meant to use it so long as the renderer closure's execution lasts (and this execution, of course, ends before your asynchronous callbacks could use the GraphicsContext instance).
What you really want is a TimelineView:
import SwiftUI
private func nanosValue(for date: Date) -> Int { return Calendar.current.component(.nanosecond, from: date)
}
struct ContentView: View {
var body: some View {
TimelineView(.periodic(from: Date(), by: 0.000001)) { timeContext in
Canvas { context, size in
let value = nanosValue(for: timeContext.date) % 2
let rect = CGRect(origin: .zero, size: size).insetBy(dx: 25, dy: 25)
// Path
let path = Path(roundedRect: rect, cornerRadius: 35.0)
// Gradient
let gradient = Gradient(colors: [.green, value == 1 ? .blue : .red])
let from = rect.origin
let to = CGPoint(x: rect.width + from.x, y: rect.height + from.y)
// Stroke path
context.stroke(path, with: .color(.blue), lineWidth: 25)
// Fill path
context.fill(path, with: .linearGradient(gradient,
startPoint: from,
endPoint: to))
}
}
}
}
struct ContentView_Previews: PreviewProvider {
static var previews: some View {
ContentView()
}
}
This will change the gradient colour of the drawn rectangle every nanosecond.
Funny thing, I have heard on some blog, you need a completely different mindset to use SwiftUI, haha. It just does not work the traditional way.
I'm trying to use SpriteKit+SwiftUI in my work running in Swift Playground. Here are some of my codes
struct SwiftUI: View {
var body: some View {
Test()
}
}
struct Test: UIViewRepresentable {
func makeUIView(context: Context) -> SKView {
let sceneView = SKView()
let gameScene = GameScene()
gameScene.size = CGSize(width: 500, height: 600)
gameScene.scaleMode = .aspectFit
sceneView.presentScene(gameScene)
return sceneView
}
func updateUIView(_ uiView: SKView, context: Context) {
}
}
It runs well, but always have this awful Black area beyond my SKView like the image below.
Black area in the View.
I have tried to change the sceneView.backgroundcolor,or change the gameScene.size and sceneView.size but those just didn't work. 🤨
Thanks so much if you can give me some advice!
The issue is that the call to Test() also needs to have an explicit frame size set. Otherwise, the Test view takes up the entire available screen space, while the sceneView only takes up a portion of this space.
The example below is the same one that you posted, with an explicit frame set on Test() (and a dummy GameScene). This works in the playground, where the result is just a purple square with no black area beyond the view:
import SwiftUI
import SpriteKit
import PlaygroundSupport
let width: CGFloat = 500
let height: CGFloat = 500
struct ContentView: View {
var body: some View {
Test()
.frame(width: width, height: height)
}
}
struct Test: UIViewRepresentable {
func makeUIView(context: Context) -> SKView {
let sceneView = SKView()
let gameScene = GameScene()
gameScene.size = CGSize(width: width, height: height)
gameScene.scaleMode = .aspectFit
sceneView.presentScene(gameScene)
return sceneView
}
func updateUIView(_ uiView: SKView, context: Context) {
}
}
class GameScene: SKScene {
override func didMove(to view: SKView) {
let node = SKShapeNode(rect: CGRect(x: 0, y: 0, width: size.width, height: size.height))
node.fillColor = .purple
addChild(node)
}
}
PlaygroundPage.current.setLiveView(ContentView())
I need some help with the SpriteView, which I wanted to use for the first time but it broke my old code since the frame is only zero. But see by your self:
I have the following Code:
struct ContentView: View {
var body: some View {
SpriteView(scene: SKScene(fileNamed: "MyScene")!)
.frame(width: 300, height: 400).edgesIgnoringSafeArea(.all)
}
}
And im my scene I have:
And then my custom class:
class MyScene: SKScene {
override func didMove(to view: SKView) {
print(frame)
}
}
And the result is:
(-0.0, -0.0, 0.0, 0.0)
And not as expected:
(-375.0, -667.0, 750.0, 1334.0)
Whats is wrong here? If I use the same Scene via the 'old' way in a UIViewController, it works fine.
I like the question and probably are related to transformations made by SwiftUI to configure the screen size, this is not the final solution, but maybe you can use:
import SpriteKit
class MyScene: SKScene {
override func didMove(to view: SKView) {
print(view.frame)
}
override func didChangeSize(_ oldSize: CGSize) {
print(#function)
print(oldSize)
print(self.size)
}
}
and get this outout:
didChangeSize(_:)
(750.0, 1334.0)
(750.0, 1334.0)
...
didChangeSize(_:)
(750.0, 1334.0)
(0.0, 0.0)
didChangeSize(_:)
(750.0, 1334.0)
(0.0, 0.0)
(0.0, 0.0, 0.0, 0.0)
didChangeSize(_:)
(0.0, 0.0)
(300.0, 400.0)
didChangeSize(_:)
(0.0, 0.0)
(300.0, 400.0)
Complete example here
I wonder why it always fails whenever I try to set the autoresizingMask to an NSView object programmatically? In the following lines of code, I have a tiny, blue NSVIew object (topLeftView) set at the top-left corner of its parent container.
class ViewController: NSViewController {
// MARK: - IBOutlet
#IBOutlet weak var panView: PanView!
override func viewDidLoad() {
super.viewDidLoad()
makeImageContainer(point: CGPoint.zero)
}
func makeViewContainer(point: CGPoint) {
let myView = NSView(frame: CGRect(origin: CGPoint.zero, size: CGSize(width: 150.0, height: 150.0))
myView.wantsLayer = true
if let myLayer = myView.layer {
myLayer.backgroundColor = NSColor.orange.cgColor
}
panView.addSubview(myView) // panView is an IBOutlet-connected `NSView` object.
/* tiny, blue `NSView` object */
let topLeftView = NSView(frame: CGRect(origin: CGPoint(x: 0.0, y: 150.0 - 6.0), size: CGSize(width: 6.0, height: 6.0)))
topLeftView.wantsLayer = true
topLeftView.autoresizingMask = [.minXMargin, .maxYMargin]
if let layer = topLeftView.layer {
layer.backgroundColor = NSColor.blue.cgColor
}
myView.addSubview(topLeftView)
}
}
So it has minXMargin and maxYMargin autoresizingMasks.
Now, I want to click on a push button to resize this orange NSView container object to see if the tiny, blue NSView object will stick at the top-left corner.
#IBAction func testClicked(_ sender: NSButton) {
for i in 0..<panView.subviews.count {
let subView = panView.subviews[i]
if i == 4 {
subView.setFrameSize(CGSize(width: 200.0, height: 200.0))
}
}
}
And the tiny guy has moved by 50 points to the right and 50 points to the bottom. What am I doing wrong? Thanks.
I'm trying to learn swift. So far, my code runs, but it doesn't show a CAlayer I create:
class AppDelegate: NSObject, NSApplicationDelegate {
var window: NSWindow!
var mainview: NSView!
var redLayer: CATextLayer!
func applicationDidFinishLaunching(aNotification: NSNotification) {
self.mainview = NSView(frame: NSRect(x:0,y:0,width:300,height:300))
self.window = NSWindow(contentRect: NSMakeRect(0, 0, 300, 300),
styleMask: NSTitledWindowMask | NSClosableWindowMask |
NSMiniaturizableWindowMask,
backing: .Buffered, `defer`: false)
self.window.contentView!.addSubview(self.mainview)
self.window.makeKeyAndOrderFront(self.window)
self.window.makeMainWindow()
self.redLayer = CATextLayer()
self.redLayer.string = "test"
self.redLayer.frame = self.mainview.bounds
self.redLayer.bounds = CGRect(x:50,y:50,width:300,height:300)
self.redLayer.position = CGPoint(x: 50, y: 50)
self.redLayer.backgroundColor = CGColorCreateGenericRGB(1.0, 0.0, 0.0, 1.0)
self.redLayer.foregroundColor = CGColorCreateGenericRGB(0.0, 0.0, 0.0, 1.0)
self.redLayer.hidden = false
self.mainview.layer = self.redLayer
}
func applicationWillTerminate(aNotification: NSNotification) {
// Insert code here to tear down your application
}}
the code gives a gray empty window. but I want it to show a colored window with a line of text saying "test", but I can't see these stuff.
I tried multiple ways, I can't fix this issue.
In OS X, Core Animation support (CALayer) is not enabled automatically. You have to manually enable it. For a programmatically created view, use the wantsLayer property.
self.mainview.wantsLayer = true