![](/img/trans.png)
[英]Handle keyboard inputs in NSTextView embedded via NSViewRepresentable? (SwiftUI/MacOS)
[英]How to use CAEmitterLayer on macOS in a SwiftUI app using NSViewRepresentable
我想在基於CAEmitterLayer
的 macOS 應用程序中使用 CAEmitterLayer。
問題:圖層本身是完全可見的,但它不發射任何粒子。
我基本上為自定義類EmitterNSView: NSView
構建了一個NSViewRepresentable
來處理發射器層本身。
final class EmitterNSView: NSView {
private let emitterLayer: CAEmitterLayer = {
let layer = CAEmitterLayer()
layer.backgroundColor = NSColor.green.withAlphaComponent(0.33).cgColor
return layer
}()
private let emitterCells: [CAEmitterCell] = {
// https://developer.apple.com/documentation/quartzcore/caemitterlayer
let cell = CAEmitterCell()
cell.name = "someParticle"
cell.birthRate = 10
cell.lifetime = 5.0
cell.velocity = 100
cell.velocityRange = 50
cell.emissionLongitude = 0.0
cell.emissionRange = CGFloat.pi * 2.0
cell.spinRange = 5
cell.scale = 1.0
cell.scaleRange = 0.25
cell.alphaSpeed = 0.25
cell.contents = NSImage(named: "whiteParticle.png")!.cgImage
cell.color = NSColor.systemPink.cgColor
cell.xAcceleration = 4
cell.yAcceleration = 3
cell.zAcceleration = 2
return [cell]
}()
override init(frame frameRect: NSRect) {
super.init(frame: frameRect)
self.wantsLayer = true
self.layer = CALayer()
self.layer?.autoresizingMask = [.layerWidthSizable, .layerHeightSizable]
self.configureEmitterLayer()
self.layer?.addSublayer(self.emitterLayer)
}
required init?(coder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
override func layout() {
super.layout()
self.configureEmitterLayer()
}
override func updateLayer() {
super.updateLayer()
self.configureEmitterLayer()
}
func configureEmitterLayer() {
self.emitterLayer.frame = self.frame
self.emitterLayer.autoresizingMask = [.layerHeightSizable, .layerWidthSizable]
self.emitterLayer.masksToBounds = false
self.emitterLayer.drawsAsynchronously = true
self.emitterLayer.emitterMode = .points
self.emitterLayer.birthRate = 2
self.emitterLayer.emitterShape = CAEmitterLayerEmitterShape.line
self.emitterLayer.emitterSize = CGSize(width: frame.width * 0.5, height: frame.height * 0.5)
self.emitterLayer.emitterPosition = CGPoint.zero
self.emitterLayer.renderMode = CAEmitterLayerRenderMode.additive
self.emitterLayer.emitterCells = self.emitterCells
self.emitterLayer.zPosition = 10
self.emitterLayer.beginTime = CACurrentMediaTime()
self.emitterLayer.speed = 1.5
self.emitterLayer.emitterCells = self.emitterCells
}
}
我可以清楚地看到應用程序中圖層的綠色背景這一事實表明單元格有問題嗎? 在這一點上感到失落。
UIKit
非常相似的實現工作得很好。
如何在基於 SwiftUI 的應用程序中在 macOS 上使用CAEmitterLayer
?
在將 NSImage 轉換為 CGImage 時使用cgImage(forProposedRect:context:hints:) 。
cell.contents = NSImage(named:"whiteParticle.png").flatMap {
return $0.cgImage(forProposedRect: nil, context: nil, hints: nil)
}
聲明:本站的技術帖子網頁,遵循CC BY-SA 4.0協議,如果您需要轉載,請注明本站網址或者原文地址。任何問題請咨詢:yoyou2525@163.com.