I am trying to achieve an animated gradient effect that changes values over time based on the current seconds. I am also using AVPlayer and AVMutableVideoComposition along with custom instruction and class to generate the effect. I didn't want to load any video file, but rather generate a custom video with my own set of instructions. I used Metal Compute shaders to generate the effects and make the video to be 20 seconds.
However, when I run the code, I get a frozen player with the gradient applied, but when I try to play the video, I get this warning in the console :- Visual isTranslatable: NO; reason: observation failure: noObservations
Here is the screenshot :-
My entire code :-
import AVFoundation
import Metal
class GradientVideoCompositorTest: NSObject, AVVideoCompositing {
var sourcePixelBufferAttributes: [String: Any]? = [
kCVPixelBufferPixelFormatTypeKey as String: kCVPixelFormatType_32BGRA
]
var requiredPixelBufferAttributesForRenderContext: [String: Any] = [
kCVPixelBufferPixelFormatTypeKey as String: kCVPixelFormatType_32BGRA
]
private var renderContext: AVVideoCompositionRenderContext?
private var metalDevice: MTLDevice!
private var metalCommandQueue: MTLCommandQueue!
private var metalLibrary: MTLLibrary!
private var metalPipeline: MTLComputePipelineState!
override init() {
super.init()
setupMetal()
}
func setupMetal() {
guard let device = MTLCreateSystemDefaultDevice(),
let queue = device.makeCommandQueue(),
let library = try? device.makeDefaultLibrary(),
let function = library.makeFunction(name: "gradientShader") else {
fatalError("Metal setup failed")
}
self.metalDevice = device
self.metalCommandQueue = queue
self.metalLibrary = library
self.metalPipeline = try? device.makeComputePipelineState(function: function)
}
func renderContextChanged(_ newRenderContext: AVVideoCompositionRenderContext) {
renderContext = newRenderContext
}
func startRequest(_ request: AVAsynchronousVideoCompositionRequest) {
guard let outputPixelBuffer = renderContext?.newPixelBuffer(),
let metalTexture = createMetalTexture(from: outputPixelBuffer) else {
request.finish(with: NSError(domain: "com.example.gradient", code: -1, userInfo: nil))
return
}
var time = Float(request.compositionTime.seconds)
renderGradient(to: metalTexture, time: time)
request.finish(withComposedVideoFrame: outputPixelBuffer)
}
private func createMetalTexture(from pixelBuffer: CVPixelBuffer) -> MTLTexture? {
var texture: MTLTexture?
let width = CVPixelBufferGetWidth(pixelBuffer)
let height = CVPixelBufferGetHeight(pixelBuffer)
let textureDescriptor = MTLTextureDescriptor.texture2DDescriptor(
pixelFormat: .bgra8Unorm,
width: width,
height: height,
mipmapped: false
)
textureDescriptor.usage = [.shaderWrite, .shaderRead]
CVPixelBufferLockBaseAddress(pixelBuffer, .readOnly)
if let textureCache = createTextureCache(), let cvTexture = createCVMetalTexture(from: pixelBuffer, cache: textureCache) {
texture = CVMetalTextureGetTexture(cvTexture)
}
CVPixelBufferUnlockBaseAddress(pixelBuffer, .readOnly)
return texture
}
private func renderGradient(to texture: MTLTexture, time: Float) {
guard let commandBuffer = metalCommandQueue.makeCommandBuffer(),
let commandEncoder = commandBuffer.makeComputeCommandEncoder() else { return }
commandEncoder.setComputePipelineState(metalPipeline)
commandEncoder.setTexture(texture, index: 0)
var mutableTime = time
commandEncoder.setBytes(&mutableTime, length: MemoryLayout<Float>.size, index: 0)
let threadsPerGroup = MTLSize(width: 16, height: 16, depth: 1)
let threadGroups = MTLSize(
width: (texture.width + 15) / 16,
height: (texture.height + 15) / 16,
depth: 1
)
commandEncoder.dispatchThreadgroups(threadGroups, threadsPerThreadgroup: threadsPerGroup)
commandEncoder.endEncoding()
commandBuffer.commit()
}
private func createTextureCache() -> CVMetalTextureCache? {
var cache: CVMetalTextureCache?
CVMetalTextureCacheCreate(kCFAllocatorDefault, nil, metalDevice, nil, &cache)
return cache
}
private func createCVMetalTexture(from pixelBuffer: CVPixelBuffer, cache: CVMetalTextureCache) -> CVMetalTexture? {
var cvTexture: CVMetalTexture?
let width = CVPixelBufferGetWidth(pixelBuffer)
let height = CVPixelBufferGetHeight(pixelBuffer)
CVMetalTextureCacheCreateTextureFromImage(
kCFAllocatorDefault,
cache,
pixelBuffer,
nil,
.bgra8Unorm,
width,
height,
0,
&cvTexture
)
return cvTexture
}
}
class GradientCompositionInstructionTest: NSObject, AVVideoCompositionInstructionProtocol {
var timeRange: CMTimeRange
var enablePostProcessing: Bool = true
var containsTweening: Bool = true
var requiredSourceTrackIDs: [NSValue]? = nil
var passthroughTrackID: CMPersistentTrackID = kCMPersistentTrackID_Invalid
init(timeRange: CMTimeRange) {
self.timeRange = timeRange
}
}
func createGradientVideoComposition(duration: CMTime, size: CGSize) -> AVMutableVideoComposition {
let composition = AVMutableComposition()
let instruction = GradientCompositionInstructionTest(timeRange: CMTimeRange(start: .zero, duration: duration))
let videoComposition = AVMutableVideoComposition()
videoComposition.customVideoCompositorClass = GradientVideoCompositorTest.self
videoComposition.renderSize = size
videoComposition.frameDuration = CMTime(value: 1, timescale: 30) // 30 FPS
videoComposition.instructions = [instruction]
return videoComposition
}
#include <metal_stdlib>
using namespace metal;
kernel void gradientShader(texture2d<float, access::write> output [[texture(0)]],
constant float &time [[buffer(0)]],
uint2 id [[thread_position_in_grid]]) {
float2 uv = float2(id) / float2(output.get_width(), output.get_height());
// Animated colors based on time
float3 color1 = float3(sin(time) * 0.8 + 0.1, 0.6, 1.0);
float3 color2 = float3(0.12, 0.99, cos(time) * 0.9 + 0.3);
// Linear interpolation for gradient
float3 gradientColor = mix(color1, color2, uv.y);
output.write(float4(gradientColor, 1.0), id);
}
Swift
RSS for tagSwift is a powerful and intuitive programming language for Apple platforms and beyond.
Posts under Swift tag
200 Posts
Sort by:
Post
Replies
Boosts
Views
Activity
We are experiencing an issue where our app gets stuck during launch. The splash screen appears for some time, and then the app either becomes unresponsive or closes unexpectedly. However, there are no crash logs captured in Xcode or Firebase Crashlytics, indicating that the app is not crashing but rather being terminated. This issue is preventing affected users from properly launching the app.
Additionally, some users have reported occasional lag and slow performance when using the app. The issue occurs only for a specific subset of users and appears to be related to other Electronic Logging Device (ELD) apps running in the background. When these apps are active, our app struggles to launch and sometimes becomes unresponsive.
We suspect that this behavior could be related to system resource allocation, such as high memory consumption by background apps, which might be affecting our app's ability to launch correctly. However, we have been unable to reproduce the issue on our end despite multiple attempts.
Actions Performed During App Launch:
Firebase configuration
API requests, including:
Fetching account details
Registering the FCM token with the server
Asynchronous background requests to fetch POI details
Creating a local database and storing POI data in local storage
We would like guidance from Apple regarding potential causes and debugging strategies, especially in scenarios where the app does not produce crash logs but still fails to launch properly. Any insights into memory management, conflicts with background applications, or system resource constraints would be highly appreciated.
Steps to Reproduce:
Install and launch the app on an affected device.
Observe that the app gets stuck on the launch screen.
After some time, the app terminates unexpectedly.
Issue is inconsistent and occurs only for certain users.
Presence of other ELD apps running in the background appears to influence the issue.
I’m running into an issue while implementing JournalingSuggestions with Xcode 16.2 and the iOS 18 simulator. My code builds and runs fine on a physical device, but it consistently crashes on the simulator and in Xcode previews. I’ve tried several workarounds, including weak linking and solutions from this post (746843), but I’m still hitting a wall.
Here’s what’s happening:
I get a warning: JournalingSuggestions is not available when building for iOS Simulator.
This is followed by a build error: Linker command failed with exit code 1 (use -v to see invocation).
What I’ve Tried:
Weak linking the JournalingSuggestions framework.
Applying fixes from this post (746843), such as cleaning the build folder and using weak link.
Deleting Derived Data and rebuilding.
Testing on a physical device (works fine).
My Setup:
Xcode Version: 16.2
Simulator: iOS 18
Build Success: Works on physical device, fails on simulator/preview
Code:
#if canImport(JournalingSuggestions)
import JournalingSuggestions
#endif
...
var suggestionButton: some View {
#if canImport(JournalingSuggestions)
#if !targetEnvironment(simulator)
if #available(iOS 17.2, *) {
return AnyView(journalingSuggestionsPickerButton)
}
#endif
return AnyView(disabledSuggestionButton)
#else
return AnyView(disabledSuggestionButton)
#endif
}
Hey Everyone,
I can't see to ActiveLabel as it says there is no active module. Please help me.
Thanks,
Ben
import UIKit
import ActiveLabel
protocol TweetCellDelegate: AnyObject {
func handleProfileImageTapped(_ cell: TweetCell)
func handleReplyTapped(_ cell: TweetCell)
func handleLikeTapped(_ cell: TweetCell)
}
class TweetCell: UICollectionViewCell {
Hello! I'm currently porting a videogame console emulator to iOS and I'm trying to make the renderer (tested on MacOS) work on iOS as well.
The emulator core is written in C++ and uses metal-cpp for rendering, whereas the iOS frontend is written in Swift with SwiftUI. I have an Objective-C++ bridging header for bridging the Swift and C++ sides.
On the Swift side, I create an MTKView. Inside the MTKView delegate, I run the emulator for 1 video frame and pass it the view's backing layer for it to render the final output image with. The emulator runs and returns, but when it returns I get a crash in Swift land (callstack attached below), inside objc_release, which indicates I'm doing something wrong with memory management.
My bridging interface (ios_driver.h):
#pragma once
#include <Foundation/Foundation.h>
#include <QuartzCore/QuartzCore.h>
void iosCreateEmulator();
void iosRunFrame(CAMetalLayer* layer);
Bridge implementation (ios_driver.mm):
#import <Foundation/Foundation.h>
extern "C" {
#include "ios_driver.h"
}
<...>
#define IOS_EXPORT extern "C" __attribute__((visibility("default")))
std::unique_ptr<Emulator> emulator = nullptr;
IOS_EXPORT void iosCreateEmulator() { ... }
// Runs 1 video frame of the emulator and
IOS_EXPORT void iosRunFrame(CAMetalLayer* layer) {
void* layerBridged = (__bridge void*)layer;
// Pass the CAMetalLayer to the emulator
emulator->getRenderer()->setMTKLayer(layerBridged);
// Runs the emulator for 1 frame and renders the output image using our layer
emulator->runFrame();
}
My MTKView delegate:
class Renderer: NSObject, MTKViewDelegate {
var parent: ContentView
var device: MTLDevice!
init(_ parent: ContentView) {
self.parent = parent
if let device = MTLCreateSystemDefaultDevice() {
self.device = device
}
super.init()
}
func mtkView(_ view: MTKView, drawableSizeWillChange size: CGSize) {}
func draw(in view: MTKView) {
var metalLayer = view.layer as! CAMetalLayer
// Run the emulator for 1 frame & display the output image
iosRunFrame(metalLayer)
}
}
Finally, the emulator's render function that interacts with the layer:
void RendererMTL::setMTKLayer(void* layer) {
metalLayer = (CA::MetalLayer*)layer;
}
void RendererMTL::display() {
CA::MetalDrawable* drawable = metalLayer->nextDrawable();
if (!drawable) {
return;
}
MTL::Texture* texture = drawable->texture();
<rest of rendering follows here using the drawable & its texture>
}
This is the Swift callstack at the time of the crash:
To my understanding, I shouldn't be violating ARC rules as my bridging header uses CAMetalLayer* instead of void* and Swift will automatically account for ARC when passing CoreFoundation objects to Objective-C. However I don't have any other idea as to what might be causing this. I've been trying to debug this code for a couple of days without much success.
If you need more info, the emulator code is also on Github
Metal renderer: https://github.com/wheremyfoodat/Panda3DS/blob/ios/src/core/renderer_mtl/renderer_mtl.cpp#L58-L68
Bridge implementation: https://github.com/wheremyfoodat/Panda3DS/blob/ios/src/ios_driver.mm
Bridging header: https://github.com/wheremyfoodat/Panda3DS/blob/ios/include/ios_driver.h
Any help is more than appreciated. Thank you for your time in advance.
Hello,
I am developing a visionOS application and am interested in obtaining detailed data of users’ hands through ARKit, including but not limited to Transform and rotation angle. I have reviewed Happy Beem, but it appears to only introduce the method of identifying the user’s specific gestures.
Could you please advise on how to obtain the Transform and rotation angle of the user’s hand?
Thank you.
I am following the Apple sample code and trying to add a manual focus lens position slider:
@available(iOS 18.0, *)
private func addCameraControls() {
if !self.session.controls.isEmpty {
for control in self.session.controls {
self.session.removeControl(control)
}
}
self.cameraControlFocusSlider = nil
//Focus Slider
if self.videoDevice!.isLockingFocusWithCustomLensPositionSupported {
self.cameraControlFocusSlider = AVCaptureSlider("Focus", symbolName: "dot.square", in: 0.0...1.0)
self.cameraControlFocusSlider!.setActionQueue(self.sessionQueue) { focusValue in
//Do manual focus
}
if self.session.canAddControl(self.cameraControlFocusSlider!) {
self.session.addControl(self.cameraControlFocusSlider!)
}
}
}
So there are these AVCaptureSessionControlsDelegate methods:
final func sessionControlsDidBecomeActive(_ session: AVCaptureSession) {
print ("sessionControlsDidBecomeActive")
}
final func sessionControlsWillEnterFullscreenAppearance(_ session: AVCaptureSession) {
print ("sessionControlsWillEnterFullscreenAppearance")
}
final func sessionControlsWillExitFullscreenAppearance(_ session: AVCaptureSession) {
print ("sessionControlsWillExitFullscreenAppearance")
}
final func sessionControlsDidBecomeInactive(_ session: AVCaptureSession) {
print ("sessionControlsDidBecomeInactive")
}
So when self.cameraControlFocusSlider is presented, I have to show the current value of the lense position. Lens position can change from auto focus and also from manual focus by the user using the app UI. Is there a way to see if self.cameraControlFocusSlider is active or being used?
Please note that I will have more than one AVCaptureSlider in the final code.
Hello,
I am getting an error message "Cannot convert value of type 'URLSessionDataTask' to expected argument type 'Data'" for the last line of this code. Please can you tell me what the problem is? Thank you
struct Item : Codable {
var id: String
var name: String
var country: String
var type: String
var overallrecsit: String
var dlastupd: String
var doverallrecsit: String
}
let url = URL(string:"https://www.TEST_URL.com/api_ios.php")
let json = try? JSONDecoder().decode(Item.self, from: URLSession.shared.dataTask(with: url!))
Hi everyone,
I’m working on implementing Live Activities in my app, and I’ve encountered an issue where the Live Activity updates work intermittently when the app is in the background. Sometimes they update correctly, but at other times, they don’t update at all, even though they should be running in the background.
However, when the app is brought to the foreground, the updates happen correctly.
A few things I’ve checked:
The app is using ActivityKit to update the Live Activity with Activity.update().
I’ve enabled the necessary background modes in the Capabilities section.
Is there a possibility that I’m hitting the system budget limit while experiencing this issue? If this is a limitation, how can I avoid it or manage this situation?
Has anyone else faced this issue? Any advice or potential solutions would be greatly appreciated!
Thank you!
Topic:
App & System Services
SubTopic:
Widgets & Live Activities
Tags:
Swift
SwiftUI
WidgetKit
ActivityKit
This is on macOS, not iOS. Not sure if that should make a difference?
I have a GUI app and a command line tool (that will run a daemon) that I need to share credentials between. The keys/certs will be stored using the GUI app. But, both tools need to utilize them.
guard let accessControl = SecAccessControlCreateWithFlags(
nil,
kSecAttrAccessibleWhenPasscodeSetThisDeviceOnly,
[.privateKeyUsage],
nil
) else {
throw KeychainCertError.keychainError(errSecAuthFailed, "Failed to create access control for private key")
}
// Define Key Pair Attributes
let privateKeyAttributes: [String: Any] = [
kSecAttrIsPermanent as String: true,
kSecAttrApplicationTag as String: privateLabel.data(using: .utf8)!,
kSecAttrLabel as String: privateLabel,
// kSecAttrAccessControl as String: accessControl,
kSecAttrAccessGroup as String: keychainAccessGroup
]
With the kSecAttrAccessControl commented out, I am able to generate a private key and generate a self signed certificate that is stored on the user login keychain. If I uncomment that line, I get an error to the affect of "Keychain error (-26275): Failed to generate key pair: A required entitlement isn't present"
Also, to share the credentials, don't they need to be NOT on the user keychain for the daemon to access them?
Any ideas what I am doing wrong? I think I'm a bit over my head here with the the security, crypto kit and openssl. 😁
Hi,
I'll start by saying that I'm a new developer for apps for Apple devices, especially for Apple Watch, so please have mercy
I'm trying to create an app for Apple Watch (WatchOs 10+) and I have a problem, my interface is made up of 3 buttons, one at the bottom of the screen and two smaller ones positioned on the top of the screen, each in their respective corners (one at the top right and the other at the top left).
By positioning these buttons on the top of the screen, the top right button is covered by the default Apple time, and I would like to move it to the center, thus creating two side buttons and the time in the center.
I'm also asking if there's a way to remove it since it's not useful to me, but reading some forums it seems that without it I don't pass the app verification, so I'm waiting for your advice.
The only app I've seen in circulation on the app store that has the centered clock is petey.
I am experiencing a crash when performing a batch delete and merging changes on a Core Data store that uses NSPersistentCloudKitContainer. The crash appears to be triggered when positive fractional Decimal values are stored in a TransactionSplit entity (those values are aggregated via a derived attribute in the AccountTransaction entity). If I store whole numbers or negative fractional decimals, deletion seems to work correctly. I suspect that the issue is related to the internal representation of positive fractional decimals in conjunction with a derived attribute.
Data Model Setup:
Account (1:N relationship → AccountTransaction)
AccountTransaction (1:N relationship → TransactionSplit), which contains a derived attribute (e.g., “splits.amount.@sum”) that computes the sum over the “amount” attribute on its related TransactionSplit objects.
TransactionSplit, which contains a stored Decimal attribute named “amount” (of type Decimal/NSDecimalNumber).
Steps to Reproduce:
Insert sample data where each TransactionSplit’s “amount” is set to a positive fractional value (e.g., 1000.01), by using code similar to:
func createSampleData() {
// Execute all creation on the context’s queue.
let checkingAccount = Account(context: context)
checkingAccount.id = UUID()
checkingAccount.name = "Main Checking"
let randomTransactionCount = 1000
for _ in 0..<randomTransactionCount {
let transaction = AccountTransaction(context: context)
transaction.id = UUID()
transaction.account = checkingAccount
let randomValue = Double.random(in: 5...5000)
let decimalValue = NSDecimalNumber(value: randomValue)
let split1 = TransactionSplit(context: context)
split1.id = UUID()
split1.amount = decimalValue
split1.transaction = transaction
let split2 = TransactionSplit(context: context)
split2.id = UUID()
split2.amount = decimalValue
split2.transaction = transaction
}
save()
}
The AccountTransaction’s derived attribute automatically aggregates the sum of its related TransactionSplit amounts.
Perform a batch deletion using NSBatchDeleteRequest (with resultType set to .resultTypeObjectIDs) on your entities and merge the changes back into your main context:
private func delete(_ fetchRequest: NSFetchRequest<NSFetchRequestResult>) {
let batchDeleteRequest = NSBatchDeleteRequest(fetchRequest: fetchRequest)
batchDeleteRequest.resultType = .resultTypeObjectIDs
// ⚠️ When performing a batch delete we need to make sure we read the result back
// then merge all the changes from that result back into our live view context
// so that the two stay in sync.
if let delete = try? context.execute(batchDeleteRequest) as? NSBatchDeleteResult {
let changes = [NSDeletedObjectsKey: delete.result as? [NSManagedObjectID] ?? []]
NSManagedObjectContext.mergeChanges(fromRemoteContextSave: changes, into: [context])
}
}
Save the context after deletion.
I have an App Clip that uses SKOverlay.AppClipConfiguration to install the full app. Before I added a Live Activity call (Activity.request), the user could see “Install,” then “Open.” Now, once “Get” is tapped, the Clip immediately closes—no “Open” button appears. If I remove the Live Activity code, it works again.
I’ve confirmed that parent/child entitlements match, and tested via TestFlight. Is there a known issue or recommended workaround for combining SKOverlay + Live Activities in an App Clip so it doesn’t dismiss prematurely? Any insights are appreciated!
Note live activity is for App Clip only.
I have a very simple CoreData model that has 1 entity and 2 attributes.
This code works fine:
.onChange(of: searchText) { _, text in
evnts.nsPredicate = text.isEmpty ? nil :NSPredicate(format: "eventName CONTAINS %@ " , text )
but I'd like to also search with the same text string for my second attribute (which is a Date). I believe an OR is appropriate for two conditions (find either one). See attempted code below:
evnts.nsPredicate = text.isEmpty ? nil : NSPredicate(format: "(eventName CONTAINS %@) OR (dueDate CONTAINS %i) " , text )
This crashes immediately %@ does the same. Is there a way to accomplish this?
How is SwiftUI not an option below?
In my app I need to have access to the users desktop, and I would like to implement the standard dialogue for the user to give permission for this access at launch. I do not want to use the NSOpenPanel() for the user to select the desktop, as I dont think that is an elegant solution.
However I am having issues implementing this.
I use the following code to be granted access to the Desktop URL:
let accessGranted = desktopURL.startAccessingSecurityScopedResource()
However no dialogue box appears and the call returns false
I have also included "Desktop Usage Description" in my plist.
Here is my code
@State var message:String = "Good Luck!"
var body: some View {
VStack {
Button("Get Desktop files") {
accessDesktopWithPermission()
}
Text(message)
}
.padding()
}
//: –—–—–—–—–—–—–—–—–—–—–—–—–—–— ://
func accessDesktopWithPermission(){
guard let desktopURL = getDesktopURL() else{
return
}
let accessGranted = desktopURL.startAccessingSecurityScopedResource()
if accessGranted{
if let content = try? FileManager.default.contentsOfDirectory(at: desktopURL, includingPropertiesForKeys: nil ){
message = "Found \(content.count) on Desktop"
}
else{
message = "issue loading file from desktop"
}
}
else{
message = "Access denied to:\(desktopURL )"
}
}
obviously I have setup something incorrectly so I have also attached my code if anyone is interested to take a look.
[https://www.openscreen.co/DesktopAccess.zip)
Hello everyone,
I've run into a peculiar behavior with UINavigationController's setViewControllers on iOS 18.2 (I guess it might be reproducible on older versions) when reordering view controllers, and I wonder if anyone can shed some light on this issue.
Initial State: The navigation stack is [A - B - C].
Without Animation: Setting [A - C - B] updates the stack to: A - C - B as expected.
With Animation: Using the same command with animation changes the stack to [A - B], oddly omitting C.
Has anyone else noticed similar behavior or knows why animations might disrupt the stack's update this way? I'd appreciate any insights or suggestions.
Thanks, Dmytro
I've been trying to use AVMIDIControlChangeEvent with a bankSelect message type to change the instrument the sequencer uses on a AVMusicTrack with no luck.
I started with the Apple AVAEMixerSample, converting the initial setup/loading and portions dealing with the sequencer to Swift. I got that working and playing the "bluesyRiff" and then modified it to play individual notes. So my createAndSetupSequencer looked like
func createAndSetupSequencer() {
sequencer = AVAudioSequencer(audioEngine: engine)
// guard let midiFileURL = Bundle.main.url(forResource: "bluesyRiff", withExtension: "mid") else {
// print (" failed guard trying to get URL for bluesyRiff")
// return
// }
let track = sequencer.createAndAppendTrack()
var currTime = 1.0
for i: UInt32 in 0...8 {
let newNoteEvent = AVMIDINoteEvent(channel: 0, key: 60+i, velocity: 64, duration: 2.0)
track.addEvent(newNoteEvent, at: AVMusicTimeStamp(currTime))
currTime += 2.0
}
The notes played, so then I also replaced the gs_instruments sound bank with GeneralUser GS MuseScore v1.442 first by trying
guard let soundBankURL = Bundle.main.url(forResource: "GeneralUser GS MuseScore v1.442", withExtension: "sf2") else {
return}
do {
try sampler.loadSoundBankInstrument(at: soundBankURL, program: 0x001C, bankMSB: 0x79, bankLSB: 0x08)
} catch{....
}
This appears to work, the instrument (8 which is "Funk Guitar") plays. If I change to bankLSB: 0x00 I get the "Palm Muted guitar". So I know that the soundfont has these instruments
Stuff goes off the rails when I try to change the instruments in createAndSetupSequencer. Putting
let programChange = AVMIDIProgramChangeEvent(channel: 0, programNumber: 0x001C)
let bankChange = AVMIDIControlChangeEvent(channel: 0, messageType: AVMIDIControlChangeEvent.MessageType.bankSelect, value: 0x00)
track.addEvent(programChange, at: AVMusicTimeStamp(1.0))
track.addEvent(bankChange, at: AVMusicTimeStamp(1.0))
just before my add note loop doesn't produce any change. Loading bankLSB 8 (Funk) in sampler.loadSoundBankInstrument and trying to change with bankSelect 0 (Palm muted) in createAndSetupSequencer results in instrument 8 (Funk) playing not Palm Muted.
Loading bankLSB 0 (Palm muted) and trying to change with bankSelect 8 (Funk) doesn't work, 0 (Palm muted) plays
I also tried sampler.loadInstrument(at: soundBankURL) and then I always get the first instrument in the sound font file (piano)no matter what values I put in my programChange/bankChange
I've also changed the time in the track.addEvent to be 0, 1.0, 3.0 etc to no success
The sampler.loadSoundBankInstrument specifies two UInt8 parameters, bankMSB and BankLSB while the AVMIDIControlChangeEvent bankSelect value is UInt32 suggesting it might be some combination of bankMSB and BankLSB. But the documentation makes no mention of what this should look like. I tried various combinations of 0x7908, 0X0879 etc to no avail
I will also point out that I am able to successfully execute other control change events
For example adding
if i == 1 {
let portamentoOnEvent = AVMIDIControlChangeEvent(channel: 0, messageType: AVMIDIControlChangeEvent.MessageType.portamento, value: 0xFF)
track.addEvent(portamentoOnEvent, at: AVMusicTimeStamp(currTime))
let portamentoRateEvent = AVMIDIControlChangeEvent(channel: 0, messageType: AVMIDIControlChangeEvent.MessageType.portamentoTime, value: 64)
track.addEvent(portamentoRateEvent, at: AVMusicTimeStamp(currTime))
}
does produce a change in the sound. (As an aside, a definition of what portamento time is, other than "the rate of portamento" would be welcome. is it notes/seconds? freq/minute? beats/hour?)
I was able to get the instrument to change in a different program using MusicPlayer and a series of MusicTrackNewMIDIChannelEvent on a track but these operate on a MusicTrack not the AVMusicTrack which the sequencer uses.
Has anyone been successful in switching instruments through an AVMIDIControlChangeEvent or have any feedback on how to do this?
Platform Specs:
Xcode 16.2
Swift 6.0.3
iOS 18.2 + iOS Simulator 18.3.1
Issue:
Refer to the following code:
struct CustomView: View {
@Binding var prop: CustomStruct
init(prop p: Binding<CustomStruct>) {
_prop = p
}
init(isPreview: Bool) {
let p = CustomStruct()
_prop = .constant(p)
}
var body: some View {
VStack {
Text("hi")
}
}
}
#Preview {
CustomView(isPreview: true)
.preferredColorScheme(.dark)
}
The first constructor is for normal app functionality (and previews/functions correctly when used with the rest of the app in the ContentView preview tab). The second constructor is for previewing only CustomView in its own preview tab. This constructor does not work when previewing in the same file, as shown above. It triggers an ambiguous crash, stating that the diagnostic log (which obviously provides no clear information) should be checked.
I have isolated the issue to be in the Binding reassignment in the second constructor. Replacing CustomStruct with anything but another struct, like an enum or primitive, fixes the issue.
Note: This bug only occurs when previewing (either through the #Preview macro or PreviewProvider struct).
Apple Pay processed a transaction but the account has insufficient funds. Later the transaction is declined. Is it expected from Apple Pay? Does Apple Pay throws an error if the account has insufficent funds- iOS Swift ?
Does anyone know the code to handle this scenario in Swift iOS?
Hi everyone,
I am experiencing an issue where I am unable to sign in to my Apple ID within Xcode. Even after updating my password and ensuring that all my credentials are correct, I continue to receive an "Incorrect username or password" error. However, I can successfully log in to Apple Developer Portal, iCloud, and Apple ID settings using the same credentials.
Steps I've Taken to Resolve the Issue:
Updated Software
I have updated macOS Sequoia and Xcode to the latest versions.
I have also verified that my Apple Developer certificates are up to date.
Tried Resetting Authentication in macOS
I removed all related entries for Xcode, Apple ID, and Developer from Keychain Access.
Logged out of my Apple ID from System Settings and restarted my Mac.
Logged back in and retried signing in to Xcode.
Checked Authentication & Security Settings
I verified that two-factor authentication (2FA) is enabled.
Tried appending the verification code directly to the password when logging into Xcode.
Checked Xcode Developer Directory
Ran sudo xcode-select --switch /Applications/Xcode.app/Contents/Developer
Verified using xcode-select -p, which correctly pointed to the Xcode Developer directory.
Tried Resetting Developer Tools
Removed and reinstalled Command Line Tools (xcode-select --install).
Accepted the Xcode license agreement (sudo xcodebuild -license).
Reinstalled Xcode
Completely uninstalled Xcode using sudo rm -rf /Applications/Xcode.app and reinstalled it from the Mac App Store.
Problem Summary:
Xcode does not recognize my Apple ID credentials, despite them being correct.
I can successfully log in to Apple’s web services, but not to Xcode.
I have already attempted multiple fixes, including resetting keychain entries, reinstalling Xcode, and verifying system configurations.
I would appreciate your guidance on resolving this issue, as I need access to my Apple Developer account within Xcode to continue working on my app.
Thank you for your support.