Environment:xcode 16.2
WidgetKit: Image(uiImage: UIImage(named: "jp_jump")!).resizable().scaledToFit().frame(width: 58, height: 16).padding(EdgeInsets(top: 0, leading: 16, bottom: 0, trailing: 0))
”jp_jump“: Local color picture load widget crashes
info:
Thread 4: EXC_RESOURCE (RESOURCE_TYPE_MEMORY: high watermark memory limit exceeded) (limit=30 MB)
                    
                  
                Explore best practices for creating inclusive apps for users of Apple accessibility features and users from diverse backgrounds.
  
    
    Selecting any option will automatically load the page
  
  
  
  
    
  
  
          Post
Replies
Boosts
Views
Created
                    
                      I have a UIImageView as the background of a custom UIView subclass. The image itself does not contain any text. On top of this image view, I have added two UILabels.
To improve accessibility, I converted the entire view into a single accessibility element and set a proper accessibilityLabel. Additionally, I disabled accessibility for the UIImageView and the labels by setting isAccessibilityElement = false.
However, when VoiceOver's Accessibility Recognition's Text Recognition feature is enabled, VoiceOver still detects and announces the text inside the UILabels at the end after reading my custom accessibility properties. This text should not be announced.
It seems that VoiceOver treats the UILabel content as part of the UIImageView. Additionally, when using the Explore Image rotor action, the entire subview is recognized as a single image.
Is this the expected behavior? If so, is there a way to disable VoiceOver’s text recognition for this view while keeping custom accessibility intact?
class BackgroundLabelView: UIView {
    private let backgroundImageView = UIImageView()
    private let backgroundImageView2 = UIImageView()
    private let titleLabel = UILabel()
    private let subtitleLabel = UILabel()
    override init(frame: CGRect) {
        super.init(frame: frame)
        setupView()
    }
    
    required init?(coder: NSCoder) {
        super.init(coder: coder)
        setupView()
        configureAceesibility()
    }
    private func configureAceesibility() {
        backgroundImageView.isAccessibilityElement = false
        backgroundImageView2.isAccessibilityElement = false
        titleLabel.isAccessibilityElement = false
        subtitleLabel.isAccessibilityElement = false
        isAccessibilityElement = true
        accessibilityTraits = .button
    }
    func configure(backgroundImage: UIImage?, title: String, subtitle: String) {
        backgroundImageView.image = backgroundImage
        titleLabel.text = title
        subtitleLabel.text = subtitle
        accessibilityLabel = "Holiday Offer ," + title + "," + subtitle
    }
    
    private func setupView() {
        backgroundImageView2.contentMode = .scaleAspectFill
        backgroundImageView2.clipsToBounds = true
        backgroundImageView2.translatesAutoresizingMaskIntoConstraints = false
        backgroundImageView2.image = UIImage(resource: .bannerfestival)
        addSubview(backgroundImageView2)
        backgroundImageView.contentMode = .scaleAspectFit
        backgroundImageView.clipsToBounds = true
        backgroundImageView.translatesAutoresizingMaskIntoConstraints = false
        addSubview(backgroundImageView)
        
        titleLabel.font = UIFont.systemFont(ofSize: 18, weight: .bold)
        titleLabel.textColor = .white
        titleLabel.translatesAutoresizingMaskIntoConstraints = false
        titleLabel.numberOfLines = 0
        addSubview(titleLabel)
        
        subtitleLabel.font = UIFont.systemFont(ofSize: 14, weight: .regular)
        subtitleLabel.textColor = .white.withAlphaComponent(0.8)
        subtitleLabel.translatesAutoresizingMaskIntoConstraints = false
        subtitleLabel.numberOfLines = 0
        addSubview(subtitleLabel)
        NSLayoutConstraint.activate([
            backgroundImageView2.leadingAnchor.constraint(equalTo: leadingAnchor),
            backgroundImageView2.trailingAnchor.constraint(equalTo: trailingAnchor),
            backgroundImageView2.heightAnchor.constraint(equalToConstant: 200),
            backgroundImageView.centerYAnchor.constraint(equalTo: centerYAnchor),
            backgroundImageView.topAnchor.constraint(equalTo: topAnchor),
            backgroundImageView.leadingAnchor.constraint(greaterThanOrEqualTo: leadingAnchor),
            backgroundImageView.trailingAnchor.constraint(equalTo: trailingAnchor),
            backgroundImageView.bottomAnchor.constraint(equalTo: bottomAnchor),
            
            titleLabel.leadingAnchor.constraint(equalTo: leadingAnchor, constant: 16),
            titleLabel.trailingAnchor.constraint(lessThanOrEqualTo: centerXAnchor),
            titleLabel.bottomAnchor.constraint(equalTo: centerYAnchor, constant: -4),
            
            subtitleLabel.leadingAnchor.constraint(equalTo: leadingAnchor, constant: 16),
            subtitleLabel.trailingAnchor.constraint(lessThanOrEqualTo: centerXAnchor),
            subtitleLabel.topAnchor.constraint(equalTo: centerYAnchor, constant: 4)
        ])
    }
    
    override func layoutSubviews() {
        super.layoutSubviews()
        backgroundImageView.layer.cornerRadius = layer.cornerRadius
    }
}
                    
                  
                
                    
                      Can you guys like probably make Visual Intelligence available for the action button on the iPhone 16e? It should be only for iPhones that use A18 and future gen apple chips.
                    
                  
                
              
                
              
              
                
                Topic:
                  
	
		Accessibility & Inclusion
  	
                
                
                SubTopic:
                  
                    
	
		General
		
  	
                  
                
              
              
              
  
  
    
    
  
  
              
                
                
              
            
          
                    
                      I’m trying to set the accessibilityActivationPoint directly on a UITableViewCell so that VoiceOver activate on a specific button inside the cell. However, this approach doesn’t seem to work.
Instead, when I override the accessibilityActivationPoint property inside the UITableViewCell subclass and return the desired point, it works as expected.
Why doesn’t setting accessibilityActivationPoint directly on the cell work, but overriding it inside the cell does? Is there a recommended approach for handling this scenario?
The following approach works,
 override var accessibilityActivationPoint: CGPoint {
        get {
            return convert(toggleSwitch.center, to: nil)
        }
        set{
            super.accessibilityActivationPoint = newValue
        }
    }
but setting accessibility point directly not works
private func configureAccessibility() {
isAccessibilityElement = true
            accessibilityLabel = titleLabel.text
            accessibilityTraits = .toggleButton
            accessibilityActivationPoint = self.convert(toggleSwitch.center, to: self)
            accessibilityValue = toggleSwitch.accessibilityValue
       }
                    
                  
                
                    
                      After enabling Developer Mode on my iPhone and restarting it, the device asks me to press the Home button to confirm. Unfortunately, my Home button is broken, so I can’t access Developer Mode. The iPhone itself still works, but I can’t enable the mode. Is there any way to bypass this without the Home button?
                    
                  
                
              
                
              
              
                
                Topic:
                  
	
		Accessibility & Inclusion
  	
                
                
                SubTopic:
                  
                    
	
		General
		
  	
                  
                
              
              
              
  
  
    
    
  
  
              
                
                
              
            
          
                    
                      I have a parent view containing 10 subviews. To control the VoiceOver navigation order, I set only a few elements in accessibilityElements. However, the remaining elements are not being focused or are completely inaccessible.
Is this the expected behavior? If I only specify a subset of elements in accessibilityElements, does it exclude the rest? What’s the best way to ensure all elements remain accessible while customising the order?
                    
                  
                
                    
                      Hello,
the AVSpeechSynthesisVoice has a audioFileSettings attributes
let utterance = AVSpeechUtterance(string: text)
utterance.voice = AVSpeechSynthesisVoice(identifier: voiceSelected!)
print("- voice \(utterance.voice!.audioFileSettings)")
["AVLinearPCMIsBigEndianKey": 0, "AVLinearPCMIsFloatKey": 1, "AVLinearPCMIsNonInterleaved": 1, "AVNumberOfChannelsKey": 1, "AVSampleRateKey": 22050, "AVFormatIDKey": 1819304813, "AVLinearPCMBitDepthKey": 32]
This is declared in
AVSpeechSynthesisVoice {
	...
    @available(iOS 13.0, *)
    open var **audioFileSettings:** [String : Any] { get }
    @available(iOS 17.0, *)
    open var voiceTraits: AVSpeechSynthesisVoice.Traits { get }
}
How can we specify the audioFileSettings attributes in a AVSpeechSynthesisProviderVoice ?
Cause in AVSpeechSynthesisProviderVoice there is no such field
AVSpeechSynthesisProviderVoice {
    open var name: String { get }
    open var identifier: String { get }
    open var primaryLanguages: [String] { get }
    open var supportedLanguages: [String] { get }
    open var voiceSize: Int64
    open var version: String
    open var gender: AVSpeechSynthesisVoiceGender
    open var age: Int
}
Regards
                    
                  
                
              
                
              
              
                
                Topic:
                  
	
		Accessibility & Inclusion
  	
                
                
                SubTopic:
                  
                    
	
		General
		
  	
                  
                
              
              
              
  
  
    
    
  
  
              
                
                
              
            
          
                    
                      SwiftUI provides the accessibilityCustomContent(_:_:) modifier to add additional accessibility information for an element. However, I couldn’t find a similar approach in UIKit.
Is there a way to achieve this in UIKit?
                    
                  
                
                    
                      My team is designing an app for retail associates that need to share managed iPads. We keep the app in Guided Access mode on our login app until an auth token is obtained. Then the iPad is opened for general use. Upon signout we need to re-enter guided access mode and we can do this via manual signout easily. But with idle signout, ie after 60 minutes of inactivity, we need to be able to make a call from the background (in a locked state even) and sign out the user, clear the pin code and enter single app mode before restarting. So that hopefully once the device restarts, we have the app in a locked state again until the next user provides credentials that can obtain a new auth token.
We are struggling to see if this is even possible. Our bosses will be displeased if we tell them it isn't. So anybody with any tips would be very appreciated.
                    
                  
                
                    
                      again and again this issue is coming , restarted my laptop, have storage , I don't why this issue is coming!!
                    
                  
                
              
                
              
              
                
                Topic:
                  
	
		Accessibility & Inclusion
  	
                
                
                SubTopic:
                  
                    
	
		General
		
  	
                  
                
              
              
              
  
  
    
    
  
  
              
                
                
              
            
          
                    
                      Hi! I have noticed a few glitches as well as some overall unfortunate cons with the assistive access mode.
Alarms, timers, stopwatch, etc. do not sound or alert. However, I have an infant monitor app and I do get that sound alert so I know it is possible.. do I need to download a separate alarm app for it to work?
Cannot make FaceTime calls with favorite contacts.
Find My iPhone cannot jump to the maps app.
Camera cannot zoom in or out.
Photos cannot be deleted, edited, or shared in a shared album in the photos app.
Photos/videos cannot be sent in messages.
Spotify cannot be accessed from the lock screen.
Apps do not stay open if you lock the phone screen or leave it on too long without touching the screen (auto locks).
There is no flashlight option. I downloaded an app to have this feature but without being touched the screen will lock which shuts off the flashlight feature in the app until I unlock the phone again.
                    
                  
                
                    
                      Watched videos, blog post and downloaded their projects and there the core spot lights works accordingly.
I copied code to an empty project and did the same as what they did but still is not working
os: macOS and iOS
on coredataobject I settled up a attribute to index for spotlight and in object it self I putted the attribute name in display name for spotlight.
    static let shared = PersistenceController()
    
    var spotlightDelegate: NSCoreDataCoreSpotlightDelegate?
    @MainActor
    static let preview: PersistenceController = {
        let result = PersistenceController(inMemory: true)
        let viewContext = result.container.viewContext
        for _ in 0..<10 {
            let newItem = Item(context: viewContext)
            newItem.timestamp = Date()
        }
        do {
            try viewContext.save()
        } catch {
            let nsError = error as NSError
            fatalError("Unresolved error \(nsError), \(nsError.userInfo)")
        }
        return result
    }()
    let container: NSPersistentContainer
    init(inMemory: Bool = false) {
        container = NSPersistentContainer(name: "SpotLightSearchTest")
        if inMemory {
            container.persistentStoreDescriptions.first!.url = URL(fileURLWithPath: "/dev/null")
        }
        container.loadPersistentStores(completionHandler: { [weak self] (storeDescription, error) in
            if let error = error as NSError? {
                fatalError("Unresolved error \(error), \(error.userInfo)")
            }
            if let description = self?.container.persistentStoreDescriptions.first {
                description.setOption(true as NSNumber, forKey: NSPersistentHistoryTrackingKey)
                description.type = NSSQLiteStoreType
                if let coordinator = self?.container.persistentStoreCoordinator {
                    self?.spotlightDelegate = NSCoreDataCoreSpotlightDelegate(
                        forStoreWith: description,
                        coordinator: coordinator
                    )
                    self?.spotlightDelegate?.startSpotlightIndexing()
                }
            }
        })
        container.viewContext.automaticallyMergesChangesFromParent = true
    }
}
in my @main view
struct SpotLightSearchTestApp: App {
    let persistenceController = PersistenceController.shared
    var body: some Scene {
        WindowGroup {
            ContentView()
                .environment(\.managedObjectContext, persistenceController.container.viewContext)
                .onContinueUserActivity(CSSearchableItemActionType) {_ in 
                    print("")
                }
        }
    }
}
onContinueUserActivity(CSSearchableItemActionType) {_ in
print("")
}
never gets triggered. Sow What am I missing that they dont explain in the blog post or videos ?
                    
                  
                
                    
                      I use AttributedString to create a string containing a link. And I set the  AttributedString to UILabel. How should I set up the Accessibility feature to make sure that
I can keyboard focus on the substring with link and use keyboard operation to open the link
I can VoiceOver the whole string and VoiceOver the substring with link to open the link
Thanks a lot.
                    
                  
                
              
                
              
              
                
                Topic:
                  
	
		Accessibility & Inclusion
  	
                
                
                SubTopic:
                  
                    
	
		General
		
  	
                  
                
              
              
              
  
  
    
    
  
  
              
                
                
              
            
          
                    
                      In SwiftUI, the date picker component is breaking in colour contrast accessibility. Below code has been use to create date picker:
struct ContentView: View {
@State private var date = Date()
@State private var selectedDate: Date = .init()
var body: some View {
    let min = Calendar.current.date(byAdding: .day, value: 14, to: Date()) ?? Date()
    let max = Calendar.current.date(byAdding: .year, value: 4, to: Date()) ?? Date()
    
    DatePicker(
        "Start Date",
        selection: $date,
        in: min ... max,
        displayedComponents: [.date]
    )
    .datePickerStyle(.graphical)
    .frame(alignment: .topLeading)
    .onAppear {
        selectedDate = Calendar.current.date(byAdding: .day, value: 14, to: Date()) ?? Date()
    }
}
}
#Preview {
ContentView()
}
attaching the screenshot of failure accessibility.
                    
                  
                
                    
                      Hope it's okay to post here - I haven't gotten resolution anywhere else. Apple's iOs Live Captions is supposed to translate speech into written text either on the phone (works like a charm!) or via microphone (think meeting in a conference room). Microphone doesn't work anywhere, anytime on a new iPhone 14 purchased November 2024. Anyone out there want to fix this and help a lot of people who have trouble hearing? I'm part of an entire generation that didn't know we were supposed to protect our hearing at concerts and clubs and worse, thought it was cool to snag a spot by the speakers...
                    
                  
                
                    
                      I’m trying to add the .header accessibility trait to a UISegmentedControl so that VoiceOver recognizes it accordingly. However, setting the trait using the following code doesn’t seem to have any effect:
segmentControl.accessibilityTraits = segmentControl.accessibilityTraits.union(.header)
Even after applying this, VoiceOver doesn’t announce it as a header. Is there any workaround or recommended approach to achieve this?
                    
                  
                
                    
                      Hello,
When I listen to title in my app with VoiceOver, it makes a strange sound.
This characters make with Korean+number+Alphabet.
Is this combination makes some strange sound with voice over?
I would like to ask if Apple can fix this issue.
Thank you.
                    
                  
                
              
                
              
              
                
                Topic:
                  
	
		Accessibility & Inclusion
  	
                
                
                SubTopic:
                  
                    
	
		General
		
  	
                  
                
              
              
              
  
  
    
    
  
  
              
                
                
              
            
          
                    
                      I have implemented a SwiftUI view containing a grid of TextField elements, where focus moves automatically to the next field upon input. This behavior works well on iOS 16 and 17, maintaining proper focus highlighting when keyboard full access is enabled.
However, in iOS 18 and above, the keyboard full access focus behaves differently. It always stays behind the actual focus state, causing a mismatch between the visually highlighted field and the active text input. This leads to usability issues, especially for users navigating with an external keyboard.
Below is the SwiftUI code for reference:
struct AutoFocusGridTextFieldsView: View {
    private let fieldCount: Int
    private let columns: Int
    @State private var textFields: [String]
    @FocusState private var focusedField: Int?
    init(fieldCount: Int = 17, columns: Int = 5) {
        self.fieldCount = fieldCount
        self.columns = columns
        _textFields = State(initialValue: Array(repeating: "", count: fieldCount))
    }
    var body: some View {
        let rows = (fieldCount / columns) + (fieldCount % columns == 0 ? 0 : 1)
        VStack(spacing: 10) {
            ForEach(0..<rows, id: \.self) { row in
                HStack(spacing: 10) {
                    ForEach(0..<columns, id: \.self) { col in
                        let index = row * columns + col
                        if index < fieldCount {
                            TextField("", text: $textFields[index])
                                .frame(width: 40, height: 40)
                                .multilineTextAlignment(.center)
                                .textFieldStyle(RoundedBorderTextFieldStyle())
                                .focused($focusedField, equals: index)
                                .onChange(of: textFields[index]) { newValue in
                                    if newValue.count > 1 {
                                        textFields[index] = String(newValue.prefix(1))
                                    }
                                    if !textFields[index].isEmpty {
                                        moveToNextField(from: index)
                                    }
                                }
                        }
                    }
                }
            }
        }
        .padding()
        .onAppear {
            focusedField = 0
        }
    }
    private func moveToNextField(from index: Int) {
        if index + 1 < fieldCount {
            focusedField = index + 1
        }
    }
}
struct AutoFocusGridTextFieldsView_Previews: PreviewProvider {
    static var previews: some View {
        AutoFocusGridTextFieldsView(fieldCount: 10, columns: 5)
    }
}
Has anyone else encountered this issue with FocusState in iOS 18?
I really do believe that this is a bug strictly connected to keyboard navigation since I experienced similar problem also on UIKit equivalent of the view.
Any insights or suggestions would be greatly appreciated!
                    
                  
                
                    
                      In VoiceOver, when using Group Navigation style, the cursor first focuses on the semantic group. To navigate inside the group, a two-finger swipe (left or right) can be used. This behavior works for default containers like the Navigation Bar, Tab Bar, and Tool Bar.
How can I achieve the same behavior for a custom view?
I tried setting accessibilityContainerType = .semanticGroup, but it only works for Mac Catalyst. Is there an equivalent approach for iOS?
                    
                  
                
                    
                      VoiceOver reads out all visible content on the screen, which is essential for visually challenged users. However, this raises a privacy concern—what if a user accidentally focuses on sensitive information, like a bank account password, and it gets read aloud?
How can developers prevent VoiceOver from exposing confidential data while still maintaining accessibility? Are there best practices or recommended approaches to handle such scenarios effectively?