How to live stream a UDP broadcast with ffmpeg

First of all, I tried MobileVLCKit but there is too much delay

Then I wrote a UDPManager class and I am writing my codes below. I would be very happy if anyone has information and wants to direct me.

Broadcast code

ffmpeg -f avfoundation -video_size 1280x720 -framerate 30 -i "0" -c:v libx264 -preset medium -tune zerolatency -f mpegts "udp://127.0.0.1:6000?pkt_size=1316"

Live View Code (almost 0 delay)

ffplay -fflags nobuffer -flags low_delay -probesize 32 -analyzeduration 1 -strict experimental -framedrop -f mpegts -vf setpts=0 udp://127.0.0.1:6000

OR

mpv udp://127.0.0.1:6000 --no-cache --untimed --no-demuxer-thread --vd-lavc-threads=1

UDPManager

import Foundation
import AVFoundation
import CoreMedia
import VideoDecoder
import SwiftUI
import Network
import Combine
import CocoaAsyncSocket
import VideoToolbox

class UDPManager: NSObject, ObservableObject, GCDAsyncUdpSocketDelegate {
    private let host: String
    private let port: UInt16
    private var socket: GCDAsyncUdpSocket?
    @Published var videoOutput: CMSampleBuffer?
    
    init(host: String, port: UInt16) {
        self.host = host
        self.port = port
    }
    
    func connectUDP() {
        do {
            socket = GCDAsyncUdpSocket(delegate: self, delegateQueue: .global())
            //try socket?.connect(toHost: host, onPort: port)
            try socket?.bind(toPort: port)
            try socket?.enableBroadcast(true)
            try socket?.enableReusePort(true)
            try socket?.beginReceiving()
            
        } catch {
            print("UDP soketi oluşturma hatası: \(error)")
        }
    }
    
    func closeUDP() {
        socket?.close()
    }
    
    func udpSocket(_ sock: GCDAsyncUdpSocket, didConnectToAddress address: Data) {
        print("UDP Bağlandı.")
    }
    
    func udpSocket(_ sock: GCDAsyncUdpSocket, didNotConnect error: Error?) {
        print("UDP soketi bağlantı hatası: \(error?.localizedDescription ?? "Bilinmeyen hata")")
    }
    
    func udpSocket(_ sock: GCDAsyncUdpSocket, didReceive data: Data, fromAddress address: Data, withFilterContext filterContext: Any?) {
        if !data.isEmpty {
            DispatchQueue.main.async {
                self.videoOutput = self.createSampleBuffer(from: data)
            }
        }
    }

    func createSampleBuffer(from data: Data) -> CMSampleBuffer? {
        var blockBuffer: CMBlockBuffer?
        var status = CMBlockBufferCreateWithMemoryBlock(
            allocator: kCFAllocatorDefault,
            memoryBlock: UnsafeMutableRawPointer(mutating: (data as NSData).bytes),
            blockLength: data.count,
            blockAllocator: kCFAllocatorNull,
            customBlockSource: nil,
            offsetToData: 0,
            dataLength: data.count,
            flags: 0,
            blockBufferOut: &blockBuffer)
            
        if status != noErr {
            return nil
        }
        
        var sampleBuffer: CMSampleBuffer?
        let sampleSizeArray = [data.count]
        status = CMSampleBufferCreateReady(
            allocator: kCFAllocatorDefault,
            dataBuffer: blockBuffer,
            formatDescription: nil,
            sampleCount: 1,
            sampleTimingEntryCount: 0,
            sampleTimingArray: nil,
            sampleSizeEntryCount: 1,
            sampleSizeArray: sampleSizeArray,
            sampleBufferOut: &sampleBuffer)
            
        if status != noErr {
            return nil
        }
        
        return sampleBuffer
    }
}

I didn't know how to convert the data object to video, so I searched and found this code and wanted to try it


func createSampleBuffer(from data: Data) -> CMSampleBuffer? {

        var blockBuffer: CMBlockBuffer?

        var status = CMBlockBufferCreateWithMemoryBlock(

            allocator: kCFAllocatorDefault,

            memoryBlock: UnsafeMutableRawPointer(mutating: (data as NSData).bytes),

            blockLength: data.count,

            blockAllocator: kCFAllocatorNull,

            customBlockSource: nil,

            offsetToData: 0,

            dataLength: data.count,

            flags: 0,

            blockBufferOut: &blockBuffer)

            

        if status != noErr {

            return nil

        }

       

        var sampleBuffer: CMSampleBuffer?

        let sampleSizeArray = [data.count]

        status = CMSampleBufferCreateReady(

            allocator: kCFAllocatorDefault,

            dataBuffer: blockBuffer,

            formatDescription: nil,

            sampleCount: 1,

            sampleTimingEntryCount: 0,

            sampleTimingArray: nil,

            sampleSizeEntryCount: 1,

            sampleSizeArray: sampleSizeArray,

            sampleBufferOut: &sampleBuffer)

            

        if status != noErr {

            return nil

        }

        

        return sampleBuffer

    }

And I tried to make CMSampleBuffer a player but it just shows a white screen and doesn't work

struct SampleBufferPlayerView: UIViewRepresentable {
    typealias UIViewType = UIView
    var sampleBuffer: CMSampleBuffer

    func makeUIView(context: Context) -> UIView {
        let view = UIView(frame: .zero)
        let displayLayer = AVSampleBufferDisplayLayer()
        displayLayer.videoGravity = .resizeAspectFill
        view.layer.addSublayer(displayLayer)
        context.coordinator.displayLayer = displayLayer
        return view
    }

    func updateUIView(_ uiView: UIView, context: Context) {
        context.coordinator.sampleBuffer = sampleBuffer
        context.coordinator.updateSampleBuffer()
    }

    func makeCoordinator() -> Coordinator {
        Coordinator()
    }

    class Coordinator {
        var displayLayer: AVSampleBufferDisplayLayer?
        var sampleBuffer: CMSampleBuffer?

        func updateSampleBuffer() {
            guard let displayLayer = displayLayer, let sampleBuffer = sampleBuffer else { return }
            if displayLayer.isReadyForMoreMediaData {
                displayLayer.enqueue(sampleBuffer)
            } else {
                displayLayer.requestMediaDataWhenReady(on: .main) {
                    if displayLayer.isReadyForMoreMediaData {
                        displayLayer.enqueue(sampleBuffer)
                        print("isReadyForMoreMediaData")
                    }
                }
            }
        }
    }
}

And I tried to use it but I couldn't figure it out, can anyone help me?

struct ContentView: View {
    // udp://@127.0.0.1:6000
    @ObservedObject var udpManager = UDPManager(host: "127.0.0.1", port: 6000)
    
    var body: some View {
        VStack {
            
            
            if let buffer = udpManager.videoOutput{
                SampleBufferDisplayLayerView(sampleBuffer: buffer)
                    .frame(width: 300, height: 200)
            }
        }
        .onAppear(perform: {
            udpManager.connectUDP()
        })
    }
}

When tackling problems like this I recommend that you separate your networking code from your video code. In this case that means:

  • Isolate your networking code from your video code.

  • Then test that the networking code is able to receive the UDP flow being sent by your tool.

  • And test your video code by simulating the incoming datagrams by reading them from a file.

That’ll tell you whether you have a networking problem or a video problem. You can then post a focused question with appropriate tags. Right now your question covers both networking and video, and very few folks are experts in both.

ps If you’re dealing with UDP flows, it’s best to use Network framework rather than a third-party wrapper around BSD Sockets. TN3151 Choosing the right networking API has more details about this.

Share and Enjoy

Quinn “The Eskimo!” @ Developer Technical Support @ Apple
let myEmail = "eskimo" + "1" + "@" + "apple.com"

@OVRIDOO Did you find a solution? I have the same issue

Same issue here with the delay using MobileVLCKit. Have you found a solution @OVRIDOO? Thank you :)

Same issue here with the delay using MobileVLCKit. Have you found a solution @OVRIDOO? Thank you :)

How to live stream a UDP broadcast with ffmpeg
 
 
Q