diff --git a/Examples/iOS/InfoGuideView.swift b/Examples/iOS/InfoGuideView.swift index cd7b5db77..d44146c03 100644 --- a/Examples/iOS/InfoGuideView.swift +++ b/Examples/iOS/InfoGuideView.swift @@ -53,6 +53,7 @@ private struct PreferenceGuideList: View { Section("Video Settings") { GuideRow(title: "HDR Video", description: "Captures wider color/brightness range. Requires HDR-capable camera.") GuideRow(title: "Low Latency", description: "Reduces stream delay to ~2-3 seconds. May slightly reduce quality.") + GuideRow(title: "Resolution", description: "Choose 720p for performance or 1080p for higher detail.") GuideRow(title: "BitRate Mode", description: "Average: Consistent file size\nConstant: Stable quality\nVariable: Best quality") } Section("Capture Settings") { @@ -75,8 +76,8 @@ private struct PublishGuideList: View { description: "Frames per second. 15 saves battery, 30 is standard, 60 is ultra-smooth.") GuideRowWithIcon(icon: "slider.horizontal.3", title: "Bitrate (kbps)", description: "Video quality. Higher = better but more data. 1500-2500 recommended.") - GuideRowWithIcon(icon: "rectangle.badge.checkmark", title: "720p", - description: "Video resolution (1280×720). Good balance of quality and performance.") + GuideRowWithIcon(icon: "rectangle.badge.checkmark", title: "720p / 1080p", + description: "Video resolution. 720p = lighter load, 1080p = sharper image but needs more CPU/network.") } Section("Controls") { GuideRowWithIcon(icon: "record.circle", title: "Record", diff --git a/Examples/iOS/PreferenceView.swift b/Examples/iOS/PreferenceView.swift index 377495a25..f804345b6 100644 --- a/Examples/iOS/PreferenceView.swift +++ b/Examples/iOS/PreferenceView.swift @@ -64,6 +64,11 @@ struct PreferenceView: View { Toggle(isOn: $model.isLowLatencyRateControlEnabled) { Text("Low Latency Mode") } + Picker("Resolution", selection: $model.videoResolution) { + ForEach(VideoResolution.allCases) { resolution in + Text(resolution.displayName).tag(resolution) + } + } Picker("BitRate Mode", selection: $model.bitRateMode) { ForEach(model.bitRateModes, id: \.description) { index in Text(index.description).tag(index) diff --git a/Examples/iOS/PreferenceViewModel.swift b/Examples/iOS/PreferenceViewModel.swift index d8d1fde96..6c929385f 100644 --- a/Examples/iOS/PreferenceViewModel.swift +++ b/Examples/iOS/PreferenceViewModel.swift @@ -22,6 +22,31 @@ enum AudioSourceServiceMode: String, CaseIterable, Sendable { case audioEngine } +enum VideoResolution: String, CaseIterable, Identifiable, Sendable { + case p720 + case p1080 + + var id: Self { self } + + var displayName: String { + switch self { + case .p720: + return "720p" + case .p1080: + return "1080p" + } + } + + var landscapeSize: CGSize { + switch self { + case .p720: + return .init(width: 1280, height: 720) + case .p1080: + return .init(width: 1920, height: 1080) + } + } +} + @MainActor final class PreferenceViewModel: ObservableObject { private enum Keys { @@ -29,6 +54,7 @@ final class PreferenceViewModel: ObservableObject { static let streamName = "pref_stream_name" static let audioFormat = "pref_audio_format" static let bitRateMode = "pref_bitrate_mode" + static let videoResolution = "pref_video_resolution" static let isLowLatencyEnabled = "pref_low_latency" static let viewType = "pref_view_type" static let audioCaptureMode = "pref_audio_capture_mode" @@ -64,6 +90,11 @@ final class PreferenceViewModel: ObservableObject { UserDefaults.standard.set(bitRateMode.description, forKey: Keys.bitRateMode) } } + @Published var videoResolution: VideoResolution = .p720 { + didSet { + UserDefaults.standard.set(videoResolution.rawValue, forKey: Keys.videoResolution) + } + } @Published var isLowLatencyRateControlEnabled: Bool = false { didSet { UserDefaults.standard.set(isLowLatencyRateControlEnabled, forKey: Keys.isLowLatencyEnabled) @@ -111,6 +142,11 @@ final class PreferenceViewModel: ObservableObject { } } + if let savedResolution = defaults.string(forKey: Keys.videoResolution), + let resolution = VideoResolution(rawValue: savedResolution) { + self.videoResolution = resolution + } + if defaults.object(forKey: Keys.isLowLatencyEnabled) != nil { self.isLowLatencyRateControlEnabled = defaults.bool(forKey: Keys.isLowLatencyEnabled) } diff --git a/Examples/iOS/PublishView.swift b/Examples/iOS/PublishView.swift index 64ffa2b25..c210f874b 100644 --- a/Examples/iOS/PublishView.swift +++ b/Examples/iOS/PublishView.swift @@ -370,7 +370,7 @@ struct PublishView: View { } if !model.isLoading { - Text("720p") + Text(preference.videoResolution.displayName) .font(.system(size: 10, weight: .medium)) .foregroundColor(.white) .padding(.horizontal, 8) @@ -576,7 +576,10 @@ struct PublishView: View { model.stopRunning() } .onChange(of: horizontalSizeClass) { _ in - model.orientationDidChange() + model.orientationDidChange(preference) + } + .onChange(of: preference.videoResolution) { _ in + model.orientationDidChange(preference) }.alert(isPresented: $model.isShowError) { Alert( title: Text("Error"), diff --git a/Examples/iOS/PublishViewModel.swift b/Examples/iOS/PublishViewModel.swift index 34abd6343..337546ea1 100644 --- a/Examples/iOS/PublishViewModel.swift +++ b/Examples/iOS/PublishViewModel.swift @@ -5,6 +5,7 @@ import MediaPlayer import Photos import RTCHaishinKit import SwiftUI +import VideoToolbox @MainActor final class PublishViewModel: ObservableObject { @@ -334,7 +335,7 @@ final class PublishViewModel: ObservableObject { videoMixerSettings.mode = .offscreen await mixer.setVideoMixerSettings(videoMixerSettings) - await configureScreen(isGPURendererEnabled: true) + await configureScreen(preference: preference, isGPURendererEnabled: true) let backCamera = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .back) let frontCamera = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .front) @@ -370,7 +371,7 @@ final class PublishViewModel: ObservableObject { } await makeSession(preference) let isLandscape = UIDevice.current.orientation.isLandscape - await updateVideoEncoderSize(isLandscape: isLandscape) + await updateVideoEncoderSize(isLandscape: isLandscape, resolution: preference.videoResolution) let screenSize = await mixer.screen.size if let session = self.session { let videoSettings = await session.stream.videoSettings @@ -378,7 +379,7 @@ final class PublishViewModel: ObservableObject { } isLoading = false } - orientationDidChange() + orientationDidChange(preference) tasks.append(Task { for await buffer in await audioSourceService.buffer { await mixer.append(buffer.0, when: buffer.1) @@ -396,8 +397,9 @@ final class PublishViewModel: ObservableObject { } @ScreenActor - private func configureScreen(isGPURendererEnabled: Bool) async { - await mixer.screen.size = .init(width: 720, height: 1280) + private func configureScreen(preference: PreferenceViewModel, isGPURendererEnabled: Bool) async { + let size = await makeTargetSize(isLandscape: false, resolution: preference.videoResolution) + await mixer.screen.size = size await mixer.screen.backgroundColor = UIColor.black.cgColor } @@ -535,13 +537,14 @@ final class PublishViewModel: ObservableObject { } } - func orientationDidChange() { + func orientationDidChange(_ preference: PreferenceViewModel) { Task { @ScreenActor in await mixer.setVideoOrientation(.portrait) - await mixer.screen.size = .init(width: 720, height: 1280) + let size = await makeTargetSize(isLandscape: false, resolution: preference.videoResolution) + await mixer.screen.size = size let screenSize = await mixer.screen.size Task { @MainActor in - await self.updateVideoEncoderSize(isLandscape: false) + await self.updateVideoEncoderSize(isLandscape: false, resolution: preference.videoResolution) if let session = self.session { let videoSettings = await session.stream.videoSettings self.videoDimensions = "Screen: \(Int(screenSize.width))x\(Int(screenSize.height)) | Video: \(videoSettings.videoSize.width)x\(videoSettings.videoSize.height)" @@ -552,18 +555,32 @@ final class PublishViewModel: ObservableObject { } } - private func updateVideoEncoderSize(isLandscape: Bool) async { + private func updateVideoEncoderSize(isLandscape: Bool, resolution: VideoResolution) async { guard let session else { return } var videoSettings = await session.stream.videoSettings - let targetSize: CGSize = isLandscape - ? CGSize(width: 1280, height: 720) - : CGSize(width: 720, height: 1280) + var shouldApplySettings = false + let targetSize = makeTargetSize(isLandscape: isLandscape, resolution: resolution) if videoSettings.videoSize != targetSize { videoSettings.videoSize = targetSize + shouldApplySettings = true + } + if resolution == .p1080, + videoSettings.profileLevel == kVTProfileLevel_H264_Baseline_3_1 as String { + videoSettings.profileLevel = kVTProfileLevel_H264_High_AutoLevel as String + shouldApplySettings = true + } + if shouldApplySettings { try? await session.stream.setVideoSettings(videoSettings) } } + private func makeTargetSize(isLandscape: Bool, resolution: VideoResolution) -> CGSize { + let landscape = resolution.landscapeSize + return isLandscape + ? landscape + : .init(width: landscape.height, height: landscape.width) + } + private func startBatteryTracking() { UIDevice.current.isBatteryMonitoringEnabled = true streamStartBattery = UIDevice.current.batteryLevel diff --git a/HaishinKit/Sources/Screen/Screen.swift b/HaishinKit/Sources/Screen/Screen.swift index af642c29f..1444eb8b5 100644 --- a/HaishinKit/Sources/Screen/Screen.swift +++ b/HaishinKit/Sources/Screen/Screen.swift @@ -15,6 +15,42 @@ public protocol ScreenDelegate: AnyObject { func screen(_ screen: Screen, willLayout time: CMTime) } +public struct ScreenTimingSettings: Codable, Sendable { + public static let `default` = ScreenTimingSettings() + + public var minimumFrameDuration: TimeInterval + public var maximumFrameDuration: TimeInterval + public var minimumMonotonicStep: TimeInterval + public var maximumVideoCaptureLatency: TimeInterval + public var videoCaptureLatencySmoothingFactor: Double + + public init( + minimumFrameDuration: TimeInterval = 1.0 / 30.0, + maximumFrameDuration: TimeInterval = 1.0 / 15.0, + minimumMonotonicStep: TimeInterval = 1.0 / 600.0, + maximumVideoCaptureLatency: TimeInterval = 0.25, + videoCaptureLatencySmoothingFactor: Double = 0.2 + ) { + self.minimumFrameDuration = minimumFrameDuration + self.maximumFrameDuration = maximumFrameDuration + self.minimumMonotonicStep = minimumMonotonicStep + self.maximumVideoCaptureLatency = maximumVideoCaptureLatency + self.videoCaptureLatencySmoothingFactor = videoCaptureLatencySmoothingFactor + } +} + +private extension ScreenTimingSettings { + func normalized() -> ScreenTimingSettings { + var settings = self + settings.minimumFrameDuration = max(settings.minimumFrameDuration, .ulpOfOne) + settings.maximumFrameDuration = max(settings.maximumFrameDuration, settings.minimumFrameDuration) + settings.minimumMonotonicStep = max(settings.minimumMonotonicStep, .ulpOfOne) + settings.maximumVideoCaptureLatency = max(settings.maximumVideoCaptureLatency, 0) + settings.videoCaptureLatencySmoothingFactor = min(max(settings.videoCaptureLatencySmoothingFactor, 0), 1) + return settings + } +} + /// An object that manages offscreen rendering a foundation. public final class Screen: ScreenObjectContainerConvertible { /// The default screen size. @@ -99,6 +135,8 @@ public final class Screen: ScreenObjectContainerConvertible { } } private var presentationTimeStamp: CMTime = .zero + private var lastFrameDuration: TimeInterval = 1.0 / 30.0 + private var timingSettings: ScreenTimingSettings = .default /// Creates a screen object. public init() { @@ -126,6 +164,14 @@ public final class Screen: ScreenObjectContainerConvertible { return videoTrackScreenObject.unregisterVideoEffect(effect) } + public func setTimingSettings(_ settings: ScreenTimingSettings) { + timingSettings = settings.normalized() + } + + public func getTimingSettings() -> ScreenTimingSettings { + timingSettings + } + public func findById(_ id: String) -> ScreenObject? { return root.findById(id) } @@ -159,13 +205,17 @@ public final class Screen: ScreenObjectContainerConvertible { if let dictionary = CVBufferCopyAttachments(pixelBuffer, .shouldNotPropagate) { CVBufferSetAttachments(pixelBuffer, dictionary, .shouldPropagate) } - let presentationTimeStamp = CMTime(seconds: updateFrame.timestamp - videoCaptureLatency, preferredTimescale: Self.preferredTimescale) - guard self.presentationTimeStamp <= presentationTimeStamp else { - return nil + let rawFrameDuration = updateFrame.targetTimestamp - updateFrame.timestamp + let settings = timingSettings + let frameDuration = min(max(rawFrameDuration > 0 ? rawFrameDuration : lastFrameDuration, settings.minimumFrameDuration), settings.maximumFrameDuration) + lastFrameDuration = frameDuration + var presentationTimeStamp = CMTime(seconds: updateFrame.timestamp - videoCaptureLatency, preferredTimescale: Self.preferredTimescale) + if presentationTimeStamp <= self.presentationTimeStamp { + presentationTimeStamp = self.presentationTimeStamp + CMTime(seconds: settings.minimumMonotonicStep, preferredTimescale: Self.preferredTimescale) } self.presentationTimeStamp = presentationTimeStamp var timingInfo = CMSampleTimingInfo( - duration: CMTime(seconds: updateFrame.targetTimestamp - updateFrame.timestamp, preferredTimescale: Self.preferredTimescale), + duration: CMTime(seconds: frameDuration, preferredTimescale: Self.preferredTimescale), presentationTimeStamp: presentationTimeStamp, decodeTimeStamp: .invalid ) @@ -208,8 +258,14 @@ public final class Screen: ScreenObjectContainerConvertible { return } let hostPresentationTimeStamp = presentationTimeStamp.convertTime(from: synchronizationClock) - let diff = ceil((targetTimestamp - hostPresentationTimeStamp.seconds) * 10000) / 10000 - videoCaptureLatency = diff + let settings = timingSettings + let diff = targetTimestamp - hostPresentationTimeStamp.seconds + let clamped = min(max(diff, 0), settings.maximumVideoCaptureLatency) + if videoCaptureLatency == 0 { + videoCaptureLatency = clamped + } else { + videoCaptureLatency += (clamped - videoCaptureLatency) * settings.videoCaptureLatencySmoothingFactor + } } func reset() { diff --git a/RTMPHaishinKit/Sources/RTMP/RTMPConnection.swift b/RTMPHaishinKit/Sources/RTMP/RTMPConnection.swift index 98aa71700..b28a29365 100644 --- a/RTMPHaishinKit/Sources/RTMP/RTMPConnection.swift +++ b/RTMPHaishinKit/Sources/RTMP/RTMPConnection.swift @@ -435,9 +435,9 @@ public actor RTMPConnection: HaishinKit.NetworkConnection { if logger.isEnabledFor(level: .trace) { logger.trace("<<", message) } - let iterator = outputBuffer.putMessage(type, chunkStreamId: chunkStreamId.rawValue, message: message) + let chunks = Array(outputBuffer.putMessage(type, chunkStreamId: chunkStreamId.rawValue, message: message)) Task { - await socket?.send(iterator) + await socket?.send(chunks) } return message.payload.count } diff --git a/RTMPHaishinKit/Sources/RTMP/RTMPSocket.swift b/RTMPHaishinKit/Sources/RTMP/RTMPSocket.swift index fe8360030..ddf1fc493 100644 --- a/RTMPHaishinKit/Sources/RTMP/RTMPSocket.swift +++ b/RTMPHaishinKit/Sources/RTMP/RTMPSocket.swift @@ -101,6 +101,16 @@ final actor RTMPSocket { } } + func send(_ chunks: [Data]) { + guard connected else { + return + } + for data in chunks { + queueBytesOut += data.count + outputs?.yield(data) + } + } + func recv() -> AsyncStream { AsyncStream { continuation in Task { diff --git a/SRTHaishinKit/Sources/Extension/sockaddr_in+Extension.swift b/SRTHaishinKit/Sources/Extension/sockaddr_in+Extension.swift deleted file mode 100644 index 07f56a833..000000000 --- a/SRTHaishinKit/Sources/Extension/sockaddr_in+Extension.swift +++ /dev/null @@ -1,30 +0,0 @@ -import Foundation - -extension sockaddr_in { - var size: Int { - return MemoryLayout.size(ofValue: self) - } - - init?(_ host: String, port: Int) { - self.init() - self.sin_family = sa_family_t(AF_INET) - self.sin_port = CFSwapInt16BigToHost(UInt16(port)) - if inet_pton(AF_INET, host, &sin_addr) == 1 { - return - } - guard let hostent = gethostbyname(host), hostent.pointee.h_addrtype == AF_INET else { - return nil - } - if let h_addr_list = hostent.pointee.h_addr_list[0] { - self.sin_addr = UnsafeRawPointer(h_addr_list).assumingMemoryBound(to: in_addr.self).pointee - } else { - return nil - } - } - - mutating func makeSockaddr() -> sockaddr { - var address = sockaddr() - memcpy(&address, &self, size) - return address - } -} diff --git a/SRTHaishinKit/Sources/SRT/SRTSocket.swift b/SRTHaishinKit/Sources/SRT/SRTSocket.swift index 83d9d8223..064577c24 100644 --- a/SRTHaishinKit/Sources/SRT/SRTSocket.swift +++ b/SRTHaishinKit/Sources/SRT/SRTSocket.swift @@ -8,6 +8,7 @@ final actor SRTSocket { enum Error: Swift.Error { case notConnected + case invalidArgument(_ message: String) case rejected(_ reason: SRTRejectReason) case illegalState(_ message: String) } @@ -128,28 +129,42 @@ final actor SRTSocket { let status: Int32 = try { switch url.mode { case .caller: - guard var remote = url.remote else { - return SRT_ERROR + guard let remote = url.remote else { + throw Error.invalidArgument("missing remote url") + } + return try remote.resolve(AI_ADDRCONFIG) { name, length in + srt_connect(socket, name, length) } - var remoteaddr = remote.makeSockaddr() - return srt_connect(socket, &remoteaddr, Int32(remote.size)) case .listener: - guard var local = url.local else { - return SRT_ERROR + guard let local = url.local else { + throw Error.invalidArgument("missing local url") } - var localaddr = local.makeSockaddr() - let status = srt_bind(socket, &localaddr, Int32(local.size)) - guard status != SRT_ERROR else { - throw makeSocketError() + let _: Int32 = try local.resolve(AI_PASSIVE) { name, length in + let status = srt_bind(socket, name, length) + if status == SRT_ERROR { + return nil + } else { + return status + } } return srt_listen(socket, 1) case .rendezvous: - guard var remote = url.remote, var local = url.local else { - return SRT_ERROR + guard let remote = url.remote else { + throw Error.invalidArgument("missing remote url") + } + guard let local = url.local else { + throw Error.invalidArgument("missing local url") + } + return try remote.resolve(AI_PASSIVE | AI_ADDRCONFIG) { remotename, remotelen in + return try local.resolve(AI_PASSIVE | AI_ADDRCONFIG) { localname, locallen in + let status = srt_rendezvous(socket, localname, locallen, remotename, remotelen) + if status == SRT_ERROR { + return nil + } else { + return status + } + } } - var remoteaddr = remote.makeSockaddr() - var localaddr = local.makeSockaddr() - return srt_rendezvous(socket, &remoteaddr, Int32(remote.size), &localaddr, Int32(local.size)) } }() guard status != SRT_ERROR else { diff --git a/SRTHaishinKit/Sources/SRT/SRTSocketURL.swift b/SRTHaishinKit/Sources/SRT/SRTSocketURL.swift index 2426b7d12..80d024eeb 100644 --- a/SRTHaishinKit/Sources/SRT/SRTSocketURL.swift +++ b/SRTHaishinKit/Sources/SRT/SRTSocketURL.swift @@ -27,20 +27,20 @@ struct SRTSocketURL { let mode: SRTMode let options: [SRTSocketOption] - var remote: sockaddr_in? { + var remote: AddrInfo? { guard let host = url.host else { return nil } - return .init(host, port: url.port ?? Self.defaultPort) + return AddrInfo(host: host, port: url.port ?? Self.defaultPort) } - var local: sockaddr_in? { + var local: AddrInfo? { let queryItems = Self.getQueryItems(url) let adapter = queryItems["adapter"] ?? "0.0.0.0" if let port = queryItems["port"] { - return .init(adapter, port: Int(port) ?? url.port ?? Self.defaultPort) + return AddrInfo(host: adapter, port: Int(port) ?? url.port ?? Self.defaultPort) } - return .init(adapter, port: url.port ?? Self.defaultPort) + return AddrInfo(host: adapter, port: url.port ?? Self.defaultPort) } init?(_ url: URL?) { diff --git a/SRTHaishinKit/Sources/Util/AddrInfo.swift b/SRTHaishinKit/Sources/Util/AddrInfo.swift new file mode 100644 index 000000000..db2e4c642 --- /dev/null +++ b/SRTHaishinKit/Sources/Util/AddrInfo.swift @@ -0,0 +1,56 @@ +import Foundation +import libsrt + +struct AddrInfo { + enum Error: Swift.Error { + case failedToGetaddrinfo(_ code: Int) + case failedToResolve + } + + let host: String + let port: Int + + @discardableResult + func resolve(_ flags: Int32, lambda: (UnsafePointer, Int32) throws -> R?) throws -> R { + var hints = addrinfo( + ai_flags: flags, + ai_family: AF_UNSPEC, + ai_socktype: SOCK_DGRAM, + ai_protocol: 0, + ai_addrlen: 0, + ai_canonname: nil, + ai_addr: nil, + ai_next: nil + ) + var result: UnsafeMutablePointer? + let rv = getaddrinfo(host, String(port), &hints, &result) + guard rv == 0 else { + throw Error.failedToGetaddrinfo(Int(rv)) + } + defer { + freeaddrinfo(result) + } + var addr = sockaddr_storage() + var rp = result + while rp != nil { + if let ai = rp?.pointee { + memcpy(&addr, ai.ai_addr, Int(ai.ai_addrlen)) + let result = withUnsafePointer(to: &addr) { + $0.withMemoryRebound(to: sockaddr.self, capacity: 1) { + do { + return try lambda($0, Int32(ai.ai_addrlen)) + } catch { + print("AddrInfo.resolve: lambda threw error for address \(host):\(port): \(error)") + return nil + } + } + } + if let result { + return result + } + } + rp = rp?.pointee.ai_next + } + throw Error.failedToResolve + } +}