Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 3 additions & 2 deletions Examples/iOS/InfoGuideView.swift
Original file line number Diff line number Diff line change
Expand Up @@ -53,6 +53,7 @@ private struct PreferenceGuideList: View {
Section("Video Settings") {
GuideRow(title: "HDR Video", description: "Captures wider color/brightness range. Requires HDR-capable camera.")
GuideRow(title: "Low Latency", description: "Reduces stream delay to ~2-3 seconds. May slightly reduce quality.")
GuideRow(title: "Resolution", description: "Choose 720p for performance or 1080p for higher detail.")
GuideRow(title: "BitRate Mode", description: "Average: Consistent file size\nConstant: Stable quality\nVariable: Best quality")
}
Section("Capture Settings") {
Expand All @@ -75,8 +76,8 @@ private struct PublishGuideList: View {
description: "Frames per second. 15 saves battery, 30 is standard, 60 is ultra-smooth.")
GuideRowWithIcon(icon: "slider.horizontal.3", title: "Bitrate (kbps)",
description: "Video quality. Higher = better but more data. 1500-2500 recommended.")
GuideRowWithIcon(icon: "rectangle.badge.checkmark", title: "720p",
description: "Video resolution (1280×720). Good balance of quality and performance.")
GuideRowWithIcon(icon: "rectangle.badge.checkmark", title: "720p / 1080p",
description: "Video resolution. 720p = lighter load, 1080p = sharper image but needs more CPU/network.")
}
Section("Controls") {
GuideRowWithIcon(icon: "record.circle", title: "Record",
Expand Down
5 changes: 5 additions & 0 deletions Examples/iOS/PreferenceView.swift
Original file line number Diff line number Diff line change
Expand Up @@ -64,6 +64,11 @@ struct PreferenceView: View {
Toggle(isOn: $model.isLowLatencyRateControlEnabled) {
Text("Low Latency Mode")
}
Picker("Resolution", selection: $model.videoResolution) {
ForEach(VideoResolution.allCases) { resolution in
Text(resolution.displayName).tag(resolution)
}
}
Picker("BitRate Mode", selection: $model.bitRateMode) {
ForEach(model.bitRateModes, id: \.description) { index in
Text(index.description).tag(index)
Expand Down
36 changes: 36 additions & 0 deletions Examples/iOS/PreferenceViewModel.swift
Original file line number Diff line number Diff line change
Expand Up @@ -22,13 +22,39 @@ enum AudioSourceServiceMode: String, CaseIterable, Sendable {
case audioEngine
}

enum VideoResolution: String, CaseIterable, Identifiable, Sendable {
case p720
case p1080

var id: Self { self }

var displayName: String {
switch self {
case .p720:
return "720p"
case .p1080:
return "1080p"
}
}

var landscapeSize: CGSize {
switch self {
case .p720:
return .init(width: 1280, height: 720)
case .p1080:
return .init(width: 1920, height: 1080)
}
}
}

@MainActor
final class PreferenceViewModel: ObservableObject {
private enum Keys {
static let uri = "pref_stream_uri"
static let streamName = "pref_stream_name"
static let audioFormat = "pref_audio_format"
static let bitRateMode = "pref_bitrate_mode"
static let videoResolution = "pref_video_resolution"
static let isLowLatencyEnabled = "pref_low_latency"
static let viewType = "pref_view_type"
static let audioCaptureMode = "pref_audio_capture_mode"
Expand Down Expand Up @@ -64,6 +90,11 @@ final class PreferenceViewModel: ObservableObject {
UserDefaults.standard.set(bitRateMode.description, forKey: Keys.bitRateMode)
}
}
@Published var videoResolution: VideoResolution = .p720 {
didSet {
UserDefaults.standard.set(videoResolution.rawValue, forKey: Keys.videoResolution)
}
}
@Published var isLowLatencyRateControlEnabled: Bool = false {
didSet {
UserDefaults.standard.set(isLowLatencyRateControlEnabled, forKey: Keys.isLowLatencyEnabled)
Expand Down Expand Up @@ -111,6 +142,11 @@ final class PreferenceViewModel: ObservableObject {
}
}

if let savedResolution = defaults.string(forKey: Keys.videoResolution),
let resolution = VideoResolution(rawValue: savedResolution) {
self.videoResolution = resolution
}

if defaults.object(forKey: Keys.isLowLatencyEnabled) != nil {
self.isLowLatencyRateControlEnabled = defaults.bool(forKey: Keys.isLowLatencyEnabled)
}
Expand Down
7 changes: 5 additions & 2 deletions Examples/iOS/PublishView.swift
Original file line number Diff line number Diff line change
Expand Up @@ -370,7 +370,7 @@ struct PublishView: View {
}

if !model.isLoading {
Text("720p")
Text(preference.videoResolution.displayName)
.font(.system(size: 10, weight: .medium))
.foregroundColor(.white)
.padding(.horizontal, 8)
Expand Down Expand Up @@ -576,7 +576,10 @@ struct PublishView: View {
model.stopRunning()
}
.onChange(of: horizontalSizeClass) { _ in
model.orientationDidChange()
model.orientationDidChange(preference)
}
.onChange(of: preference.videoResolution) { _ in
model.orientationDidChange(preference)
}.alert(isPresented: $model.isShowError) {
Alert(
title: Text("Error"),
Expand Down
41 changes: 29 additions & 12 deletions Examples/iOS/PublishViewModel.swift
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ import MediaPlayer
import Photos
import RTCHaishinKit
import SwiftUI
import VideoToolbox

@MainActor
final class PublishViewModel: ObservableObject {
Expand Down Expand Up @@ -334,7 +335,7 @@ final class PublishViewModel: ObservableObject {
videoMixerSettings.mode = .offscreen
await mixer.setVideoMixerSettings(videoMixerSettings)

await configureScreen(isGPURendererEnabled: true)
await configureScreen(preference: preference, isGPURendererEnabled: true)

let backCamera = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .back)
let frontCamera = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .front)
Expand Down Expand Up @@ -370,15 +371,15 @@ final class PublishViewModel: ObservableObject {
}
await makeSession(preference)
let isLandscape = UIDevice.current.orientation.isLandscape
await updateVideoEncoderSize(isLandscape: isLandscape)
await updateVideoEncoderSize(isLandscape: isLandscape, resolution: preference.videoResolution)
let screenSize = await mixer.screen.size
if let session = self.session {
let videoSettings = await session.stream.videoSettings
self.videoDimensions = "Screen: \(Int(screenSize.width))x\(Int(screenSize.height)) | Video: \(videoSettings.videoSize.width)x\(videoSettings.videoSize.height)"
}
isLoading = false
}
orientationDidChange()
orientationDidChange(preference)
tasks.append(Task {
for await buffer in await audioSourceService.buffer {
await mixer.append(buffer.0, when: buffer.1)
Expand All @@ -396,8 +397,9 @@ final class PublishViewModel: ObservableObject {
}

@ScreenActor
private func configureScreen(isGPURendererEnabled: Bool) async {
await mixer.screen.size = .init(width: 720, height: 1280)
private func configureScreen(preference: PreferenceViewModel, isGPURendererEnabled: Bool) async {
let size = await makeTargetSize(isLandscape: false, resolution: preference.videoResolution)
await mixer.screen.size = size
await mixer.screen.backgroundColor = UIColor.black.cgColor
}

Expand Down Expand Up @@ -535,13 +537,14 @@ final class PublishViewModel: ObservableObject {
}
}

func orientationDidChange() {
func orientationDidChange(_ preference: PreferenceViewModel) {
Task { @ScreenActor in
await mixer.setVideoOrientation(.portrait)
await mixer.screen.size = .init(width: 720, height: 1280)
let size = await makeTargetSize(isLandscape: false, resolution: preference.videoResolution)
await mixer.screen.size = size
let screenSize = await mixer.screen.size
Task { @MainActor in
await self.updateVideoEncoderSize(isLandscape: false)
await self.updateVideoEncoderSize(isLandscape: false, resolution: preference.videoResolution)
if let session = self.session {
let videoSettings = await session.stream.videoSettings
self.videoDimensions = "Screen: \(Int(screenSize.width))x\(Int(screenSize.height)) | Video: \(videoSettings.videoSize.width)x\(videoSettings.videoSize.height)"
Expand All @@ -552,18 +555,32 @@ final class PublishViewModel: ObservableObject {
}
}

private func updateVideoEncoderSize(isLandscape: Bool) async {
private func updateVideoEncoderSize(isLandscape: Bool, resolution: VideoResolution) async {
guard let session else { return }
var videoSettings = await session.stream.videoSettings
let targetSize: CGSize = isLandscape
? CGSize(width: 1280, height: 720)
: CGSize(width: 720, height: 1280)
var shouldApplySettings = false
let targetSize = makeTargetSize(isLandscape: isLandscape, resolution: resolution)
if videoSettings.videoSize != targetSize {
videoSettings.videoSize = targetSize
shouldApplySettings = true
}
if resolution == .p1080,
videoSettings.profileLevel == kVTProfileLevel_H264_Baseline_3_1 as String {
videoSettings.profileLevel = kVTProfileLevel_H264_High_AutoLevel as String
shouldApplySettings = true
}
if shouldApplySettings {
try? await session.stream.setVideoSettings(videoSettings)
}
}

private func makeTargetSize(isLandscape: Bool, resolution: VideoResolution) -> CGSize {
let landscape = resolution.landscapeSize
return isLandscape
? landscape
: .init(width: landscape.height, height: landscape.width)
}

private func startBatteryTracking() {
UIDevice.current.isBatteryMonitoringEnabled = true
streamStartBattery = UIDevice.current.batteryLevel
Expand Down
68 changes: 62 additions & 6 deletions HaishinKit/Sources/Screen/Screen.swift
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,42 @@ public protocol ScreenDelegate: AnyObject {
func screen(_ screen: Screen, willLayout time: CMTime)
}

public struct ScreenTimingSettings: Codable, Sendable {
public static let `default` = ScreenTimingSettings()

public var minimumFrameDuration: TimeInterval
public var maximumFrameDuration: TimeInterval
public var minimumMonotonicStep: TimeInterval
public var maximumVideoCaptureLatency: TimeInterval
public var videoCaptureLatencySmoothingFactor: Double

public init(
minimumFrameDuration: TimeInterval = 1.0 / 30.0,
maximumFrameDuration: TimeInterval = 1.0 / 15.0,
minimumMonotonicStep: TimeInterval = 1.0 / 600.0,
maximumVideoCaptureLatency: TimeInterval = 0.25,
videoCaptureLatencySmoothingFactor: Double = 0.2
) {
self.minimumFrameDuration = minimumFrameDuration
self.maximumFrameDuration = maximumFrameDuration
self.minimumMonotonicStep = minimumMonotonicStep
self.maximumVideoCaptureLatency = maximumVideoCaptureLatency
self.videoCaptureLatencySmoothingFactor = videoCaptureLatencySmoothingFactor
}
}

private extension ScreenTimingSettings {
func normalized() -> ScreenTimingSettings {
var settings = self
settings.minimumFrameDuration = max(settings.minimumFrameDuration, .ulpOfOne)
settings.maximumFrameDuration = max(settings.maximumFrameDuration, settings.minimumFrameDuration)
settings.minimumMonotonicStep = max(settings.minimumMonotonicStep, .ulpOfOne)
settings.maximumVideoCaptureLatency = max(settings.maximumVideoCaptureLatency, 0)
settings.videoCaptureLatencySmoothingFactor = min(max(settings.videoCaptureLatencySmoothingFactor, 0), 1)
return settings
}
}

/// An object that manages offscreen rendering a foundation.
public final class Screen: ScreenObjectContainerConvertible {
/// The default screen size.
Expand Down Expand Up @@ -99,6 +135,8 @@ public final class Screen: ScreenObjectContainerConvertible {
}
}
private var presentationTimeStamp: CMTime = .zero
private var lastFrameDuration: TimeInterval = 1.0 / 30.0
private var timingSettings: ScreenTimingSettings = .default

/// Creates a screen object.
public init() {
Expand Down Expand Up @@ -126,6 +164,14 @@ public final class Screen: ScreenObjectContainerConvertible {
return videoTrackScreenObject.unregisterVideoEffect(effect)
}

public func setTimingSettings(_ settings: ScreenTimingSettings) {
timingSettings = settings.normalized()
}

public func getTimingSettings() -> ScreenTimingSettings {
timingSettings
}

public func findById(_ id: String) -> ScreenObject? {
return root.findById(id)
}
Expand Down Expand Up @@ -159,13 +205,17 @@ public final class Screen: ScreenObjectContainerConvertible {
if let dictionary = CVBufferCopyAttachments(pixelBuffer, .shouldNotPropagate) {
CVBufferSetAttachments(pixelBuffer, dictionary, .shouldPropagate)
}
let presentationTimeStamp = CMTime(seconds: updateFrame.timestamp - videoCaptureLatency, preferredTimescale: Self.preferredTimescale)
guard self.presentationTimeStamp <= presentationTimeStamp else {
return nil
let rawFrameDuration = updateFrame.targetTimestamp - updateFrame.timestamp
let settings = timingSettings
let frameDuration = min(max(rawFrameDuration > 0 ? rawFrameDuration : lastFrameDuration, settings.minimumFrameDuration), settings.maximumFrameDuration)
lastFrameDuration = frameDuration
var presentationTimeStamp = CMTime(seconds: updateFrame.timestamp - videoCaptureLatency, preferredTimescale: Self.preferredTimescale)
if presentationTimeStamp <= self.presentationTimeStamp {
presentationTimeStamp = self.presentationTimeStamp + CMTime(seconds: settings.minimumMonotonicStep, preferredTimescale: Self.preferredTimescale)
}
self.presentationTimeStamp = presentationTimeStamp
var timingInfo = CMSampleTimingInfo(
duration: CMTime(seconds: updateFrame.targetTimestamp - updateFrame.timestamp, preferredTimescale: Self.preferredTimescale),
duration: CMTime(seconds: frameDuration, preferredTimescale: Self.preferredTimescale),
presentationTimeStamp: presentationTimeStamp,
decodeTimeStamp: .invalid
)
Expand Down Expand Up @@ -208,8 +258,14 @@ public final class Screen: ScreenObjectContainerConvertible {
return
}
let hostPresentationTimeStamp = presentationTimeStamp.convertTime(from: synchronizationClock)
let diff = ceil((targetTimestamp - hostPresentationTimeStamp.seconds) * 10000) / 10000
videoCaptureLatency = diff
let settings = timingSettings
let diff = targetTimestamp - hostPresentationTimeStamp.seconds
let clamped = min(max(diff, 0), settings.maximumVideoCaptureLatency)
if videoCaptureLatency == 0 {
videoCaptureLatency = clamped
} else {
videoCaptureLatency += (clamped - videoCaptureLatency) * settings.videoCaptureLatencySmoothingFactor
}
}

func reset() {
Expand Down
4 changes: 2 additions & 2 deletions RTMPHaishinKit/Sources/RTMP/RTMPConnection.swift
Original file line number Diff line number Diff line change
Expand Up @@ -435,9 +435,9 @@ public actor RTMPConnection: HaishinKit.NetworkConnection {
if logger.isEnabledFor(level: .trace) {
logger.trace("<<", message)
}
let iterator = outputBuffer.putMessage(type, chunkStreamId: chunkStreamId.rawValue, message: message)
let chunks = Array(outputBuffer.putMessage(type, chunkStreamId: chunkStreamId.rawValue, message: message))
Task {
await socket?.send(iterator)
await socket?.send(chunks)
}
return message.payload.count
}
Expand Down
10 changes: 10 additions & 0 deletions RTMPHaishinKit/Sources/RTMP/RTMPSocket.swift
Original file line number Diff line number Diff line change
Expand Up @@ -101,6 +101,16 @@ final actor RTMPSocket {
}
}

func send(_ chunks: [Data]) {
guard connected else {
return
}
for data in chunks {
queueBytesOut += data.count
outputs?.yield(data)
}
}

func recv() -> AsyncStream<Data> {
AsyncStream<Data> { continuation in
Task {
Expand Down
30 changes: 0 additions & 30 deletions SRTHaishinKit/Sources/Extension/sockaddr_in+Extension.swift

This file was deleted.

Loading