let call = streamVideo.call(callType: "default", callId: "my-call-id")
await call.disableClientCapabilities([.subscriberVideoPause])
Low Bandwidth
Our servers can detect when a subscriber is on a low-bandwidth connection and may automatically disable selected incoming video streams. This helps prioritize audio quality and reduce total bandwidth usage, improving overall call stability.
When this feature is active, incoming video streams may be paused selectively, resulting in an audio-only experience for some participants. The UI can reflect this state with an icon or message overlay.
Low Bandwidth Optimization
The Low Bandwidth optimization is enabled by default on SDK level. The SDK integrator may opt out by updating the clientCapabilities of the Call with an array of ClientCapability type containing .subscriberVideoPause
.
This signals to the backend that the client supports dynamic video pausing, allowing the system to optimize media delivery under limited network conditions.
Observing Paused Tracks
When the feature is active, the backend may mark some remote video tracks as paused. These will appear in the participant.pausedTracks property:
import StreamVideo
import Combine
let cancellable = call
.state
.$participants
.sink { participants in
let pausedVideoParticipants = participants.filter {
$0.pausedTracks.contains(.video)
}
print("Participants with paused video tracks: \(pausedVideoParticipants)")
}
// Cancel when no longer needed:
cancellable.cancel()
This can be used to update the UI, e.g. by displaying an indicator that a participant’s video is disabled due to low bandwidth.
UI Example
For this example we are going to deep dive into the SDK’s implementation that is being used to indicate to the user that a video track has been paused temporarily.
The indicator
In order to present or hide the indicator we can use the information that is encapsulated in the CallParticipant
object. Specifically, the pausedTracks
property contains information about any track type - related to this user - that has been remotely paused. Based on that we can implement the following:
if participant.pausedTracks.contains(.video) {
Image(systemName: "video.slash.fill")
.foregroundColor(.yellow)
.padding(4)
}
Include the indicator in the ParticipantInfo SwiftUI view
With the indicator implemented above, we need to decide where we want to present it. By default the SDK presents this indicator, next to Participant’s name, in the view that overlays their VideoRenderer. That view is called ParticipantInfoView and below is its implementation.
public struct ParticipantInfoView: View {
@Injected(\.images) var images
@Injected(\.fonts) var fonts
@Injected(\.colors) var colors
var participant: CallParticipant
var isPinned: Bool
var maxHeight: CGFloat
public init(
participant: CallParticipant,
isPinned: Bool,
maxHeight: Float = 14
) {
self.participant = participant
self.isPinned = isPinned
self.maxHeight = CGFloat(maxHeight)
}
public var body: some View {
HStack(spacing: 4) {
if isPinned {
Image(systemName: "pin.fill")
.resizable()
.aspectRatio(contentMode: .fit)
.frame(maxHeight: maxHeight)
.foregroundColor(.white)
.padding(.trailing, 4)
}
Text(participant.name.isEmpty ? participant.id : participant.name)
.foregroundColor(.white)
.multilineTextAlignment(.leading)
.lineLimit(1)
.font(fonts.caption1)
.minimumScaleFactor(0.7)
.accessibility(identifier: "participantName")
if participant.pausedTracks.contains(.video) {
Image(systemName: "wifi.slash")
.resizable()
.aspectRatio(contentMode: .fit)
.frame(maxHeight: maxHeight)
.foregroundColor(.white)
.padding(.trailing, 4)
}
SoundIndicator(participant: participant)
.frame(maxHeight: maxHeight)
}
.padding(.all, 2)
.padding(.horizontal, 4)
.frame(height: 28)
.cornerRadius(
8,
corners: [.topRight],
backgroundColor: colors.participantInfoBackgroundColor
)
}
}
Integration with the VideoCallParticipantViewModifier
We use the ParticipantInfoView
in the provided out-of-the-box VideoCallParticipantModifier
that looks like this:
public struct VideoCallParticipantModifier: ViewModifier {
var participant: CallParticipant
var call: Call?
var availableFrame: CGRect
var ratio: CGFloat
var showAllInfo: Bool
var decorations: Set<VideoCallParticipantDecoration>
public init(
participant: CallParticipant,
call: Call?,
availableFrame: CGRect,
ratio: CGFloat,
showAllInfo: Bool,
decorations: [VideoCallParticipantDecoration] = VideoCallParticipantDecoration.allCases
) {
self.participant = participant
self.call = call
self.availableFrame = availableFrame
self.ratio = ratio
self.showAllInfo = showAllInfo
self.decorations = .init(decorations)
}
public func body(content: Content) -> some View {
content
.adjustVideoFrame(to: availableFrame.size.width, ratio: ratio)
.overlay(
ZStack {
BottomView(content: {
HStack {
ParticipantInfoView(
participant: participant,
isPinned: participant.isPinned
)
Spacer()
if showAllInfo {
ConnectionQualityIndicator(
connectionQuality: participant.connectionQuality
)
}
}
})
}
)
.applyDecorationModifierIfRequired(
VideoCallParticipantOptionsModifier(participant: participant, call: call),
decoration: .options,
availableDecorations: decorations
)
.applyDecorationModifierIfRequired(
VideoCallParticipantSpeakingModifier(participant: participant, participantCount: participantCount),
decoration: .speaking,
availableDecorations: decorations
)
.clipShape(RoundedRectangle(cornerRadius: 16))
.clipped()
}
@MainActor
private var participantCount: Int {
call?.state.participants.count ?? 0
}
}
You can find more info about the ConnectionQuality and the ConnectionQualityIndicator here.
The VideoCallParticipantModifier
is then being returned in the default implementation of the ViewFactory
when the following method is being called:
func makeVideoCallParticipantModifier(
participant: CallParticipant,
call: Call?,
availableFrame: CGRect,
ratio: CGFloat,
showAllInfo: Bool
) -> some ViewModifier