Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions media/engine/fake_webrtc_call.cc
Original file line number Diff line number Diff line change
Expand Up @@ -99,6 +99,10 @@ void FakeAudioSendStream::SetMuted(bool muted) {
muted_ = muted;
}

bool FakeAudioSendStream::GetMuted() {
return muted_;
}

AudioSendStream::Stats FakeAudioSendStream::GetStats() const {
return stats_;
}
Expand Down
3 changes: 2 additions & 1 deletion media/engine/fake_webrtc_call.h
Original file line number Diff line number Diff line change
Expand Up @@ -100,7 +100,8 @@ class FakeAudioSendStream final : public AudioSendStream {
int payload_frequency,
int event,
int duration_ms) override;
bool GetMuted() override { return muted_; }
// Keep fake stream API aligned with AudioSendStream interface changes.
bool GetMuted() override;
void SetMuted(bool muted) override;
AudioSendStream::Stats GetStats() const override;
AudioSendStream::Stats GetStats(bool has_remote_tracks) const override;
Expand Down
32 changes: 32 additions & 0 deletions media/engine/simulcast_encoder_adapter.cc
Original file line number Diff line number Diff line change
Expand Up @@ -951,13 +951,24 @@ VideoEncoder::EncoderInfo SimulcastEncoderAdapter::GetEncoderInfo() const {

encoder_info.scaling_settings = VideoEncoder::ScalingSettings::kOff;
std::vector<std::string> encoder_names;
// SEA can keep multiple stream contexts alive even when runtime bitrate
// allocation has paused all but one spatial layer. Track that state
// explicitly so we can preserve simulcast-specific aggregation while still
// forwarding the active encoder's single-layer adaptation hints.
size_t active_stream_count = 0;
std::optional<VideoEncoder::EncoderInfo> active_stream_info;

for (size_t i = 0; i < stream_contexts_.size(); ++i) {
VideoEncoder::EncoderInfo encoder_impl_info =
stream_contexts_[i].encoder().GetEncoderInfo();

// Encoder name indicates names of all active sub-encoders.
if (!stream_contexts_[i].is_paused()) {
// If exactly one layer stays unpaused after SetRates(), this is the
// encoder whose runtime adaptation fields should be exposed to the rest
// of WebRTC.
++active_stream_count;
active_stream_info = encoder_impl_info;
encoder_names.push_back(encoder_impl_info.implementation_name);
}
if (i == 0) {
Expand Down Expand Up @@ -1011,6 +1022,27 @@ VideoEncoder::EncoderInfo SimulcastEncoderAdapter::GetEncoderInfo() const {
encoder_info.implementation_name += implementation_name_builder.Release();
}

if (active_stream_count == 1) {
RTC_DCHECK(active_stream_info.has_value());
// Keep the aggregated SEA view for simulcast-specific fields such as
// implementation_name, fps_allocation and alignment, but make the adapter
// behave like single-layer publishing for the runtime-sensitive fields
// consumed by adaptation and frame handling.
encoder_info.scaling_settings = active_stream_info->scaling_settings;
encoder_info.supports_native_handle =
active_stream_info->supports_native_handle;
encoder_info.has_trusted_rate_controller =
active_stream_info->has_trusted_rate_controller;
encoder_info.is_hardware_accelerated =
active_stream_info->is_hardware_accelerated;
encoder_info.is_qp_trusted = active_stream_info->is_qp_trusted;
encoder_info.resolution_bitrate_limits =
active_stream_info->resolution_bitrate_limits;
encoder_info.min_qp = active_stream_info->min_qp;
encoder_info.preferred_pixel_formats =
active_stream_info->preferred_pixel_formats;
}

OverrideFromFieldTrial(&encoder_info);

return encoder_info;
Expand Down
139 changes: 139 additions & 0 deletions media/engine/simulcast_encoder_adapter_unittest.cc
Original file line number Diff line number Diff line change
Expand Up @@ -286,6 +286,7 @@ class MockVideoEncoder : public VideoEncoder {
info.supports_simulcast = supports_simulcast_;
info.is_qp_trusted = is_qp_trusted_;
info.resolution_bitrate_limits = resolution_bitrate_limits;
info.min_qp = min_qp_;
return info;
}

Expand Down Expand Up @@ -365,6 +366,8 @@ class MockVideoEncoder : public VideoEncoder {
resolution_bitrate_limits = limits;
}

void set_min_qp(std::optional<int> min_qp) { min_qp_ = min_qp; }

bool supports_simulcast() const { return supports_simulcast_; }

SdpVideoFormat video_format() const { return video_format_; }
Expand All @@ -384,6 +387,7 @@ class MockVideoEncoder : public VideoEncoder {
FramerateFractions fps_allocation_;
bool supports_simulcast_ = false;
std::optional<bool> is_qp_trusted_;
std::optional<int> min_qp_;
SdpVideoFormat video_format_;
std::vector<VideoEncoder::ResolutionBitrateLimits> resolution_bitrate_limits;

Expand Down Expand Up @@ -1692,6 +1696,141 @@ TEST_F(TestSimulcastEncoderAdapterFake, ReportsFpsAllocation) {
::testing::ElementsAreArray(expected_fps_allocation));
}

TEST_F(TestSimulcastEncoderAdapterFake,
ForwardsRuntimeSensitiveEncoderInfoForSingleUnpausedLayer) {
SimulcastTestFixtureImpl::DefaultSettings(
&codec_, static_cast<const int*>(kTestTemporalLayerProfile),
kVideoCodecVP8);
codec_.numberOfSimulcastStreams = 3;
EXPECT_EQ(0, adapter_->InitEncode(&codec_, kSettings));
adapter_->RegisterEncodeCompleteCallback(this);
ASSERT_EQ(3u, helper_->factory()->encoders().size());

auto* low_encoder = helper_->factory()->encoders()[0];
auto* mid_encoder = helper_->factory()->encoders()[1];
auto* high_encoder = helper_->factory()->encoders()[2];

low_encoder->set_scaling_settings(VideoEncoder::ScalingSettings(10, 20, 111));
low_encoder->set_supports_native_handle(false);
low_encoder->set_is_qp_trusted(true);
low_encoder->set_resolution_bitrate_limits(
{VideoEncoder::ResolutionBitrateLimits(111, 1111, 2222, 3333)});
low_encoder->set_min_qp(10);
low_encoder->set_fps_allocation(
FramerateFractions{EncoderInfo::kMaxFramerateFraction / 2});

mid_encoder->set_scaling_settings(VideoEncoder::ScalingSettings(30, 40, 222));
mid_encoder->set_supports_native_handle(true);
mid_encoder->set_is_qp_trusted(false);
mid_encoder->set_resolution_bitrate_limits(
{VideoEncoder::ResolutionBitrateLimits(222, 4444, 5555, 6666)});
mid_encoder->set_min_qp(20);
mid_encoder->set_fps_allocation(
FramerateFractions{EncoderInfo::kMaxFramerateFraction / 3,
EncoderInfo::kMaxFramerateFraction});

high_encoder->set_scaling_settings(
VideoEncoder::ScalingSettings(50, 60, 333));
high_encoder->set_supports_native_handle(false);
high_encoder->set_is_qp_trusted(true);
high_encoder->set_resolution_bitrate_limits(
{VideoEncoder::ResolutionBitrateLimits(333, 7777, 8888, 9999)});
high_encoder->set_min_qp(30);
high_encoder->set_fps_allocation(
FramerateFractions{EncoderInfo::kMaxFramerateFraction});

// Only keep the middle spatial layer active. SEA still has three stream
// contexts, so this exercises the runtime state that used to incorrectly
// report aggregated simulcast encoder info with scaling disabled.
VideoBitrateAllocation allocation;
ASSERT_TRUE(allocation.SetBitrate(1, 0, 500000));
adapter_->SetRates(VideoEncoder::RateControlParameters(allocation, 30.0));

const auto info = adapter_->GetEncoderInfo();
// Runtime-sensitive fields should come from the only unpaused encoder.
ASSERT_TRUE(info.scaling_settings.thresholds.has_value());
EXPECT_EQ(30, info.scaling_settings.thresholds->low);
EXPECT_EQ(40, info.scaling_settings.thresholds->high);
EXPECT_EQ(222, info.scaling_settings.min_pixels_per_frame);
EXPECT_TRUE(info.supports_native_handle);
EXPECT_EQ(std::optional<bool>(false), info.is_qp_trusted);
EXPECT_EQ(std::optional<int>(20), info.min_qp);
EXPECT_EQ(info.resolution_bitrate_limits,
std::vector<VideoEncoder::ResolutionBitrateLimits>(
{VideoEncoder::ResolutionBitrateLimits(222, 4444, 5555, 6666)}));
// Simulcast-specific fields must remain in SEA's aggregated spatial-slot
// layout even when runtime-sensitive fields are forwarded from one encoder.
EXPECT_THAT(info.fps_allocation[0], ::testing::IsEmpty());
EXPECT_THAT(info.fps_allocation[1],
::testing::ElementsAre(EncoderInfo::kMaxFramerateFraction / 3,
EncoderInfo::kMaxFramerateFraction));
EXPECT_THAT(info.fps_allocation[2],
::testing::ElementsAre(EncoderInfo::kMaxFramerateFraction));
}

TEST_F(TestSimulcastEncoderAdapterFake,
RestoresAggregatedEncoderInfoWhenMultipleLayersUnpause) {
SimulcastTestFixtureImpl::DefaultSettings(
&codec_, static_cast<const int*>(kTestTemporalLayerProfile),
kVideoCodecVP8);
codec_.numberOfSimulcastStreams = 3;
EXPECT_EQ(0, adapter_->InitEncode(&codec_, kSettings));
adapter_->RegisterEncodeCompleteCallback(this);
ASSERT_EQ(3u, helper_->factory()->encoders().size());

auto* low_encoder = helper_->factory()->encoders()[0];
auto* mid_encoder = helper_->factory()->encoders()[1];
auto* high_encoder = helper_->factory()->encoders()[2];

low_encoder->set_scaling_settings(VideoEncoder::ScalingSettings(10, 20, 111));
low_encoder->set_supports_native_handle(false);
low_encoder->set_fps_allocation(
FramerateFractions{EncoderInfo::kMaxFramerateFraction / 2});

mid_encoder->set_scaling_settings(VideoEncoder::ScalingSettings(30, 40, 222));
mid_encoder->set_supports_native_handle(true);
mid_encoder->set_fps_allocation(
FramerateFractions{EncoderInfo::kMaxFramerateFraction / 3,
EncoderInfo::kMaxFramerateFraction});

high_encoder->set_scaling_settings(
VideoEncoder::ScalingSettings(50, 60, 333));
high_encoder->set_supports_native_handle(false);
high_encoder->set_fps_allocation(
FramerateFractions{EncoderInfo::kMaxFramerateFraction});

// First collapse to a single active spatial layer and verify the forwarded
// encoder info.
VideoBitrateAllocation one_layer_allocation;
ASSERT_TRUE(one_layer_allocation.SetBitrate(1, 0, 500000));
adapter_->SetRates(
VideoEncoder::RateControlParameters(one_layer_allocation, 30.0));

auto info = adapter_->GetEncoderInfo();
ASSERT_TRUE(info.scaling_settings.thresholds.has_value());
EXPECT_EQ(30, info.scaling_settings.thresholds->low);
EXPECT_EQ(40, info.scaling_settings.thresholds->high);
EXPECT_TRUE(info.supports_native_handle);

// Then enable another layer. SEA should immediately return to its normal
// aggregated simulcast view without requiring a re-init.
VideoBitrateAllocation two_layer_allocation;
ASSERT_TRUE(two_layer_allocation.SetBitrate(1, 0, 500000));
ASSERT_TRUE(two_layer_allocation.SetBitrate(2, 0, 700000));
adapter_->SetRates(
VideoEncoder::RateControlParameters(two_layer_allocation, 30.0));

info = adapter_->GetEncoderInfo();
EXPECT_FALSE(info.scaling_settings.thresholds.has_value());
EXPECT_TRUE(info.supports_native_handle);
EXPECT_THAT(info.fps_allocation[0], ::testing::IsEmpty());
EXPECT_THAT(info.fps_allocation[1],
::testing::ElementsAre(EncoderInfo::kMaxFramerateFraction / 3,
EncoderInfo::kMaxFramerateFraction));
EXPECT_THAT(info.fps_allocation[2],
::testing::ElementsAre(EncoderInfo::kMaxFramerateFraction));
}

TEST_F(TestSimulcastEncoderAdapterFake, SetRateDistributesBandwithAllocation) {
SimulcastTestFixtureImpl::DefaultSettings(
&codec_, static_cast<const int*>(kTestTemporalLayerProfile),
Expand Down
61 changes: 61 additions & 0 deletions sdk/objc/components/video_codec/RTCVideoEncoderH264.mm
Original file line number Diff line number Diff line change
Expand Up @@ -61,6 +61,11 @@ - (void)frameWasEncoded : (OSStatus)status flags
const int kLowH264QpThreshold = 28;
const int kHighH264QpThreshold = 39;
const int kBitsPerByte = 8;
// If no encoded frame is observed for this long while bitrate is non-zero,
// treat the encoder as stalled and force a session reset.
const int64_t kEncoderStallResetThresholdMs = 2500;
// Minimum gap between forced resets to avoid rapid reset loops.
const int64_t kEncoderStallMinResetIntervalMs = 1000;

const OSType kNV12PixelFormat = kCVPixelFormatType_420YpCbCr8BiPlanarFullRange;

Expand Down Expand Up @@ -384,6 +389,12 @@ @implementation RTC_OBJC_TYPE (RTCVideoEncoderH264) {
std::vector<uint8_t> _frameScaleBuffer;

CMTime _previousPresentationTimeStamp;
// Timestamp (ms) of the last frame successfully produced by VideoToolbox.
int64_t _lastSuccessfulEncodeMs;
// Timestamp (ms) of the last hard reset we triggered for stall recovery.
int64_t _lastHardResetMs;
// True after a forced reset until we observe the first successful frame.
BOOL _isInForcedRecovery;
}

// .5 is set as a mininum to prevent overcompensating for large temporary
Expand All @@ -402,6 +413,12 @@ - (instancetype)initWithCodecInfo:
_profile_level_id =
webrtc::ParseSdpForH264ProfileLevelId([codecInfo nativeSdpVideoFormat].parameters);
_previousPresentationTimeStamp = kCMTimeZero;
// Zero means "no successful encode yet" for this encoder instance.
_lastSuccessfulEncodeMs = 0;
// Zero means "no forced reset has happened yet".
_lastHardResetMs = 0;
// Start in normal mode; recovery mode is enabled only after forced reset.
_isInForcedRecovery = NO;
RTC_DCHECK(_profile_level_id);
RTC_LOG(LS_INFO) << "Using profile "
<< CFStringToString(ExtractProfile(
Expand Down Expand Up @@ -445,6 +462,12 @@ - (NSInteger)startEncodeWithSettings:
_targetFrameRate = MIN(settings.maxFramerate, _maxAllowedFrameRate);
_encoderBitrateBps = 0;
_encoderFrameRate = 0;
// Reset stall tracking when a fresh encoding session starts.
_lastSuccessfulEncodeMs = 0;
// Reset last-reset timestamp for the new session lifecycle.
_lastHardResetMs = 0;
// New session starts outside recovery mode.
_isInForcedRecovery = NO;
if (settings.maxFramerate > _maxAllowedFrameRate && _maxAllowedFrameRate > 0) {
RTC_LOG(LS_WARNING) << "Initial encoder frame rate setting " << settings.maxFramerate
<< " is larger than the "
Expand Down Expand Up @@ -472,6 +495,31 @@ - (NSInteger)encode:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame
}
_previousPresentationTimeStamp = presentationTimeStamp;

#if defined(WEBRTC_IOS)
// Evaluate stall heuristics only on iOS where this recovery path is required.
const int64_t nowMs = rtc::TimeMillis();
// Trigger forced reset only when:
// 1) encoder should be producing data (_targetBitrateBps > 0),
// 2) session exists,
// 3) we've seen at least one successful encode before,
// 4) stall duration exceeds threshold,
// 5) minimum interval since previous reset has elapsed.
if (_targetBitrateBps > 0 && _compressionSession && _lastSuccessfulEncodeMs > 0 &&
nowMs - _lastSuccessfulEncodeMs >= kEncoderStallResetThresholdMs &&
nowMs - _lastHardResetMs >= kEncoderStallMinResetIntervalMs) {
// Emit explicit stall telemetry for field diagnosis.
RTC_LOG(LS_WARNING) << "iOS H264 encoder appears stalled. Forcing hard reset."
<< " stalled_for_ms=" << (nowMs - _lastSuccessfulEncodeMs)
<< " target_bps=" << _targetBitrateBps;
// Record reset time before reset call so recovery delay can be measured.
_lastHardResetMs = nowMs;
// Mark that we are awaiting first successful frame after forced reset.
_isInForcedRecovery = YES;
// Recreate compression session to recover from internal VideoToolbox stall.
[self resetCompressionSessionWithPixelFormat:[self pixelFormatOfFrame:frame]];
}
#endif

BOOL isKeyframeRequired = NO;
// Get a pixel buffer from the pool and copy frame data over.
if ([self resetCompressionSessionIfNeededWithFrame:frame]) {
Expand Down Expand Up @@ -949,6 +997,19 @@ - (void)frameWasEncoded:(OSStatus)status
return;
}

const int64_t nowMs = rtc::TimeMillis();
// Successful output proves encoder is alive; refresh liveness timestamp.
_lastSuccessfulEncodeMs = nowMs;
// Log one-time recovery event after a forced reset once first frame arrives.
if (_isInForcedRecovery) {
const int64_t recoveryDelayMs =
_lastHardResetMs > 0 ? nowMs - _lastHardResetMs : -1;
RTC_LOG(LS_INFO) << "iOS H264 encoder recovered after forced hard reset."
<< " recovery_delay_ms=" << recoveryDelayMs;
// Exit recovery mode after the first successful frame post-reset.
_isInForcedRecovery = NO;
}

BOOL isKeyframe = NO;
CFArrayRef attachments =
CMSampleBufferGetSampleAttachmentsArray(sampleBuffer, 0);
Expand Down
17 changes: 17 additions & 0 deletions video/adaptation/video_stream_encoder_resource_manager.cc
Original file line number Diff line number Diff line change
Expand Up @@ -559,6 +559,23 @@ void VideoStreamEncoderResourceManager::UpdateBandwidthQualityScalerSettings(
}
}

void VideoStreamEncoderResourceManager::ResetAdaptationsForSimulcastChange() {
RTC_DCHECK_RUN_ON(encoder_queue_);
if (quality_scaler_resource_->is_started()) {
RTC_LOG(LS_INFO) << "Clearing quality scaler restrictions for simulcast "
"active-layer transition.";
quality_scaler_resource_->StopCheckForOveruse();
RemoveResource(quality_scaler_resource_);
initial_frame_dropper_->OnQualityScalerSettingsUpdated();
}
if (bandwidth_quality_scaler_resource_->is_started()) {
RTC_LOG(LS_INFO) << "Clearing bandwidth quality scaler restrictions for "
"simulcast active-layer transition.";
bandwidth_quality_scaler_resource_->StopCheckForOveruse();
RemoveResource(bandwidth_quality_scaler_resource_);
}
}

void VideoStreamEncoderResourceManager::ConfigureQualityScaler(
const VideoEncoder::EncoderInfo& encoder_info) {
RTC_DCHECK_RUN_ON(encoder_queue_);
Expand Down
6 changes: 6 additions & 0 deletions video/adaptation/video_stream_encoder_resource_manager.h
Original file line number Diff line number Diff line change
Expand Up @@ -109,6 +109,12 @@ class VideoStreamEncoderResourceManager
// TODO(https://crbug.com/webrtc/11338): This can be made private if we
// configure on SetDegredationPreference and SetEncoderSettings.
void ConfigureQualityScaler(const VideoEncoder::EncoderInfo& encoder_info);

// Stops the quality scaler and clears its accumulated adaptation
// restrictions. Called when the number of active simulcast layers increases
// from <=1 to >1, so that the source provides full-resolution frames for
// the new multi-layer configuration.
void ResetAdaptationsForSimulcastChange();
void ConfigureBandwidthQualityScaler(
const VideoEncoder::EncoderInfo& encoder_info);

Expand Down
8 changes: 5 additions & 3 deletions video/config/encoder_stream_factory.cc
Original file line number Diff line number Diff line change
Expand Up @@ -69,13 +69,15 @@ bool IsTemporalLayersSupported(VideoCodecType codec_type) {
}

size_t FindRequiredActiveLayers(const VideoEncoderConfig& encoder_config) {
// Need enough layers so that at least the first active one is present.
// Need enough layers so that all active ones are present.
// Return the position after the highest active layer.
size_t highest = 0;
for (size_t i = 0; i < encoder_config.number_of_streams; ++i) {
if (encoder_config.simulcast_layers[i].active) {
return i + 1;
highest = i + 1;
}
}
return 0;
return highest;
}

// The selected thresholds for QVGA and VGA corresponded to a QP around 10.
Expand Down
Loading