diff --git a/NOTICE b/NOTICE new file mode 100644 index 0000000000..3972578ec4 --- /dev/null +++ b/NOTICE @@ -0,0 +1,26 @@ +################################################################################### + +The following modifications follow Apache License 2.0 from shiguredo. + +https://github.com/webrtc-sdk/webrtc/commit/dfec53e93a0a1cb93f444caf50f844ec0068c7b7 +https://github.com/webrtc-sdk/webrtc/commit/403b4678543c5d4ac77bd1ea5753c02637b3bb89 +https://github.com/webrtc-sdk/webrtc/commit/77d5d685a90fb4bded17835ae72ec6671b26d696 + +Apache License 2.0 + +Copyright 2019-2021, Wandbox LLC (Original Author) +Copyright 2019-2021, Shiguredo Inc. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + +##################################################################################### \ No newline at end of file diff --git a/README.md b/README.md index 332efcc58b..8677a0d072 100644 --- a/README.md +++ b/README.md @@ -1,3 +1,23 @@ +# WebRTC-SDK + +This is a **Community-Maintained** project, **Not the official Google WebRTC repository**. It is maintained by some contributors with WebRTC development experience, and the goal is to create a flexible release version to provide pre-compiled Mobile/Desktop/Embedded SDKs. + +## Additional notes from [webrtc-sdk](https://github.com/webrtc-sdk) community + +### Open Source License Compliance + +* The original code was cloned from [google webrtc](https://chromium.googlesource.com/external/webrtc.git) and released with [BSD license](/LICENSE). + +* Contains patches from the [shiguredo-webrtc-build](https://github.com/shiguredo-webrtc-build) community and complies with the [Apache 2.0](/NOTICE) license. + +### Who is using this project + +* [flutter-webrtc](https://github.com/flutter-webrtc/flutter-webrtc) + +* [LiveKit](https://github.com/livekit) + +## The Google WebRTC original README + **WebRTC is a free, open software project** that provides browsers and mobile applications with Real-Time Communications (RTC) capabilities via simple APIs. The WebRTC components have been optimized to best serve this purpose. diff --git a/sdk/BUILD.gn b/sdk/BUILD.gn index 90e245a18d..672b4f5b4a 100644 --- a/sdk/BUILD.gn +++ b/sdk/BUILD.gn @@ -657,6 +657,7 @@ if (is_ios || is_mac) { deps = [ ":av1", + ":simulcast", ":base_objc", ":native_video", ":videocodec_objc", @@ -734,6 +735,22 @@ if (is_ios || is_mac) { ] } + rtc_library("simulcast") { + sources = [ + "objc/components/video_codec/RTCVideoEncoderFactorySimulcast.h", + "objc/components/video_codec/RTCVideoEncoderFactorySimulcast.mm", + "objc/api/video_codec/RTCVideoEncoderSimulcast.h", + "objc/api/video_codec/RTCVideoEncoderSimulcast.mm", + ] + + deps = [ + ":base_objc", + ":wrapped_native_codec_objc", + "../media:rtc_media_base", + "../media:rtc_simulcast_encoder_adapter", + ] + } + # Build the PeerConnectionFactory without audio/video support. # This target depends on the objc_peeerconnectionfactory_base which still # includes some audio/video related objects such as RTCAudioSource because @@ -1300,6 +1317,9 @@ if (is_ios || is_mac) { "objc/api/video_codec/RTCVideoEncoderAV1.h", "objc/api/video_frame_buffer/RTCNativeI420Buffer.h", "objc/api/video_frame_buffer/RTCNativeMutableI420Buffer.h", + # Added for Simulcast support + "objc/components/video_codec/RTCVideoEncoderFactorySimulcast.h", + "objc/api/video_codec/RTCVideoEncoderSimulcast.h", ] if (!build_with_chromium) { @@ -1444,6 +1464,9 @@ if (is_ios || is_mac) { "objc/components/video_codec/RTCVideoEncoderH264.h", "objc/components/video_frame_buffer/RTCCVPixelBuffer.h", "objc/helpers/RTCDispatcher.h", + # Added for Simulcast support + "objc/components/video_codec/RTCVideoEncoderFactorySimulcast.h", + "objc/api/video_codec/RTCVideoEncoderSimulcast.h", ] if (!build_with_chromium) { sources += [ diff --git a/sdk/android/BUILD.gn b/sdk/android/BUILD.gn index 20bee2b832..3020670f87 100644 --- a/sdk/android/BUILD.gn +++ b/sdk/android/BUILD.gn @@ -517,6 +517,8 @@ if (is_android) { sources = [ "api/org/webrtc/SoftwareVideoDecoderFactory.java", "api/org/webrtc/SoftwareVideoEncoderFactory.java", + "api/org/webrtc/SimulcastVideoEncoder.java", + "api/org/webrtc/SimulcastVideoEncoderFactory.java", ] deps = [ @@ -870,6 +872,21 @@ if (current_os == "linux" || is_android) { ] } + rtc_library("simulcast_jni") { + visibility = [ "*" ] + allow_poison = [ "software_video_codecs" ] + sources = [ + "src/jni/simulcast_video_encoder.cc", + "src/jni/simulcast_video_encoder.h" + ] + deps = [ + ":base_jni", + ":video_jni", + ":native_api_codecs", + "../../media:rtc_simulcast_encoder_adapter" + ] + } + rtc_library("swcodecs_jni") { visibility = [ "*" ] allow_poison = [ "software_video_codecs" ] @@ -877,6 +894,7 @@ if (current_os == "linux" || is_android) { ":libaom_av1_jni", ":libvpx_vp8_jni", ":libvpx_vp9_jni", + ":simulcast_jni", ] } diff --git a/sdk/android/api/org/webrtc/SimulcastVideoEncoder.java b/sdk/android/api/org/webrtc/SimulcastVideoEncoder.java new file mode 100644 index 0000000000..af6c8f61c7 --- /dev/null +++ b/sdk/android/api/org/webrtc/SimulcastVideoEncoder.java @@ -0,0 +1,28 @@ +package org.webrtc; + +public class SimulcastVideoEncoder extends WrappedNativeVideoEncoder { + + static native long nativeCreateEncoder(VideoEncoderFactory primary, VideoEncoderFactory fallback, VideoCodecInfo info); + + VideoEncoderFactory primary; + VideoEncoderFactory fallback; + VideoCodecInfo info; + + public SimulcastVideoEncoder(VideoEncoderFactory primary, VideoEncoderFactory fallback, VideoCodecInfo info) { + this.primary = primary; + this.fallback = fallback; + this.info = info; + } + + @Override + public long createNativeVideoEncoder() { + return nativeCreateEncoder(primary, fallback, info); + } + + @Override + public boolean isHardwareEncoder() { + return false; + } + +} + diff --git a/sdk/android/api/org/webrtc/SimulcastVideoEncoderFactory.java b/sdk/android/api/org/webrtc/SimulcastVideoEncoderFactory.java new file mode 100644 index 0000000000..97b4f32087 --- /dev/null +++ b/sdk/android/api/org/webrtc/SimulcastVideoEncoderFactory.java @@ -0,0 +1,43 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +import androidx.annotation.Nullable; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Arrays; + +public class SimulcastVideoEncoderFactory implements VideoEncoderFactory { + + VideoEncoderFactory primary; + VideoEncoderFactory fallback; + + public SimulcastVideoEncoderFactory(VideoEncoderFactory primary, VideoEncoderFactory fallback) { + this.primary = primary; + this.fallback = fallback; + } + + @Nullable + @Override + public VideoEncoder createEncoder(VideoCodecInfo info) { + return new SimulcastVideoEncoder(primary, fallback, info); + } + + @Override + public VideoCodecInfo[] getSupportedCodecs() { + List codecs = new ArrayList(); + codecs.addAll(Arrays.asList(primary.getSupportedCodecs())); + codecs.addAll(Arrays.asList(fallback.getSupportedCodecs())); + return codecs.toArray(new VideoCodecInfo[codecs.size()]); + } + +} diff --git a/sdk/android/src/jni/simulcast_video_encoder.cc b/sdk/android/src/jni/simulcast_video_encoder.cc new file mode 100644 index 0000000000..da31fbbfa5 --- /dev/null +++ b/sdk/android/src/jni/simulcast_video_encoder.cc @@ -0,0 +1,34 @@ +#include + +#include "sdk/android/src/jni/jni_helpers.h" +#include "sdk/android/src/jni/video_encoder_factory_wrapper.h" +#include "sdk/android/src/jni/video_codec_info.h" +#include "sdk/android/native_api/codecs/wrapper.h" +#include "media/engine/simulcast_encoder_adapter.h" +#include "rtc_base/logging.h" + +using namespace webrtc; +using namespace webrtc::jni; + +#ifdef __cplusplus +extern "C" { +#endif + +// (VideoEncoderFactory primary, VideoEncoderFactory fallback, VideoCodecInfo info) +JNIEXPORT jlong JNICALL Java_org_webrtc_SimulcastVideoEncoder_nativeCreateEncoder(JNIEnv *env, jclass klass, jobject primary, jobject fallback, jobject info) { + RTC_LOG(LS_INFO) << "Create simulcast video encoder"; + JavaParamRef info_ref(info); + SdpVideoFormat format = VideoCodecInfoToSdpVideoFormat(env, info_ref); + + // TODO: 影響は軽微だが、リークする可能性があるので将来的に修正したい + // https://github.com/shiguredo-webrtc-build/webrtc-build/pull/16#pullrequestreview-600675795 + return NativeToJavaPointer(std::make_unique( + JavaToNativeVideoEncoderFactory(env, primary).release(), + JavaToNativeVideoEncoderFactory(env, fallback).release(), + format).release()); +} + + +#ifdef __cplusplus +} +#endif diff --git a/sdk/android/src/jni/simulcast_video_encoder.h b/sdk/android/src/jni/simulcast_video_encoder.h new file mode 100644 index 0000000000..519be778e8 --- /dev/null +++ b/sdk/android/src/jni/simulcast_video_encoder.h @@ -0,0 +1,22 @@ +/* DO NOT EDIT THIS FILE - it is machine generated */ +#include +/* Header for class org_webrtc_SimulcastVideoEncoder */ + +#ifndef _Included_org_webrtc_SimulcastVideoEncoder +#define _Included_org_webrtc_SimulcastVideoEncoder +#ifdef __cplusplus +extern "C" { +#endif +/* + * Class: org_webrtc_SimulcastVideoEncoder + * Method: nativeCreateEncoder + * Signature: (Lorg/webrtc/VideoEncoderFactory;Lorg/webrtc/VideoEncoderFactory;Lorg/webrtc/VideoCodecInfo;)J + */ + +JNIEXPORT jlong JNICALL Java_org_webrtc_SimulcastVideoEncoder_nativeCreateEncoder + (JNIEnv *, jclass, jobject, jobject, jobject); + +#ifdef __cplusplus +} +#endif +#endif diff --git a/sdk/objc/api/peerconnection/RTCPeerConnectionFactory+Native.h b/sdk/objc/api/peerconnection/RTCPeerConnectionFactory+Native.h index f361b9f0ea..b7f5e27ded 100644 --- a/sdk/objc/api/peerconnection/RTCPeerConnectionFactory+Native.h +++ b/sdk/objc/api/peerconnection/RTCPeerConnectionFactory+Native.h @@ -71,6 +71,12 @@ NS_ASSUME_NONNULL_BEGIN initWithEncoderFactory:(nullable id)encoderFactory decoderFactory:(nullable id)decoderFactory; +- (instancetype) + initWithBypassVoiceProcessing:(BOOL)bypassVoiceProcessing + encoderFactory:(nullable id)encoderFactory + decoderFactory: + (nullable id)decoderFactory; + /** Initialize an RTCPeerConnection with a configuration, constraints, and * dependencies. */ diff --git a/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.h b/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.h index 78913527c0..a566e823ce 100644 --- a/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.h +++ b/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.h @@ -41,6 +41,13 @@ RTC_OBJC_EXPORT initWithEncoderFactory:(nullable id)encoderFactory decoderFactory:(nullable id)decoderFactory; +/* Initialize object with bypass voice processing */ +- (instancetype) + initWithBypassVoiceProcessing:(BOOL)bypassVoiceProcessing + encoderFactory:(nullable id)encoderFactory + decoderFactory: + (nullable id)decoderFactory; + /** Initialize an RTCAudioSource with constraints. */ - (RTC_OBJC_TYPE(RTCAudioSource) *)audioSourceWithConstraints: (nullable RTC_OBJC_TYPE(RTCMediaConstraints) *)constraints; diff --git a/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.mm b/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.mm index 63ba934e3d..da94078152 100644 --- a/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.mm +++ b/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.mm @@ -68,9 +68,9 @@ @implementation RTC_OBJC_TYPE (RTCPeerConnectionFactory) { @synthesize nativeFactory = _nativeFactory; -- (rtc::scoped_refptr)audioDeviceModule { +- (rtc::scoped_refptr)audioDeviceModule:(BOOL)bypassVoiceProcessing { #if defined(WEBRTC_IOS) - return webrtc::CreateAudioDeviceModule(); + return webrtc::CreateAudioDeviceModule(bypassVoiceProcessing); #else return nullptr; #endif @@ -87,7 +87,7 @@ - (instancetype)init { RTCVideoEncoderFactoryH264) alloc] init]) nativeVideoDecoderFactory:webrtc::ObjCToNativeVideoDecoderFactory([[RTC_OBJC_TYPE( RTCVideoDecoderFactoryH264) alloc] init]) - audioDeviceModule:[self audioDeviceModule] + audioDeviceModule:[self audioDeviceModule:false] audioProcessingModule:nullptr]; #endif } @@ -110,10 +110,36 @@ - (instancetype)init { nativeAudioDecoderFactory:webrtc::CreateBuiltinAudioDecoderFactory() nativeVideoEncoderFactory:std::move(native_encoder_factory) nativeVideoDecoderFactory:std::move(native_decoder_factory) - audioDeviceModule:[self audioDeviceModule] + audioDeviceModule:[self audioDeviceModule:false] audioProcessingModule:nullptr]; #endif } + +- (instancetype) + initWithBypassVoiceProcessing:(BOOL)bypassVoiceProcessing + encoderFactory:(nullable id)encoderFactory + decoderFactory: + (nullable id)decoderFactory { +#ifdef HAVE_NO_MEDIA + return [self initWithNoMedia]; +#else + std::unique_ptr native_encoder_factory; + std::unique_ptr native_decoder_factory; + if (encoderFactory) { + native_encoder_factory = webrtc::ObjCToNativeVideoEncoderFactory(encoderFactory); + } + if (decoderFactory) { + native_decoder_factory = webrtc::ObjCToNativeVideoDecoderFactory(decoderFactory); + } + return [self initWithNativeAudioEncoderFactory:webrtc::CreateBuiltinAudioEncoderFactory() + nativeAudioDecoderFactory:webrtc::CreateBuiltinAudioDecoderFactory() + nativeVideoEncoderFactory:std::move(native_encoder_factory) + nativeVideoDecoderFactory:std::move(native_decoder_factory) + audioDeviceModule:[self audioDeviceModule:bypassVoiceProcessing] + audioProcessingModule:nullptr]; +#endif +} + - (instancetype)initNative { if (self = [super init]) { _networkThread = rtc::Thread::CreateWithSocketServer(); diff --git a/sdk/objc/api/video_codec/RTCVideoEncoderSimulcast.h b/sdk/objc/api/video_codec/RTCVideoEncoderSimulcast.h new file mode 100644 index 0000000000..c3e3d4538f --- /dev/null +++ b/sdk/objc/api/video_codec/RTCVideoEncoderSimulcast.h @@ -0,0 +1,13 @@ +#import "RTCMacros.h" +#import "RTCVideoEncoder.h" +#import "RTCVideoEncoderFactory.h" +#import "RTCVideoCodecInfo.h" + +RTC_OBJC_EXPORT +@interface RTC_OBJC_TYPE (RTCVideoEncoderSimulcast) : NSObject + ++ (id)simulcastEncoderWithPrimary:(id)primary + fallback:(id)fallback + videoCodecInfo:(RTC_OBJC_TYPE(RTCVideoCodecInfo) *)videoCodecInfo; + +@end \ No newline at end of file diff --git a/sdk/objc/api/video_codec/RTCVideoEncoderSimulcast.mm b/sdk/objc/api/video_codec/RTCVideoEncoderSimulcast.mm new file mode 100644 index 0000000000..568e1bd517 --- /dev/null +++ b/sdk/objc/api/video_codec/RTCVideoEncoderSimulcast.mm @@ -0,0 +1,26 @@ +#import + +#import "RTCMacros.h" +#import "RTCVideoEncoderSimulcast.h" +#import "RTCWrappedNativeVideoEncoder.h" +#import "api/peerconnection/RTCVideoCodecInfo+Private.h" + +#include "native/api/video_encoder_factory.h" +#include "media/engine/simulcast_encoder_adapter.h" + +@implementation RTC_OBJC_TYPE (RTCVideoEncoderSimulcast) + ++ (id)simulcastEncoderWithPrimary:(id)primary + fallback:(id)fallback + videoCodecInfo:(RTC_OBJC_TYPE(RTCVideoCodecInfo) *)videoCodecInfo { + auto nativePrimary = webrtc::ObjCToNativeVideoEncoderFactory(primary); + auto nativeFallback = webrtc::ObjCToNativeVideoEncoderFactory(fallback); + auto nativeFormat = [videoCodecInfo nativeSdpVideoFormat]; + return [[RTC_OBJC_TYPE(RTCWrappedNativeVideoEncoder) alloc] + initWithNativeEncoder: std::make_unique( + nativePrimary.release(), + nativeFallback.release(), + std::move(nativeFormat))]; +} + +@end \ No newline at end of file diff --git a/sdk/objc/components/audio/RTCAudioSession+Configuration.mm b/sdk/objc/components/audio/RTCAudioSession+Configuration.mm index 449f31e9dd..b123e2002e 100644 --- a/sdk/objc/components/audio/RTCAudioSession+Configuration.mm +++ b/sdk/objc/components/audio/RTCAudioSession+Configuration.mm @@ -55,7 +55,8 @@ - (BOOL)setConfiguration:(RTC_OBJC_TYPE(RTCAudioSessionConfiguration) *)configur if (![self setCategory:configuration.category withOptions:configuration.categoryOptions error:&categoryError]) { - RTCLogError(@"Failed to set category: %@", + RTCLogError(@"Failed to set category to %@: %@", + self.category, categoryError.localizedDescription); error = categoryError; } else { @@ -66,7 +67,8 @@ - (BOOL)setConfiguration:(RTC_OBJC_TYPE(RTCAudioSessionConfiguration) *)configur if (self.mode != configuration.mode) { NSError *modeError = nil; if (![self setMode:configuration.mode error:&modeError]) { - RTCLogError(@"Failed to set mode: %@", + RTCLogError(@"Failed to set mode to %@: %@", + self.mode, modeError.localizedDescription); error = modeError; } else { diff --git a/sdk/objc/components/audio/RTCAudioSession+Private.h b/sdk/objc/components/audio/RTCAudioSession+Private.h index 4f5107f7e9..8d5786eb0c 100644 --- a/sdk/objc/components/audio/RTCAudioSession+Private.h +++ b/sdk/objc/components/audio/RTCAudioSession+Private.h @@ -35,6 +35,8 @@ NS_ASSUME_NONNULL_BEGIN */ @property(nonatomic, assign) BOOL isInterrupted; +@property(nonatomic, strong) NSString *activeCategory; + /** Adds the delegate to the list of delegates, and places it at the front of * the list. This delegate will be notified before other delegates of * audio events. diff --git a/sdk/objc/components/audio/RTCAudioSession.h b/sdk/objc/components/audio/RTCAudioSession.h index 3b83b27ba5..99cd004b4d 100644 --- a/sdk/objc/components/audio/RTCAudioSession.h +++ b/sdk/objc/components/audio/RTCAudioSession.h @@ -102,6 +102,9 @@ RTC_OBJC_EXPORT - (void)audioSession:(RTC_OBJC_TYPE(RTCAudioSession) *)audioSession audioUnitStartFailedWithError:(NSError *)error; +/** Called when audio session changed from output-only to input & output */ +- (void)audioSessionDidChangeRecordingEnabled:(RTC_OBJC_TYPE(RTCAudioSession) *)audioSession; + @end /** This is a protocol used to inform RTCAudioSession when the audio session diff --git a/sdk/objc/components/audio/RTCAudioSession.mm b/sdk/objc/components/audio/RTCAudioSession.mm index 0d0db5aa5b..11772e96d0 100644 --- a/sdk/objc/components/audio/RTCAudioSession.mm +++ b/sdk/objc/components/audio/RTCAudioSession.mm @@ -114,6 +114,8 @@ - (instancetype)initWithAudioSession:(id)audioSession { options:NSKeyValueObservingOptionNew | NSKeyValueObservingOptionOld context:(__bridge void *)RTC_OBJC_TYPE(RTCAudioSession).class]; + _activeCategory = _session.category; + RTCLog(@"RTC_OBJC_TYPE(RTCAudioSession) (%p): init.", self); } return self; @@ -540,8 +542,14 @@ - (void)handleRouteChangeNotification:(NSNotification *)notification { RTCLog(@"Audio route changed: OldDeviceUnavailable"); break; case AVAudioSessionRouteChangeReasonCategoryChange: - RTCLog(@"Audio route changed: CategoryChange to :%@", - self.session.category); + RTCLog(@"Audio route changed: CategoryChange to :%@", self.session.category); + { + if (![_session.category isEqualToString:_activeCategory]) { + _activeCategory = _session.category; + RTCLog(@"Audio route changed: Restarting Audio Unit"); + [self notifyDidChangeAudioSessionRecordingEnabled]; + } + } break; case AVAudioSessionRouteChangeReasonOverride: RTCLog(@"Audio route changed: Override"); @@ -997,4 +1005,13 @@ - (void)notifyFailedToSetActive:(BOOL)active error:(NSError *)error { } } +- (void)notifyDidChangeAudioSessionRecordingEnabled { + for (auto delegate : self.delegates) { + SEL sel = @selector(audioSessionDidChangeRecordingEnabled:); + if ([delegate respondsToSelector:sel]) { + [delegate audioSessionDidChangeRecordingEnabled:self]; + } + } +} + @end diff --git a/sdk/objc/components/audio/RTCAudioSessionConfiguration.m b/sdk/objc/components/audio/RTCAudioSessionConfiguration.m index 39e9ac13ec..7591d86f7d 100644 --- a/sdk/objc/components/audio/RTCAudioSessionConfiguration.m +++ b/sdk/objc/components/audio/RTCAudioSessionConfiguration.m @@ -65,15 +65,17 @@ @implementation RTC_OBJC_TYPE (RTCAudioSessionConfiguration) - (instancetype)init { if (self = [super init]) { + // Use AVAudioSession values for default + AVAudioSession *session = [AVAudioSession sharedInstance]; // Use a category which supports simultaneous recording and playback. // By default, using this category implies that our app’s audio is // nonmixable, hence activating the session will interrupt any other // audio sessions which are also nonmixable. - _category = AVAudioSessionCategoryPlayAndRecord; - _categoryOptions = AVAudioSessionCategoryOptionAllowBluetooth; + _category = session.category; + _categoryOptions = session.categoryOptions; // Specify mode for two-way voice communication (e.g. VoIP). - _mode = AVAudioSessionModeVoiceChat; + _mode = session.mode; // Set the session's sample rate or the hardware sample rate. // It is essential that we use the same sample rate as stream format diff --git a/sdk/objc/components/audio/RTCNativeAudioSessionDelegateAdapter.mm b/sdk/objc/components/audio/RTCNativeAudioSessionDelegateAdapter.mm index daddf314a4..6f8978b066 100644 --- a/sdk/objc/components/audio/RTCNativeAudioSessionDelegateAdapter.mm +++ b/sdk/objc/components/audio/RTCNativeAudioSessionDelegateAdapter.mm @@ -86,4 +86,9 @@ - (void)audioSession:(RTC_OBJC_TYPE(RTCAudioSession) *)audioSession _observer->OnChangedOutputVolume(); } +- (void)audioSessionDidChangeRecordingEnabled:(RTC_OBJC_TYPE(RTCAudioSession) *)session { + // re-trigger audio unit init, by using interrupt ended callback + _observer->OnChangedRecordingEnabled(); +} + @end diff --git a/sdk/objc/components/video_codec/RTCVideoEncoderFactorySimulcast.h b/sdk/objc/components/video_codec/RTCVideoEncoderFactorySimulcast.h new file mode 100644 index 0000000000..8ba7584c8e --- /dev/null +++ b/sdk/objc/components/video_codec/RTCVideoEncoderFactorySimulcast.h @@ -0,0 +1,16 @@ +#import + +#import "RTCMacros.h" +#import "RTCVideoEncoderFactory.h" + +NS_ASSUME_NONNULL_BEGIN + +RTC_OBJC_EXPORT +@interface RTC_OBJC_TYPE (RTCVideoEncoderFactorySimulcast) : NSObject + +- (instancetype)initWithPrimary:(id)primary + fallback:(id)fallback; + +@end + +NS_ASSUME_NONNULL_END \ No newline at end of file diff --git a/sdk/objc/components/video_codec/RTCVideoEncoderFactorySimulcast.mm b/sdk/objc/components/video_codec/RTCVideoEncoderFactorySimulcast.mm new file mode 100644 index 0000000000..6ba2074768 --- /dev/null +++ b/sdk/objc/components/video_codec/RTCVideoEncoderFactorySimulcast.mm @@ -0,0 +1,39 @@ +#import + +#import "RTCMacros.h" +#import "RTCVideoCodecInfo.h" +#import "RTCVideoEncoderFactorySimulcast.h" +#import "api/video_codec/RTCVideoEncoderSimulcast.h" + +@interface RTC_OBJC_TYPE (RTCVideoEncoderFactorySimulcast) () + +@property id primary; +@property id fallback; + +@end + + +@implementation RTC_OBJC_TYPE (RTCVideoEncoderFactorySimulcast) + +@synthesize primary = _primary; +@synthesize fallback = _fallback; + +- (instancetype)initWithPrimary:(id)primary + fallback:(id)fallback { + if (self = [super init]) { + _primary = primary; + _fallback = fallback; + } + return self; +} + +- (nullable id)createEncoder: (RTC_OBJC_TYPE(RTCVideoCodecInfo) *)info { + return [RTCVideoEncoderSimulcast simulcastEncoderWithPrimary: _primary fallback: _fallback videoCodecInfo: info]; +} + +- (NSArray *)supportedCodecs { + return [[_primary supportedCodecs] arrayByAddingObjectsFromArray: [_fallback supportedCodecs]]; +} + + +@end \ No newline at end of file diff --git a/sdk/objc/components/video_codec/RTCVideoEncoderH264.mm b/sdk/objc/components/video_codec/RTCVideoEncoderH264.mm index 966eb3ee5c..1e5feae08c 100644 --- a/sdk/objc/components/video_codec/RTCVideoEncoderH264.mm +++ b/sdk/objc/components/video_codec/RTCVideoEncoderH264.mm @@ -387,8 +387,8 @@ - (NSInteger)startEncodeWithSettings:(RTC_OBJC_TYPE(RTCVideoEncoderSettings) *)s - (NSInteger)encode:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame codecSpecificInfo:(nullable id)codecSpecificInfo frameTypes:(NSArray *)frameTypes { - RTC_DCHECK_EQ(frame.width, _width); - RTC_DCHECK_EQ(frame.height, _height); + // RTC_DCHECK_EQ(frame.width, _width); + // RTC_DCHECK_EQ(frame.height, _height); if (!_callback || !_compressionSession) { return WEBRTC_VIDEO_CODEC_UNINITIALIZED; } @@ -571,6 +571,7 @@ - (BOOL)resetCompressionSessionIfNeededWithFrame:(RTC_OBJC_TYPE(RTCVideoFrame) * OSType framePixelFormat = [self pixelFormatOfFrame:frame]; if (_compressionSession) { + _pixelBufferPool = VTCompressionSessionGetPixelBufferPool(_compressionSession); // The pool attribute `kCVPixelBufferPixelFormatTypeKey` can contain either an array of pixel // formats or a single pixel format. NSDictionary *poolAttributes = diff --git a/sdk/objc/native/src/audio/audio_device_ios.h b/sdk/objc/native/src/audio/audio_device_ios.h index 5afc49a461..c861dc5e09 100644 --- a/sdk/objc/native/src/audio/audio_device_ios.h +++ b/sdk/objc/native/src/audio/audio_device_ios.h @@ -145,6 +145,7 @@ class AudioDeviceIOS : public AudioDeviceGeneric, void OnValidRouteChange() override; void OnCanPlayOrRecordChange(bool can_play_or_record) override; void OnChangedOutputVolume() override; + void OnChangedRecordingEnabled() override; // VoiceProcessingAudioUnitObserver methods. OSStatus OnDeliverRecordedData(AudioUnitRenderActionFlags* flags, @@ -172,6 +173,7 @@ class AudioDeviceIOS : public AudioDeviceGeneric, void HandleSampleRateChange(float sample_rate); void HandlePlayoutGlitchDetected(); void HandleOutputVolumeChange(); + void HandleAudioSessionRecordingEnabledChange(); // Uses current `playout_parameters_` and `record_parameters_` to inform the // audio device buffer (ADB) about our internal audio parameters. diff --git a/sdk/objc/native/src/audio/audio_device_ios.mm b/sdk/objc/native/src/audio/audio_device_ios.mm index 3ec7d0b75a..0983b5a89b 100644 --- a/sdk/objc/native/src/audio/audio_device_ios.mm +++ b/sdk/objc/native/src/audio/audio_device_ios.mm @@ -68,6 +68,7 @@ kMessageTypeCanPlayOrRecordChange, kMessageTypePlayoutGlitchDetected, kMessageOutputVolumeChange, + kMessageTypeRecordingEnabledChange, }; using ios::CheckAndLogError; @@ -373,6 +374,11 @@ static void LogDeviceInfo() { thread_->Post(RTC_FROM_HERE, this, kMessageOutputVolumeChange); } +void AudioDeviceIOS::OnChangedRecordingEnabled() { + RTC_DCHECK(thread_); + thread_->Post(RTC_FROM_HERE, this, kMessageTypeRecordingEnabledChange); +} + OSStatus AudioDeviceIOS::OnDeliverRecordedData(AudioUnitRenderActionFlags* flags, const AudioTimeStamp* time_stamp, UInt32 bus_number, @@ -458,7 +464,7 @@ static void LogDeviceInfo() { // Exclude extreme delta values since they do most likely not correspond // to a real glitch. Instead, the most probable cause is that a headset // has been plugged in or out. There are more direct ways to detect - // audio device changes (see HandleValidRouteChange()) but experiments + // audio device changes (see ValidRouteChange()) but experiments // show that using it leads to more complex implementations. // TODO(henrika): more tests might be needed to come up with an even // better upper limit. @@ -503,6 +509,9 @@ static void LogDeviceInfo() { case kMessageOutputVolumeChange: HandleOutputVolumeChange(); break; + case kMessageTypeRecordingEnabledChange: + HandleAudioSessionRecordingEnabledChange(); + break; } } @@ -673,6 +682,61 @@ static void LogDeviceInfo() { last_output_volume_change_time_ = rtc::TimeMillis(); } +void AudioDeviceIOS::HandleAudioSessionRecordingEnabledChange() { + RTC_DCHECK_RUN_ON(&thread_checker_); + + LOGI() << "HandleAudioSessionRecordingEnabledChange"; + + // If we don't have an audio unit yet, or the audio unit is uninitialized, + // there is no work to do. + if (!audio_unit_ || audio_unit_->GetState() < VoiceProcessingAudioUnit::kInitialized) { + return; + } + + // The audio unit is already initialized or started. + // Check to see if the sample rate or buffer size has changed. + RTC_OBJC_TYPE(RTCAudioSession)* session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance]; + const double session_sample_rate = session.sampleRate; + + // Extra sanity check to ensure that the new sample rate is valid. + if (session_sample_rate <= 0.0) { + RTCLogError(@"Sample rate is invalid: %f", session_sample_rate); + LOGI() << "Sample rate is invalid " << session_sample_rate; + return; + } + // We need to adjust our format and buffer sizes. + // The stream format is about to be changed and it requires that we first + // stop and uninitialize the audio unit to deallocate its resources. + RTCLog(@"Stopping and uninitializing audio unit to adjust buffers."); + bool restart_audio_unit = false; + if (audio_unit_->GetState() == VoiceProcessingAudioUnit::kStarted) { + audio_unit_->Stop(); + restart_audio_unit = true; + PrepareForNewStart(); + } + if (audio_unit_->GetState() == VoiceProcessingAudioUnit::kInitialized) { + audio_unit_->Uninitialize(); + } + + // Allocate new buffers given the new stream format. + SetupAudioBuffersForActiveAudioSession(); + + // Initialize the audio unit again with the new sample rate. + RTC_DCHECK_EQ(playout_parameters_.sample_rate(), session_sample_rate); + if (!audio_unit_->Initialize(session_sample_rate)) { + RTCLogError(@"Failed to initialize the audio unit with sample rate: %f", session_sample_rate); + return; + } + + // Restart the audio unit if it was already running. + if (restart_audio_unit && !audio_unit_->Start()) { + RTCLogError(@"Failed to start audio unit with sample rate: %f", session_sample_rate); + return; + } + + LOGI() << "Successfully enabled audio unit for recording."; +} + void AudioDeviceIOS::UpdateAudioDeviceBuffer() { LOGI() << "UpdateAudioDevicebuffer"; // AttachAudioBuffer() is called at construction by the main class but check diff --git a/sdk/objc/native/src/audio/audio_session_observer.h b/sdk/objc/native/src/audio/audio_session_observer.h index f7c44c8184..978d8e2cdd 100644 --- a/sdk/objc/native/src/audio/audio_session_observer.h +++ b/sdk/objc/native/src/audio/audio_session_observer.h @@ -32,6 +32,8 @@ class AudioSessionObserver { virtual void OnChangedOutputVolume() = 0; + virtual void OnChangedRecordingEnabled() = 0; + protected: virtual ~AudioSessionObserver() {} }; diff --git a/sdk/objc/native/src/audio/voice_processing_audio_unit.mm b/sdk/objc/native/src/audio/voice_processing_audio_unit.mm index 661022f523..ecf7c77181 100644 --- a/sdk/objc/native/src/audio/voice_processing_audio_unit.mm +++ b/sdk/objc/native/src/audio/voice_processing_audio_unit.mm @@ -112,19 +112,6 @@ static OSStatus GetAGCState(AudioUnit audio_unit, UInt32* enabled) { return false; } - // Enable input on the input scope of the input element. - UInt32 enable_input = 1; - result = AudioUnitSetProperty(vpio_unit_, kAudioOutputUnitProperty_EnableIO, - kAudioUnitScope_Input, kInputBus, &enable_input, - sizeof(enable_input)); - if (result != noErr) { - DisposeAudioUnit(); - RTCLogError(@"Failed to enable input on input scope of input element. " - "Error=%ld.", - (long)result); - return false; - } - // Enable output on the output scope of the output element. UInt32 enable_output = 1; result = AudioUnitSetProperty(vpio_unit_, kAudioOutputUnitProperty_EnableIO, @@ -205,6 +192,27 @@ static OSStatus GetAGCState(AudioUnit audio_unit, UInt32* enabled) { LogStreamDescription(format); #endif + // Enable input on the input scope of the input element. + // keep it disabled if audio session is configured for playback only + AVAudioSession* session = [AVAudioSession sharedInstance]; + UInt32 enable_input = 0; + if ([session.category isEqualToString: AVAudioSessionCategoryPlayAndRecord] || + [session.category isEqualToString: AVAudioSessionCategoryRecord]) { + enable_input = 1; + } + RTCLog(@"Initializing AudioUnit, category=%@, enable_input=%d", session.category, enable_input); + // LOGI() << "Initialize" << session.category << ", enable_input=" << enable_input; + result = AudioUnitSetProperty(vpio_unit_, kAudioOutputUnitProperty_EnableIO, + kAudioUnitScope_Input, kInputBus, &enable_input, + sizeof(enable_input)); + if (result != noErr) { + DisposeAudioUnit(); + RTCLogError(@"Failed to enable input on input scope of input element. " + "Error=%ld.", + (long)result); + return false; + } + // Set the format on the output scope of the input element/bus. result = AudioUnitSetProperty(vpio_unit_, kAudioUnitProperty_StreamFormat,