Skip to content

Commit

Permalink
feat: support 4.3.2.5 (#1236)
Browse files Browse the repository at this point in the history
* chore: bump native sdk to 4.3.2.5

* [AUTO] Generate codes by terra (#1235)

Co-authored-by: guoxianzhe <[email protected]>

---------

Co-authored-by: sda-rob <[email protected]>
Co-authored-by: guoxianzhe <[email protected]>
  • Loading branch information
3 people authored Oct 21, 2024
1 parent 9155692 commit 7bae5ff
Show file tree
Hide file tree
Showing 14 changed files with 419 additions and 262 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/terra.yml
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ on:

jobs:
generate-codes:
runs-on: ubuntu-latest
runs-on: ubuntu-22.04
steps:
- name: Checkout
uses: actions/checkout@v4
Expand Down
8 changes: 4 additions & 4 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -142,9 +142,9 @@
"yuv-canvas": "1.2.6"
},
"agora_electron": {
"iris_sdk_win": "https://download.agora.io/sdk/release/iris_4.3.2.2-build.1_DCG_Windows_Video_Standalone_20240725_0438_474.zip",
"iris_sdk_mac": "https://download.agora.io/sdk/release/iris_4.3.2.2-build.1_DCG_Mac_Video_Standalone_20240725_0406_440.zip",
"native_sdk_win": "https://download.agora.io/sdk/release/Agora_Native_SDK_for_Windows_rel.v4.3.2.2_25732_FULL_20240718_1654_320611.zip",
"native_sdk_mac": "https://download.agora.io/sdk/release/Agora_Native_SDK_for_Mac_rel.v4.3.2.2_21182_FULL_20240718_1709_320612.zip"
"iris_sdk_win": "https://download.agora.io/sdk/release/iris_4.3.2.5-build.4_DCG_Windows_Video_Standalone_20240914_0239_538.zip",
"iris_sdk_mac": "https://download.agora.io/sdk/release/iris_4.3.2.5-build.4_DCG_Mac_Video_Standalone_20240914_0239_499.zip",
"native_sdk_win": "https://download.agora.io/sdk/release/Agora_Native_SDK_for_Windows_rel.v4.3.2.5_26453_FULL_20240911_1511_334994.zip",
"native_sdk_mac": "https://download.agora.io/sdk/release/Agora_Native_SDK_for_Mac_rel.v4.3.2.5_21916_FULL_20240911_1542_334995.zip"
}
}
2 changes: 1 addition & 1 deletion scripts/terra/config/impl_config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ parsers:
- name: RTCParser
package: '@agoraio-extensions/terra_shared_configs'
args:
sdkVersion: 4.3.2.2
sdkVersion: 4.3.2.5
definesMacros:
- __ELECTRON__
FixEnumConstantParser:
Expand Down
2 changes: 1 addition & 1 deletion scripts/terra/config/types_config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ parsers:
- name: RTCParser
package: '@agoraio-extensions/terra_shared_configs'
args:
sdkVersion: 4.3.2.2
sdkVersion: 4.3.2.5
definesMacros:
- __ELECTRON__
FixEnumConstantParser:
Expand Down
66 changes: 55 additions & 11 deletions ts/Private/AgoraBase.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2026,7 +2026,7 @@ export enum VideoApplicationScenarioType {
*/
ApplicationScenarioGeneral = 0,
/**
* If set to ApplicationScenarioMeeting (1), the SDK automatically enables the following strategies:
* ApplicationScenarioMeeting (1) is suitable for meeting scenarios. The SDK automatically enables the following strategies:
* In meeting scenarios where low-quality video streams are required to have a high bitrate, the SDK automatically enables multiple technologies used to deal with network congestions, to enhance the performance of the low-quality streams and to ensure the smooth reception by subscribers.
* The SDK monitors the number of subscribers to the high-quality video stream in real time and dynamically adjusts its configuration based on the number of subscribers.
* If nobody subscribers to the high-quality stream, the SDK automatically reduces its bitrate and frame rate to save upstream bandwidth.
Expand Down Expand Up @@ -2292,39 +2292,39 @@ export enum LocalVideoStreamReason {
*/
LocalVideoStreamReasonScreenCaptureWindowNotSupported = 20,
/**
* @ignore
* 21: (Windows only) The screen has not captured any data available for window sharing.
*/
LocalVideoStreamReasonScreenCaptureFailure = 21,
/**
* @ignore
* 22: No permission for screen capture.
*/
LocalVideoStreamReasonScreenCaptureNoPermission = 22,
/**
* @ignore
* 24: (Windows only) An unexpected error occurred during screen sharing (possibly due to window blocking failure), resulting in decreased performance, but the screen sharing process itself was not affected.
*/
LocalVideoStreamReasonScreenCaptureAutoFallback = 24,
/**
* @ignore
* 25: (Windows only) The window for the current screen capture is hidden and not visible on the current screen.
*/
LocalVideoStreamReasonScreenCaptureWindowHidden = 25,
/**
* @ignore
* 26: (Windows only) The window for screen capture has been restored from hidden state.
*/
LocalVideoStreamReasonScreenCaptureWindowRecoverFromHidden = 26,
/**
* 27: The window for screen capture has been restored from the minimized state.
*/
LocalVideoStreamReasonScreenCaptureWindowRecoverFromMinimized = 27,
/**
* @ignore
* 28: (Windows only) Screen capture has been paused. Common scenarios reporting this error code: The current screen may have been switched to a secure desktop, such as a UAC dialog box or Winlogon desktop.
*/
LocalVideoStreamReasonScreenCapturePaused = 28,
/**
* @ignore
* 29: (Windows only) Screen capture has resumed from paused state.
*/
LocalVideoStreamReasonScreenCaptureResumed = 29,
/**
* @ignore
* 30: The displayer used for screen capture is disconnected.
*/
LocalVideoStreamReasonScreenCaptureDisplayDisconnected = 30,
}
Expand Down Expand Up @@ -3525,7 +3525,7 @@ export enum NetworkType {
*/
export enum VideoViewSetupMode {
/**
* 0: (Default) Replaces a view.
* 0: (Default) Clear all added views and replace with a new view.
*/
VideoViewSetupReplace = 0,
/**
Expand Down Expand Up @@ -3596,6 +3596,50 @@ export class VideoCanvas {
position?: VideoModulePosition;
}

/**
* @ignore
*/
export enum PipState {
/**
* @ignore
*/
PipStateStarted = 0,
/**
* @ignore
*/
PipStateStopped = 1,
/**
* @ignore
*/
PipStateFailed = 2,
}

/**
* @ignore
*/
export class PipOptions {
/**
* @ignore
*/
contentSource?: any;
/**
* @ignore
*/
contentWidth?: number;
/**
* @ignore
*/
contentHeight?: number;
/**
* @ignore
*/
autoEnterPip?: boolean;
/**
* @ignore
*/
canvas?: VideoCanvas;
}

/**
* The contrast level.
*/
Expand Down Expand Up @@ -4248,7 +4292,7 @@ export class AudioRecordingConfiguration {
*/
fileRecordingType?: AudioFileRecordingType;
/**
* Recording quality. See AudioRecordingQualityType. Note: This parameter applies to AAC files only.
* Recording quality. See AudioRecordingQualityType. This parameter applies to AAC files only.
*/
quality?: AudioRecordingQualityType;
/**
Expand Down
20 changes: 14 additions & 6 deletions ts/Private/AgoraMediaBase.ts
Original file line number Diff line number Diff line change
Expand Up @@ -369,6 +369,10 @@ export class AudioPcmFrame {
* The number of audio channels.
*/
num_channels_?: number;
/**
* @ignore
*/
audio_track_number_?: number;
/**
* The number of bytes per sample.
*/
Expand Down Expand Up @@ -612,19 +616,21 @@ export class ExternalVideoFrame {
*/
matrix?: number[];
/**
* This parameter only applies to video data in Texture format. The MetaData buffer. The default value is NULL.
* @ignore
*/
metadata_buffer?: Uint8Array;
/**
* @ignore
*/
metadata_size?: number;
/**
* @ignore
* The alpha channel data output by using portrait segmentation algorithm. This data matches the size of the video frame, with each pixel value ranging from [0,255], where 0 represents the background and 255 represents the foreground (portrait). By setting this parameter, you can render the video background into various effects, such as transparent, solid color, image, video, etc. In custom video rendering scenarios, ensure that both the video frame and alphaBuffer are of the Full Range type; other types may cause abnormal alpha data rendering.
*/
alphaBuffer?: Uint8Array;
/**
* @ignore
* This parameter only applies to video data in BGRA or RGBA format. Whether to extract the alpha channel data from the video frame and automatically fill it into alphaBuffer : true :Extract and fill the alpha channel data. false : (Default) Do not extract and fill the Alpha channel data. For video data in BGRA or RGBA format, you can set the Alpha channel data in either of the following ways:
* Automatically by setting this parameter to true.
* Manually through the alphaBuffer parameter.
*/
fillAlphaBuffer?: boolean;
/**
Expand Down Expand Up @@ -700,11 +706,11 @@ export class VideoFrame {
*/
textureId?: number;
/**
* @ignore
* This parameter only applies to video data in Texture format. Incoming 4 × 4 transformational matrix. The typical value is a unit matrix.
*/
matrix?: number[];
/**
* @ignore
* The alpha channel data output by using portrait segmentation algorithm. This data matches the size of the video frame, with each pixel value ranging from [0,255], where 0 represents the background and 255 represents the foreground (portrait). By setting this parameter, you can render the video background into various effects, such as transparent, solid color, image, video, etc. In custom video rendering scenarios, ensure that both the video frame and alphaBuffer are of the Full Range type; other types may cause abnormal alpha data rendering.
*/
alphaBuffer?: Uint8Array;
/**
Expand Down Expand Up @@ -1267,7 +1273,9 @@ export interface IFaceInfoObserver {
* pitch: Head pitch angle. A positve value means looking down, while a negative value means looking up.
* yaw: Head yaw angle. A positve value means turning left, while a negative value means turning right.
* roll: Head roll angle. A positve value means tilting to the right, while a negative value means tilting to the left.
* timestamp: String. The timestamp of the output result, in milliseconds. Here is an example of JSON: { "faces":[{ "blendshapes":{ "eyeBlinkLeft":0.9, "eyeLookDownLeft":0.0, "eyeLookInLeft":0.0, "eyeLookOutLeft":0.0, "eyeLookUpLeft":0.0, "eyeSquintLeft":0.0, "eyeWideLeft":0.0, "eyeBlinkRight":0.0, "eyeLookDownRight":0.0, "eyeLookInRight":0.0, "eyeLookOutRight":0.0, "eyeLookUpRight":0.0, "eyeSquintRight":0.0, "eyeWideRight":0.0, "jawForward":0.0, "jawLeft":0.0, "jawRight":0.0, "jawOpen":0.0, "mouthClose":0.0, "mouthFunnel":0.0, "mouthPucker":0.0, "mouthLeft":0.0, "mouthRight":0.0, "mouthSmileLeft":0.0, "mouthSmileRight":0.0, "mouthFrownLeft":0.0, "mouthFrownRight":0.0, "mouthDimpleLeft":0.0, "mouthDimpleRight":0.0, "mouthStretchLeft":0.0, "mouthStretchRight":0.0, "mouthRollLower":0.0, "mouthRollUpper":0.0, "mouthShrugLower":0.0, "mouthShrugUpper":0.0, "mouthPressLeft":0.0, "mouthPressRight":0.0, "mouthLowerDownLeft":0.0, "mouthLowerDownRight":0.0, "mouthUpperUpLeft":0.0, "mouthUpperUpRight":0.0, "browDownLeft":0.0, "browDownRight":0.0, "browInnerUp":0.0, "browOuterUpLeft":0.0, "browOuterUpRight":0.0, "cheekPuff":0.0, "cheekSquintLeft":0.0, "cheekSquintRight":0.0, "noseSneerLeft":0.0, "noseSneerRight":0.0, "tongueOut":0.0 }, "rotation":{"pitch":30.0, "yaw":25.5, "roll":-15.5}, }], "timestamp":"654879876546" }
* timestamp: String. The timestamp of the output result, in milliseconds. Here is an example of JSON:
* { "faces":[{ "blendshapes":{ "eyeBlinkLeft":0.9, "eyeLookDownLeft":0.0, "eyeLookInLeft":0.0, "eyeLookOutLeft":0.0, "eyeLookUpLeft":0.0, "eyeSquintLeft":0.0, "eyeWideLeft":0.0, "eyeBlinkRight":0.0, "eyeLookDownRight":0.0, "eyeLookInRight":0.0, "eyeLookOutRight":0.0, "eyeLookUpRight":0.0, "eyeSquintRight":0.0, "eyeWideRight":0.0, "jawForward":0.0, "jawLeft":0.0, "jawRight":0.0, "jawOpen":0.0, "mouthClose":0.0, "mouthFunnel":0.0, "mouthPucker":0.0, "mouthLeft":0.0, "mouthRight":0.0, "mouthSmileLeft":0.0, "mouthSmileRight":0.0, "mouthFrownLeft":0.0, "mouthFrownRight":0.0, "mouthDimpleLeft":0.0, "mouthDimpleRight":0.0, "mouthStretchLeft":0.0, "mouthStretchRight":0.0, "mouthRollLower":0.0, "mouthRollUpper":0.0, "mouthShrugLower":0.0, "mouthShrugUpper":0.0, "mouthPressLeft":0.0, "mouthPressRight":0.0, "mouthLowerDownLeft":0.0, "mouthLowerDownRight":0.0, "mouthUpperUpLeft":0.0, "mouthUpperUpRight":0.0, "browDownLeft":0.0, "browDownRight":0.0, "browInnerUp":0.0, "browOuterUpLeft":0.0, "browOuterUpRight":0.0, "cheekPuff":0.0, "cheekSquintLeft":0.0, "cheekSquintRight":0.0, "noseSneerLeft":0.0, "noseSneerRight":0.0, "tongueOut":0.0 }, "rotation":{"pitch":30.0, "yaw":25.5, "roll":-15.5},
* }], "timestamp":"654879876546" }
*
* @returns
* true : Facial information JSON parsing successful. false : Facial information JSON parsing failed.
Expand Down
5 changes: 5 additions & 0 deletions ts/Private/IAgoraMediaEngine.ts
Original file line number Diff line number Diff line change
Expand Up @@ -321,4 +321,9 @@ export abstract class IMediaEngine {
* < 0: Failure.
*/
abstract unregisterFaceInfoObserver(observer: IFaceInfoObserver): number;

/**
* @ignore
*/
abstract setExternalRemoteEglContext(eglContext: any): number;
}
23 changes: 11 additions & 12 deletions ts/Private/IAgoraMediaPlayer.ts
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ export abstract class IMediaPlayer {
/**
* Opens the media resource.
*
* This method is called asynchronously. If you need to play a media file, make sure you receive the onPlayerSourceStateChanged callback reporting PlayerStateOpenCompleted before calling the play method to play the file.
* This method is called asynchronously.
*
* @param url The path of the media file. Both local path and online path are supported.
* @param startPos The starting position (ms) for playback. Default value is 0.
Expand All @@ -58,8 +58,6 @@ export abstract class IMediaPlayer {
/**
* Plays the media file.
*
* After calling open or seek, you can call this method to play the media file.
*
* @returns
* 0: Success.
* < 0: Failure.
Expand All @@ -78,6 +76,8 @@ export abstract class IMediaPlayer {
/**
* Stops playing the media track.
*
* After calling this method to stop playback, if you want to play again, you need to call open or openWithMediaSource to open the media resource.
*
* @returns
* 0: Success.
* < 0: Failure.
Expand All @@ -96,9 +96,8 @@ export abstract class IMediaPlayer {
/**
* Seeks to a new playback position.
*
* After successfully calling this method, you will receive the onPlayerEvent callback, reporting the result of the seek operation to the new playback position. To play the media file from a specific position, do the following:
* Call this method to seek to the position you want to begin playback.
* Call the play method to play the media file.
* If you call seek after the playback has completed (upon receiving callback onPlayerSourceStateChanged reporting playback status as PlayerStatePlaybackCompleted or PlayerStatePlaybackAllLoopsCompleted), the SDK will play the media file from the specified position. At this point, you will receive callback onPlayerSourceStateChanged reporting playback status as PlayerStatePlaying.
* If you call seek while the playback is paused, upon successful call of this method, the SDK will seek to the specified position. To resume playback, call resume or play .
*
* @param newPos The new playback position (ms).
*
Expand Down Expand Up @@ -152,8 +151,6 @@ export abstract class IMediaPlayer {
/**
* Gets the detailed information of the media stream.
*
* Call this method after calling getStreamCount.
*
* @param index The index of the media stream. This parameter must be less than the return value of getStreamCount.
*
* @returns
Expand All @@ -168,6 +165,8 @@ export abstract class IMediaPlayer {
* If you want to loop, call this method and set the number of the loops. When the loop finishes, the SDK triggers onPlayerSourceStateChanged and reports the playback state as PlayerStatePlaybackAllLoopsCompleted.
*
* @param loopCount The number of times the audio effect loops:
* ≥0: Number of times for playing. For example, setting it to 0 means no loop playback, playing only once; setting it to 1 means loop playback once, playing a total of twice.
* -1: Play the audio file in an infinite loop.
*
* @returns
* 0: Success.
Expand Down Expand Up @@ -569,9 +568,9 @@ export abstract class IMediaPlayer {
abstract setSoundPositionParams(pan: number, gain: number): number;

/**
* Set media player options for providing technical previews or special customization features.
* Sets media player options.
*
* The media player supports setting options through key and value. In general, you don't need to know about the option settings. You can use the default option settings of the media player. The difference between this method and setPlayerOptionInString is that the value parameter of this method is of type Int, while the value of setPlayerOptionInString is of type String. These two methods cannot be used together. Ensure that you call this method before open or openWithMediaSource.
* The media player supports setting options through key and value. The difference between this method and setPlayerOptionInString is that the value parameter of this method is of type Int, while the value of setPlayerOptionInString is of type String. These two methods cannot be used together.
*
* @param key The key of the option.
* @param value The value of the key.
Expand All @@ -583,9 +582,9 @@ export abstract class IMediaPlayer {
abstract setPlayerOptionInInt(key: string, value: number): number;

/**
* Set media player options for providing technical previews or special customization features.
* Sets media player options.
*
* Ensure that you call this method before open or openWithMediaSource. The media player supports setting options through key and value. In general, you don't need to know about the option settings. You can use the default option settings of the media player. The difference between this method and setPlayerOptionInInt is that the value parameter of this method is of type String, while the value of setPlayerOptionInInt is of type String. These two methods cannot be used together.
* The media player supports setting options through key and value. The difference between this method and setPlayerOptionInInt is that the value parameter of this method is of type String, while the value of setPlayerOptionInInt is of type String. These two methods cannot be used together.
*
* @param key The key of the option.
* @param value The value of the key.
Expand Down
14 changes: 13 additions & 1 deletion ts/Private/IAgoraMusicContentCenter.ts
Original file line number Diff line number Diff line change
Expand Up @@ -289,7 +289,19 @@ export interface IMusicContentCenterEventHandler {
): void;

/**
* @ignore
* 音乐资源的详细信息回调。
*
* 当你调用 getSongSimpleInfo 获取某一音乐资源的详细信息后,SDK 会触发该回调。
*
* @param requestId The request ID. 本次请求的唯一标识。
* @param songCode The code of the music, which is an unique identifier of the music.
* @param simpleInfo 音乐资源的相关信息,包含下列内容:
* 副歌片段的开始和结束的时间(ms)
* 副歌片段的歌词下载地址
* 副歌片段时长(ms)
* 歌曲名称
* 歌手名
* @param reason 音乐内容中心的请求状态码,详见 MusicContentCenterStateReason 。
*/
onSongSimpleInfoResult?(
requestId: string,
Expand Down
Loading

0 comments on commit 7bae5ff

Please sign in to comment.