diff --git a/specification/ai/Face/examples/v1.3-preview.1/Detect.json b/specification/ai/Face/examples/v1.3-preview.1/Detect.json new file mode 100644 index 000000000000..9dcd2e753490 --- /dev/null +++ b/specification/ai/Face/examples/v1.3-preview.1/Detect.json @@ -0,0 +1,181 @@ +{ + "title": "Detect with Image", + "operationId": "FaceDetectionOperations_Detect", + "parameters": { + "apiVersion": "v1.3-preview.1", + "returnFaceId": true, + "returnFaceLandmarks": true, + "returnFaceAttributes": "glasses,headPose,occlusion,accessories,blur,exposure,noise,qualityForRecognition", + "recognitionModel": "recognition_03", + "returnRecognitionModel": true, + "detectionModel": "detection_01", + "faceIdTimeToLive": 60, + "imageContent": "" + }, + "responses": { + "200": { + "body": [ + { + "faceId": "c5c24a82-6845-4031-9d5d-978df9175426", + "recognitionModel": "recognition_03", + "faceRectangle": { + "width": 78, + "height": 78, + "left": 394, + "top": 54 + }, + "faceLandmarks": { + "pupilLeft": { + "x": 412.7, + "y": 78.4 + }, + "pupilRight": { + "x": 446.8, + "y": 74.2 + }, + "noseTip": { + "x": 437.7, + "y": 92.4 + }, + "mouthLeft": { + "x": 417.8, + "y": 114.4 + }, + "mouthRight": { + "x": 451.3, + "y": 109.3 + }, + "eyebrowLeftOuter": { + "x": 397.9, + "y": 78.5 + }, + "eyebrowLeftInner": { + "x": 425.4, + "y": 70.5 + }, + "eyeLeftOuter": { + "x": 406.7, + "y": 80.6 + }, + "eyeLeftTop": { + "x": 412.2, + "y": 76.2 + }, + "eyeLeftBottom": { + "x": 413.0, + "y": 80.1 + }, + "eyeLeftInner": { + "x": 418.9, + "y": 78.0 + }, + "eyebrowRightInner": { + "x": 4.8, + "y": 69.7 + }, + "eyebrowRightOuter": { + "x": 5.5, + "y": 68.5 + }, + "eyeRightInner": { + "x": 441.5, + "y": 75.0 + }, + "eyeRightTop": { + "x": 446.4, + "y": 71.7 + }, + "eyeRightBottom": { + "x": 447.0, + "y": 75.3 + }, + "eyeRightOuter": { + "x": 451.7, + "y": 73.4 + }, + "noseRootLeft": { + "x": 428.0, + "y": 77.1 + }, + "noseRootRight": { + "x": 435.8, + "y": 75.6 + }, + "noseLeftAlarTop": { + "x": 428.3, + "y": 89.7 + }, + "noseRightAlarTop": { + "x": 442.2, + "y": 87.0 + }, + "noseLeftAlarOutTip": { + "x": 424.3, + "y": 96.4 + }, + "noseRightAlarOutTip": { + "x": 446.6, + "y": 92.5 + }, + "upperLipTop": { + "x": 437.6, + "y": 105.9 + }, + "upperLipBottom": { + "x": 437.6, + "y": 108.2 + }, + "underLipTop": { + "x": 436.8, + "y": 111.4 + }, + "underLipBottom": { + "x": 437.3, + "y": 114.5 + } + }, + "faceAttributes": { + "glasses": "sunglasses", + "headPose": { + "roll": 2.1, + "yaw": 3, + "pitch": 1.6 + }, + "occlusion": { + "foreheadOccluded": false, + "eyeOccluded": false, + "mouthOccluded": false + }, + "accessories": [ + { + "type": "headwear", + "confidence": 0.99 + }, + { + "type": "glasses", + "confidence": 1.0 + }, + { + "type": "mask", + "confidence": 0.87 + } + ], + "blur": { + "blurLevel": "medium", + "value": 0.51 + }, + "exposure": { + "exposureLevel": "goodExposure", + "value": 0.55 + }, + "noise": { + "noiseLevel": "low", + "value": 0.12 + }, + "qualityForRecognition": "high" + } + } + ] + } + } +} diff --git a/specification/ai/Face/examples/v1.3-preview.1/DetectFromSessionImageId.json b/specification/ai/Face/examples/v1.3-preview.1/DetectFromSessionImageId.json new file mode 100644 index 000000000000..d5413102b43d --- /dev/null +++ b/specification/ai/Face/examples/v1.3-preview.1/DetectFromSessionImageId.json @@ -0,0 +1,183 @@ +{ + "title": "Detect From Session Image Id", + "operationId": "FaceDetectionOperations_DetectFromSessionImageId", + "parameters": { + "apiVersion": "v1.3-preview.1", + "returnFaceId": true, + "returnFaceLandmarks": true, + "returnFaceAttributes": "glasses,headPose,occlusion,accessories,blur,exposure,noise,qualityForRecognition", + "recognitionModel": "recognition_03", + "returnRecognitionModel": true, + "detectionModel": "detection_01", + "faceIdTimeToLive": 60, + "body": { + "sessionImageId": "aa93ce80-9a9b-48bd-ae1a-1c7543841e92" + } + }, + "responses": { + "200": { + "body": [ + { + "faceId": "c5c24a82-6845-4031-9d5d-978df9175426", + "recognitionModel": "recognition_03", + "faceRectangle": { + "width": 78, + "height": 78, + "left": 394, + "top": 54 + }, + "faceLandmarks": { + "pupilLeft": { + "x": 412.7, + "y": 78.4 + }, + "pupilRight": { + "x": 446.8, + "y": 74.2 + }, + "noseTip": { + "x": 437.7, + "y": 92.4 + }, + "mouthLeft": { + "x": 417.8, + "y": 114.4 + }, + "mouthRight": { + "x": 451.3, + "y": 109.3 + }, + "eyebrowLeftOuter": { + "x": 397.9, + "y": 78.5 + }, + "eyebrowLeftInner": { + "x": 425.4, + "y": 70.5 + }, + "eyeLeftOuter": { + "x": 406.7, + "y": 80.6 + }, + "eyeLeftTop": { + "x": 412.2, + "y": 76.2 + }, + "eyeLeftBottom": { + "x": 413.0, + "y": 80.1 + }, + "eyeLeftInner": { + "x": 418.9, + "y": 78.0 + }, + "eyebrowRightInner": { + "x": 4.8, + "y": 69.7 + }, + "eyebrowRightOuter": { + "x": 5.5, + "y": 68.5 + }, + "eyeRightInner": { + "x": 441.5, + "y": 75.0 + }, + "eyeRightTop": { + "x": 446.4, + "y": 71.7 + }, + "eyeRightBottom": { + "x": 447.0, + "y": 75.3 + }, + "eyeRightOuter": { + "x": 451.7, + "y": 73.4 + }, + "noseRootLeft": { + "x": 428.0, + "y": 77.1 + }, + "noseRootRight": { + "x": 435.8, + "y": 75.6 + }, + "noseLeftAlarTop": { + "x": 428.3, + "y": 89.7 + }, + "noseRightAlarTop": { + "x": 442.2, + "y": 87.0 + }, + "noseLeftAlarOutTip": { + "x": 424.3, + "y": 96.4 + }, + "noseRightAlarOutTip": { + "x": 446.6, + "y": 92.5 + }, + "upperLipTop": { + "x": 437.6, + "y": 105.9 + }, + "upperLipBottom": { + "x": 437.6, + "y": 108.2 + }, + "underLipTop": { + "x": 436.8, + "y": 111.4 + }, + "underLipBottom": { + "x": 437.3, + "y": 114.5 + } + }, + "faceAttributes": { + "glasses": "sunglasses", + "headPose": { + "roll": 2.1, + "yaw": 3, + "pitch": 1.6 + }, + "occlusion": { + "foreheadOccluded": false, + "eyeOccluded": false, + "mouthOccluded": false + }, + "accessories": [ + { + "type": "headwear", + "confidence": 0.99 + }, + { + "type": "glasses", + "confidence": 1.0 + }, + { + "type": "mask", + "confidence": 0.87 + } + ], + "blur": { + "blurLevel": "medium", + "value": 0.51 + }, + "exposure": { + "exposureLevel": "goodExposure", + "value": 0.55 + }, + "noise": { + "noiseLevel": "low", + "value": 0.12 + }, + "qualityForRecognition": "high" + } + } + ] + } + } +} diff --git a/specification/ai/Face/examples/v1.3-preview.1/DetectFromUrl.json b/specification/ai/Face/examples/v1.3-preview.1/DetectFromUrl.json new file mode 100644 index 000000000000..b45a4a6d5687 --- /dev/null +++ b/specification/ai/Face/examples/v1.3-preview.1/DetectFromUrl.json @@ -0,0 +1,183 @@ +{ + "title": "Detect with Image URL", + "operationId": "FaceDetectionOperations_DetectFromUrl", + "parameters": { + "apiVersion": "v1.3-preview.1", + "returnFaceId": true, + "returnFaceLandmarks": true, + "returnFaceAttributes": "glasses,headPose,occlusion,accessories,blur,exposure,noise,qualityForRecognition", + "recognitionModel": "recognition_03", + "returnRecognitionModel": true, + "detectionModel": "detection_01", + "faceIdTimeToLive": 60, + "body": { + "url": "https://microsoft.com/example.jpg" + } + }, + "responses": { + "200": { + "body": [ + { + "faceId": "c5c24a82-6845-4031-9d5d-978df9175426", + "recognitionModel": "recognition_03", + "faceRectangle": { + "width": 78, + "height": 78, + "left": 394, + "top": 54 + }, + "faceLandmarks": { + "pupilLeft": { + "x": 412.7, + "y": 78.4 + }, + "pupilRight": { + "x": 446.8, + "y": 74.2 + }, + "noseTip": { + "x": 437.7, + "y": 92.4 + }, + "mouthLeft": { + "x": 417.8, + "y": 114.4 + }, + "mouthRight": { + "x": 451.3, + "y": 109.3 + }, + "eyebrowLeftOuter": { + "x": 397.9, + "y": 78.5 + }, + "eyebrowLeftInner": { + "x": 425.4, + "y": 70.5 + }, + "eyeLeftOuter": { + "x": 406.7, + "y": 80.6 + }, + "eyeLeftTop": { + "x": 412.2, + "y": 76.2 + }, + "eyeLeftBottom": { + "x": 413.0, + "y": 80.1 + }, + "eyeLeftInner": { + "x": 418.9, + "y": 78.0 + }, + "eyebrowRightInner": { + "x": 4.8, + "y": 69.7 + }, + "eyebrowRightOuter": { + "x": 5.5, + "y": 68.5 + }, + "eyeRightInner": { + "x": 441.5, + "y": 75.0 + }, + "eyeRightTop": { + "x": 446.4, + "y": 71.7 + }, + "eyeRightBottom": { + "x": 447.0, + "y": 75.3 + }, + "eyeRightOuter": { + "x": 451.7, + "y": 73.4 + }, + "noseRootLeft": { + "x": 428.0, + "y": 77.1 + }, + "noseRootRight": { + "x": 435.8, + "y": 75.6 + }, + "noseLeftAlarTop": { + "x": 428.3, + "y": 89.7 + }, + "noseRightAlarTop": { + "x": 442.2, + "y": 87.0 + }, + "noseLeftAlarOutTip": { + "x": 424.3, + "y": 96.4 + }, + "noseRightAlarOutTip": { + "x": 446.6, + "y": 92.5 + }, + "upperLipTop": { + "x": 437.6, + "y": 105.9 + }, + "upperLipBottom": { + "x": 437.6, + "y": 108.2 + }, + "underLipTop": { + "x": 436.8, + "y": 111.4 + }, + "underLipBottom": { + "x": 437.3, + "y": 114.5 + } + }, + "faceAttributes": { + "glasses": "sunglasses", + "headPose": { + "roll": 2.1, + "yaw": 3, + "pitch": 1.6 + }, + "occlusion": { + "foreheadOccluded": false, + "eyeOccluded": false, + "mouthOccluded": false + }, + "accessories": [ + { + "type": "headwear", + "confidence": 0.99 + }, + { + "type": "glasses", + "confidence": 1.0 + }, + { + "type": "mask", + "confidence": 0.87 + } + ], + "blur": { + "blurLevel": "medium", + "value": 0.51 + }, + "exposure": { + "exposureLevel": "goodExposure", + "value": 0.55 + }, + "noise": { + "noiseLevel": "low", + "value": 0.12 + }, + "qualityForRecognition": "high" + } + } + ] + } + } +} diff --git a/specification/ai/Face/examples/v1.3-preview.1/FaceListOperations_AddFaceListFaceFromStream.json b/specification/ai/Face/examples/v1.3-preview.1/FaceListOperations_AddFaceListFaceFromStream.json new file mode 100644 index 000000000000..30dd931e0466 --- /dev/null +++ b/specification/ai/Face/examples/v1.3-preview.1/FaceListOperations_AddFaceListFaceFromStream.json @@ -0,0 +1,19 @@ +{ + "title": "Add Face to FaceList", + "operationId": "FaceListOperations_AddFaceListFace", + "parameters": { + "apiVersion": "v1.3-preview.1", + "faceListId": "your_face_list_id", + "targetFace": "10,10,100,100", + "detectionModel": "detection_01", + "userData": "your_user_data", + "imageContent": "" + }, + "responses": { + "200": { + "body": { + "persistedFaceId": "43897a75-8d6f-42cf-885e-74832febb055" + } + } + } +} diff --git a/specification/ai/Face/examples/v1.3-preview.1/FaceListOperations_AddFaceListFaceFromUrl.json b/specification/ai/Face/examples/v1.3-preview.1/FaceListOperations_AddFaceListFaceFromUrl.json new file mode 100644 index 000000000000..a3b526782972 --- /dev/null +++ b/specification/ai/Face/examples/v1.3-preview.1/FaceListOperations_AddFaceListFaceFromUrl.json @@ -0,0 +1,21 @@ +{ + "title": "Add Face to FaceList from Url", + "operationId": "FaceListOperations_AddFaceListFaceFromUrl", + "parameters": { + "apiVersion": "v1.3-preview.1", + "faceListId": "your_face_list_id", + "targetFace": "10,10,100,100", + "detectionModel": "detection_01", + "userData": "your_user_data", + "body": { + "url": "https://microsoft.com/example.jpg" + } + }, + "responses": { + "200": { + "body": { + "persistedFaceId": "43897a75-8d6f-42cf-885e-74832febb055" + } + } + } +} diff --git a/specification/ai/Face/examples/v1.3-preview.1/FaceListOperations_AddLargeFaceListFaceFromStream.json b/specification/ai/Face/examples/v1.3-preview.1/FaceListOperations_AddLargeFaceListFaceFromStream.json new file mode 100644 index 000000000000..60c02274964f --- /dev/null +++ b/specification/ai/Face/examples/v1.3-preview.1/FaceListOperations_AddLargeFaceListFaceFromStream.json @@ -0,0 +1,19 @@ +{ + "title": "Add Face to LargeFaceList", + "operationId": "FaceListOperations_AddLargeFaceListFace", + "parameters": { + "apiVersion": "v1.3-preview.1", + "largeFaceListId": "your_large_face_list_id", + "targetFace": "10,10,100,100", + "detectionModel": "detection_01", + "userData": "your_user_data", + "imageContent": "" + }, + "responses": { + "200": { + "body": { + "persistedFaceId": "43897a75-8d6f-42cf-885e-74832febb055" + } + } + } +} diff --git a/specification/ai/Face/examples/v1.3-preview.1/FaceListOperations_AddLargeFaceListFaceFromUrl.json b/specification/ai/Face/examples/v1.3-preview.1/FaceListOperations_AddLargeFaceListFaceFromUrl.json new file mode 100644 index 000000000000..a39e9e95dc20 --- /dev/null +++ b/specification/ai/Face/examples/v1.3-preview.1/FaceListOperations_AddLargeFaceListFaceFromUrl.json @@ -0,0 +1,21 @@ +{ + "title": "Add Face to LargeFaceList from Url", + "operationId": "FaceListOperations_AddLargeFaceListFaceFromUrl", + "parameters": { + "apiVersion": "v1.3-preview.1", + "largeFaceListId": "your_large_face_list_id", + "targetFace": "10,10,100,100", + "detectionModel": "detection_01", + "userData": "your_user_data", + "body": { + "url": "https://microsoft.com/example.jpg" + } + }, + "responses": { + "200": { + "body": { + "persistedFaceId": "43897a75-8d6f-42cf-885e-74832febb055" + } + } + } +} diff --git a/specification/ai/Face/examples/v1.3-preview.1/FaceListOperations_CreateFaceList.json b/specification/ai/Face/examples/v1.3-preview.1/FaceListOperations_CreateFaceList.json new file mode 100644 index 000000000000..26a8f77ce462 --- /dev/null +++ b/specification/ai/Face/examples/v1.3-preview.1/FaceListOperations_CreateFaceList.json @@ -0,0 +1,16 @@ +{ + "title": "Create FaceList", + "operationId": "FaceListOperations_CreateFaceList", + "parameters": { + "apiVersion": "v1.3-preview.1", + "faceListId": "your_face_list_id", + "body": { + "name": "your_face_list_name", + "userData": "your_user_data", + "recognitionModel": "recognition_01" + } + }, + "responses": { + "200": {} + } +} diff --git a/specification/ai/Face/examples/v1.3-preview.1/FaceListOperations_CreateLargeFaceList.json b/specification/ai/Face/examples/v1.3-preview.1/FaceListOperations_CreateLargeFaceList.json new file mode 100644 index 000000000000..8f1a5c572ddd --- /dev/null +++ b/specification/ai/Face/examples/v1.3-preview.1/FaceListOperations_CreateLargeFaceList.json @@ -0,0 +1,16 @@ +{ + "title": "Create LargeFaceList", + "operationId": "FaceListOperations_CreateLargeFaceList", + "parameters": { + "apiVersion": "v1.3-preview.1", + "largeFaceListId": "your_large_face_list_id", + "body": { + "name": "your_large_face_list_name", + "userData": "your_user_data", + "recognitionModel": "recognition_01" + } + }, + "responses": { + "200": {} + } +} diff --git a/specification/ai/Face/examples/v1.3-preview.1/FaceListOperations_DeleteFaceList.json b/specification/ai/Face/examples/v1.3-preview.1/FaceListOperations_DeleteFaceList.json new file mode 100644 index 000000000000..9da1fd3098f1 --- /dev/null +++ b/specification/ai/Face/examples/v1.3-preview.1/FaceListOperations_DeleteFaceList.json @@ -0,0 +1,11 @@ +{ + "title": "Delete FaceList", + "operationId": "FaceListOperations_DeleteFaceList", + "parameters": { + "apiVersion": "v1.3-preview.1", + "faceListId": "your_face_list_id" + }, + "responses": { + "200": {} + } +} diff --git a/specification/ai/Face/examples/v1.3-preview.1/FaceListOperations_DeleteFaceListFace.json b/specification/ai/Face/examples/v1.3-preview.1/FaceListOperations_DeleteFaceListFace.json new file mode 100644 index 000000000000..00d207310be0 --- /dev/null +++ b/specification/ai/Face/examples/v1.3-preview.1/FaceListOperations_DeleteFaceListFace.json @@ -0,0 +1,12 @@ +{ + "title": "Delete Face from FaceList", + "operationId": "FaceListOperations_DeleteFaceListFace", + "parameters": { + "apiVersion": "v1.3-preview.1", + "faceListId": "your_face_list_id", + "persistedFaceId": "43897a75-8d6f-42cf-885e-74832febb055" + }, + "responses": { + "200": {} + } +} diff --git a/specification/ai/Face/examples/v1.3-preview.1/FaceListOperations_DeleteLargeFaceList.json b/specification/ai/Face/examples/v1.3-preview.1/FaceListOperations_DeleteLargeFaceList.json new file mode 100644 index 000000000000..60b3618d2319 --- /dev/null +++ b/specification/ai/Face/examples/v1.3-preview.1/FaceListOperations_DeleteLargeFaceList.json @@ -0,0 +1,11 @@ +{ + "title": "Delete LargeFaceList", + "operationId": "FaceListOperations_DeleteLargeFaceList", + "parameters": { + "apiVersion": "v1.3-preview.1", + "largeFaceListId": "your_large_face_list_id" + }, + "responses": { + "200": {} + } +} diff --git a/specification/ai/Face/examples/v1.3-preview.1/FaceListOperations_DeleteLargeFaceListFace.json b/specification/ai/Face/examples/v1.3-preview.1/FaceListOperations_DeleteLargeFaceListFace.json new file mode 100644 index 000000000000..cf773a7d3f0e --- /dev/null +++ b/specification/ai/Face/examples/v1.3-preview.1/FaceListOperations_DeleteLargeFaceListFace.json @@ -0,0 +1,12 @@ +{ + "title": "Delete Face From LargeFaceList", + "operationId": "FaceListOperations_DeleteLargeFaceListFace", + "parameters": { + "apiVersion": "v1.3-preview.1", + "largeFaceListId": "your_large_face_list_id", + "persistedFaceId": "43897a75-8d6f-42cf-885e-74832febb055" + }, + "responses": { + "200": {} + } +} diff --git a/specification/ai/Face/examples/v1.3-preview.1/FaceListOperations_GetFaceList.json b/specification/ai/Face/examples/v1.3-preview.1/FaceListOperations_GetFaceList.json new file mode 100644 index 000000000000..177c4756f9e5 --- /dev/null +++ b/specification/ai/Face/examples/v1.3-preview.1/FaceListOperations_GetFaceList.json @@ -0,0 +1,19 @@ +{ + "title": "Get FaceList", + "operationId": "FaceListOperations_GetFaceList", + "parameters": { + "apiVersion": "v1.3-preview.1", + "faceListId": "your_face_list_id", + "returnRecognitionModel": true + }, + "responses": { + "200": { + "body": { + "name": "your_face_list_name", + "userData": "your_user_data", + "recognitionModel": "recognition_01", + "faceListId": "your_face_list_id" + } + } + } +} diff --git a/specification/ai/Face/examples/v1.3-preview.1/FaceListOperations_GetFaceLists.json b/specification/ai/Face/examples/v1.3-preview.1/FaceListOperations_GetFaceLists.json new file mode 100644 index 000000000000..336e510315a8 --- /dev/null +++ b/specification/ai/Face/examples/v1.3-preview.1/FaceListOperations_GetFaceLists.json @@ -0,0 +1,20 @@ +{ + "title": "Get FaceLists", + "operationId": "FaceListOperations_GetFaceLists", + "parameters": { + "apiVersion": "v1.3-preview.1", + "returnRecognitionModel": true + }, + "responses": { + "200": { + "body": [ + { + "name": "your_face_list_name", + "userData": "your_user_data", + "recognitionModel": "recognition_01", + "faceListId": "your_face_list_id" + } + ] + } + } +} diff --git a/specification/ai/Face/examples/v1.3-preview.1/FaceListOperations_GetLargeFaceList.json b/specification/ai/Face/examples/v1.3-preview.1/FaceListOperations_GetLargeFaceList.json new file mode 100644 index 000000000000..ee7562801402 --- /dev/null +++ b/specification/ai/Face/examples/v1.3-preview.1/FaceListOperations_GetLargeFaceList.json @@ -0,0 +1,19 @@ +{ + "title": "Get LargeFaceList", + "operationId": "FaceListOperations_GetLargeFaceList", + "parameters": { + "apiVersion": "v1.3-preview.1", + "largeFaceListId": "your_large_face_list_id", + "returnRecognitionModel": true + }, + "responses": { + "200": { + "body": { + "name": "your_large_face_list_name", + "userData": "your_user_data", + "recognitionModel": "recognition_01", + "largeFaceListId": "your_large_face_list_id" + } + } + } +} diff --git a/specification/ai/Face/examples/v1.3-preview.1/FaceListOperations_GetLargeFaceListFace.json b/specification/ai/Face/examples/v1.3-preview.1/FaceListOperations_GetLargeFaceListFace.json new file mode 100644 index 000000000000..46943a5f2ffe --- /dev/null +++ b/specification/ai/Face/examples/v1.3-preview.1/FaceListOperations_GetLargeFaceListFace.json @@ -0,0 +1,17 @@ +{ + "title": "Get Face from LargeFaceList", + "operationId": "FaceListOperations_GetLargeFaceListFace", + "parameters": { + "apiVersion": "v1.3-preview.1", + "largeFaceListId": "your_large_face_list_id", + "persistedFaceId": "43897a75-8d6f-42cf-885e-74832febb055" + }, + "responses": { + "200": { + "body": { + "persistedFaceId": "43897a75-8d6f-42cf-885e-74832febb055", + "userData": "your_user_data" + } + } + } +} diff --git a/specification/ai/Face/examples/v1.3-preview.1/FaceListOperations_GetLargeFaceListFaces.json b/specification/ai/Face/examples/v1.3-preview.1/FaceListOperations_GetLargeFaceListFaces.json new file mode 100644 index 000000000000..7330d90aa137 --- /dev/null +++ b/specification/ai/Face/examples/v1.3-preview.1/FaceListOperations_GetLargeFaceListFaces.json @@ -0,0 +1,20 @@ +{ + "title": "Get Faces from LargeFaceList", + "operationId": "FaceListOperations_GetLargeFaceListFaces", + "parameters": { + "apiVersion": "v1.3-preview.1", + "largeFaceListId": "your_large_face_list_id", + "start": "00000000-0000-0000-0000-000000000000", + "top": 20 + }, + "responses": { + "200": { + "body": [ + { + "persistedFaceId": "43897a75-8d6f-42cf-885e-74832febb055", + "userData": "your_user_data" + } + ] + } + } +} diff --git a/specification/ai/Face/examples/v1.3-preview.1/FaceListOperations_GetLargeFaceListTrainingStatus.json b/specification/ai/Face/examples/v1.3-preview.1/FaceListOperations_GetLargeFaceListTrainingStatus.json new file mode 100644 index 000000000000..7aa0970b3e13 --- /dev/null +++ b/specification/ai/Face/examples/v1.3-preview.1/FaceListOperations_GetLargeFaceListTrainingStatus.json @@ -0,0 +1,19 @@ +{ + "title": "Get Training Status of LargeFaceList", + "operationId": "FaceListOperations_GetLargeFaceListTrainingStatus", + "parameters": { + "apiVersion": "v1.3-preview.1", + "largeFaceListId": "your_large_face_list_id" + }, + "responses": { + "200": { + "body": { + "status": "notStarted", + "createdDateTime": "2024-03-05T11:07:58.371Z", + "lastActionDateTime": "2024-03-05T11:07:58.371Z", + "lastSuccessfulTrainingDateTime": "2024-03-05T11:07:58.371Z", + "message": null + } + } + } +} diff --git a/specification/ai/Face/examples/v1.3-preview.1/FaceListOperations_GetLargeFaceLists.json b/specification/ai/Face/examples/v1.3-preview.1/FaceListOperations_GetLargeFaceLists.json new file mode 100644 index 000000000000..e7e8978da228 --- /dev/null +++ b/specification/ai/Face/examples/v1.3-preview.1/FaceListOperations_GetLargeFaceLists.json @@ -0,0 +1,22 @@ +{ + "title": "Get LargeFaceLists", + "operationId": "FaceListOperations_GetLargeFaceLists", + "parameters": { + "apiVersion": "v1.3-preview.1", + "start": "my_list_id", + "top": 20, + "returnRecognitionModel": true + }, + "responses": { + "200": { + "body": [ + { + "name": "your_large_face_list_name", + "userData": "your_user_data", + "recognitionModel": "recognition_01", + "largeFaceListId": "your_large_face_list_id" + } + ] + } + } +} diff --git a/specification/ai/Face/examples/v1.3-preview.1/FaceListOperations_TrainLargeFaceList.json b/specification/ai/Face/examples/v1.3-preview.1/FaceListOperations_TrainLargeFaceList.json new file mode 100644 index 000000000000..1c7de71dfd8b --- /dev/null +++ b/specification/ai/Face/examples/v1.3-preview.1/FaceListOperations_TrainLargeFaceList.json @@ -0,0 +1,15 @@ +{ + "title": "Train LargeFaceList", + "operationId": "FaceListOperations_TrainLargeFaceList", + "parameters": { + "apiVersion": "v1.3-preview.1", + "largeFaceListId": "your_large_face_list_id" + }, + "responses": { + "202": { + "headers": { + "operation-Location": "https://contoso.com/operationstatus" + } + } + } +} diff --git a/specification/ai/Face/examples/v1.3-preview.1/FaceListOperations_UpdateFaceList.json b/specification/ai/Face/examples/v1.3-preview.1/FaceListOperations_UpdateFaceList.json new file mode 100644 index 000000000000..521bd9d54298 --- /dev/null +++ b/specification/ai/Face/examples/v1.3-preview.1/FaceListOperations_UpdateFaceList.json @@ -0,0 +1,15 @@ +{ + "title": "Update FaceList", + "operationId": "FaceListOperations_UpdateFaceList", + "parameters": { + "apiVersion": "v1.3-preview.1", + "faceListId": "your_face_list_id", + "body": { + "name": "your_face_list_name", + "userData": "your_user_data" + } + }, + "responses": { + "200": {} + } +} diff --git a/specification/ai/Face/examples/v1.3-preview.1/FaceListOperations_UpdateLargeFaceList.json b/specification/ai/Face/examples/v1.3-preview.1/FaceListOperations_UpdateLargeFaceList.json new file mode 100644 index 000000000000..1f87bd146bc8 --- /dev/null +++ b/specification/ai/Face/examples/v1.3-preview.1/FaceListOperations_UpdateLargeFaceList.json @@ -0,0 +1,15 @@ +{ + "title": "Update LargeFaceList", + "operationId": "FaceListOperations_UpdateLargeFaceList", + "parameters": { + "apiVersion": "v1.3-preview.1", + "largeFaceListId": "your_large_face_list_id", + "body": { + "name": "your_large_face_list_name", + "userData": "your_user_data" + } + }, + "responses": { + "200": {} + } +} diff --git a/specification/ai/Face/examples/v1.3-preview.1/FaceListOperations_UpdateLargeFaceListFace.json b/specification/ai/Face/examples/v1.3-preview.1/FaceListOperations_UpdateLargeFaceListFace.json new file mode 100644 index 000000000000..bdb1cf8f47bf --- /dev/null +++ b/specification/ai/Face/examples/v1.3-preview.1/FaceListOperations_UpdateLargeFaceListFace.json @@ -0,0 +1,15 @@ +{ + "title": "Update Face in LargeFaceList", + "operationId": "FaceListOperations_UpdateLargeFaceListFace", + "parameters": { + "apiVersion": "v1.3-preview.1", + "largeFaceListId": "your_large_face_list_id", + "persistedFaceId": "43897a75-8d6f-42cf-885e-74832febb055", + "body": { + "userData": "your_user_data" + } + }, + "responses": { + "200": {} + } +} diff --git a/specification/ai/Face/examples/v1.3-preview.1/FaceRecognitionOperations_FindSimilar.json b/specification/ai/Face/examples/v1.3-preview.1/FaceRecognitionOperations_FindSimilar.json new file mode 100644 index 000000000000..3dcb8c7e67d5 --- /dev/null +++ b/specification/ai/Face/examples/v1.3-preview.1/FaceRecognitionOperations_FindSimilar.json @@ -0,0 +1,26 @@ +{ + "title": "Find Similar among Face IDs", + "operationId": "FaceRecognitionOperations_FindSimilar", + "parameters": { + "apiVersion": "v1.3-preview.1", + "body": { + "faceId": "c5c24a82-6845-4031-9d5d-978df9175426", + "maxNumOfCandidatesReturned": 3, + "mode": "matchPerson", + "faceIds": [ + "015839fb-fbd9-4f79-ace9-7675fc2f1dd9", + "be386ab3-af91-4104-9e6d-4dae4c9fddb7" + ] + } + }, + "responses": { + "200": { + "body": [ + { + "confidence": 0.9, + "faceId": "015839fb-fbd9-4f79-ace9-7675fc2f1dd9" + } + ] + } + } +} diff --git a/specification/ai/Face/examples/v1.3-preview.1/FaceRecognitionOperations_FindSimilarFromFaceList.json b/specification/ai/Face/examples/v1.3-preview.1/FaceRecognitionOperations_FindSimilarFromFaceList.json new file mode 100644 index 000000000000..871b3e0c56b1 --- /dev/null +++ b/specification/ai/Face/examples/v1.3-preview.1/FaceRecognitionOperations_FindSimilarFromFaceList.json @@ -0,0 +1,23 @@ +{ + "title": "Find Similar from FaceList", + "operationId": "FaceRecognitionOperations_FindSimilarFromFaceList", + "parameters": { + "apiVersion": "v1.3-preview.1", + "body": { + "faceId": "c5c24a82-6845-4031-9d5d-978df9175426", + "maxNumOfCandidatesReturned": 3, + "mode": "matchPerson", + "faceListId": "your_face_list_id" + } + }, + "responses": { + "200": { + "body": [ + { + "confidence": 0.8, + "persistedFaceId": "015839fb-fbd9-4f79-ace9-7675fc2f1dd9" + } + ] + } + } +} diff --git a/specification/ai/Face/examples/v1.3-preview.1/FaceRecognitionOperations_FindSimilarFromLargeFaceList.json b/specification/ai/Face/examples/v1.3-preview.1/FaceRecognitionOperations_FindSimilarFromLargeFaceList.json new file mode 100644 index 000000000000..f4ba3d86f3ec --- /dev/null +++ b/specification/ai/Face/examples/v1.3-preview.1/FaceRecognitionOperations_FindSimilarFromLargeFaceList.json @@ -0,0 +1,23 @@ +{ + "title": "Find Similar from LargeFaceList", + "operationId": "FaceRecognitionOperations_FindSimilarFromLargeFaceList", + "parameters": { + "apiVersion": "v1.3-preview.1", + "body": { + "faceId": "c5c24a82-6845-4031-9d5d-978df9175426", + "maxNumOfCandidatesReturned": 3, + "mode": "matchPerson", + "largeFaceListId": "your_large_face_list_id" + } + }, + "responses": { + "200": { + "body": [ + { + "confidence": 0.8, + "persistedFaceId": "015839fb-fbd9-4f79-ace9-7675fc2f1dd9" + } + ] + } + } +} diff --git a/specification/ai/Face/examples/v1.3-preview.1/FaceRecognitionOperations_Group.json b/specification/ai/Face/examples/v1.3-preview.1/FaceRecognitionOperations_Group.json new file mode 100644 index 000000000000..96eac4adec94 --- /dev/null +++ b/specification/ai/Face/examples/v1.3-preview.1/FaceRecognitionOperations_Group.json @@ -0,0 +1,41 @@ +{ + "title": "Group Face IDs", + "operationId": "FaceRecognitionOperations_Group", + "parameters": { + "apiVersion": "v1.3-preview.1", + "body": { + "faceIds": [ + "c5c24a82-6845-4031-9d5d-978df9175426", + "015839fb-fbd9-4f79-ace9-7675fc2f1dd9", + "65d083d4-9447-47d1-af30-b626144bf0fb", + "fce92aed-d578-4d2e-8114-068f8af4492e", + "30ea1073-cc9e-4652-b1e3-d08fb7b95315", + "be386ab3-af91-4104-9e6d-4dae4c9fddb7", + "fbd2a038-dbff-452c-8e79-2ee81b1aa84e", + "b64d5e15-8257-4af2-b20a-5a750f8940e7" + ] + } + }, + "responses": { + "200": { + "body": { + "groups": [ + [ + "c5c24a82-6845-4031-9d5d-978df9175426", + "015839fb-fbd9-4f79-ace9-7675fc2f1dd9", + "fce92aed-d578-4d2e-8114-068f8af4492e", + "b64d5e15-8257-4af2-b20a-5a750f8940e7" + ], + [ + "65d083d4-9447-47d1-af30-b626144bf0fb", + "30ea1073-cc9e-4652-b1e3-d08fb7b95315" + ] + ], + "messyGroup": [ + "be386ab3-af91-4104-9e6d-4dae4c9fddb7", + "fbd2a038-dbff-452c-8e79-2ee81b1aa84e" + ] + } + } + } +} diff --git a/specification/ai/Face/examples/v1.3-preview.1/FaceRecognitionOperations_IdentifyFromDynamicPersonGroup.json b/specification/ai/Face/examples/v1.3-preview.1/FaceRecognitionOperations_IdentifyFromDynamicPersonGroup.json new file mode 100644 index 000000000000..658d283b0a18 --- /dev/null +++ b/specification/ai/Face/examples/v1.3-preview.1/FaceRecognitionOperations_IdentifyFromDynamicPersonGroup.json @@ -0,0 +1,30 @@ +{ + "title": "Identify from DynamicPersonGroup", + "operationId": "FaceRecognitionOperations_IdentifyFromDynamicPersonGroup", + "parameters": { + "apiVersion": "v1.3-preview.1", + "body": { + "faceIds": [ + "c5c24a82-6845-4031-9d5d-978df9175426" + ], + "dynamicPersonGroupId": "your_dynamic_person_group_id", + "maxNumOfCandidatesReturned": 9, + "confidenceThreshold": 0.7 + } + }, + "responses": { + "200": { + "body": [ + { + "faceId": "c5c24a82-6845-4031-9d5d-978df9175426", + "candidates": [ + { + "personId": "85c0c630-c9c9-40f8-8a4e-f9ae4f926ea5", + "confidence": 0.8 + } + ] + } + ] + } + } +} diff --git a/specification/ai/Face/examples/v1.3-preview.1/FaceRecognitionOperations_IdentifyFromLargePersonGroup.json b/specification/ai/Face/examples/v1.3-preview.1/FaceRecognitionOperations_IdentifyFromLargePersonGroup.json new file mode 100644 index 000000000000..1733b5003bcf --- /dev/null +++ b/specification/ai/Face/examples/v1.3-preview.1/FaceRecognitionOperations_IdentifyFromLargePersonGroup.json @@ -0,0 +1,30 @@ +{ + "title": "Identify from LargePersonGroup", + "operationId": "FaceRecognitionOperations_IdentifyFromLargePersonGroup", + "parameters": { + "apiVersion": "v1.3-preview.1", + "body": { + "faceIds": [ + "c5c24a82-6845-4031-9d5d-978df9175426" + ], + "largePersonGroupId": "your_large_person_group_id", + "maxNumOfCandidatesReturned": 9, + "confidenceThreshold": 0.7 + } + }, + "responses": { + "200": { + "body": [ + { + "faceId": "c5c24a82-6845-4031-9d5d-978df9175426", + "candidates": [ + { + "personId": "85c0c630-c9c9-40f8-8a4e-f9ae4f926ea5", + "confidence": 0.8 + } + ] + } + ] + } + } +} diff --git a/specification/ai/Face/examples/v1.3-preview.1/FaceRecognitionOperations_IdentifyFromPersonDirectory.json b/specification/ai/Face/examples/v1.3-preview.1/FaceRecognitionOperations_IdentifyFromPersonDirectory.json new file mode 100644 index 000000000000..ecdf4a2f339c --- /dev/null +++ b/specification/ai/Face/examples/v1.3-preview.1/FaceRecognitionOperations_IdentifyFromPersonDirectory.json @@ -0,0 +1,32 @@ +{ + "title": "Identify from PersonDirectory", + "operationId": "FaceRecognitionOperations_IdentifyFromPersonDirectory", + "parameters": { + "apiVersion": "v1.3-preview.1", + "body": { + "faceIds": [ + "c5c24a82-6845-4031-9d5d-978df9175426" + ], + "personIds": [ + "85c0c630-c9c9-40f8-8a4e-f9ae4f926ea5" + ], + "maxNumOfCandidatesReturned": 9, + "confidenceThreshold": 0.7 + } + }, + "responses": { + "200": { + "body": [ + { + "faceId": "c5c24a82-6845-4031-9d5d-978df9175426", + "candidates": [ + { + "personId": "85c0c630-c9c9-40f8-8a4e-f9ae4f926ea5", + "confidence": 0.8 + } + ] + } + ] + } + } +} diff --git a/specification/ai/Face/examples/v1.3-preview.1/FaceRecognitionOperations_IdentifyFromPersonGroup.json b/specification/ai/Face/examples/v1.3-preview.1/FaceRecognitionOperations_IdentifyFromPersonGroup.json new file mode 100644 index 000000000000..7d0cd9a3cecc --- /dev/null +++ b/specification/ai/Face/examples/v1.3-preview.1/FaceRecognitionOperations_IdentifyFromPersonGroup.json @@ -0,0 +1,30 @@ +{ + "title": "Identify from PersonGroup", + "operationId": "FaceRecognitionOperations_IdentifyFromPersonGroup", + "parameters": { + "apiVersion": "v1.3-preview.1", + "body": { + "faceIds": [ + "c5c24a82-6845-4031-9d5d-978df9175426" + ], + "personGroupId": "your_person_group_id", + "maxNumOfCandidatesReturned": 9, + "confidenceThreshold": 0.7 + } + }, + "responses": { + "200": { + "body": [ + { + "faceId": "c5c24a82-6845-4031-9d5d-978df9175426", + "candidates": [ + { + "personId": "85c0c630-c9c9-40f8-8a4e-f9ae4f926ea5", + "confidence": 0.8 + } + ] + } + ] + } + } +} diff --git a/specification/ai/Face/examples/v1.3-preview.1/FaceRecognitionOperations_VerifyFaceToFace.json b/specification/ai/Face/examples/v1.3-preview.1/FaceRecognitionOperations_VerifyFaceToFace.json new file mode 100644 index 000000000000..c1860d8c7e3c --- /dev/null +++ b/specification/ai/Face/examples/v1.3-preview.1/FaceRecognitionOperations_VerifyFaceToFace.json @@ -0,0 +1,19 @@ +{ + "title": "Verify Face to Face", + "operationId": "FaceRecognitionOperations_VerifyFaceToFace", + "parameters": { + "apiVersion": "v1.3-preview.1", + "body": { + "faceId1": "c5c24a82-6845-4031-9d5d-978df9175426", + "faceId2": "3aa87e30-b380-48eb-ad9e-1aa54fc52bd3" + } + }, + "responses": { + "200": { + "body": { + "isIdentical": true, + "confidence": 0.8 + } + } + } +} diff --git a/specification/ai/Face/examples/v1.3-preview.1/FaceRecognitionOperations_VerifyFromLargePersonGroup.json b/specification/ai/Face/examples/v1.3-preview.1/FaceRecognitionOperations_VerifyFromLargePersonGroup.json new file mode 100644 index 000000000000..cf6c5bfeca7c --- /dev/null +++ b/specification/ai/Face/examples/v1.3-preview.1/FaceRecognitionOperations_VerifyFromLargePersonGroup.json @@ -0,0 +1,20 @@ +{ + "title": "Verify from LargePersonGroup", + "operationId": "FaceRecognitionOperations_VerifyFromLargePersonGroup", + "parameters": { + "apiVersion": "v1.3-preview.1", + "body": { + "faceId": "c5c24a82-6845-4031-9d5d-978df9175426", + "personId": "815df99c-598f-4926-930a-a734b3fd651c", + "largePersonGroupId": "your_large_person_group" + } + }, + "responses": { + "200": { + "body": { + "isIdentical": true, + "confidence": 0.8 + } + } + } +} diff --git a/specification/ai/Face/examples/v1.3-preview.1/FaceRecognitionOperations_VerifyFromPersonDirectory.json b/specification/ai/Face/examples/v1.3-preview.1/FaceRecognitionOperations_VerifyFromPersonDirectory.json new file mode 100644 index 000000000000..eb37fb3c2f21 --- /dev/null +++ b/specification/ai/Face/examples/v1.3-preview.1/FaceRecognitionOperations_VerifyFromPersonDirectory.json @@ -0,0 +1,19 @@ +{ + "title": "Verify from PersonDirectory", + "operationId": "FaceRecognitionOperations_VerifyFromPersonDirectory", + "parameters": { + "apiVersion": "v1.3-preview.1", + "body": { + "faceId": "c5c24a82-6845-4031-9d5d-978df9175426", + "personId": "815df99c-598f-4926-930a-a734b3fd651c" + } + }, + "responses": { + "200": { + "body": { + "isIdentical": true, + "confidence": 0.8 + } + } + } +} diff --git a/specification/ai/Face/examples/v1.3-preview.1/FaceRecognitionOperations_VerifyFromPersonGroup.json b/specification/ai/Face/examples/v1.3-preview.1/FaceRecognitionOperations_VerifyFromPersonGroup.json new file mode 100644 index 000000000000..1591fa7396ef --- /dev/null +++ b/specification/ai/Face/examples/v1.3-preview.1/FaceRecognitionOperations_VerifyFromPersonGroup.json @@ -0,0 +1,20 @@ +{ + "title": "Verify from PersonGroup", + "operationId": "FaceRecognitionOperations_VerifyFromPersonGroup", + "parameters": { + "apiVersion": "v1.3-preview.1", + "body": { + "faceId": "c5c24a82-6845-4031-9d5d-978df9175426", + "personId": "815df99c-598f-4926-930a-a734b3fd651c", + "personGroupId": "your_person_group" + } + }, + "responses": { + "200": { + "body": { + "isIdentical": true, + "confidence": 0.8 + } + } + } +} diff --git a/specification/ai/Face/examples/v1.3-preview.1/LivenessSessionOperations_CreateLivenessSession.json b/specification/ai/Face/examples/v1.3-preview.1/LivenessSessionOperations_CreateLivenessSession.json new file mode 100644 index 000000000000..77529b46ef8a --- /dev/null +++ b/specification/ai/Face/examples/v1.3-preview.1/LivenessSessionOperations_CreateLivenessSession.json @@ -0,0 +1,32 @@ +{ + "title": "Create Liveness Session", + "operationId": "LivenessSessionOperations_CreateLivenessSession", + "parameters": { + "apiVersion": "v1.3-preview.1", + "body": { + "livenessOperationMode": "PassiveActive", + "deviceCorrelationIdSetInClient": false, + "deviceCorrelationId": "your_device_correlation_id", + "userCorrelationIdSetInClient": false, + "userCorrelationId": "your_user_correlation_id", + "authTokenTimeToLiveInSeconds": 60, + "numberOfClientAttemptsAllowed": 1, + "expectedClientIpAddress": "1.2.3.4" + } + }, + "responses": { + "200": { + "body": { + "sessionId": "b12e033e-bda7-4b83-a211-e721c661f30e", + "authToken": "eyJhbGciOiJFUzI1NiIsIm", + "status": "NotStarted", + "modelVersion": "2024-11-15", + "isAbuseMonitoringEnabled": true, + "expectedClientIpAddress": "1.2.3.4", + "results": { + "attempts": [] + } + } + } + } +} diff --git a/specification/ai/Face/examples/v1.3-preview.1/LivenessSessionOperations_CreateLivenessWithVerifySession.json b/specification/ai/Face/examples/v1.3-preview.1/LivenessSessionOperations_CreateLivenessWithVerifySession.json new file mode 100644 index 000000000000..958236a457b3 --- /dev/null +++ b/specification/ai/Face/examples/v1.3-preview.1/LivenessSessionOperations_CreateLivenessWithVerifySession.json @@ -0,0 +1,42 @@ +{ + "title": "Create LivenessWithVerify Session", + "operationId": "LivenessSessionOperations_CreateLivenessWithVerifySession", + "parameters": { + "apiVersion": "v1.3-preview.1", + "livenessOperationMode": "PassiveActive", + "deviceCorrelationIdSetInClient": false, + "deviceCorrelationId": "your_device_correlation_id", + "userCorrelationIdSetInClient": false, + "userCorrelationId": "your_user_correlation_id", + "authTokenTimeToLiveInSeconds": 60, + "numberOfClientAttemptsAllowed": 1, + "expectedClientIpAddress": "1.2.3.4", + "verifyImage": "" + }, + "responses": { + "200": { + "body": { + "sessionId": "b12e033e-bda7-4b83-a211-e721c661f30e", + "authToken": "eyJhbGciOiJFUzI1NiIsIm", + "status": "NotStarted", + "modelVersion": "2024-11-15", + "isAbuseMonitoringEnabled": true, + "results": { + "attempts": [], + "verifyReferences": [ + { + "referenceType": "image", + "faceRectangle": { + "top": 316, + "left": 131, + "width": 498, + "height": 677 + }, + "qualityForRecognition": "high" + } + ] + } + } + } + } +} diff --git a/specification/ai/Face/examples/v1.3-preview.1/LivenessSessionOperations_DeleteLivenessSession.json b/specification/ai/Face/examples/v1.3-preview.1/LivenessSessionOperations_DeleteLivenessSession.json new file mode 100644 index 000000000000..929aa7ced311 --- /dev/null +++ b/specification/ai/Face/examples/v1.3-preview.1/LivenessSessionOperations_DeleteLivenessSession.json @@ -0,0 +1,11 @@ +{ + "title": "Delete Liveness Session", + "operationId": "LivenessSessionOperations_DeleteLivenessSession", + "parameters": { + "apiVersion": "v1.3-preview.1", + "sessionId": "b12e033e-bda7-4b83-a211-e721c661f30e" + }, + "responses": { + "204": {} + } +} diff --git a/specification/ai/Face/examples/v1.3-preview.1/LivenessSessionOperations_DeleteLivenessWithVerifySession.json b/specification/ai/Face/examples/v1.3-preview.1/LivenessSessionOperations_DeleteLivenessWithVerifySession.json new file mode 100644 index 000000000000..d86b75b1fbfc --- /dev/null +++ b/specification/ai/Face/examples/v1.3-preview.1/LivenessSessionOperations_DeleteLivenessWithVerifySession.json @@ -0,0 +1,11 @@ +{ + "title": "Delete LivenessWithVerify Session", + "operationId": "LivenessSessionOperations_DeleteLivenessWithVerifySession", + "parameters": { + "apiVersion": "v1.3-preview.1", + "sessionId": "b12e033e-bda7-4b83-a211-e721c661f30e" + }, + "responses": { + "204": {} + } +} diff --git a/specification/ai/Face/examples/v1.3-preview.1/LivenessSessionOperations_GetClientAssetsAccessToken.json b/specification/ai/Face/examples/v1.3-preview.1/LivenessSessionOperations_GetClientAssetsAccessToken.json new file mode 100644 index 000000000000..14dd93beae62 --- /dev/null +++ b/specification/ai/Face/examples/v1.3-preview.1/LivenessSessionOperations_GetClientAssetsAccessToken.json @@ -0,0 +1,16 @@ +{ + "title": "Get LivenessSessionOperations Settings ClientAssetsAccessToken", + "operationId": "LivenessSessionOperations_GetClientAssetsAccessToken", + "parameters": { + "apiVersion": "v1.3-preview.1" + }, + "responses": { + "200": { + "body": { + "expiry": "2025-07-03T15:30:00.000Z", + "accessToken": "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9", + "base64AccessToken": "ZXlKaGJHY2lPaUpJVXpJMU5pSXNJblI1=" + } + } + } +} diff --git a/specification/ai/Face/examples/v1.3-preview.1/LivenessSessionOperations_GetLivenessSessionResult.json b/specification/ai/Face/examples/v1.3-preview.1/LivenessSessionOperations_GetLivenessSessionResult.json new file mode 100644 index 000000000000..f6042d5ee73c --- /dev/null +++ b/specification/ai/Face/examples/v1.3-preview.1/LivenessSessionOperations_GetLivenessSessionResult.json @@ -0,0 +1,80 @@ +{ + "title": "Get LivenessSession Result", + "operationId": "LivenessSessionOperations_GetLivenessSessionResult", + "parameters": { + "apiVersion": "v1.3-preview.1", + "sessionId": "b12e033e-bda7-4b83-a211-e721c661f30e" + }, + "responses": { + "200": { + "body": { + "sessionId": "b12e033e-bda7-4b83-a211-e721c661f30e", + "authToken": "eyJhbGciOiJFUzI1NiIsIm", + "status": "NotStarted", + "modelVersion": "2024-11-15", + "isAbuseMonitoringEnabled": true, + "results": { + "attempts": [ + { + "attemptId": 2, + "attemptStatus": "Succeeded", + "result": { + "livenessDecision": "realface", + "targets": { + "color": { + "faceRectangle": { + "top": 669, + "left": 203, + "width": 646, + "height": 724 + } + } + }, + "digest": "B0A803BB7B26F3C8F29CD36030F8E63ED3FAF955FEEF8E01C88AB8FD89CCF761", + "sessionImageId": "Ae3PVWlXAmVAnXgkAFt1QSjGUWONKzWiSr2iPh9p9G4I" + }, + "clientInformation": [ + { + "ip": "73.21.34.122" + } + ], + "abuseMonitoringResult": { + "isAbuseDetected": true, + "otherFlaggedSessions": [ + { + "attemptId": 1, + "sessionId": "b12e033e-bda7-4b83-a211-e721c661f30e", + "sessionImageId": "Ae3PVWlXAmVAnXgkAFt1QSjGUWONKzWiSr2iPh9p9G4I" + } + ] + } + }, + { + "attemptId": 1, + "attemptStatus": "Failed", + "error": { + "code": "FaceWithMaskDetected", + "message": "Mask detected on face image.", + "targets": { + "color": { + "faceRectangle": { + "top": 669, + "left": 203, + "width": 646, + "height": 724 + } + } + } + }, + "clientInformation": [ + { + "ip": "73.21.34.122" + } + ] + } + ] + } + } + } + } +} diff --git a/specification/ai/Face/examples/v1.3-preview.1/LivenessSessionOperations_GetLivenessWithVerifySessionResult.json b/specification/ai/Face/examples/v1.3-preview.1/LivenessSessionOperations_GetLivenessWithVerifySessionResult.json new file mode 100644 index 000000000000..8b8bb97f9359 --- /dev/null +++ b/specification/ai/Face/examples/v1.3-preview.1/LivenessSessionOperations_GetLivenessWithVerifySessionResult.json @@ -0,0 +1,98 @@ +{ + "title": "Get LivenessWithVerify Session Result", + "operationId": "LivenessSessionOperations_GetLivenessWithVerifySessionResult", + "parameters": { + "apiVersion": "v1.3-preview.1", + "sessionId": "b12e033e-bda7-4b83-a211-e721c661f30e" + }, + "responses": { + "200": { + "body": { + "sessionId": "b12e033e-bda7-4b83-a211-e721c661f30e", + "authToken": "eyJhbGciOiJFUzI1NiIsIm", + "status": "NotStarted", + "modelVersion": "2024-11-15", + "isAbuseMonitoringEnabled": true, + "expectedClientIpAddress": "1.2.3.4", + "results": { + "attempts": [ + { + "attemptId": 2, + "attemptStatus": "Succeeded", + "result": { + "livenessDecision": "realface", + "targets": { + "color": { + "faceRectangle": { + "top": 669, + "left": 203, + "width": 646, + "height": 724 + } + } + }, + "verifyResult": { + "matchConfidence": 0.08871888, + "isIdentical": false + }, + "digest": "B0A803BB7B26F3C8F29CD36030F8E63ED3FAF955FEEF8E01C88AB8FD89CCF761", + "sessionImageId": "Ae3PVWlXAmVAnXgkAFt1QSjGUWONKzWiSr2iPh9p9G4I", + "verifyImageHash": "43B7D8E8769533C3290DBD37A84D821B2C28CB4381DF9C6784DBC4AAF7E45018" + }, + "clientInformation": [ + { + "ip": "73.21.34.122" + } + ], + "abuseMonitoringResult": { + "isAbuseDetected": true, + "otherFlaggedSessions": [ + { + "attemptId": 1, + "sessionId": "b12e033e-bda7-4b83-a211-e721c661f30e", + "sessionImageId": "Ae3PVWlXAmVAnXgkAFt1QSjGUWONKzWiSr2iPh9p9G4I" + } + ] + } + }, + { + "attemptId": 1, + "attemptStatus": "Failed", + "error": { + "code": "FaceWithMaskDetected", + "message": "Mask detected on face image.", + "targets": { + "color": { + "faceRectangle": { + "top": 669, + "left": 203, + "width": 646, + "height": 724 + } + } + } + }, + "clientInformation": [ + { + "ip": "73.21.34.122" + } + ] + } + ], + "verifyReferences": [ + { + "referenceType": "image", + "faceRectangle": { + "top": 316, + "left": 131, + "width": 498, + "height": 677 + }, + "qualityForRecognition": "high" + } + ] + } + } + } + } +} diff --git a/specification/ai/Face/examples/v1.3-preview.1/LivenessSessionOperations_GetSessionImage.json b/specification/ai/Face/examples/v1.3-preview.1/LivenessSessionOperations_GetSessionImage.json new file mode 100644 index 000000000000..577b4a7b8ccc --- /dev/null +++ b/specification/ai/Face/examples/v1.3-preview.1/LivenessSessionOperations_GetSessionImage.json @@ -0,0 +1,13 @@ +{ + "title": "Get Session Image", + "operationId": "LivenessSessionOperations_GetSessionImage", + "parameters": { + "apiVersion": "v1.3-preview.1", + "sessionImageId": "3d035d35-2e01-4ed4-8935-577afde9caaa" + }, + "responses": { + "200": { + "body": "" + } + } +} diff --git a/specification/ai/Face/examples/v1.3-preview.1/LivenessSessionOperations_GetSettings.json b/specification/ai/Face/examples/v1.3-preview.1/LivenessSessionOperations_GetSettings.json new file mode 100644 index 000000000000..0ad339fbe901 --- /dev/null +++ b/specification/ai/Face/examples/v1.3-preview.1/LivenessSessionOperations_GetSettings.json @@ -0,0 +1,16 @@ +{ + "title": "Get LivenessSessionOperations Settings", + "operationId": "LivenessSessionOperations_GetSettings", + "parameters": { + "apiVersion": "v1.3-preview.1" + }, + "responses": { + "200": { + "body": { + "livenessAbuseMonitoring": { + "enabled": true + } + } + } + } +} diff --git a/specification/ai/Face/examples/v1.3-preview.1/LivenessSessionOperations_PatchSettings.json b/specification/ai/Face/examples/v1.3-preview.1/LivenessSessionOperations_PatchSettings.json new file mode 100644 index 000000000000..6264247b6d9e --- /dev/null +++ b/specification/ai/Face/examples/v1.3-preview.1/LivenessSessionOperations_PatchSettings.json @@ -0,0 +1,21 @@ +{ + "title": "Patch LivenessSessionOperations Settings", + "operationId": "LivenessSessionOperations_PatchSettings", + "parameters": { + "apiVersion": "v1.3-preview.1", + "body": { + "livenessAbuseMonitoring": { + "enabled": true + } + } + }, + "responses": { + "200": { + "body": { + "livenessAbuseMonitoring": { + "enabled": true + } + } + } + } +} diff --git a/specification/ai/Face/examples/v1.3-preview.1/PersonGroupOperations_AddLargePersonGroupPersonFaceFromStream.json b/specification/ai/Face/examples/v1.3-preview.1/PersonGroupOperations_AddLargePersonGroupPersonFaceFromStream.json new file mode 100644 index 000000000000..e5b5233766b8 --- /dev/null +++ b/specification/ai/Face/examples/v1.3-preview.1/PersonGroupOperations_AddLargePersonGroupPersonFaceFromStream.json @@ -0,0 +1,20 @@ +{ + "title": "Add Face in LargePersonGroup Person", + "operationId": "PersonGroupOperations_AddLargePersonGroupPersonFace", + "parameters": { + "apiVersion": "v1.3-preview.1", + "largePersonGroupId": "your_large_person_group_id", + "personId": "25985303-c537-4467-b41d-bdb45cd95ca1", + "targetFace": "10,10,100,100", + "detectionModel": "detection_01", + "userData": "your_user_data", + "imageContent": "" + }, + "responses": { + "200": { + "body": { + "persistedFaceId": "43897a75-8d6f-42cf-885e-74832febb055" + } + } + } +} diff --git a/specification/ai/Face/examples/v1.3-preview.1/PersonGroupOperations_AddLargePersonGroupPersonFaceFromUrl.json b/specification/ai/Face/examples/v1.3-preview.1/PersonGroupOperations_AddLargePersonGroupPersonFaceFromUrl.json new file mode 100644 index 000000000000..5010cc3524fc --- /dev/null +++ b/specification/ai/Face/examples/v1.3-preview.1/PersonGroupOperations_AddLargePersonGroupPersonFaceFromUrl.json @@ -0,0 +1,22 @@ +{ + "title": "Add Face in LargePersonGroup Person from Url", + "operationId": "PersonGroupOperations_AddLargePersonGroupPersonFaceFromUrl", + "parameters": { + "apiVersion": "v1.3-preview.1", + "largePersonGroupId": "your_large_person_group_id", + "personId": "25985303-c537-4467-b41d-bdb45cd95ca1", + "targetFace": "10,10,100,100", + "detectionModel": "detection_01", + "userData": "your_user_data", + "body": { + "url": "https://microsoft.com/example.jpg" + } + }, + "responses": { + "200": { + "body": { + "persistedFaceId": "43897a75-8d6f-42cf-885e-74832febb055" + } + } + } +} diff --git a/specification/ai/Face/examples/v1.3-preview.1/PersonGroupOperations_AddPersonGroupPersonFaceFromStream.json b/specification/ai/Face/examples/v1.3-preview.1/PersonGroupOperations_AddPersonGroupPersonFaceFromStream.json new file mode 100644 index 000000000000..94a27103d0f2 --- /dev/null +++ b/specification/ai/Face/examples/v1.3-preview.1/PersonGroupOperations_AddPersonGroupPersonFaceFromStream.json @@ -0,0 +1,20 @@ +{ + "title": "Add Face to PersonGroup Person", + "operationId": "PersonGroupOperations_AddPersonGroupPersonFace", + "parameters": { + "apiVersion": "v1.3-preview.1", + "personGroupId": "your_person_group_id", + "personId": "25985303-c537-4467-b41d-bdb45cd95ca1", + "targetFace": "10,10,100,100", + "detectionModel": "detection_01", + "userData": "your_user_data", + "imageContent": "" + }, + "responses": { + "200": { + "body": { + "persistedFaceId": "43897a75-8d6f-42cf-885e-74832febb055" + } + } + } +} diff --git a/specification/ai/Face/examples/v1.3-preview.1/PersonGroupOperations_AddPersonGroupPersonFaceFromUrl.json b/specification/ai/Face/examples/v1.3-preview.1/PersonGroupOperations_AddPersonGroupPersonFaceFromUrl.json new file mode 100644 index 000000000000..3d0961bad0f7 --- /dev/null +++ b/specification/ai/Face/examples/v1.3-preview.1/PersonGroupOperations_AddPersonGroupPersonFaceFromUrl.json @@ -0,0 +1,22 @@ +{ + "title": "Add Face to PersonGroupPerson from Url", + "operationId": "PersonGroupOperations_AddPersonGroupPersonFaceFromUrl", + "parameters": { + "apiVersion": "v1.3-preview.1", + "personGroupId": "your_person_group_id", + "personId": "25985303-c537-4467-b41d-bdb45cd95ca1", + "targetFace": "10,10,100,100", + "detectionModel": "detection_01", + "userData": "your_user_data", + "body": { + "url": "https://microsoft.com/example.jpg" + } + }, + "responses": { + "200": { + "body": { + "persistedFaceId": "43897a75-8d6f-42cf-885e-74832febb055" + } + } + } +} diff --git a/specification/ai/Face/examples/v1.3-preview.1/PersonGroupOperations_CreateLargePersonGroup.json b/specification/ai/Face/examples/v1.3-preview.1/PersonGroupOperations_CreateLargePersonGroup.json new file mode 100644 index 000000000000..ef5b10d2063b --- /dev/null +++ b/specification/ai/Face/examples/v1.3-preview.1/PersonGroupOperations_CreateLargePersonGroup.json @@ -0,0 +1,16 @@ +{ + "title": "Create LargePersonGroup", + "operationId": "PersonGroupOperations_CreateLargePersonGroup", + "parameters": { + "apiVersion": "v1.3-preview.1", + "largePersonGroupId": "your_large_person_group_id", + "body": { + "name": "your_large_person_group_name", + "userData": "your_user_data", + "recognitionModel": "recognition_01" + } + }, + "responses": { + "200": {} + } +} diff --git a/specification/ai/Face/examples/v1.3-preview.1/PersonGroupOperations_CreateLargePersonGroupPerson.json b/specification/ai/Face/examples/v1.3-preview.1/PersonGroupOperations_CreateLargePersonGroupPerson.json new file mode 100644 index 000000000000..7dd4680cf354 --- /dev/null +++ b/specification/ai/Face/examples/v1.3-preview.1/PersonGroupOperations_CreateLargePersonGroupPerson.json @@ -0,0 +1,19 @@ +{ + "title": "Create Person in LargePersonGroup", + "operationId": "PersonGroupOperations_CreateLargePersonGroupPerson", + "parameters": { + "apiVersion": "v1.3-preview.1", + "largePersonGroupId": "your_large_person_group_id", + "body": { + "name": "your_large_person_group_person_name", + "userData": "your_user_data" + } + }, + "responses": { + "200": { + "body": { + "personId": "25985303-c537-4467-b41d-bdb45cd95ca1" + } + } + } +} diff --git a/specification/ai/Face/examples/v1.3-preview.1/PersonGroupOperations_CreatePersonGroup.json b/specification/ai/Face/examples/v1.3-preview.1/PersonGroupOperations_CreatePersonGroup.json new file mode 100644 index 000000000000..b532487c5cd1 --- /dev/null +++ b/specification/ai/Face/examples/v1.3-preview.1/PersonGroupOperations_CreatePersonGroup.json @@ -0,0 +1,16 @@ +{ + "title": "Create PersonGroup", + "operationId": "PersonGroupOperations_CreatePersonGroup", + "parameters": { + "apiVersion": "v1.3-preview.1", + "personGroupId": "your_person_group_id", + "body": { + "name": "your_person_group_name", + "userData": "your_user_data", + "recognitionModel": "recognition_01" + } + }, + "responses": { + "200": {} + } +} diff --git a/specification/ai/Face/examples/v1.3-preview.1/PersonGroupOperations_CreatePersonGroupPerson.json b/specification/ai/Face/examples/v1.3-preview.1/PersonGroupOperations_CreatePersonGroupPerson.json new file mode 100644 index 000000000000..cc335edb11aa --- /dev/null +++ b/specification/ai/Face/examples/v1.3-preview.1/PersonGroupOperations_CreatePersonGroupPerson.json @@ -0,0 +1,19 @@ +{ + "title": "Create Person in PersonGroup", + "operationId": "PersonGroupOperations_CreatePersonGroupPerson", + "parameters": { + "apiVersion": "v1.3-preview.1", + "personGroupId": "your_person_group_id", + "body": { + "name": "your_person_group_person_name", + "userData": "your_user_data" + } + }, + "responses": { + "200": { + "body": { + "personId": "25985303-c537-4467-b41d-bdb45cd95ca1" + } + } + } +} diff --git a/specification/ai/Face/examples/v1.3-preview.1/PersonGroupOperations_DeleteLargePersonGroup.json b/specification/ai/Face/examples/v1.3-preview.1/PersonGroupOperations_DeleteLargePersonGroup.json new file mode 100644 index 000000000000..16a49c6b756c --- /dev/null +++ b/specification/ai/Face/examples/v1.3-preview.1/PersonGroupOperations_DeleteLargePersonGroup.json @@ -0,0 +1,11 @@ +{ + "title": "Delete LargePersonGroup", + "operationId": "PersonGroupOperations_DeleteLargePersonGroup", + "parameters": { + "apiVersion": "v1.3-preview.1", + "largePersonGroupId": "your_large_person_group_id" + }, + "responses": { + "200": {} + } +} diff --git a/specification/ai/Face/examples/v1.3-preview.1/PersonGroupOperations_DeleteLargePersonGroupPerson.json b/specification/ai/Face/examples/v1.3-preview.1/PersonGroupOperations_DeleteLargePersonGroupPerson.json new file mode 100644 index 000000000000..65f099335b57 --- /dev/null +++ b/specification/ai/Face/examples/v1.3-preview.1/PersonGroupOperations_DeleteLargePersonGroupPerson.json @@ -0,0 +1,12 @@ +{ + "title": "Delete Person from LargePersonGroup", + "operationId": "PersonGroupOperations_DeleteLargePersonGroupPerson", + "parameters": { + "apiVersion": "v1.3-preview.1", + "largePersonGroupId": "your_large_person_group_id", + "personId": "25985303-c537-4467-b41d-bdb45cd95ca1" + }, + "responses": { + "200": {} + } +} diff --git a/specification/ai/Face/examples/v1.3-preview.1/PersonGroupOperations_DeleteLargePersonGroupPersonFace.json b/specification/ai/Face/examples/v1.3-preview.1/PersonGroupOperations_DeleteLargePersonGroupPersonFace.json new file mode 100644 index 000000000000..6f9439d4afb0 --- /dev/null +++ b/specification/ai/Face/examples/v1.3-preview.1/PersonGroupOperations_DeleteLargePersonGroupPersonFace.json @@ -0,0 +1,13 @@ +{ + "title": "Delete Face from LargePersonGroup Person", + "operationId": "PersonGroupOperations_DeleteLargePersonGroupPersonFace", + "parameters": { + "apiVersion": "v1.3-preview.1", + "largePersonGroupId": "your_large_person_group_id", + "personId": "25985303-c537-4467-b41d-bdb45cd95ca1", + "persistedFaceId": "43897a75-8d6f-42cf-885e-74832febb055" + }, + "responses": { + "200": {} + } +} diff --git a/specification/ai/Face/examples/v1.3-preview.1/PersonGroupOperations_DeletePersonGroup.json b/specification/ai/Face/examples/v1.3-preview.1/PersonGroupOperations_DeletePersonGroup.json new file mode 100644 index 000000000000..4ff1d5c78777 --- /dev/null +++ b/specification/ai/Face/examples/v1.3-preview.1/PersonGroupOperations_DeletePersonGroup.json @@ -0,0 +1,11 @@ +{ + "title": "Delete PersonGroup", + "operationId": "PersonGroupOperations_DeletePersonGroup", + "parameters": { + "apiVersion": "v1.3-preview.1", + "personGroupId": "your_person_group_id" + }, + "responses": { + "200": {} + } +} diff --git a/specification/ai/Face/examples/v1.3-preview.1/PersonGroupOperations_DeletePersonGroupPerson.json b/specification/ai/Face/examples/v1.3-preview.1/PersonGroupOperations_DeletePersonGroupPerson.json new file mode 100644 index 000000000000..2a13f7abf443 --- /dev/null +++ b/specification/ai/Face/examples/v1.3-preview.1/PersonGroupOperations_DeletePersonGroupPerson.json @@ -0,0 +1,12 @@ +{ + "title": "Delete Person from PersonGroup", + "operationId": "PersonGroupOperations_DeletePersonGroupPerson", + "parameters": { + "apiVersion": "v1.3-preview.1", + "personGroupId": "your_person_group_id", + "personId": "25985303-c537-4467-b41d-bdb45cd95ca1" + }, + "responses": { + "200": {} + } +} diff --git a/specification/ai/Face/examples/v1.3-preview.1/PersonGroupOperations_DeletePersonGroupPersonFace.json b/specification/ai/Face/examples/v1.3-preview.1/PersonGroupOperations_DeletePersonGroupPersonFace.json new file mode 100644 index 000000000000..c17b75b9eccb --- /dev/null +++ b/specification/ai/Face/examples/v1.3-preview.1/PersonGroupOperations_DeletePersonGroupPersonFace.json @@ -0,0 +1,13 @@ +{ + "title": "Delete Face from PersonGroup Person", + "operationId": "PersonGroupOperations_DeletePersonGroupPersonFace", + "parameters": { + "apiVersion": "v1.3-preview.1", + "personGroupId": "your_person_group_id", + "personId": "25985303-c537-4467-b41d-bdb45cd95ca1", + "persistedFaceId": "43897a75-8d6f-42cf-885e-74832febb055" + }, + "responses": { + "200": {} + } +} diff --git a/specification/ai/Face/examples/v1.3-preview.1/PersonGroupOperations_GetLargePersonGroup.json b/specification/ai/Face/examples/v1.3-preview.1/PersonGroupOperations_GetLargePersonGroup.json new file mode 100644 index 000000000000..8fd27f06b3a5 --- /dev/null +++ b/specification/ai/Face/examples/v1.3-preview.1/PersonGroupOperations_GetLargePersonGroup.json @@ -0,0 +1,19 @@ +{ + "title": "Get LargePersonGroup", + "operationId": "PersonGroupOperations_GetLargePersonGroup", + "parameters": { + "apiVersion": "v1.3-preview.1", + "largePersonGroupId": "your_large_person_group_id", + "returnRecognitionModel": true + }, + "responses": { + "200": { + "body": { + "name": "your_large_person_group_name", + "userData": "your_user_data", + "recognitionModel": "recognition_01", + "largePersonGroupId": "your_large_person_group_id" + } + } + } +} diff --git a/specification/ai/Face/examples/v1.3-preview.1/PersonGroupOperations_GetLargePersonGroupPerson.json b/specification/ai/Face/examples/v1.3-preview.1/PersonGroupOperations_GetLargePersonGroupPerson.json new file mode 100644 index 000000000000..ae17b5113282 --- /dev/null +++ b/specification/ai/Face/examples/v1.3-preview.1/PersonGroupOperations_GetLargePersonGroupPerson.json @@ -0,0 +1,21 @@ +{ + "title": "Get Person from LargePersonGroup", + "operationId": "PersonGroupOperations_GetLargePersonGroupPerson", + "parameters": { + "apiVersion": "v1.3-preview.1", + "largePersonGroupId": "your_large_person_group_id", + "personId": "25985303-c537-4467-b41d-bdb45cd95ca1" + }, + "responses": { + "200": { + "body": { + "personId": "25985303-c537-4467-b41d-bdb45cd95ca1", + "name": "your_large_person_group_person_name", + "userData": "your_user_data", + "persistedFaceIds": [ + "43897a75-8d6f-42cf-885e-74832febb055" + ] + } + } + } +} diff --git a/specification/ai/Face/examples/v1.3-preview.1/PersonGroupOperations_GetLargePersonGroupPersonFace.json b/specification/ai/Face/examples/v1.3-preview.1/PersonGroupOperations_GetLargePersonGroupPersonFace.json new file mode 100644 index 000000000000..562db1c19045 --- /dev/null +++ b/specification/ai/Face/examples/v1.3-preview.1/PersonGroupOperations_GetLargePersonGroupPersonFace.json @@ -0,0 +1,18 @@ +{ + "title": "Get Face from LargePersonGroup Person", + "operationId": "PersonGroupOperations_GetLargePersonGroupPersonFace", + "parameters": { + "apiVersion": "v1.3-preview.1", + "largePersonGroupId": "your_large_person_group_id", + "personId": "25985303-c537-4467-b41d-bdb45cd95ca1", + "persistedFaceId": "43897a75-8d6f-42cf-885e-74832febb055" + }, + "responses": { + "200": { + "body": { + "persistedFaceId": "43897a75-8d6f-42cf-885e-74832febb055", + "userData": "your_user_data" + } + } + } +} diff --git a/specification/ai/Face/examples/v1.3-preview.1/PersonGroupOperations_GetLargePersonGroupPersons.json b/specification/ai/Face/examples/v1.3-preview.1/PersonGroupOperations_GetLargePersonGroupPersons.json new file mode 100644 index 000000000000..9c8262286f16 --- /dev/null +++ b/specification/ai/Face/examples/v1.3-preview.1/PersonGroupOperations_GetLargePersonGroupPersons.json @@ -0,0 +1,24 @@ +{ + "title": "Get Persons from LargePersonGroup", + "operationId": "PersonGroupOperations_GetLargePersonGroupPersons", + "parameters": { + "apiVersion": "v1.3-preview.1", + "largePersonGroupId": "your_large_person_group_id", + "start": "00000000-0000-0000-0000-000000000000", + "top": 20 + }, + "responses": { + "200": { + "body": [ + { + "personId": "25985303-c537-4467-b41d-bdb45cd95ca1", + "name": "your_large_person_group_person_name", + "userData": "your_user_data", + "persistedFaceIds": [ + "43897a75-8d6f-42cf-885e-74832febb055" + ] + } + ] + } + } +} diff --git a/specification/ai/Face/examples/v1.3-preview.1/PersonGroupOperations_GetLargePersonGroupTrainingStatus.json b/specification/ai/Face/examples/v1.3-preview.1/PersonGroupOperations_GetLargePersonGroupTrainingStatus.json new file mode 100644 index 000000000000..f43d627feec0 --- /dev/null +++ b/specification/ai/Face/examples/v1.3-preview.1/PersonGroupOperations_GetLargePersonGroupTrainingStatus.json @@ -0,0 +1,19 @@ +{ + "title": "Get Training Status of LargePersonGroup", + "operationId": "PersonGroupOperations_GetLargePersonGroupTrainingStatus", + "parameters": { + "apiVersion": "v1.3-preview.1", + "largePersonGroupId": "your_large_person_group_id" + }, + "responses": { + "200": { + "body": { + "status": "notStarted", + "createdDateTime": "2024-03-05T11:07:58.371Z", + "lastActionDateTime": "2024-03-05T11:07:58.371Z", + "lastSuccessfulTrainingDateTime": "2024-03-05T11:07:58.371Z", + "message": null + } + } + } +} diff --git a/specification/ai/Face/examples/v1.3-preview.1/PersonGroupOperations_GetLargePersonGroups.json b/specification/ai/Face/examples/v1.3-preview.1/PersonGroupOperations_GetLargePersonGroups.json new file mode 100644 index 000000000000..7dd496aa32e2 --- /dev/null +++ b/specification/ai/Face/examples/v1.3-preview.1/PersonGroupOperations_GetLargePersonGroups.json @@ -0,0 +1,22 @@ +{ + "title": "Get LargePersonGroups", + "operationId": "PersonGroupOperations_GetLargePersonGroups", + "parameters": { + "apiVersion": "v1.3-preview.1", + "start": "00000000-0000-0000-0000-000000000000", + "top": 20, + "returnRecognitionModel": true + }, + "responses": { + "200": { + "body": [ + { + "name": "your_large_person_group_name", + "userData": "your_user_data", + "recognitionModel": "recognition_01", + "largePersonGroupId": "your_large_person_group_id" + } + ] + } + } +} diff --git a/specification/ai/Face/examples/v1.3-preview.1/PersonGroupOperations_GetPersonGroup.json b/specification/ai/Face/examples/v1.3-preview.1/PersonGroupOperations_GetPersonGroup.json new file mode 100644 index 000000000000..4100a7353fd5 --- /dev/null +++ b/specification/ai/Face/examples/v1.3-preview.1/PersonGroupOperations_GetPersonGroup.json @@ -0,0 +1,19 @@ +{ + "title": "Get PersonGroup", + "operationId": "PersonGroupOperations_GetPersonGroup", + "parameters": { + "apiVersion": "v1.3-preview.1", + "personGroupId": "your_person_group_id", + "returnRecognitionModel": true + }, + "responses": { + "200": { + "body": { + "name": "your_person_group_name", + "userData": "your_user_data", + "recognitionModel": "recognition_01", + "personGroupId": "your_person_group_id" + } + } + } +} diff --git a/specification/ai/Face/examples/v1.3-preview.1/PersonGroupOperations_GetPersonGroupPerson.json b/specification/ai/Face/examples/v1.3-preview.1/PersonGroupOperations_GetPersonGroupPerson.json new file mode 100644 index 000000000000..6f994263f961 --- /dev/null +++ b/specification/ai/Face/examples/v1.3-preview.1/PersonGroupOperations_GetPersonGroupPerson.json @@ -0,0 +1,21 @@ +{ + "title": "Get Person from PersonGroup", + "operationId": "PersonGroupOperations_GetPersonGroupPerson", + "parameters": { + "apiVersion": "v1.3-preview.1", + "personGroupId": "your_person_group_id", + "personId": "25985303-c537-4467-b41d-bdb45cd95ca1" + }, + "responses": { + "200": { + "body": { + "personId": "25985303-c537-4467-b41d-bdb45cd95ca1", + "name": "your_person_group_person_name", + "userData": "your_user_data", + "persistedFaceIds": [ + "43897a75-8d6f-42cf-885e-74832febb055" + ] + } + } + } +} diff --git a/specification/ai/Face/examples/v1.3-preview.1/PersonGroupOperations_GetPersonGroupPersonFace.json b/specification/ai/Face/examples/v1.3-preview.1/PersonGroupOperations_GetPersonGroupPersonFace.json new file mode 100644 index 000000000000..ec3e2bce1e57 --- /dev/null +++ b/specification/ai/Face/examples/v1.3-preview.1/PersonGroupOperations_GetPersonGroupPersonFace.json @@ -0,0 +1,18 @@ +{ + "title": "Get Face form PersonGroup Person", + "operationId": "PersonGroupOperations_GetPersonGroupPersonFace", + "parameters": { + "apiVersion": "v1.3-preview.1", + "personGroupId": "your_person_group_id", + "personId": "25985303-c537-4467-b41d-bdb45cd95ca1", + "persistedFaceId": "43897a75-8d6f-42cf-885e-74832febb055" + }, + "responses": { + "200": { + "body": { + "persistedFaceId": "43897a75-8d6f-42cf-885e-74832febb055", + "userData": "your_user_data" + } + } + } +} diff --git a/specification/ai/Face/examples/v1.3-preview.1/PersonGroupOperations_GetPersonGroupPersons.json b/specification/ai/Face/examples/v1.3-preview.1/PersonGroupOperations_GetPersonGroupPersons.json new file mode 100644 index 000000000000..424721fd7a3d --- /dev/null +++ b/specification/ai/Face/examples/v1.3-preview.1/PersonGroupOperations_GetPersonGroupPersons.json @@ -0,0 +1,24 @@ +{ + "title": "Get Persons from PersonGroup", + "operationId": "PersonGroupOperations_GetPersonGroupPersons", + "parameters": { + "apiVersion": "v1.3-preview.1", + "personGroupId": "your_person_group_id", + "start": "00000000-0000-0000-0000-000000000000", + "top": 20 + }, + "responses": { + "200": { + "body": [ + { + "personId": "25985303-c537-4467-b41d-bdb45cd95ca1", + "name": "your_person_group_person_name", + "userData": "your_user_data", + "persistedFaceIds": [ + "43897a75-8d6f-42cf-885e-74832febb055" + ] + } + ] + } + } +} diff --git a/specification/ai/Face/examples/v1.3-preview.1/PersonGroupOperations_GetPersonGroupTrainingStatus.json b/specification/ai/Face/examples/v1.3-preview.1/PersonGroupOperations_GetPersonGroupTrainingStatus.json new file mode 100644 index 000000000000..ab2ef36a910c --- /dev/null +++ b/specification/ai/Face/examples/v1.3-preview.1/PersonGroupOperations_GetPersonGroupTrainingStatus.json @@ -0,0 +1,19 @@ +{ + "title": "Get Training Status of PersonGroup", + "operationId": "PersonGroupOperations_GetPersonGroupTrainingStatus", + "parameters": { + "apiVersion": "v1.3-preview.1", + "personGroupId": "your_person_group_id" + }, + "responses": { + "200": { + "body": { + "status": "notStarted", + "createdDateTime": "2024-03-05T11:07:58.371Z", + "lastActionDateTime": "2024-03-05T11:07:58.371Z", + "lastSuccessfulTrainingDateTime": "2024-03-05T11:07:58.371Z", + "message": null + } + } + } +} diff --git a/specification/ai/Face/examples/v1.3-preview.1/PersonGroupOperations_GetPersonGroups.json b/specification/ai/Face/examples/v1.3-preview.1/PersonGroupOperations_GetPersonGroups.json new file mode 100644 index 000000000000..a92fb21ec63e --- /dev/null +++ b/specification/ai/Face/examples/v1.3-preview.1/PersonGroupOperations_GetPersonGroups.json @@ -0,0 +1,22 @@ +{ + "title": "Get PersonGroups", + "operationId": "PersonGroupOperations_GetPersonGroups", + "parameters": { + "apiVersion": "v1.3-preview.1", + "start": "00000000-0000-0000-0000-000000000000", + "top": 20, + "returnRecognitionModel": true + }, + "responses": { + "200": { + "body": [ + { + "name": "your_person_group_name", + "userData": "your_user_data", + "recognitionModel": "recognition_01", + "personGroupId": "your_person_group_id" + } + ] + } + } +} diff --git a/specification/ai/Face/examples/v1.3-preview.1/PersonGroupOperations_TrainLargePersonGroup.json b/specification/ai/Face/examples/v1.3-preview.1/PersonGroupOperations_TrainLargePersonGroup.json new file mode 100644 index 000000000000..a7aba2f271d1 --- /dev/null +++ b/specification/ai/Face/examples/v1.3-preview.1/PersonGroupOperations_TrainLargePersonGroup.json @@ -0,0 +1,15 @@ +{ + "title": "Train LargePersonGroup", + "operationId": "PersonGroupOperations_TrainLargePersonGroup", + "parameters": { + "apiVersion": "v1.3-preview.1", + "largePersonGroupId": "your_large_person_group_id" + }, + "responses": { + "202": { + "headers": { + "operation-Location": "https://contoso.com/operationstatus" + } + } + } +} diff --git a/specification/ai/Face/examples/v1.3-preview.1/PersonGroupOperations_TrainPersonGroup.json b/specification/ai/Face/examples/v1.3-preview.1/PersonGroupOperations_TrainPersonGroup.json new file mode 100644 index 000000000000..7174943ec9f6 --- /dev/null +++ b/specification/ai/Face/examples/v1.3-preview.1/PersonGroupOperations_TrainPersonGroup.json @@ -0,0 +1,15 @@ +{ + "title": "Train PersonGroup", + "operationId": "PersonGroupOperations_TrainPersonGroup", + "parameters": { + "apiVersion": "v1.3-preview.1", + "personGroupId": "your_person_group_id" + }, + "responses": { + "202": { + "headers": { + "operation-Location": "https://contoso.com/operationstatus" + } + } + } +} diff --git a/specification/ai/Face/examples/v1.3-preview.1/PersonGroupOperations_UpdateLargePersonGroup.json b/specification/ai/Face/examples/v1.3-preview.1/PersonGroupOperations_UpdateLargePersonGroup.json new file mode 100644 index 000000000000..6bce2e96a32b --- /dev/null +++ b/specification/ai/Face/examples/v1.3-preview.1/PersonGroupOperations_UpdateLargePersonGroup.json @@ -0,0 +1,15 @@ +{ + "title": "Update LargePersonGroup", + "operationId": "PersonGroupOperations_UpdateLargePersonGroup", + "parameters": { + "apiVersion": "v1.3-preview.1", + "largePersonGroupId": "your_large_person_group_id", + "body": { + "name": "your_large_person_group_name", + "userData": "your_user_data" + } + }, + "responses": { + "200": {} + } +} diff --git a/specification/ai/Face/examples/v1.3-preview.1/PersonGroupOperations_UpdateLargePersonGroupPerson.json b/specification/ai/Face/examples/v1.3-preview.1/PersonGroupOperations_UpdateLargePersonGroupPerson.json new file mode 100644 index 000000000000..f50e6d43cb65 --- /dev/null +++ b/specification/ai/Face/examples/v1.3-preview.1/PersonGroupOperations_UpdateLargePersonGroupPerson.json @@ -0,0 +1,16 @@ +{ + "title": "Update Person in LargePersonGroup", + "operationId": "PersonGroupOperations_UpdateLargePersonGroupPerson", + "parameters": { + "apiVersion": "v1.3-preview.1", + "largePersonGroupId": "your_large_person_group_id", + "personId": "25985303-c537-4467-b41d-bdb45cd95ca1", + "body": { + "name": "your_large_person_group_person_name", + "userData": "your_user_data" + } + }, + "responses": { + "200": {} + } +} diff --git a/specification/ai/Face/examples/v1.3-preview.1/PersonGroupOperations_UpdateLargePersonGroupPersonFace.json b/specification/ai/Face/examples/v1.3-preview.1/PersonGroupOperations_UpdateLargePersonGroupPersonFace.json new file mode 100644 index 000000000000..3688f758ce99 --- /dev/null +++ b/specification/ai/Face/examples/v1.3-preview.1/PersonGroupOperations_UpdateLargePersonGroupPersonFace.json @@ -0,0 +1,16 @@ +{ + "title": "Update Face in LargePersonGroup Person", + "operationId": "PersonGroupOperations_UpdateLargePersonGroupPersonFace", + "parameters": { + "apiVersion": "v1.3-preview.1", + "largePersonGroupId": "your_large_person_group_id", + "personId": "25985303-c537-4467-b41d-bdb45cd95ca1", + "persistedFaceId": "43897a75-8d6f-42cf-885e-74832febb055", + "body": { + "userData": "your_user_data" + } + }, + "responses": { + "200": {} + } +} diff --git a/specification/ai/Face/examples/v1.3-preview.1/PersonGroupOperations_UpdatePersonGroup.json b/specification/ai/Face/examples/v1.3-preview.1/PersonGroupOperations_UpdatePersonGroup.json new file mode 100644 index 000000000000..a909f776c4a0 --- /dev/null +++ b/specification/ai/Face/examples/v1.3-preview.1/PersonGroupOperations_UpdatePersonGroup.json @@ -0,0 +1,15 @@ +{ + "title": "Update PersonGroup", + "operationId": "PersonGroupOperations_UpdatePersonGroup", + "parameters": { + "apiVersion": "v1.3-preview.1", + "personGroupId": "your_person_group_id", + "body": { + "name": "your_person_group_name", + "userData": "your_user_data" + } + }, + "responses": { + "200": {} + } +} diff --git a/specification/ai/Face/examples/v1.3-preview.1/PersonGroupOperations_UpdatePersonGroupPerson.json b/specification/ai/Face/examples/v1.3-preview.1/PersonGroupOperations_UpdatePersonGroupPerson.json new file mode 100644 index 000000000000..fc3597e8186f --- /dev/null +++ b/specification/ai/Face/examples/v1.3-preview.1/PersonGroupOperations_UpdatePersonGroupPerson.json @@ -0,0 +1,16 @@ +{ + "title": "Update PersonGroup Person", + "operationId": "PersonGroupOperations_UpdatePersonGroupPerson", + "parameters": { + "apiVersion": "v1.3-preview.1", + "personGroupId": "your_person_group_id", + "personId": "25985303-c537-4467-b41d-bdb45cd95ca1", + "body": { + "name": "your_person_group_person_name", + "userData": "your_user_data" + } + }, + "responses": { + "200": {} + } +} diff --git a/specification/ai/Face/examples/v1.3-preview.1/PersonGroupOperations_UpdatePersonGroupPersonFace.json b/specification/ai/Face/examples/v1.3-preview.1/PersonGroupOperations_UpdatePersonGroupPersonFace.json new file mode 100644 index 000000000000..2c6eccce0140 --- /dev/null +++ b/specification/ai/Face/examples/v1.3-preview.1/PersonGroupOperations_UpdatePersonGroupPersonFace.json @@ -0,0 +1,16 @@ +{ + "title": "Update Face in PersonGroup Person", + "operationId": "PersonGroupOperations_UpdatePersonGroupPersonFace", + "parameters": { + "apiVersion": "v1.3-preview.1", + "personGroupId": "your_person_group_id", + "personId": "25985303-c537-4467-b41d-bdb45cd95ca1", + "persistedFaceId": "43897a75-8d6f-42cf-885e-74832febb055", + "body": { + "userData": "your_user_data" + } + }, + "responses": { + "200": {} + } +} diff --git a/specification/ai/Face/main.tsp b/specification/ai/Face/main.tsp index 8db754e34662..db120a285017 100644 --- a/specification/ai/Face/main.tsp +++ b/specification/ai/Face/main.tsp @@ -61,4 +61,8 @@ enum Versions { @useDependency(Azure.Core.Versions.v1_0_Preview_2) @doc("v1.2") v1_2: "v1.2", + + @useDependency(Azure.Core.Versions.v1_0_Preview_2) + @doc("v1.3-preview.1") + v1_3_preview_1: "v1.3-preview.1", } diff --git a/specification/ai/Face/models.session.tsp b/specification/ai/Face/models.session.tsp index e36545ed2232..ef8625520771 100644 --- a/specification/ai/Face/models.session.tsp +++ b/specification/ai/Face/models.session.tsp @@ -68,6 +68,22 @@ model LivenessSessionData { @minValue(60) @maxValue(86400) authTokenTimeToLiveInSeconds?: int32 = 600; + + @added(Versions.v1_3_preview_1) + @doc("The number of times a client can attempt a liveness check using the same authToken. Default value is 1. Maximum value is 3.") + numberOfClientAttemptsAllowed?: HttpPart; // = 1; + + @added(Versions.v1_3_preview_1) + @doc("Unique Guid per each end-user. This is to provide rate limiting and anti-hammering. If 'userCorrelationIdSetInClient' is true in this request, this 'userCorrelationId' must be null.") + userCorrelationId?: string; + + @added(Versions.v1_3_preview_1) + @doc("Whether or not to allow client to set their own 'userCorrelationId' via the Vision SDK. Default is false, and 'userCorrelationId' must be set in this request body.") + userCorrelationIdSetInClient?: boolean; + + @added(Versions.v1_3_preview_1) + @doc("Specify the expected IP address or CIDR block of the client that runs the liveness check.") + expectedClientIpAddress?: string; } @doc("Session data returned for enumeration.") @@ -361,6 +377,14 @@ model LivenessSessionCommonData { @doc("The model version used for liveness classification. This is an optional parameter, and if this is not specified, then the latest supported model version will be chosen") modelVersion?: LivenessModel; + + @added(Versions.v1_3_preview_1) + @doc("Denotes if the abuse monitoring feature was enabled during this session.") + isAbuseMonitoringEnabled?: HttpPart; + + @added(Versions.v1_3_preview_1) + @doc("The expected IP address or CIDR block of the client that runs the liveness check.") + expectedClientIpAddress?: string; } @added(Versions.v1_2) @@ -410,6 +434,14 @@ model LivenessSessionAttempt { @doc("The error of the liveness call, will be null if there is result.") error?: LivenessError; + + @added(Versions.v1_3_preview_1) + @doc("The client information gathered during the liveness attempt.") + clientInformation?: ClientInformation[]; + + @added(Versions.v1_3_preview_1) + @doc("The abuse monitoring result for the liveness attempt.") + abuseMonitoringResult?: AbuseMonitoringResult; } @added(Versions.v1_2) @@ -426,6 +458,14 @@ model LivenessWithVerifySessionAttempt { @doc("The error of the liveness with verify call, will be null if there is result.") error?: LivenessError; + + @added(Versions.v1_3_preview_1) + @doc("The client information gathered during the liveness attempt.") + clientInformation?: ClientInformation[]; + + @added(Versions.v1_3_preview_1) + @doc("The abuse monitoring result for the liveness attempt.") + abuseMonitoringResult?: AbuseMonitoringResult; } @added(Versions.v1_2) @@ -482,6 +522,36 @@ model LivenessError { targets: LivenessDecisionTargets; } +@added(Versions.v1_3_preview_1) +@doc("The client information gathered during the liveness attempt.") +model ClientInformation { + @doc("The client ip address seen during the liveness attempt.") + ip: string; +} + +@added(Versions.v1_3_preview_1) +@doc("The abuse monitoring result for the liveness attempt.") +model AbuseMonitoringResult { + @doc("Denotes if abuse detection triggered during this liveness attempt.") + isAbuseDetected: HttpPart; + + @doc("Denotes if abuse detection triggered during this liveness attempt.") + otherFlaggedSessions: OtherFlaggedSessions[]; +} + +@added(Versions.v1_3_preview_1) +@doc("The other sessions flagged as abuse based on the information gathered during this attempt.") +model OtherFlaggedSessions { + @doc("The attempt ID, start from 1.") + attemptId: int32; + + @doc("The unique session ID of the flagged session.") + sessionId: string; + + @doc("The image ID from the flagged session.") + sessionImageId?: string; +} + @added(Versions.v1_2) @doc("The targets used for liveness classification.") model LivenessDecisionTargets { @@ -538,6 +608,37 @@ model CreateLivenessWithVerifySessionContent { @doc("Seconds the session should last for. Range is 60 to 86400 seconds. Default value is 600.") authTokenTimeToLiveInSeconds?: HttpPart; // = 600; + + @added(Versions.v1_3_preview_1) + @doc("The number of times a client can attempt a liveness check using the same authToken. Default value is 1. Maximum value is 3.") + numberOfClientAttemptsAllowed?: HttpPart; // = 1; +} + +@added(Versions.v1_3_preview_1) +@doc("Settings for liveness abuse monitoring.") +model LivenessAbuseMonitoringSetting { + /** Whether liveness abuse monitoring is enabled */ + enabled: HttpPart; +} + +@added(Versions.v1_3_preview_1) +@doc("Response model for client assets access token.") +model ClientAssetsAccessTokenResponse { + @doc("The expiry time of the access token.") + expiry: utcDateTime; + + @doc("The access token for client assets.") + accessToken: string; + + @doc("The base64 encoded access token.") + base64AccessToken: string; +} + +@added(Versions.v1_3_preview_1) +@doc("Response model for settings.") +model Settings { + /** Liveness abuse monitoring settings */ + livenessAbuseMonitoring: LivenessAbuseMonitoringSetting; } @minValue(60) @@ -547,3 +648,7 @@ scalar authTokenLifetimeInSeconds extends int32; @minValue(0) @maxValue(1) scalar confidenceScore extends float32; + +@minValue(1) +@maxValue(3) +scalar numberOfClientAttemptsAllowed extends int32; diff --git a/specification/ai/Face/routes.session.tsp b/specification/ai/Face/routes.session.tsp index dc7c9608b5f9..cdfe3ce1ca60 100644 --- a/specification/ai/Face/routes.session.tsp +++ b/specification/ai/Face/routes.session.tsp @@ -19,7 +19,6 @@ alias LivenessSessionWithVerifyDescription = """ Permissions includes... > * - * Ability to call /detectLivenessWithVerify/singleModal for up to 3 retries. * A token lifetime of 10 minutes. > [!NOTE] @@ -36,7 +35,6 @@ alias CreateLivenessSessionDescription = """ Permissions includes... > * - * Ability to call /detectLiveness/singleModal for up to 3 retries. * A token lifetime of 10 minutes. > [!NOTE] @@ -74,7 +72,10 @@ alias CreateLivenessWithVerifySessionSummary = "Create a new liveness session wi alias GetLivenessWithVerifySessionResultDescription = "Get session result of detectLivenessWithVerify/singleModal call."; alias ListLivenessWithVerifySessionsSummary = "Lists sessions for /detectLivenessWithVerify/SingleModal."; alias GetSessionImageDescription = "Get session image stored during the liveness session."; +alias GetLivenessSessionSettingDescription = "Get the liveness sessions setting object."; +alias PatchLivenessSessionSettingDescription = "Update the liveness setting object."; alias SuccessfullyDeletedSession = "Successfully deleted session and all correlated data."; +alias GetClientAssetsAccessTokenDescription = "Get access token to get access to client AI model assets."; @get @action("audit") @@ -258,4 +259,37 @@ interface LivenessSessionOperations { ServiceTraits, FaceErrorResponse >; + + @added(Versions.v1_3_preview_1) + @doc(GetClientAssetsAccessTokenDescription) + @get + @route("settings/getClientAssetsAccessToken") + getClientAssetsAccessToken is Azure.Core.RpcOperation< + {}, + ClientAssetsAccessTokenResponse, + ServiceTraits, + FaceErrorResponse + >; + + @added(Versions.v1_3_preview_1) + @doc(GetLivenessSessionSettingDescription) + @get + @route("settings") + getSettings is Azure.Core.RpcOperation< + {}, + Settings, + ServiceTraits, + FaceErrorResponse + >; + + @added(Versions.v1_3_preview_1) + @doc(PatchLivenessSessionSettingDescription) + @patch(#{ implicitOptionality: true }) + @route("settings") + patchSettings is Azure.Core.RpcOperation< + BodyParameter, + Settings, + ServiceTraits, + FaceErrorResponse + >; } diff --git a/specification/ai/Face/suppressions.yaml b/specification/ai/Face/suppressions.yaml new file mode 100644 index 000000000000..cc9f02f9df9b --- /dev/null +++ b/specification/ai/Face/suppressions.yaml @@ -0,0 +1,12 @@ +- tool: TypeSpecValidation + paths: + - tspconfig.yaml + rules: + - SdkTspConfigValidation + sub-rules: + # Suppress validation for a Go emitter options + - options.@azure-tools/typespec-go.generate-fakes + - options.@azure-tools/typespec-go.inject-spans + - options.@azure-tools/typespec-go.service-dir + - options.@azure-tools/typespec-go.package-dir + reason: 'Face APIs does not support a Go SDK currently' \ No newline at end of file diff --git a/specification/ai/data-plane/Face/preview/v1.1-preview.1/Face.json b/specification/ai/data-plane/Face/preview/v1.1-preview.1/Face.json index ff2d80e238c4..01c6e9e27c5e 100644 --- a/specification/ai/data-plane/Face/preview/v1.1-preview.1/Face.json +++ b/specification/ai/data-plane/Face/preview/v1.1-preview.1/Face.json @@ -402,7 +402,7 @@ "post": { "operationId": "LivenessSessionOperations_CreateLivenessSession", "summary": "Create a new detect liveness session.", - "description": "A session is best for client device scenarios where developers want to authorize a client device to perform only a liveness detection without granting full access to their resource. Created sessions have a limited life span and only authorize clients to perform the desired action before access is expired.\n\nPermissions includes...\n>\n*\n * Ability to call /detectLiveness/singleModal for up to 3 retries.\n * A token lifetime of 10 minutes.\n\n> [!NOTE]\n> Client access can be revoked by deleting the session using the Delete Liveness Session operation. To retrieve a result, use the Get Liveness Session. To audit the individual requests that a client has made to your resource, use the List Liveness Session Audit Entries.", + "description": "A session is best for client device scenarios where developers want to authorize a client device to perform only a liveness detection without granting full access to their resource. Created sessions have a limited life span and only authorize clients to perform the desired action before access is expired.\n\nPermissions includes...\n>\n*\n * A token lifetime of 10 minutes.\n\n> [!NOTE]\n> Client access can be revoked by deleting the session using the Delete Liveness Session operation. To retrieve a result, use the Get Liveness Session. To audit the individual requests that a client has made to your resource, use the List Liveness Session Audit Entries.", "parameters": [ { "name": "body", @@ -635,7 +635,7 @@ "post": { "operationId": "LivenessSessionOperations_CreateLivenessWithVerifySessionWithVerifyImage", "summary": "Create a new liveness session with verify. Provide the verify image during session creation.", - "description": "A session is best for client device scenarios where developers want to authorize a client device to perform only a liveness detection without granting full access to their resource. Created sessions have a limited life span and only authorize clients to perform the desired action before access is expired.\n\nPermissions includes...\n>\n*\n * Ability to call /detectLivenessWithVerify/singleModal for up to 3 retries.\n * A token lifetime of 10 minutes.\n\n> [!NOTE]\n>\n> *\n> * Client access can be revoked by deleting the session using the Delete Liveness With Verify Session operation.\n> * To retrieve a result, use the Get Liveness With Verify Session.\n> * To audit the individual requests that a client has made to your resource, use the List Liveness With Verify Session Audit Entries.\n\nRecommended Option: VerifyImage is provided during session creation.", + "description": "A session is best for client device scenarios where developers want to authorize a client device to perform only a liveness detection without granting full access to their resource. Created sessions have a limited life span and only authorize clients to perform the desired action before access is expired.\n\nPermissions includes...\n>\n*\n * A token lifetime of 10 minutes.\n\n> [!NOTE]\n>\n> *\n> * Client access can be revoked by deleting the session using the Delete Liveness With Verify Session operation.\n> * To retrieve a result, use the Get Liveness With Verify Session.\n> * To audit the individual requests that a client has made to your resource, use the List Liveness With Verify Session Audit Entries.\n\nRecommended Option: VerifyImage is provided during session creation.", "consumes": [ "multipart/form-data" ], @@ -5183,7 +5183,7 @@ "post": { "operationId": "LivenessSessionOperations_CreateLivenessWithVerifySession", "summary": "Create a new liveness session with verify. Client device submits VerifyImage during the /detectLivenessWithVerify/singleModal call.", - "description": "A session is best for client device scenarios where developers want to authorize a client device to perform only a liveness detection without granting full access to their resource. Created sessions have a limited life span and only authorize clients to perform the desired action before access is expired.\n\nPermissions includes...\n>\n*\n * Ability to call /detectLivenessWithVerify/singleModal for up to 3 retries.\n * A token lifetime of 10 minutes.\n\n> [!NOTE]\n>\n> *\n> * Client access can be revoked by deleting the session using the Delete Liveness With Verify Session operation.\n> * To retrieve a result, use the Get Liveness With Verify Session.\n> * To audit the individual requests that a client has made to your resource, use the List Liveness With Verify Session Audit Entries.\n\nAlternative Option: Client device submits VerifyImage during the /detectLivenessWithVerify/singleModal call.\n> [!NOTE]\n> Extra measures should be taken to validate that the client is sending the expected VerifyImage.", + "description": "A session is best for client device scenarios where developers want to authorize a client device to perform only a liveness detection without granting full access to their resource. Created sessions have a limited life span and only authorize clients to perform the desired action before access is expired.\n\nPermissions includes...\n>\n*\n * A token lifetime of 10 minutes.\n\n> [!NOTE]\n>\n> *\n> * Client access can be revoked by deleting the session using the Delete Liveness With Verify Session operation.\n> * To retrieve a result, use the Get Liveness With Verify Session.\n> * To audit the individual requests that a client has made to your resource, use the List Liveness With Verify Session Audit Entries.\n\nAlternative Option: Client device submits VerifyImage during the /detectLivenessWithVerify/singleModal call.\n> [!NOTE]\n> Extra measures should be taken to validate that the client is sending the expected VerifyImage.", "parameters": [ { "name": "body", diff --git a/specification/ai/data-plane/Face/preview/v1.2-preview.1/Face.json b/specification/ai/data-plane/Face/preview/v1.2-preview.1/Face.json index 26b454d4288b..611d0feb0bd0 100644 --- a/specification/ai/data-plane/Face/preview/v1.2-preview.1/Face.json +++ b/specification/ai/data-plane/Face/preview/v1.2-preview.1/Face.json @@ -402,7 +402,7 @@ "post": { "operationId": "LivenessSessionOperations_CreateLivenessSession", "summary": "Create a new detect liveness session.", - "description": "A session is best for client device scenarios where developers want to authorize a client device to perform only a liveness detection without granting full access to their resource. Created sessions have a limited life span and only authorize clients to perform the desired action before access is expired.\n\nPermissions includes...\n>\n*\n * Ability to call /detectLiveness/singleModal for up to 3 retries.\n * A token lifetime of 10 minutes.\n\n> [!NOTE]\n> Client access can be revoked by deleting the session using the Delete Liveness Session operation. To retrieve a result, use the Get Liveness Session. To audit the individual requests that a client has made to your resource, use the List Liveness Session Audit Entries.", + "description": "A session is best for client device scenarios where developers want to authorize a client device to perform only a liveness detection without granting full access to their resource. Created sessions have a limited life span and only authorize clients to perform the desired action before access is expired.\n\nPermissions includes...\n>\n*\n * A token lifetime of 10 minutes.\n\n> [!NOTE]\n> Client access can be revoked by deleting the session using the Delete Liveness Session operation. To retrieve a result, use the Get Liveness Session. To audit the individual requests that a client has made to your resource, use the List Liveness Session Audit Entries.", "parameters": [ { "name": "body", @@ -635,7 +635,7 @@ "post": { "operationId": "LivenessSessionOperations_CreateLivenessWithVerifySessionWithVerifyImage", "summary": "Create a new liveness session with verify. Provide the verify image during session creation.", - "description": "A session is best for client device scenarios where developers want to authorize a client device to perform only a liveness detection without granting full access to their resource. Created sessions have a limited life span and only authorize clients to perform the desired action before access is expired.\n\nPermissions includes...\n>\n*\n * Ability to call /detectLivenessWithVerify/singleModal for up to 3 retries.\n * A token lifetime of 10 minutes.\n\n> [!NOTE]\n>\n> *\n> * Client access can be revoked by deleting the session using the Delete Liveness With Verify Session operation.\n> * To retrieve a result, use the Get Liveness With Verify Session.\n> * To audit the individual requests that a client has made to your resource, use the List Liveness With Verify Session Audit Entries.\n\nRecommended Option: VerifyImage is provided during session creation.", + "description": "A session is best for client device scenarios where developers want to authorize a client device to perform only a liveness detection without granting full access to their resource. Created sessions have a limited life span and only authorize clients to perform the desired action before access is expired.\n\nPermissions includes...\n>\n*\n * A token lifetime of 10 minutes.\n\n> [!NOTE]\n>\n> *\n> * Client access can be revoked by deleting the session using the Delete Liveness With Verify Session operation.\n> * To retrieve a result, use the Get Liveness With Verify Session.\n> * To audit the individual requests that a client has made to your resource, use the List Liveness With Verify Session Audit Entries.\n\nRecommended Option: VerifyImage is provided during session creation.", "consumes": [ "multipart/form-data" ], @@ -5487,7 +5487,7 @@ "post": { "operationId": "LivenessSessionOperations_CreateLivenessWithVerifySession", "summary": "Create a new liveness session with verify. Client device submits VerifyImage during the /detectLivenessWithVerify/singleModal call.", - "description": "A session is best for client device scenarios where developers want to authorize a client device to perform only a liveness detection without granting full access to their resource. Created sessions have a limited life span and only authorize clients to perform the desired action before access is expired.\n\nPermissions includes...\n>\n*\n * Ability to call /detectLivenessWithVerify/singleModal for up to 3 retries.\n * A token lifetime of 10 minutes.\n\n> [!NOTE]\n>\n> *\n> * Client access can be revoked by deleting the session using the Delete Liveness With Verify Session operation.\n> * To retrieve a result, use the Get Liveness With Verify Session.\n> * To audit the individual requests that a client has made to your resource, use the List Liveness With Verify Session Audit Entries.\n\nAlternative Option: Client device submits VerifyImage during the /detectLivenessWithVerify/singleModal call.\n> [!NOTE]\n> Extra measures should be taken to validate that the client is sending the expected VerifyImage.", + "description": "A session is best for client device scenarios where developers want to authorize a client device to perform only a liveness detection without granting full access to their resource. Created sessions have a limited life span and only authorize clients to perform the desired action before access is expired.\n\nPermissions includes...\n>\n*\n * A token lifetime of 10 minutes.\n\n> [!NOTE]\n>\n> *\n> * Client access can be revoked by deleting the session using the Delete Liveness With Verify Session operation.\n> * To retrieve a result, use the Get Liveness With Verify Session.\n> * To audit the individual requests that a client has made to your resource, use the List Liveness With Verify Session Audit Entries.\n\nAlternative Option: Client device submits VerifyImage during the /detectLivenessWithVerify/singleModal call.\n> [!NOTE]\n> Extra measures should be taken to validate that the client is sending the expected VerifyImage.", "parameters": [ { "name": "body", diff --git a/specification/ai/data-plane/Face/preview/v1.3-preview.1/Face.json b/specification/ai/data-plane/Face/preview/v1.3-preview.1/Face.json new file mode 100644 index 000000000000..0ee91ee4b098 --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.3-preview.1/Face.json @@ -0,0 +1,7403 @@ +{ + "swagger": "2.0", + "info": { + "title": "Azure AI Face API", + "version": "v1.3-preview.1", + "x-typespec-generated": [ + { + "emitter": "@azure-tools/typespec-autorest" + } + ] + }, + "schemes": [ + "https" + ], + "x-ms-parameterized-host": { + "hostTemplate": "{endpoint}/face/{apiVersion}", + "useSchemePrefix": false, + "parameters": [ + { + "name": "endpoint", + "in": "path", + "description": "Supported Cognitive Services endpoints (protocol and hostname, for example:\nhttps://{resource-name}.cognitiveservices.azure.com).", + "required": true, + "type": "string", + "format": "uri", + "x-ms-skip-url-encoding": true + }, + { + "name": "apiVersion", + "in": "path", + "description": "API Version", + "required": true, + "type": "string", + "enum": [ + "v1.3-preview.1" + ], + "x-ms-enum": { + "name": "Versions", + "modelAsString": true, + "values": [ + { + "name": "v1_3_preview_1", + "value": "v1.3-preview.1", + "description": "v1.3-preview.1" + } + ] + } + } + ] + }, + "produces": [ + "application/json" + ], + "consumes": [ + "application/json" + ], + "security": [ + { + "KeyAuth": [] + }, + { + "AADToken": [ + "https://cognitiveservices.azure.com/.default" + ] + } + ], + "securityDefinitions": { + "AADToken": { + "type": "oauth2", + "description": "The Azure Active Directory OAuth2 Flow", + "flow": "accessCode", + "authorizationUrl": "https://api.example.com/oauth2/authorize", + "scopes": { + "https://cognitiveservices.azure.com/.default": "" + }, + "tokenUrl": "https://api.example.com/oauth2/token" + }, + "KeyAuth": { + "type": "apiKey", + "description": "The secret key for your Azure AI Face subscription.", + "name": "Ocp-Apim-Subscription-Key", + "in": "header" + } + }, + "tags": [], + "paths": { + "/detect": { + "post": { + "operationId": "FaceDetectionOperations_DetectFromUrl", + "summary": "Detect human faces in an image, return face rectangles, and optionally with faceIds, landmarks, and attributes.", + "description": "> [!IMPORTANT]\n> Microsoft has retired or limited facial recognition capabilities that can be used to try to infer emotional states and identity attributes which, if misused, can subject people to stereotyping, discrimination or unfair denial of services. The retired capabilities are emotion and gender. The limited capabilities are age, smile, facial hair, hair and makeup. Email [Azure Face API](mailto:azureface@microsoft.com) if you have a responsible use case that would benefit from the use of any of the limited capabilities. Read more about this decision [here](https://azure.microsoft.com/blog/responsible-ai-investments-and-safeguards-for-facial-recognition/).\n\n*\n * No image will be stored. Only the extracted face feature(s) will be stored on server. The faceId is an identifier of the face feature and will be used in \"Identify\", \"Verify\", and \"Find Similar\". The stored face features will expire and be deleted at the time specified by faceIdTimeToLive after the original detection call.\n * Optional parameters include faceId, landmarks, and attributes. Attributes include headPose, glasses, occlusion, accessories, blur, exposure, noise, mask, and qualityForRecognition. Some of the results returned for specific attributes may not be highly accurate.\n * JPEG, PNG, GIF (the first frame), and BMP format are supported. The allowed image file size is from 1KB to 6MB.\n * The minimum detectable face size is 36x36 pixels in an image no larger than 1920x1080 pixels. Images with dimensions higher than 1920x1080 pixels will need a proportionally larger minimum face size.\n * Up to 100 faces can be returned for an image. Faces are ranked by face rectangle size from large to small.\n * For optimal results when querying \"Identify\", \"Verify\", and \"Find Similar\" ('returnFaceId' is true), please use faces that are: frontal, clear, and with a minimum size of 200x200 pixels (100 pixels between eyes).\n * Different 'detectionModel' values can be provided. The availability of landmarks and supported attributes depends on the detection model specified. To use and compare different detection models, please refer to [here](https://learn.microsoft.com/azure/ai-services/computer-vision/how-to/specify-detection-model).\n * Different 'recognitionModel' values are provided. If follow-up operations like \"Verify\", \"Identify\", \"Find Similar\" are needed, please specify the recognition model with 'recognitionModel' parameter. The default value for 'recognitionModel' is 'recognition_01', if latest model needed, please explicitly specify the model you need in this parameter. Once specified, the detected faceIds will be associated with the specified recognition model. More details, please refer to [here](https://learn.microsoft.com/azure/ai-services/computer-vision/how-to/specify-recognition-model).", + "parameters": [ + { + "name": "detectionModel", + "in": "query", + "description": "The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'. 'detection_03' is recommended since its accuracy is improved on smaller faces (64x64 pixels) and rotated face orientations.", + "required": false, + "type": "string", + "default": "detection_01", + "enum": [ + "detection_01", + "detection_02", + "detection_03" + ], + "x-ms-enum": { + "name": "DetectionModel", + "modelAsString": true, + "values": [ + { + "name": "detection_01", + "value": "detection_01", + "description": "The default detection model. Recommend for near frontal face detection. For scenarios with exceptionally large angle (head-pose) faces, occluded faces or wrong image orientation, the faces in such cases may not be detected." + }, + { + "name": "detection_02", + "value": "detection_02", + "description": "Detection model released in 2019 May with improved accuracy especially on small, side and blurry faces." + }, + { + "name": "detection_03", + "value": "detection_03", + "description": "Detection model released in 2021 February with improved accuracy especially on small faces." + } + ] + } + }, + { + "name": "recognitionModel", + "in": "query", + "description": "The 'recognitionModel' associated with the detected faceIds. Supported 'recognitionModel' values include 'recognition_01', 'recognition_02', 'recognition_03' or 'recognition_04'. The default value is 'recognition_01'. 'recognition_04' is recommended since its accuracy is improved on faces wearing masks compared with 'recognition_03', and its overall accuracy is improved compared with 'recognition_01' and 'recognition_02'.", + "required": false, + "type": "string", + "default": "recognition_01", + "enum": [ + "recognition_01", + "recognition_02", + "recognition_03", + "recognition_04" + ], + "x-ms-enum": { + "name": "RecognitionModel", + "modelAsString": true, + "values": [ + { + "name": "recognition_01", + "value": "recognition_01", + "description": "The default recognition model for \"Detect\". All those faceIds created before 2019 March are bonded with this recognition model." + }, + { + "name": "recognition_02", + "value": "recognition_02", + "description": "Recognition model released in 2019 March." + }, + { + "name": "recognition_03", + "value": "recognition_03", + "description": "Recognition model released in 2020 May." + }, + { + "name": "recognition_04", + "value": "recognition_04", + "description": "Recognition model released in 2021 February. It's recommended to use this recognition model for better recognition accuracy." + } + ] + } + }, + { + "name": "returnFaceId", + "in": "query", + "description": "Return faceIds of the detected faces or not. The default value is true.", + "required": false, + "type": "boolean", + "default": true + }, + { + "name": "returnFaceAttributes", + "in": "query", + "description": "Analyze and return the one or more specified face attributes in the comma-separated string like 'returnFaceAttributes=headPose,glasses'. Face attribute analysis has additional computational and time cost.", + "required": false, + "type": "array", + "items": { + "type": "string", + "enum": [ + "headPose", + "glasses", + "occlusion", + "accessories", + "blur", + "exposure", + "noise", + "mask", + "qualityForRecognition", + "age", + "smile", + "facialHair", + "hair" + ], + "x-ms-enum": { + "name": "FaceAttributeType", + "modelAsString": true, + "values": [ + { + "name": "headPose", + "value": "headPose", + "description": "3-D roll/yaw/pitch angles for face direction." + }, + { + "name": "glasses", + "value": "glasses", + "description": "Glasses type. Values include 'NoGlasses', 'ReadingGlasses', 'Sunglasses', 'SwimmingGoggles'." + }, + { + "name": "occlusion", + "value": "occlusion", + "description": "Whether each facial area is occluded, including forehead, eyes and mouth." + }, + { + "name": "accessories", + "value": "accessories", + "description": "Accessories around face, including 'headwear', 'glasses' and 'mask'. Empty array means no accessories detected. Note this is after a face is detected. Large mask could result in no face to be detected." + }, + { + "name": "blur", + "value": "blur", + "description": "Face is blurry or not. Level returns 'Low', 'Medium' or 'High'. Value returns a number between [0,1], the larger the blurrier." + }, + { + "name": "exposure", + "value": "exposure", + "description": "Face exposure level. Level returns 'GoodExposure', 'OverExposure' or 'UnderExposure'." + }, + { + "name": "noise", + "value": "noise", + "description": "Noise level of face pixels. Level returns 'Low', 'Medium' and 'High'. Value returns a number between [0,1], the larger the noisier" + }, + { + "name": "mask", + "value": "mask", + "description": "Whether each face is wearing a mask. Mask type returns 'noMask', 'faceMask', 'otherMaskOrOcclusion', or 'uncertain'. Value returns a boolean 'noseAndMouthCovered' indicating whether nose and mouth are covered." + }, + { + "name": "qualityForRecognition", + "value": "qualityForRecognition", + "description": "The overall image quality regarding whether the image being used in the detection is of sufficient quality to attempt face recognition on. The value is an informal rating of low, medium, or high. Only 'high' quality images are recommended for person enrollment and quality at or above 'medium' is recommended for identification scenarios. The attribute is only available when using recognition models recognition_03 or recognition_04." + }, + { + "name": "age", + "value": "age", + "description": "Age in years." + }, + { + "name": "smile", + "value": "smile", + "description": "Smile intensity, a number between [0,1]." + }, + { + "name": "facialHair", + "value": "facialHair", + "description": "Properties describing facial hair attributes." + }, + { + "name": "hair", + "value": "hair", + "description": "Properties describing hair attributes." + } + ] + } + }, + "collectionFormat": "csv" + }, + { + "name": "returnFaceLandmarks", + "in": "query", + "description": "Return face landmarks of the detected faces or not. The default value is false.", + "required": false, + "type": "boolean", + "default": false + }, + { + "name": "returnRecognitionModel", + "in": "query", + "description": "Return 'recognitionModel' or not. The default value is false. This is only applicable when returnFaceId = true.", + "required": false, + "type": "boolean", + "default": false + }, + { + "name": "faceIdTimeToLive", + "in": "query", + "description": "The number of seconds for the face ID being cached. Supported range from 60 seconds up to 86400 seconds. The default value is 86400 (24 hours).", + "required": false, + "type": "integer", + "format": "int32", + "default": 86400, + "minimum": 60, + "maximum": 86400 + }, + { + "name": "body", + "in": "body", + "required": true, + "schema": { + "type": "object", + "properties": { + "url": { + "type": "string", + "format": "uri", + "description": "URL of input image." + } + }, + "required": [ + "url" + ] + } + } + ], + "responses": { + "200": { + "description": "A successful call returns an array of face entries ranked by face rectangle size in descending order. An empty response indicates no faces detected.", + "schema": { + "type": "array", + "items": { + "$ref": "#/definitions/FaceDetectionResult" + } + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Detect with Image URL": { + "$ref": "./examples/DetectFromUrl.json" + } + } + } + }, + "/detectLiveness-sessions": { + "post": { + "operationId": "LivenessSessionOperations_CreateLivenessSession", + "summary": "Create a new detect liveness session.", + "description": "A session is best for client device scenarios where developers want to authorize a client device to perform only a liveness detection without granting full access to their resource. Created sessions have a limited life span and only authorize clients to perform the desired action before access is expired.\n\nPermissions includes...\n>\n*\n * A token lifetime of 10 minutes.\n\n> [!NOTE]\n> Client access can be revoked by deleting the session using the Delete Liveness Session operation. To retrieve a result, use the Get Liveness Session. To audit the individual requests that a client has made to your resource, use the List Liveness Session Audit Entries.", + "parameters": [ + { + "name": "body", + "in": "body", + "description": "Body parameter.", + "required": true, + "schema": { + "$ref": "#/definitions/CreateLivenessSessionContent" + } + } + ], + "responses": { + "200": { + "description": "A successful call create a session for a client device and provide an authorization token for use by the client application for a limited purpose and time.", + "schema": { + "$ref": "#/definitions/LivenessSession" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Create Liveness Session": { + "$ref": "./examples/LivenessSessionOperations_CreateLivenessSession.json" + } + } + } + }, + "/detectLiveness-sessions/{sessionId}": { + "get": { + "operationId": "LivenessSessionOperations_GetLivenessSessionResult", + "description": "Get session result of detectLiveness/singleModal call.", + "parameters": [ + { + "name": "sessionId", + "in": "path", + "description": "The unique ID to reference this session.", + "required": true, + "type": "string" + } + ], + "responses": { + "200": { + "description": "The request has succeeded.", + "schema": { + "$ref": "#/definitions/LivenessSession" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Get LivenessSession Result": { + "$ref": "./examples/LivenessSessionOperations_GetLivenessSessionResult.json" + } + } + }, + "delete": { + "operationId": "LivenessSessionOperations_DeleteLivenessSession", + "summary": "Delete all session related information for matching the specified session id.", + "description": "> [!NOTE]\n> Deleting a session deactivates the Session Auth Token by blocking future API calls made with that Auth Token. While this can be used to remove any access for that token, those requests will still count towards overall resource rate limits. It's best to leverage TokenTTL to limit length of tokens in the case that it is misused.", + "parameters": [ + { + "name": "sessionId", + "in": "path", + "description": "The unique ID to reference this session.", + "required": true, + "type": "string" + } + ], + "responses": { + "204": { + "description": "Successfully deleted session and all correlated data." + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Delete Liveness Session": { + "$ref": "./examples/LivenessSessionOperations_DeleteLivenessSession.json" + } + } + } + }, + "/detectLivenessWithVerify-sessions": { + "post": { + "operationId": "LivenessSessionOperations_CreateLivenessWithVerifySession", + "summary": "Create a new liveness session with verify. Provide the verify image during session creation.", + "description": "A session is best for client device scenarios where developers want to authorize a client device to perform only a liveness detection without granting full access to their resource. Created sessions have a limited life span and only authorize clients to perform the desired action before access is expired.\n\nPermissions includes...\n>\n*\n * A token lifetime of 10 minutes.\n\n> [!NOTE]\n>\n> *\n> * Client access can be revoked by deleting the session using the Delete Liveness With Verify Session operation.\n> * To retrieve a result, use the Get Liveness With Verify Session.\n> * To audit the individual requests that a client has made to your resource, use the List Liveness With Verify Session Audit Entries.", + "consumes": [ + "multipart/form-data" + ], + "parameters": [ + { + "name": "livenessOperationMode", + "in": "formData", + "description": "Type of liveness mode the client should follow.", + "required": true, + "type": "string", + "enum": [ + "Passive", + "PassiveActive" + ], + "x-ms-enum": { + "name": "LivenessOperationMode", + "modelAsString": true, + "values": [ + { + "name": "Passive", + "value": "Passive", + "description": "Utilizes a passive liveness technique that requires no additional actions from the user. Requires normal indoor lighting and high screen brightness for optimal performance. And thus, this mode has a narrow operational envelope and will not be suitable for scenarios that requires the end-user's to be in bright lighting conditions. Note: this is the only supported mode for the Mobile (iOS and Android) solution." + }, + { + "name": "PassiveActive", + "value": "PassiveActive", + "description": "This mode utilizes a hybrid passive or active liveness technique that necessitates user cooperation. It is optimized to require active motion only under suboptimal lighting conditions. Unlike the passive mode, this mode has no lighting restrictions, and thus offering a broader operational envelope. This mode is preferable on Web based solutions due to the lack of automatic screen brightness control available on browsers which hinders the Passive mode's operational envelope on Web based solutions." + } + ] + } + }, + { + "name": "deviceCorrelationIdSetInClient", + "in": "formData", + "description": "Whether or not to allow client to set their own 'deviceCorrelationId' via the Vision SDK. Default is false, and 'deviceCorrelationId' must be set in this request body.", + "required": false, + "type": "boolean" + }, + { + "name": "enableSessionImage", + "in": "formData", + "description": "Whether or not store the session image.", + "required": false, + "type": "boolean" + }, + { + "name": "livenessModelVersion", + "in": "formData", + "description": "The model version used for liveness classification. This is an optional parameter, and if this is not specified, then the latest supported model version will be chosen", + "required": false, + "type": "string", + "enum": [ + "2024-11-15" + ], + "x-ms-enum": { + "name": "LivenessModel", + "modelAsString": true, + "values": [ + { + "name": "v2024_11_15", + "value": "2024-11-15" + } + ] + } + }, + { + "name": "returnVerifyImageHash", + "in": "formData", + "description": "Whether or not return the verify image hash.", + "required": false, + "type": "boolean" + }, + { + "name": "verifyConfidenceThreshold", + "in": "formData", + "description": "Threshold for confidence of the face verification. Please refer to the documentation for more details. https://learn.microsoft.com/legal/cognitive-services/face/characteristics-and-limitations?context=%2Fazure%2Fai-services%2Fcomputer-vision%2Fcontext%2Fcontext#recognition-confidence-score", + "required": false, + "type": "number", + "format": "float", + "minimum": 0, + "maximum": 1 + }, + { + "name": "verifyImage", + "in": "formData", + "description": "The image stream for verify. Content-Disposition header field for this part must have filename.", + "required": true, + "type": "file" + }, + { + "name": "deviceCorrelationId", + "in": "formData", + "description": "Unique Guid per each end-user device. This is to provide rate limiting and anti-hammering. If 'deviceCorrelationIdSetInClient' is true in this request, this 'deviceCorrelationId' must be null.", + "required": false, + "type": "string" + }, + { + "name": "authTokenTimeToLiveInSeconds", + "in": "formData", + "description": "Seconds the session should last for. Range is 60 to 86400 seconds. Default value is 600.", + "required": false, + "type": "integer", + "format": "int32", + "minimum": 60, + "maximum": 86400 + }, + { + "name": "numberOfClientAttemptsAllowed", + "in": "formData", + "description": "The number of times a client can attempt a liveness check using the same authToken. Default value is 1. Maximum value is 3.", + "required": false, + "type": "integer", + "format": "int32", + "minimum": 1, + "maximum": 3 + } + ], + "responses": { + "200": { + "description": "A successful call create a session for a client device and provide an authorization token for use by the client application for a limited purpose and time.", + "schema": { + "$ref": "#/definitions/LivenessWithVerifySession" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Create LivenessWithVerify Session": { + "$ref": "./examples/LivenessSessionOperations_CreateLivenessWithVerifySession.json" + } + } + } + }, + "/detectLivenessWithVerify-sessions/{sessionId}": { + "get": { + "operationId": "LivenessSessionOperations_GetLivenessWithVerifySessionResult", + "description": "Get session result of detectLivenessWithVerify/singleModal call.", + "parameters": [ + { + "name": "sessionId", + "in": "path", + "description": "The unique ID to reference this session.", + "required": true, + "type": "string" + } + ], + "responses": { + "200": { + "description": "The request has succeeded.", + "schema": { + "$ref": "#/definitions/LivenessWithVerifySession" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Get LivenessWithVerify Session Result": { + "$ref": "./examples/LivenessSessionOperations_GetLivenessWithVerifySessionResult.json" + } + } + }, + "delete": { + "operationId": "LivenessSessionOperations_DeleteLivenessWithVerifySession", + "summary": "Delete all session related information for matching the specified session id.", + "description": "> [!NOTE]\n> Deleting a session deactivates the Session Auth Token by blocking future API calls made with that Auth Token. While this can be used to remove any access for that token, those requests will still count towards overall resource rate limits. It's best to leverage TokenTTL to limit length of tokens in the case that it is misused.", + "parameters": [ + { + "name": "sessionId", + "in": "path", + "description": "The unique ID to reference this session.", + "required": true, + "type": "string" + } + ], + "responses": { + "204": { + "description": "Successfully deleted session and all correlated data." + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Delete LivenessWithVerify Session": { + "$ref": "./examples/LivenessSessionOperations_DeleteLivenessWithVerifySession.json" + } + } + } + }, + "/facelists": { + "get": { + "operationId": "FaceListOperations_GetFaceLists", + "description": "List Face Lists' faceListId, name, userData and recognitionModel.\n\nTo get face information inside Face List use \"Get Face List\".", + "parameters": [ + { + "name": "returnRecognitionModel", + "in": "query", + "description": "Return 'recognitionModel' or not. The default value is false.", + "required": false, + "type": "boolean", + "default": false + } + ], + "responses": { + "200": { + "description": "A successful call returns an array of Face Lists.", + "schema": { + "type": "array", + "items": { + "$ref": "#/definitions/FaceListItem" + } + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Get FaceLists": { + "$ref": "./examples/FaceListOperations_GetFaceLists.json" + } + } + } + }, + "/facelists/{faceListId}": { + "get": { + "operationId": "FaceListOperations_GetFaceList", + "description": "Retrieve a Face List's faceListId, name, userData, recognitionModel and faces in the Face List.", + "parameters": [ + { + "name": "faceListId", + "in": "path", + "description": "Valid character is letter in lower case or digit or '-' or '_', maximum length is 64.", + "required": true, + "type": "string", + "minLength": 1, + "maxLength": 64, + "pattern": "^[a-z0-9-_]+$" + }, + { + "name": "returnRecognitionModel", + "in": "query", + "description": "Return 'recognitionModel' or not. The default value is false.", + "required": false, + "type": "boolean", + "default": false + } + ], + "responses": { + "200": { + "description": "A successful call returns the Face List's information.", + "schema": { + "$ref": "#/definitions/FaceList" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Get FaceList": { + "$ref": "./examples/FaceListOperations_GetFaceList.json" + } + } + }, + "put": { + "operationId": "FaceListOperations_CreateFaceList", + "summary": "Create an empty Face List with user-specified faceListId, name, an optional userData and recognitionModel.", + "description": "Up to 64 Face Lists are allowed in one subscription.\n\nFace List is a list of faces, up to 1,000 faces, and used by \"Find Similar From Face List\".\n\nAfter creation, user should use \"Add Face List Face\" to import the faces. No image will be stored. Only the extracted face feature(s) will be stored on server until \"Delete Face List\" is called.\n\n\"Find Similar\" is used for scenario like finding celebrity-like faces, similar face filtering, or as a light way face identification. But if the actual use is to identify person, please use Person Group / Large Person Group and \"Identify\".\n\nPlease consider Large Face List when the face number is large. It can support up to 1,000,000 faces.", + "parameters": [ + { + "name": "faceListId", + "in": "path", + "description": "Valid character is letter in lower case or digit or '-' or '_', maximum length is 64.", + "required": true, + "type": "string", + "minLength": 1, + "maxLength": 64, + "pattern": "^[a-z0-9-_]+$" + }, + { + "name": "body", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/CreateCollectionRequest" + } + } + ], + "responses": { + "200": { + "description": "The request has succeeded." + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Create FaceList": { + "$ref": "./examples/FaceListOperations_CreateFaceList.json" + } + } + }, + "patch": { + "operationId": "FaceListOperations_UpdateFaceList", + "description": "Update information of a Face List, including name and userData.", + "parameters": [ + { + "name": "faceListId", + "in": "path", + "description": "Valid character is letter in lower case or digit or '-' or '_', maximum length is 64.", + "required": true, + "type": "string", + "minLength": 1, + "maxLength": 64, + "pattern": "^[a-z0-9-_]+$" + }, + { + "name": "body", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/UserDefinedFieldsForUpdate" + } + } + ], + "responses": { + "200": { + "description": "The request has succeeded." + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Update FaceList": { + "$ref": "./examples/FaceListOperations_UpdateFaceList.json" + } + } + }, + "delete": { + "operationId": "FaceListOperations_DeleteFaceList", + "description": "Delete a specified Face List.", + "parameters": [ + { + "name": "faceListId", + "in": "path", + "description": "Valid character is letter in lower case or digit or '-' or '_', maximum length is 64.", + "required": true, + "type": "string", + "minLength": 1, + "maxLength": 64, + "pattern": "^[a-z0-9-_]+$" + } + ], + "responses": { + "200": { + "description": "The request has succeeded." + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Delete FaceList": { + "$ref": "./examples/FaceListOperations_DeleteFaceList.json" + } + } + } + }, + "/facelists/{faceListId}/persistedfaces": { + "post": { + "operationId": "FaceListOperations_AddFaceListFaceFromUrl", + "summary": "Add a face to a specified Face List, up to 1,000 faces.", + "description": "To deal with an image containing multiple faces, input face can be specified as an image with a targetFace rectangle. It returns a persistedFaceId representing the added face. No image will be stored. Only the extracted face feature(s) will be stored on server until \"Delete Face List Face\" or \"Delete Face List\" is called.\n\nNote that persistedFaceId is different from faceId generated by \"Detect\".\n\n>\n*\n * Higher face image quality means better recognition precision. Please consider high-quality faces: frontal, clear, and face size is 200x200 pixels (100 pixels between eyes) or bigger.\n * JPEG, PNG, GIF (the first frame), and BMP format are supported. The allowed image file size is from 1KB to 6MB.\n * \"targetFace\" rectangle should contain one face. Zero or multiple faces will be regarded as an error. If the provided \"targetFace\" rectangle is not returned from \"Detect\", there's no guarantee to detect and add the face successfully.\n * Out of detectable face size (36x36 - 4096x4096 pixels), large head-pose, or large occlusions will cause failures.\n * The minimum detectable face size is 36x36 pixels in an image no larger than 1920x1080 pixels. Images with dimensions higher than 1920x1080 pixels will need a proportionally larger minimum face size.\n * Different 'detectionModel' values can be provided. To use and compare different detection models, please refer to [here](https://learn.microsoft.com/azure/ai-services/computer-vision/how-to/specify-detection-model).", + "parameters": [ + { + "name": "faceListId", + "in": "path", + "description": "Valid character is letter in lower case or digit or '-' or '_', maximum length is 64.", + "required": true, + "type": "string", + "minLength": 1, + "maxLength": 64, + "pattern": "^[a-z0-9-_]+$" + }, + { + "$ref": "#/parameters/AddFaceOptions.targetFace" + }, + { + "$ref": "#/parameters/AddFaceOptions.detectionModel" + }, + { + "$ref": "#/parameters/AddFaceOptions.userData" + }, + { + "name": "body", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/AddFaceFromUrlRequest" + } + } + ], + "responses": { + "200": { + "description": "A successful call returns a new persistedFaceId.", + "schema": { + "$ref": "#/definitions/AddFaceResult" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Add Face to FaceList from Url": { + "$ref": "./examples/FaceListOperations_AddFaceListFaceFromUrl.json" + } + } + } + }, + "/facelists/{faceListId}/persistedfaces/{persistedFaceId}": { + "delete": { + "operationId": "FaceListOperations_DeleteFaceListFace", + "summary": "Delete a face from a Face List by specified faceListId and persistedFaceId.", + "description": "Adding/deleting faces to/from a same Face List are processed sequentially and to/from different Face Lists are in parallel.", + "parameters": [ + { + "name": "faceListId", + "in": "path", + "description": "Valid character is letter in lower case or digit or '-' or '_', maximum length is 64.", + "required": true, + "type": "string", + "minLength": 1, + "maxLength": 64, + "pattern": "^[a-z0-9-_]+$" + }, + { + "name": "persistedFaceId", + "in": "path", + "description": "Face ID of the face.", + "required": true, + "type": "string", + "format": "uuid" + } + ], + "responses": { + "200": { + "description": "The request has succeeded." + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Delete Face from FaceList": { + "$ref": "./examples/FaceListOperations_DeleteFaceListFace.json" + } + } + } + }, + "/findsimilars": { + "post": { + "operationId": "FaceRecognitionOperations_FindSimilar", + "summary": "Given query face's faceId, to search the similar-looking faces from a faceId array. A faceId array contains the faces created by Detect.", + "description": "Depending on the input the returned similar faces list contains faceIds or persistedFaceIds ranked by similarity.\n\nFind similar has two working modes, \"matchPerson\" and \"matchFace\". \"matchPerson\" is the default mode that it tries to find faces of the same person as possible by using internal same-person thresholds. It is useful to find a known person's other photos. Note that an empty list will be returned if no faces pass the internal thresholds. \"matchFace\" mode ignores same-person thresholds and returns ranked similar faces anyway, even the similarity is low. It can be used in the cases like searching celebrity-looking faces.\n\nThe 'recognitionModel' associated with the query faceId should be the same as the 'recognitionModel' used by the target faceId array.", + "parameters": [ + { + "name": "body", + "in": "body", + "required": true, + "schema": { + "type": "object", + "properties": { + "faceId": { + "$ref": "#/definitions/Azure.Core.uuid", + "description": "faceId of the query face. User needs to call \"Detect\" first to get a valid faceId. Note that this faceId is not persisted and will expire 24 hours after the detection call." + }, + "maxNumOfCandidatesReturned": { + "type": "integer", + "format": "int32", + "description": "The number of top similar faces returned. The valid range is [1, 1000]. Default value is 20.", + "default": 20, + "minimum": 1, + "maximum": 1000 + }, + "mode": { + "type": "string", + "description": "Similar face searching mode. It can be 'matchPerson' or 'matchFace'. Default value is 'matchPerson'.", + "default": "matchPerson", + "enum": [ + "matchPerson", + "matchFace" + ], + "x-ms-enum": { + "name": "FindSimilarMatchMode", + "modelAsString": true, + "values": [ + { + "name": "matchPerson", + "value": "matchPerson", + "description": "Match person." + }, + { + "name": "matchFace", + "value": "matchFace", + "description": "Match face." + } + ] + } + }, + "faceIds": { + "type": "array", + "description": "An array of candidate faceIds. All of them are created by \"Detect\" and the faceIds will expire 24 hours after the detection call. The number of faceIds is limited to 1000.", + "items": { + "$ref": "#/definitions/Azure.Core.uuid" + } + } + }, + "required": [ + "faceId", + "faceIds" + ] + } + } + ], + "responses": { + "200": { + "description": "A successful call returns an array of the most similar faces represented in faceId if the input parameter is faceIds or persistedFaceId if the input parameter is faceListId or largeFaceListId.", + "schema": { + "type": "array", + "items": { + "$ref": "#/definitions/FindSimilarResult" + } + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Find Similar among Face IDs": { + "$ref": "./examples/FaceRecognitionOperations_FindSimilar.json" + } + } + } + }, + "/group": { + "post": { + "operationId": "FaceRecognitionOperations_Group", + "summary": "Divide candidate faces into groups based on face similarity.", + "description": ">\n*\n * The output is one or more disjointed face groups and a messyGroup. A face group contains faces that have similar looking, often of the same person. Face groups are ranked by group size, i.e. number of faces. Notice that faces belonging to a same person might be split into several groups in the result.\n * MessyGroup is a special face group containing faces that cannot find any similar counterpart face from original faces. The messyGroup will not appear in the result if all faces found their counterparts.\n * Group API needs at least 2 candidate faces and 1000 at most. We suggest to try \"Verify Face To Face\" when you only have 2 candidate faces.\n * The 'recognitionModel' associated with the query faces' faceIds should be the same.", + "parameters": [ + { + "name": "body", + "in": "body", + "required": true, + "schema": { + "type": "object", + "properties": { + "faceIds": { + "type": "array", + "description": "Array of candidate faceIds created by \"Detect\". The maximum is 1000 faces.", + "items": { + "$ref": "#/definitions/Azure.Core.uuid" + } + } + }, + "required": [ + "faceIds" + ] + } + } + ], + "responses": { + "200": { + "description": "A successful call returns one or more groups of similar faces (rank by group size) and a messyGroup.", + "schema": { + "$ref": "#/definitions/GroupingResult" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Group Face IDs": { + "$ref": "./examples/FaceRecognitionOperations_Group.json" + } + } + } + }, + "/identify": { + "post": { + "operationId": "FaceRecognitionOperations_IdentifyFromPersonGroup", + "summary": "1-to-many identification to find the closest matches of the specific query person face from a Person Group.", + "description": "For each face in the faceIds array, Face Identify will compute similarities between the query face and all the faces in the Person Group (given by personGroupId), and return candidate person(s) for that face ranked by similarity confidence. The Person Group should be trained to make it ready for identification. See more in \"Train Person Group\".\n> [!NOTE]\n>\n> *\n> * The algorithm allows more than one face to be identified independently at the same request, but no more than 10 faces.\n> * Each person could have more than one face, but no more than 248 faces.\n> * Higher face image quality means better identification precision. Please consider high-quality faces: frontal, clear, and face size is 200x200 pixels (100 pixels between eyes) or bigger.\n> * Number of candidates returned is restricted by maxNumOfCandidatesReturned and confidenceThreshold. If no person is identified, the returned candidates will be an empty array.\n> * Try \"Find Similar\" when you need to find similar faces from a Face List/Large Face List instead of a Person Group.\n> * The 'recognitionModel' associated with the query faces' faceIds should be the same as the 'recognitionModel' used by the target Person Group.", + "parameters": [ + { + "name": "body", + "in": "body", + "required": true, + "schema": { + "type": "object", + "properties": { + "faceIds": { + "type": "array", + "description": "Array of query faces faceIds, created by the \"Detect\". Each of the faces are identified independently. The valid number of faceIds is between [1, 10].", + "minItems": 1, + "maxItems": 10, + "items": { + "$ref": "#/definitions/Azure.Core.uuid" + } + }, + "personGroupId": { + "type": "string", + "description": "personGroupId of the target Person Group, created by \"Create Person Group\". Parameter personGroupId and largePersonGroupId should not be provided at the same time." + }, + "maxNumOfCandidatesReturned": { + "type": "integer", + "format": "int32", + "description": "The range of maxNumOfCandidatesReturned is between 1 and 100. Default value is 10.", + "default": 10, + "minimum": 1, + "maximum": 100 + }, + "confidenceThreshold": { + "type": "number", + "format": "float", + "description": "Customized identification confidence threshold, in the range of [0, 1]. Advanced user can tweak this value to override default internal threshold for better precision on their scenario data. Note there is no guarantee of this threshold value working on other data and after algorithm updates.", + "minimum": 0, + "maximum": 1 + } + }, + "required": [ + "faceIds", + "personGroupId" + ] + } + } + ], + "responses": { + "200": { + "description": "A successful call returns the identified candidate person(s) for each query face.", + "schema": { + "type": "array", + "items": { + "$ref": "#/definitions/IdentificationResult" + } + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Identify from PersonGroup": { + "$ref": "./examples/FaceRecognitionOperations_IdentifyFromPersonGroup.json" + } + } + } + }, + "/largefacelists": { + "get": { + "operationId": "FaceListOperations_GetLargeFaceLists", + "summary": "List Large Face Lists' information of largeFaceListId, name, userData and recognitionModel.", + "description": "To get face information inside largeFaceList use \"Get Large Face List Face\".\n\nLarge Face Lists are stored in alphabetical order of largeFaceListId.\n>\n*\n * \"start\" parameter (string, optional) specifies an ID value from which returned entries will have larger IDs based on string comparison. Setting \"start\" to an empty value indicates that entries should be returned starting from the first item.\n * \"top\" parameter (int, optional) determines the maximum number of entries to be returned, with a limit of up to 1000 entries per call. To retrieve additional entries beyond this limit, specify \"start\" with the personId of the last entry returned in the current call.\n\n> [!TIP]\n>\n> * For example, there are total 5 items with their IDs: \"itemId1\", ..., \"itemId5\".\n> * \"start=&top=\" will return all 5 items.\n> * \"start=&top=2\" will return \"itemId1\", \"itemId2\".\n> * \"start=itemId2&top=3\" will return \"itemId3\", \"itemId4\", \"itemId5\".", + "parameters": [ + { + "name": "start", + "in": "query", + "description": "List resources greater than the \"start\". It contains no more than 64 characters. Default is empty.", + "required": false, + "type": "string" + }, + { + "name": "top", + "in": "query", + "description": "The number of items to list, ranging in [1, 1000]. Default is 1000.", + "required": false, + "type": "integer", + "format": "int32", + "default": 1000, + "minimum": 1, + "maximum": 1000 + }, + { + "name": "returnRecognitionModel", + "in": "query", + "description": "Return 'recognitionModel' or not. The default value is false.", + "required": false, + "type": "boolean", + "default": false + } + ], + "responses": { + "200": { + "description": "A successful call returns an array of Large Face Lists and their information (largeFaceListId, name and userData).", + "schema": { + "type": "array", + "items": { + "$ref": "#/definitions/LargeFaceList" + } + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Get LargeFaceLists": { + "$ref": "./examples/FaceListOperations_GetLargeFaceLists.json" + } + } + } + }, + "/largefacelists/{largeFaceListId}": { + "get": { + "operationId": "FaceListOperations_GetLargeFaceList", + "description": "Retrieve a Large Face List's largeFaceListId, name, userData and recognitionModel.", + "parameters": [ + { + "name": "largeFaceListId", + "in": "path", + "description": "Valid character is letter in lower case or digit or '-' or '_', maximum length is 64.", + "required": true, + "type": "string", + "minLength": 1, + "maxLength": 64, + "pattern": "^[a-z0-9-_]+$" + }, + { + "name": "returnRecognitionModel", + "in": "query", + "description": "Return 'recognitionModel' or not. The default value is false.", + "required": false, + "type": "boolean", + "default": false + } + ], + "responses": { + "200": { + "description": "A successful call returns the Large Face List's information.", + "schema": { + "$ref": "#/definitions/LargeFaceList" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Get LargeFaceList": { + "$ref": "./examples/FaceListOperations_GetLargeFaceList.json" + } + } + }, + "put": { + "operationId": "FaceListOperations_CreateLargeFaceList", + "summary": "Create an empty Large Face List with user-specified largeFaceListId, name, an optional userData and recognitionModel.", + "description": "Large Face List is a list of faces, up to 1,000,000 faces, and used by \"Find Similar From Large Face List\".\n\nAfter creation, user should use Add Large Face List Face to import the faces and Train Large Face List to make it ready for \"Find Similar\". No image will be stored. Only the extracted face feature(s) will be stored on server until Delete Large Face List is called.\n\n\"Find Similar\" is used for scenario like finding celebrity-like faces, similar face filtering, or as a light way face identification. But if the actual use is to identify person, please use Person Group / Large Person Group and \"Identify\".\n\n> [!NOTE]\n>\n> *\n> * Free-tier subscription quota: 64 Large Face Lists.\n> * S0-tier subscription quota: 1,000,000 Large Face Lists.", + "parameters": [ + { + "name": "largeFaceListId", + "in": "path", + "description": "Valid character is letter in lower case or digit or '-' or '_', maximum length is 64.", + "required": true, + "type": "string", + "minLength": 1, + "maxLength": 64, + "pattern": "^[a-z0-9-_]+$" + }, + { + "name": "body", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/CreateCollectionRequest" + } + } + ], + "responses": { + "200": { + "description": "The request has succeeded." + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Create LargeFaceList": { + "$ref": "./examples/FaceListOperations_CreateLargeFaceList.json" + } + } + }, + "patch": { + "operationId": "FaceListOperations_UpdateLargeFaceList", + "description": "Update information of a Large Face List, including name and userData.", + "parameters": [ + { + "name": "largeFaceListId", + "in": "path", + "description": "Valid character is letter in lower case or digit or '-' or '_', maximum length is 64.", + "required": true, + "type": "string", + "minLength": 1, + "maxLength": 64, + "pattern": "^[a-z0-9-_]+$" + }, + { + "name": "body", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/UserDefinedFieldsForUpdate" + } + } + ], + "responses": { + "200": { + "description": "The request has succeeded." + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Update LargeFaceList": { + "$ref": "./examples/FaceListOperations_UpdateLargeFaceList.json" + } + } + }, + "delete": { + "operationId": "FaceListOperations_DeleteLargeFaceList", + "summary": "Delete a face from a Large Face List by specified largeFaceListId and persistedFaceId.", + "description": "Adding/deleting faces to/from a same Large Face List are processed sequentially and to/from different Large Face Lists are in parallel.", + "parameters": [ + { + "name": "largeFaceListId", + "in": "path", + "description": "Valid character is letter in lower case or digit or '-' or '_', maximum length is 64.", + "required": true, + "type": "string", + "minLength": 1, + "maxLength": 64, + "pattern": "^[a-z0-9-_]+$" + } + ], + "responses": { + "200": { + "description": "The request has succeeded." + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Delete LargeFaceList": { + "$ref": "./examples/FaceListOperations_DeleteLargeFaceList.json" + } + } + } + }, + "/largefacelists/{largeFaceListId}/persistedfaces": { + "get": { + "operationId": "FaceListOperations_GetLargeFaceListFaces", + "summary": "List faces' persistedFaceId and userData in a specified Large Face List.", + "description": "Faces are stored in alphabetical order of persistedFaceId created in \"Add Large Face List Face\".\n>\n*\n * \"start\" parameter (string, optional) specifies an ID value from which returned entries will have larger IDs based on string comparison. Setting \"start\" to an empty value indicates that entries should be returned starting from the first item.\n * \"top\" parameter (int, optional) determines the maximum number of entries to be returned, with a limit of up to 1000 entries per call. To retrieve additional entries beyond this limit, specify \"start\" with the personId of the last entry returned in the current call.\n\n> [!TIP]\n>\n> * For example, there are total 5 items with their IDs: \"itemId1\", ..., \"itemId5\".\n> * \"start=&top=\" will return all 5 items.\n> * \"start=&top=2\" will return \"itemId1\", \"itemId2\".\n> * \"start=itemId2&top=3\" will return \"itemId3\", \"itemId4\", \"itemId5\".", + "parameters": [ + { + "name": "largeFaceListId", + "in": "path", + "description": "Valid character is letter in lower case or digit or '-' or '_', maximum length is 64.", + "required": true, + "type": "string", + "minLength": 1, + "maxLength": 64, + "pattern": "^[a-z0-9-_]+$" + }, + { + "name": "start", + "in": "query", + "description": "List resources greater than the \"start\". It contains no more than 64 characters. Default is empty.", + "required": false, + "type": "string" + }, + { + "name": "top", + "in": "query", + "description": "The number of items to list, ranging in [1, 1000]. Default is 1000.", + "required": false, + "type": "integer", + "format": "int32", + "default": 1000, + "minimum": 1, + "maximum": 1000 + } + ], + "responses": { + "200": { + "description": "A successful call returns an array of persisted faces and their information (persistedFaceId and userData).", + "schema": { + "type": "array", + "items": { + "$ref": "#/definitions/LargeFaceListFace" + } + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Get Faces from LargeFaceList": { + "$ref": "./examples/FaceListOperations_GetLargeFaceListFaces.json" + } + } + }, + "post": { + "operationId": "FaceListOperations_AddLargeFaceListFaceFromUrl", + "summary": "Add a face to a specified Large Face List, up to 1,000,000 faces.", + "description": "To deal with an image containing multiple faces, input face can be specified as an image with a targetFace rectangle. It returns a persistedFaceId representing the added face. No image will be stored. Only the extracted face feature(s) will be stored on server until \"Delete Large Face List Face\" or \"Delete Large Face List\" is called.\n\nNote that persistedFaceId is different from faceId generated by \"Detect\".\n\n>\n*\n * Higher face image quality means better recognition precision. Please consider high-quality faces: frontal, clear, and face size is 200x200 pixels (100 pixels between eyes) or bigger.\n * JPEG, PNG, GIF (the first frame), and BMP format are supported. The allowed image file size is from 1KB to 6MB.\n * \"targetFace\" rectangle should contain one face. Zero or multiple faces will be regarded as an error. If the provided \"targetFace\" rectangle is not returned from \"Detect\", there's no guarantee to detect and add the face successfully.\n * Out of detectable face size (36x36 - 4096x4096 pixels), large head-pose, or large occlusions will cause failures.\n * The minimum detectable face size is 36x36 pixels in an image no larger than 1920x1080 pixels. Images with dimensions higher than 1920x1080 pixels will need a proportionally larger minimum face size.\n * Different 'detectionModel' values can be provided. To use and compare different detection models, please refer to [here](https://learn.microsoft.com/azure/ai-services/computer-vision/how-to/specify-detection-model).\n\n> [!NOTE]\n>\n> *\n> * Free-tier subscription quota: 1,000 faces per Large Face List.\n> * S0-tier subscription quota: 1,000,000 faces per Large Face List.", + "parameters": [ + { + "name": "largeFaceListId", + "in": "path", + "description": "Valid character is letter in lower case or digit or '-' or '_', maximum length is 64.", + "required": true, + "type": "string", + "minLength": 1, + "maxLength": 64, + "pattern": "^[a-z0-9-_]+$" + }, + { + "$ref": "#/parameters/AddFaceOptions.targetFace" + }, + { + "$ref": "#/parameters/AddFaceOptions.detectionModel" + }, + { + "$ref": "#/parameters/AddFaceOptions.userData" + }, + { + "name": "body", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/AddFaceFromUrlRequest" + } + } + ], + "responses": { + "200": { + "description": "A successful call returns a new persistedFaceId.", + "schema": { + "$ref": "#/definitions/AddFaceResult" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Add Face to LargeFaceList from Url": { + "$ref": "./examples/FaceListOperations_AddLargeFaceListFaceFromUrl.json" + } + } + } + }, + "/largefacelists/{largeFaceListId}/persistedfaces/{persistedFaceId}": { + "get": { + "operationId": "FaceListOperations_GetLargeFaceListFace", + "description": "Retrieve persisted face in Large Face List by largeFaceListId and persistedFaceId.", + "parameters": [ + { + "name": "largeFaceListId", + "in": "path", + "description": "Valid character is letter in lower case or digit or '-' or '_', maximum length is 64.", + "required": true, + "type": "string", + "minLength": 1, + "maxLength": 64, + "pattern": "^[a-z0-9-_]+$" + }, + { + "name": "persistedFaceId", + "in": "path", + "description": "Face ID of the face.", + "required": true, + "type": "string", + "format": "uuid" + } + ], + "responses": { + "200": { + "description": "A successful call returns target persisted face's information (persistedFaceId and userData).", + "schema": { + "$ref": "#/definitions/LargeFaceListFace" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Get Face from LargeFaceList": { + "$ref": "./examples/FaceListOperations_GetLargeFaceListFace.json" + } + } + }, + "patch": { + "operationId": "FaceListOperations_UpdateLargeFaceListFace", + "description": "Update a specified face's userData field in a Large Face List by its persistedFaceId.", + "parameters": [ + { + "name": "largeFaceListId", + "in": "path", + "description": "Valid character is letter in lower case or digit or '-' or '_', maximum length is 64.", + "required": true, + "type": "string", + "minLength": 1, + "maxLength": 64, + "pattern": "^[a-z0-9-_]+$" + }, + { + "name": "persistedFaceId", + "in": "path", + "description": "Face ID of the face.", + "required": true, + "type": "string", + "format": "uuid" + }, + { + "name": "body", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/FaceUserData" + } + } + ], + "responses": { + "200": { + "description": "The request has succeeded." + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Update Face in LargeFaceList": { + "$ref": "./examples/FaceListOperations_UpdateLargeFaceListFace.json" + } + } + }, + "delete": { + "operationId": "FaceListOperations_DeleteLargeFaceListFace", + "description": "Delete a face from a Large Face List by specified largeFaceListId and persistedFaceId.", + "parameters": [ + { + "name": "largeFaceListId", + "in": "path", + "description": "Valid character is letter in lower case or digit or '-' or '_', maximum length is 64.", + "required": true, + "type": "string", + "minLength": 1, + "maxLength": 64, + "pattern": "^[a-z0-9-_]+$" + }, + { + "name": "persistedFaceId", + "in": "path", + "description": "Face ID of the face.", + "required": true, + "type": "string", + "format": "uuid" + } + ], + "responses": { + "200": { + "description": "The request has succeeded." + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Delete Face From LargeFaceList": { + "$ref": "./examples/FaceListOperations_DeleteLargeFaceListFace.json" + } + } + } + }, + "/largefacelists/{largeFaceListId}/train": { + "post": { + "operationId": "FaceListOperations_TrainLargeFaceList", + "summary": "Submit a Large Face List training task.", + "description": "\nTraining is a crucial step that only a trained Large Face List can be used by \"Find Similar From Large Face List\".\n\nThe training task is an asynchronous task. Training time depends on the number of face entries in a Large Face List. It could be in seconds, or up to half an hour for 1,000,000 faces. To check training completion, please use \"Get Large Face List Training Status\".", + "parameters": [ + { + "name": "largeFaceListId", + "in": "path", + "description": "Valid character is letter in lower case or digit or '-' or '_', maximum length is 64.", + "required": true, + "type": "string", + "minLength": 1, + "maxLength": 64, + "pattern": "^[a-z0-9-_]+$" + } + ], + "responses": { + "202": { + "description": "A successful call returns an empty response body.", + "headers": { + "operation-Location": { + "type": "string", + "format": "uri", + "description": "The location of an instance of TrainingResult" + } + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Train LargeFaceList": { + "$ref": "./examples/FaceListOperations_TrainLargeFaceList.json" + } + }, + "x-ms-long-running-operation": true + } + }, + "/largefacelists/{largeFaceListId}/training": { + "get": { + "operationId": "FaceListOperations_GetLargeFaceListTrainingStatus", + "description": "To check the Large Face List training status completed or still ongoing. Large Face List training is an asynchronous operation triggered by \"Train Large Face List\".\n\nTraining time depends on the number of face entries in a Large Face List. It could be in seconds, or up to half an hour for 1,000,000 faces.", + "parameters": [ + { + "name": "largeFaceListId", + "in": "path", + "description": "Valid character is letter in lower case or digit or '-' or '_', maximum length is 64.", + "required": true, + "type": "string", + "minLength": 1, + "maxLength": 64, + "pattern": "^[a-z0-9-_]+$" + } + ], + "responses": { + "200": { + "description": "A successful call returns the Large Face List's training status.", + "schema": { + "$ref": "#/definitions/TrainingResult" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Get Training Status of LargeFaceList": { + "$ref": "./examples/FaceListOperations_GetLargeFaceListTrainingStatus.json" + } + } + } + }, + "/largepersongroups": { + "get": { + "operationId": "PersonGroupOperations_GetLargePersonGroups", + "summary": "List all existing Large Person Groups' largePersonGroupId, name, userData and recognitionModel.", + "description": "Large Person Groups are stored in alphabetical order of largePersonGroupId.\n>\n*\n * \"start\" parameter (string, optional) specifies an ID value from which returned entries will have larger IDs based on string comparison. Setting \"start\" to an empty value indicates that entries should be returned starting from the first item.\n * \"top\" parameter (int, optional) determines the maximum number of entries to be returned, with a limit of up to 1000 entries per call. To retrieve additional entries beyond this limit, specify \"start\" with the personId of the last entry returned in the current call.\n\n> [!TIP]\n>\n> * For example, there are total 5 items with their IDs: \"itemId1\", ..., \"itemId5\".\n> * \"start=&top=\" will return all 5 items.\n> * \"start=&top=2\" will return \"itemId1\", \"itemId2\".\n> * \"start=itemId2&top=3\" will return \"itemId3\", \"itemId4\", \"itemId5\".", + "parameters": [ + { + "name": "start", + "in": "query", + "description": "List resources greater than the \"start\". It contains no more than 64 characters. Default is empty.", + "required": false, + "type": "string" + }, + { + "name": "top", + "in": "query", + "description": "The number of items to list, ranging in [1, 1000]. Default is 1000.", + "required": false, + "type": "integer", + "format": "int32", + "default": 1000, + "minimum": 1, + "maximum": 1000 + }, + { + "name": "returnRecognitionModel", + "in": "query", + "description": "Return 'recognitionModel' or not. The default value is false.", + "required": false, + "type": "boolean", + "default": false + } + ], + "responses": { + "200": { + "description": "A successful call returns an array of Large Person Groups and their information (largePersonGroupId, name and userData).", + "schema": { + "type": "array", + "items": { + "$ref": "#/definitions/LargePersonGroup" + } + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Get LargePersonGroups": { + "$ref": "./examples/PersonGroupOperations_GetLargePersonGroups.json" + } + } + } + }, + "/largepersongroups/{largePersonGroupId}": { + "get": { + "operationId": "PersonGroupOperations_GetLargePersonGroup", + "description": "Retrieve the information of a Large Person Group, including its name, userData and recognitionModel. This API returns Large Person Group information only, use \"Get Large Person Group Persons\" instead to retrieve person information under the Large Person Group.", + "parameters": [ + { + "name": "largePersonGroupId", + "in": "path", + "description": "ID of the container.", + "required": true, + "type": "string", + "minLength": 1, + "maxLength": 64, + "pattern": "^[a-z0-9-_]+$" + }, + { + "name": "returnRecognitionModel", + "in": "query", + "description": "Return 'recognitionModel' or not. The default value is false.", + "required": false, + "type": "boolean", + "default": false + } + ], + "responses": { + "200": { + "description": "A successful call returns the Large Person Group's information.", + "schema": { + "$ref": "#/definitions/LargePersonGroup" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Get LargePersonGroup": { + "$ref": "./examples/PersonGroupOperations_GetLargePersonGroup.json" + } + } + }, + "put": { + "operationId": "PersonGroupOperations_CreateLargePersonGroup", + "summary": "Create a new Large Person Group with user-specified largePersonGroupId, name, an optional userData and recognitionModel.", + "description": "A Large Person Group is a container holding the uploaded person data, including the face recognition features. It can hold up to 1,000,000 entities.\n\nAfter creation, use \"Create Large Person Group Person\" to add person into the group, and call \"Train Large Person Group\" to get this group ready for \"Identify From Large Person Group\".\n\nNo image will be stored. Only the person's extracted face feature(s) and userData will be stored on server until \"Delete Large Person Group Person\" or \"Delete Large Person Group\" is called.\n\n'recognitionModel' should be specified to associate with this Large Person Group. The default value for 'recognitionModel' is 'recognition_01', if the latest model needed, please explicitly specify the model you need in this parameter. New faces that are added to an existing Large Person Group will use the recognition model that's already associated with the collection. Existing face feature(s) in a Large Person Group can't be updated to features extracted by another version of recognition model.\n\n> [!NOTE]\n>\n> *\n> * Free-tier subscription quota: 1,000 Large Person Groups.\n> * S0-tier subscription quota: 1,000,000 Large Person Groups.", + "parameters": [ + { + "name": "largePersonGroupId", + "in": "path", + "description": "ID of the container.", + "required": true, + "type": "string", + "minLength": 1, + "maxLength": 64, + "pattern": "^[a-z0-9-_]+$" + }, + { + "name": "body", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/CreateCollectionRequest" + } + } + ], + "responses": { + "200": { + "description": "The request has succeeded." + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Create LargePersonGroup": { + "$ref": "./examples/PersonGroupOperations_CreateLargePersonGroup.json" + } + } + }, + "patch": { + "operationId": "PersonGroupOperations_UpdateLargePersonGroup", + "description": "Update an existing Large Person Group's name and userData. The properties keep unchanged if they are not in request body.", + "parameters": [ + { + "name": "largePersonGroupId", + "in": "path", + "description": "ID of the container.", + "required": true, + "type": "string", + "minLength": 1, + "maxLength": 64, + "pattern": "^[a-z0-9-_]+$" + }, + { + "name": "body", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/UserDefinedFieldsForUpdate" + } + } + ], + "responses": { + "200": { + "description": "The request has succeeded." + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Update LargePersonGroup": { + "$ref": "./examples/PersonGroupOperations_UpdateLargePersonGroup.json" + } + } + }, + "delete": { + "operationId": "PersonGroupOperations_DeleteLargePersonGroup", + "description": "Delete an existing Large Person Group with specified personGroupId. Persisted data in this Large Person Group will be deleted.", + "parameters": [ + { + "name": "largePersonGroupId", + "in": "path", + "description": "ID of the container.", + "required": true, + "type": "string", + "minLength": 1, + "maxLength": 64, + "pattern": "^[a-z0-9-_]+$" + } + ], + "responses": { + "200": { + "description": "The request has succeeded." + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Delete LargePersonGroup": { + "$ref": "./examples/PersonGroupOperations_DeleteLargePersonGroup.json" + } + } + } + }, + "/largepersongroups/{largePersonGroupId}/persons": { + "get": { + "operationId": "PersonGroupOperations_GetLargePersonGroupPersons", + "summary": "List all persons' information in the specified Large Person Group, including personId, name, userData and persistedFaceIds of registered person faces.", + "description": "Persons are stored in alphabetical order of personId created in \"Create Large Person Group Person\".\n>\n*\n * \"start\" parameter (string, optional) specifies an ID value from which returned entries will have larger IDs based on string comparison. Setting \"start\" to an empty value indicates that entries should be returned starting from the first item.\n * \"top\" parameter (int, optional) determines the maximum number of entries to be returned, with a limit of up to 1000 entries per call. To retrieve additional entries beyond this limit, specify \"start\" with the personId of the last entry returned in the current call.\n\n> [!TIP]\n>\n> * For example, there are total 5 items with their IDs: \"itemId1\", ..., \"itemId5\".\n> * \"start=&top=\" will return all 5 items.\n> * \"start=&top=2\" will return \"itemId1\", \"itemId2\".\n> * \"start=itemId2&top=3\" will return \"itemId3\", \"itemId4\", \"itemId5\".", + "parameters": [ + { + "name": "largePersonGroupId", + "in": "path", + "description": "ID of the container.", + "required": true, + "type": "string", + "minLength": 1, + "maxLength": 64, + "pattern": "^[a-z0-9-_]+$" + }, + { + "name": "start", + "in": "query", + "description": "List resources greater than the \"start\". It contains no more than 64 characters. Default is empty.", + "required": false, + "type": "string" + }, + { + "name": "top", + "in": "query", + "description": "The number of items to list, ranging in [1, 1000]. Default is 1000.", + "required": false, + "type": "integer", + "format": "int32", + "default": 1000, + "minimum": 1, + "maximum": 1000 + } + ], + "responses": { + "200": { + "description": "A successful call returns an array of person information that belong to the Large Person Group.", + "schema": { + "type": "array", + "items": { + "$ref": "#/definitions/LargePersonGroupPerson" + } + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Get Persons from LargePersonGroup": { + "$ref": "./examples/PersonGroupOperations_GetLargePersonGroupPersons.json" + } + } + }, + "post": { + "operationId": "PersonGroupOperations_CreateLargePersonGroupPerson", + "summary": "Create a new person in a specified Large Person Group. To add face to this person, please call \"Add Large Person Group Person Face\".", + "description": "> [!NOTE]\n>\n> *\n> * Free-tier subscription quota:\n> * 1,000 persons in all Large Person Groups.\n> * S0-tier subscription quota:\n> * 1,000,000 persons per Large Person Group.\n> * 1,000,000 Large Person Groups.\n> * 1,000,000,000 persons in all Large Person Groups. ", + "parameters": [ + { + "name": "largePersonGroupId", + "in": "path", + "description": "ID of the container.", + "required": true, + "type": "string", + "minLength": 1, + "maxLength": 64, + "pattern": "^[a-z0-9-_]+$" + }, + { + "name": "body", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/UserDefinedFields" + } + } + ], + "responses": { + "200": { + "description": "A successful call returns a new personId created.", + "schema": { + "$ref": "#/definitions/CreatePersonResult" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Create Person in LargePersonGroup": { + "$ref": "./examples/PersonGroupOperations_CreateLargePersonGroupPerson.json" + } + } + } + }, + "/largepersongroups/{largePersonGroupId}/persons/{personId}": { + "get": { + "operationId": "PersonGroupOperations_GetLargePersonGroupPerson", + "description": "Retrieve a person's name and userData, and the persisted faceIds representing the registered person face feature(s).", + "parameters": [ + { + "name": "largePersonGroupId", + "in": "path", + "description": "ID of the container.", + "required": true, + "type": "string", + "minLength": 1, + "maxLength": 64, + "pattern": "^[a-z0-9-_]+$" + }, + { + "name": "personId", + "in": "path", + "description": "ID of the person.", + "required": true, + "type": "string", + "format": "uuid" + } + ], + "responses": { + "200": { + "description": "A successful call returns the person's information.", + "schema": { + "$ref": "#/definitions/LargePersonGroupPerson" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Get Person from LargePersonGroup": { + "$ref": "./examples/PersonGroupOperations_GetLargePersonGroupPerson.json" + } + } + }, + "patch": { + "operationId": "PersonGroupOperations_UpdateLargePersonGroupPerson", + "description": "Update name or userData of a person.", + "parameters": [ + { + "name": "largePersonGroupId", + "in": "path", + "description": "ID of the container.", + "required": true, + "type": "string", + "minLength": 1, + "maxLength": 64, + "pattern": "^[a-z0-9-_]+$" + }, + { + "name": "personId", + "in": "path", + "description": "ID of the person.", + "required": true, + "type": "string", + "format": "uuid" + }, + { + "name": "body", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/UserDefinedFieldsForUpdate" + } + } + ], + "responses": { + "200": { + "description": "The request has succeeded." + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Update Person in LargePersonGroup": { + "$ref": "./examples/PersonGroupOperations_UpdateLargePersonGroupPerson.json" + } + } + }, + "delete": { + "operationId": "PersonGroupOperations_DeleteLargePersonGroupPerson", + "description": "Delete an existing person from a Large Person Group. The persistedFaceId, userData, person name and face feature(s) in the person entry will all be deleted.", + "parameters": [ + { + "name": "largePersonGroupId", + "in": "path", + "description": "ID of the container.", + "required": true, + "type": "string", + "minLength": 1, + "maxLength": 64, + "pattern": "^[a-z0-9-_]+$" + }, + { + "name": "personId", + "in": "path", + "description": "ID of the person.", + "required": true, + "type": "string", + "format": "uuid" + } + ], + "responses": { + "200": { + "description": "The request has succeeded." + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Delete Person from LargePersonGroup": { + "$ref": "./examples/PersonGroupOperations_DeleteLargePersonGroupPerson.json" + } + } + } + }, + "/largepersongroups/{largePersonGroupId}/persons/{personId}/persistedfaces": { + "post": { + "operationId": "PersonGroupOperations_AddLargePersonGroupPersonFaceFromUrl", + "summary": "Add a face to a person into a Large Person Group for face identification or verification.", + "description": "To deal with an image containing multiple faces, input face can be specified as an image with a targetFace rectangle. It returns a persistedFaceId representing the added face. No image will be stored. Only the extracted face feature(s) will be stored on server until \"Delete Large Person Group Person Face\", \"Delete Large Person Group Person\" or \"Delete Large Person Group\" is called.\n\nNote that persistedFaceId is different from faceId generated by \"Detect\".\n\n>\n*\n * Each person entry can hold up to 248 faces.\n * Higher face image quality means better recognition precision. Please consider high-quality faces: frontal, clear, and face size is 200x200 pixels (100 pixels between eyes) or bigger.\n * JPEG, PNG, GIF (the first frame), and BMP format are supported. The allowed image file size is from 1KB to 6MB.\n * \"targetFace\" rectangle should contain one face. Zero or multiple faces will be regarded as an error. If the provided \"targetFace\" rectangle is not returned from \"Detect\", there's no guarantee to detect and add the face successfully.\n * Out of detectable face size (36x36 - 4096x4096 pixels), large head-pose, or large occlusions will cause failures.\n * The minimum detectable face size is 36x36 pixels in an image no larger than 1920x1080 pixels. Images with dimensions higher than 1920x1080 pixels will need a proportionally larger minimum face size.\n * Different 'detectionModel' values can be provided. To use and compare different detection models, please refer to [here](https://learn.microsoft.com/azure/ai-services/computer-vision/how-to/specify-detection-model).", + "parameters": [ + { + "name": "largePersonGroupId", + "in": "path", + "description": "ID of the container.", + "required": true, + "type": "string", + "minLength": 1, + "maxLength": 64, + "pattern": "^[a-z0-9-_]+$" + }, + { + "name": "personId", + "in": "path", + "description": "ID of the person.", + "required": true, + "type": "string", + "format": "uuid" + }, + { + "$ref": "#/parameters/AddFaceOptions.targetFace" + }, + { + "$ref": "#/parameters/AddFaceOptions.detectionModel" + }, + { + "$ref": "#/parameters/AddFaceOptions.userData" + }, + { + "name": "body", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/AddFaceFromUrlRequest" + } + } + ], + "responses": { + "200": { + "description": "A successful call returns a new persistedFaceId.", + "schema": { + "$ref": "#/definitions/AddFaceResult" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Add Face in LargePersonGroup Person from Url": { + "$ref": "./examples/PersonGroupOperations_AddLargePersonGroupPersonFaceFromUrl.json" + } + } + } + }, + "/largepersongroups/{largePersonGroupId}/persons/{personId}/persistedfaces/{persistedFaceId}": { + "get": { + "operationId": "PersonGroupOperations_GetLargePersonGroupPersonFace", + "description": "Retrieve person face information. The persisted person face is specified by its largePersonGroupId, personId and persistedFaceId.", + "parameters": [ + { + "name": "largePersonGroupId", + "in": "path", + "description": "ID of the container.", + "required": true, + "type": "string", + "minLength": 1, + "maxLength": 64, + "pattern": "^[a-z0-9-_]+$" + }, + { + "name": "personId", + "in": "path", + "description": "ID of the person.", + "required": true, + "type": "string", + "format": "uuid" + }, + { + "name": "persistedFaceId", + "in": "path", + "description": "Face ID of the face.", + "required": true, + "type": "string", + "format": "uuid" + } + ], + "responses": { + "200": { + "description": "A successful call returns target persisted face's information (persistedFaceId and userData).", + "schema": { + "$ref": "#/definitions/LargePersonGroupPersonFace" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Get Face from LargePersonGroup Person": { + "$ref": "./examples/PersonGroupOperations_GetLargePersonGroupPersonFace.json" + } + } + }, + "patch": { + "operationId": "PersonGroupOperations_UpdateLargePersonGroupPersonFace", + "description": "Update a person persisted face's userData field.", + "parameters": [ + { + "name": "largePersonGroupId", + "in": "path", + "description": "ID of the container.", + "required": true, + "type": "string", + "minLength": 1, + "maxLength": 64, + "pattern": "^[a-z0-9-_]+$" + }, + { + "name": "personId", + "in": "path", + "description": "ID of the person.", + "required": true, + "type": "string", + "format": "uuid" + }, + { + "name": "persistedFaceId", + "in": "path", + "description": "Face ID of the face.", + "required": true, + "type": "string", + "format": "uuid" + }, + { + "name": "body", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/FaceUserData" + } + } + ], + "responses": { + "200": { + "description": "The request has succeeded." + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Update Face in LargePersonGroup Person": { + "$ref": "./examples/PersonGroupOperations_UpdateLargePersonGroupPersonFace.json" + } + } + }, + "delete": { + "operationId": "PersonGroupOperations_DeleteLargePersonGroupPersonFace", + "summary": "Delete a face from a person in a Large Person Group by specified largePersonGroupId, personId and persistedFaceId.", + "description": "Adding/deleting faces to/from a same person will be processed sequentially. Adding/deleting faces to/from different persons are processed in parallel.", + "parameters": [ + { + "name": "largePersonGroupId", + "in": "path", + "description": "ID of the container.", + "required": true, + "type": "string", + "minLength": 1, + "maxLength": 64, + "pattern": "^[a-z0-9-_]+$" + }, + { + "name": "personId", + "in": "path", + "description": "ID of the person.", + "required": true, + "type": "string", + "format": "uuid" + }, + { + "name": "persistedFaceId", + "in": "path", + "description": "Face ID of the face.", + "required": true, + "type": "string", + "format": "uuid" + } + ], + "responses": { + "200": { + "description": "The request has succeeded." + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Delete Face from LargePersonGroup Person": { + "$ref": "./examples/PersonGroupOperations_DeleteLargePersonGroupPersonFace.json" + } + } + } + }, + "/largepersongroups/{largePersonGroupId}/train": { + "post": { + "operationId": "PersonGroupOperations_TrainLargePersonGroup", + "summary": "Submit a Large Person Group training task. Training is a crucial step that only a trained Large Person Group can be used by \"Identify From Large Person Group\".", + "description": "The training task is an asynchronous task. Training time depends on the number of person entries, and their faces in a Large Person Group. It could be in several seconds, or up to half a hour for 1,000,000 persons. To check training status, please use \"Get Large Person Group Training Status\".", + "parameters": [ + { + "name": "largePersonGroupId", + "in": "path", + "description": "ID of the container.", + "required": true, + "type": "string", + "minLength": 1, + "maxLength": 64, + "pattern": "^[a-z0-9-_]+$" + } + ], + "responses": { + "202": { + "description": "A successful call returns an empty response body.", + "headers": { + "operation-Location": { + "type": "string", + "format": "uri", + "description": "The location of an instance of TrainingResult" + } + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Train LargePersonGroup": { + "$ref": "./examples/PersonGroupOperations_TrainLargePersonGroup.json" + } + }, + "x-ms-long-running-operation": true + } + }, + "/largepersongroups/{largePersonGroupId}/training": { + "get": { + "operationId": "PersonGroupOperations_GetLargePersonGroupTrainingStatus", + "summary": "To check Large Person Group training status completed or still ongoing. Large Person Group training is an asynchronous operation triggered by \"Train Large Person Group\" API.", + "description": "Training time depends on the number of person entries, and their faces in a Large Person Group. It could be in seconds, or up to half an hour for 1,000,000 persons.", + "parameters": [ + { + "name": "largePersonGroupId", + "in": "path", + "description": "ID of the container.", + "required": true, + "type": "string", + "minLength": 1, + "maxLength": 64, + "pattern": "^[a-z0-9-_]+$" + } + ], + "responses": { + "200": { + "description": "A successful call returns the Large Person Group's training status.", + "schema": { + "$ref": "#/definitions/TrainingResult" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Get Training Status of LargePersonGroup": { + "$ref": "./examples/PersonGroupOperations_GetLargePersonGroupTrainingStatus.json" + } + } + } + }, + "/persongroups": { + "get": { + "operationId": "PersonGroupOperations_GetPersonGroups", + "summary": "List Person Groups' personGroupId, name, userData and recognitionModel.", + "description": "Person Groups are stored in alphabetical order of personGroupId.\n>\n*\n * \"start\" parameter (string, optional) specifies an ID value from which returned entries will have larger IDs based on string comparison. Setting \"start\" to an empty value indicates that entries should be returned starting from the first item.\n * \"top\" parameter (int, optional) determines the maximum number of entries to be returned, with a limit of up to 1000 entries per call. To retrieve additional entries beyond this limit, specify \"start\" with the personId of the last entry returned in the current call.\n\n> [!TIP]\n>\n> * For example, there are total 5 items with their IDs: \"itemId1\", ..., \"itemId5\".\n> * \"start=&top=\" will return all 5 items.\n> * \"start=&top=2\" will return \"itemId1\", \"itemId2\".\n> * \"start=itemId2&top=3\" will return \"itemId3\", \"itemId4\", \"itemId5\".", + "parameters": [ + { + "name": "start", + "in": "query", + "description": "List resources greater than the \"start\". It contains no more than 64 characters. Default is empty.", + "required": false, + "type": "string" + }, + { + "name": "top", + "in": "query", + "description": "The number of items to list, ranging in [1, 1000]. Default is 1000.", + "required": false, + "type": "integer", + "format": "int32", + "default": 1000, + "minimum": 1, + "maximum": 1000 + }, + { + "name": "returnRecognitionModel", + "in": "query", + "description": "Return 'recognitionModel' or not. The default value is false.", + "required": false, + "type": "boolean", + "default": false + } + ], + "responses": { + "200": { + "description": "A successful call returns an array of Person Groups and their information (personGroupId, name and userData).", + "schema": { + "type": "array", + "items": { + "$ref": "#/definitions/PersonGroup" + } + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Get PersonGroups": { + "$ref": "./examples/PersonGroupOperations_GetPersonGroups.json" + } + } + } + }, + "/persongroups/{personGroupId}": { + "get": { + "operationId": "PersonGroupOperations_GetPersonGroup", + "description": "Retrieve Person Group name, userData and recognitionModel. To get person information under this personGroup, use \"Get Person Group Persons\".", + "parameters": [ + { + "name": "personGroupId", + "in": "path", + "description": "ID of the container.", + "required": true, + "type": "string", + "minLength": 1, + "maxLength": 64, + "pattern": "^[a-z0-9-_]+$" + }, + { + "name": "returnRecognitionModel", + "in": "query", + "description": "Return 'recognitionModel' or not. The default value is false.", + "required": false, + "type": "boolean", + "default": false + } + ], + "responses": { + "200": { + "description": "A successful call returns the Person Group's information.", + "schema": { + "$ref": "#/definitions/PersonGroup" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Get PersonGroup": { + "$ref": "./examples/PersonGroupOperations_GetPersonGroup.json" + } + } + }, + "put": { + "operationId": "PersonGroupOperations_CreatePersonGroup", + "summary": "Create a new Person Group with specified personGroupId, name, user-provided userData and recognitionModel.", + "description": "A Person Group is a container holding the uploaded person data, including face recognition features.\n\nAfter creation, use \"Create Person Group Person\" to add persons into the group, and then call \"Train Person Group\" to get this group ready for \"Identify From Person Group\".\n\nNo image will be stored. Only the person's extracted face feature(s) and userData will be stored on server until \"Delete Person Group Person\" or \"Delete Person Group\" is called.\n\n'recognitionModel' should be specified to associate with this Person Group. The default value for 'recognitionModel' is 'recognition_01', if the latest model needed, please explicitly specify the model you need in this parameter. New faces that are added to an existing Person Group will use the recognition model that's already associated with the collection. Existing face feature(s) in a Person Group can't be updated to features extracted by another version of recognition model.\n\n> [!NOTE]\n>\n> *\n> * Free-tier subscription quota: 1,000 Person Groups. Each holds up to 1,000 persons.\n> * S0-tier subscription quota: 1,000,000 Person Groups. Each holds up to 10,000 persons.\n> * to handle larger scale face identification problem, please consider using Large Person Group.", + "parameters": [ + { + "name": "personGroupId", + "in": "path", + "description": "ID of the container.", + "required": true, + "type": "string", + "minLength": 1, + "maxLength": 64, + "pattern": "^[a-z0-9-_]+$" + }, + { + "name": "body", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/CreateCollectionRequest" + } + } + ], + "responses": { + "200": { + "description": "The request has succeeded." + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Create PersonGroup": { + "$ref": "./examples/PersonGroupOperations_CreatePersonGroup.json" + } + } + }, + "patch": { + "operationId": "PersonGroupOperations_UpdatePersonGroup", + "description": "Update an existing Person Group's name and userData. The properties keep unchanged if they are not in request body.", + "parameters": [ + { + "name": "personGroupId", + "in": "path", + "description": "ID of the container.", + "required": true, + "type": "string", + "minLength": 1, + "maxLength": 64, + "pattern": "^[a-z0-9-_]+$" + }, + { + "name": "body", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/UserDefinedFieldsForUpdate" + } + } + ], + "responses": { + "200": { + "description": "The request has succeeded." + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Update PersonGroup": { + "$ref": "./examples/PersonGroupOperations_UpdatePersonGroup.json" + } + } + }, + "delete": { + "operationId": "PersonGroupOperations_DeletePersonGroup", + "description": "Delete an existing Person Group with specified personGroupId. Persisted data in this Person Group will be deleted.", + "parameters": [ + { + "name": "personGroupId", + "in": "path", + "description": "ID of the container.", + "required": true, + "type": "string", + "minLength": 1, + "maxLength": 64, + "pattern": "^[a-z0-9-_]+$" + } + ], + "responses": { + "200": { + "description": "The request has succeeded." + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Delete PersonGroup": { + "$ref": "./examples/PersonGroupOperations_DeletePersonGroup.json" + } + } + } + }, + "/persongroups/{personGroupId}/persons": { + "get": { + "operationId": "PersonGroupOperations_GetPersonGroupPersons", + "summary": "List all persons' information in the specified Person Group, including personId, name, userData and persistedFaceIds of registered person faces.", + "description": "Persons are stored in alphabetical order of personId created in \"Create Person Group Person\".\n>\n*\n * \"start\" parameter (string, optional) specifies an ID value from which returned entries will have larger IDs based on string comparison. Setting \"start\" to an empty value indicates that entries should be returned starting from the first item.\n * \"top\" parameter (int, optional) determines the maximum number of entries to be returned, with a limit of up to 1000 entries per call. To retrieve additional entries beyond this limit, specify \"start\" with the personId of the last entry returned in the current call.\n\n> [!TIP]\n>\n> * For example, there are total 5 items with their IDs: \"itemId1\", ..., \"itemId5\".\n> * \"start=&top=\" will return all 5 items.\n> * \"start=&top=2\" will return \"itemId1\", \"itemId2\".\n> * \"start=itemId2&top=3\" will return \"itemId3\", \"itemId4\", \"itemId5\".", + "parameters": [ + { + "name": "personGroupId", + "in": "path", + "description": "ID of the container.", + "required": true, + "type": "string", + "minLength": 1, + "maxLength": 64, + "pattern": "^[a-z0-9-_]+$" + }, + { + "name": "start", + "in": "query", + "description": "List resources greater than the \"start\". It contains no more than 64 characters. Default is empty.", + "required": false, + "type": "string" + }, + { + "name": "top", + "in": "query", + "description": "The number of items to list, ranging in [1, 1000]. Default is 1000.", + "required": false, + "type": "integer", + "format": "int32", + "default": 1000, + "minimum": 1, + "maximum": 1000 + } + ], + "responses": { + "200": { + "description": "A successful call returns an array of person information that belong to the Person Group.", + "schema": { + "type": "array", + "items": { + "$ref": "#/definitions/PersonGroupPerson" + } + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Get Persons from PersonGroup": { + "$ref": "./examples/PersonGroupOperations_GetPersonGroupPersons.json" + } + } + }, + "post": { + "operationId": "PersonGroupOperations_CreatePersonGroupPerson", + "summary": "Create a new person in a specified Person Group. To add face to this person, please call \"Add Person Group Person Face\".", + "description": "> [!NOTE]\n>\n> *\n> * Free-tier subscription quota:\n> * 1,000 persons in all Person Groups.\n> * S0-tier subscription quota:\n> * 10,000 persons per Person Group.\n> * 1,000,000 Person Groups.\n> * 100,000,000 persons in all Person Groups.", + "parameters": [ + { + "name": "personGroupId", + "in": "path", + "description": "ID of the container.", + "required": true, + "type": "string", + "minLength": 1, + "maxLength": 64, + "pattern": "^[a-z0-9-_]+$" + }, + { + "name": "body", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/UserDefinedFields" + } + } + ], + "responses": { + "200": { + "description": "A successful call returns a new personId created.", + "schema": { + "$ref": "#/definitions/CreatePersonResult" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Create Person in PersonGroup": { + "$ref": "./examples/PersonGroupOperations_CreatePersonGroupPerson.json" + } + } + } + }, + "/persongroups/{personGroupId}/persons/{personId}": { + "get": { + "operationId": "PersonGroupOperations_GetPersonGroupPerson", + "description": "Retrieve a person's name and userData, and the persisted faceIds representing the registered person face feature(s).", + "parameters": [ + { + "name": "personGroupId", + "in": "path", + "description": "ID of the container.", + "required": true, + "type": "string", + "minLength": 1, + "maxLength": 64, + "pattern": "^[a-z0-9-_]+$" + }, + { + "name": "personId", + "in": "path", + "description": "ID of the person.", + "required": true, + "type": "string", + "format": "uuid" + } + ], + "responses": { + "200": { + "description": "A successful call returns the person's information.", + "schema": { + "$ref": "#/definitions/PersonGroupPerson" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Get Person from PersonGroup": { + "$ref": "./examples/PersonGroupOperations_GetPersonGroupPerson.json" + } + } + }, + "patch": { + "operationId": "PersonGroupOperations_UpdatePersonGroupPerson", + "description": "Update name or userData of a person.", + "parameters": [ + { + "name": "personGroupId", + "in": "path", + "description": "ID of the container.", + "required": true, + "type": "string", + "minLength": 1, + "maxLength": 64, + "pattern": "^[a-z0-9-_]+$" + }, + { + "name": "personId", + "in": "path", + "description": "ID of the person.", + "required": true, + "type": "string", + "format": "uuid" + }, + { + "name": "body", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/UserDefinedFieldsForUpdate" + } + } + ], + "responses": { + "200": { + "description": "The request has succeeded." + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Update PersonGroup Person": { + "$ref": "./examples/PersonGroupOperations_UpdatePersonGroupPerson.json" + } + } + }, + "delete": { + "operationId": "PersonGroupOperations_DeletePersonGroupPerson", + "description": "Delete an existing person from a Person Group. The persistedFaceId, userData, person name and face feature(s) in the person entry will all be deleted.", + "parameters": [ + { + "name": "personGroupId", + "in": "path", + "description": "ID of the container.", + "required": true, + "type": "string", + "minLength": 1, + "maxLength": 64, + "pattern": "^[a-z0-9-_]+$" + }, + { + "name": "personId", + "in": "path", + "description": "ID of the person.", + "required": true, + "type": "string", + "format": "uuid" + } + ], + "responses": { + "200": { + "description": "The request has succeeded." + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Delete Person from PersonGroup": { + "$ref": "./examples/PersonGroupOperations_DeletePersonGroupPerson.json" + } + } + } + }, + "/persongroups/{personGroupId}/persons/{personId}/persistedfaces": { + "post": { + "operationId": "PersonGroupOperations_AddPersonGroupPersonFaceFromUrl", + "summary": "Add a face to a person into a Person Group for face identification or verification.", + "description": "To deal with an image containing multiple faces, input face can be specified as an image with a targetFace rectangle. It returns a persistedFaceId representing the added face. No image will be stored. Only the extracted face feature(s) will be stored on server until \"Delete Person Group Person Face\", \"Delete Person Group Person\" or \"Delete Person Group\" is called.\n\nNote that persistedFaceId is different from faceId generated by \"Detect\".\n\n>\n* \n * Each person entry can hold up to 248 faces.\n * Higher face image quality means better recognition precision. Please consider high-quality faces: frontal, clear, and face size is 200x200 pixels (100 pixels between eyes) or bigger.\n * JPEG, PNG, GIF (the first frame), and BMP format are supported. The allowed image file size is from 1KB to 6MB.\n * \"targetFace\" rectangle should contain one face. Zero or multiple faces will be regarded as an error. If the provided \"targetFace\" rectangle is not returned from \"Detect\", there's no guarantee to detect and add the face successfully.\n * Out of detectable face size (36x36 - 4096x4096 pixels), large head-pose, or large occlusions will cause failures.\n * The minimum detectable face size is 36x36 pixels in an image no larger than 1920x1080 pixels. Images with dimensions higher than 1920x1080 pixels will need a proportionally larger minimum face size.\n * Different 'detectionModel' values can be provided. To use and compare different detection models, please refer to [here](https://learn.microsoft.com/azure/ai-services/computer-vision/how-to/specify-detection-model).", + "parameters": [ + { + "name": "personGroupId", + "in": "path", + "description": "ID of the container.", + "required": true, + "type": "string", + "minLength": 1, + "maxLength": 64, + "pattern": "^[a-z0-9-_]+$" + }, + { + "name": "personId", + "in": "path", + "description": "ID of the person.", + "required": true, + "type": "string", + "format": "uuid" + }, + { + "$ref": "#/parameters/AddFaceOptions.targetFace" + }, + { + "$ref": "#/parameters/AddFaceOptions.detectionModel" + }, + { + "$ref": "#/parameters/AddFaceOptions.userData" + }, + { + "name": "body", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/AddFaceFromUrlRequest" + } + } + ], + "responses": { + "200": { + "description": "A successful call returns a new persistedFaceId.", + "schema": { + "$ref": "#/definitions/AddFaceResult" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Add Face to PersonGroupPerson from Url": { + "$ref": "./examples/PersonGroupOperations_AddPersonGroupPersonFaceFromUrl.json" + } + } + } + }, + "/persongroups/{personGroupId}/persons/{personId}/persistedfaces/{persistedFaceId}": { + "get": { + "operationId": "PersonGroupOperations_GetPersonGroupPersonFace", + "description": "Retrieve person face information. The persisted person face is specified by its personGroupId, personId and persistedFaceId.", + "parameters": [ + { + "name": "personGroupId", + "in": "path", + "description": "ID of the container.", + "required": true, + "type": "string", + "minLength": 1, + "maxLength": 64, + "pattern": "^[a-z0-9-_]+$" + }, + { + "name": "personId", + "in": "path", + "description": "ID of the person.", + "required": true, + "type": "string", + "format": "uuid" + }, + { + "name": "persistedFaceId", + "in": "path", + "description": "Face ID of the face.", + "required": true, + "type": "string", + "format": "uuid" + } + ], + "responses": { + "200": { + "description": "A successful call returns target persisted face's information (persistedFaceId and userData).", + "schema": { + "$ref": "#/definitions/PersonGroupPersonFace" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Get Face form PersonGroup Person": { + "$ref": "./examples/PersonGroupOperations_GetPersonGroupPersonFace.json" + } + } + }, + "patch": { + "operationId": "PersonGroupOperations_UpdatePersonGroupPersonFace", + "description": "Update a person persisted face's userData field.", + "parameters": [ + { + "name": "personGroupId", + "in": "path", + "description": "ID of the container.", + "required": true, + "type": "string", + "minLength": 1, + "maxLength": 64, + "pattern": "^[a-z0-9-_]+$" + }, + { + "name": "personId", + "in": "path", + "description": "ID of the person.", + "required": true, + "type": "string", + "format": "uuid" + }, + { + "name": "persistedFaceId", + "in": "path", + "description": "Face ID of the face.", + "required": true, + "type": "string", + "format": "uuid" + }, + { + "name": "body", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/FaceUserData" + } + } + ], + "responses": { + "200": { + "description": "The request has succeeded." + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Update Face in PersonGroup Person": { + "$ref": "./examples/PersonGroupOperations_UpdatePersonGroupPersonFace.json" + } + } + }, + "delete": { + "operationId": "PersonGroupOperations_DeletePersonGroupPersonFace", + "summary": "Delete a face from a person in a Person Group by specified personGroupId, personId and persistedFaceId.", + "description": "Adding/deleting faces to/from a same person will be processed sequentially. Adding/deleting faces to/from different persons are processed in parallel.", + "parameters": [ + { + "name": "personGroupId", + "in": "path", + "description": "ID of the container.", + "required": true, + "type": "string", + "minLength": 1, + "maxLength": 64, + "pattern": "^[a-z0-9-_]+$" + }, + { + "name": "personId", + "in": "path", + "description": "ID of the person.", + "required": true, + "type": "string", + "format": "uuid" + }, + { + "name": "persistedFaceId", + "in": "path", + "description": "Face ID of the face.", + "required": true, + "type": "string", + "format": "uuid" + } + ], + "responses": { + "200": { + "description": "The request has succeeded." + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Delete Face from PersonGroup Person": { + "$ref": "./examples/PersonGroupOperations_DeletePersonGroupPersonFace.json" + } + } + } + }, + "/persongroups/{personGroupId}/train": { + "post": { + "operationId": "PersonGroupOperations_TrainPersonGroup", + "summary": "Submit a Person Group training task. Training is a crucial step that only a trained Person Group can be used by \"Identify From Person Group\".", + "description": "The training task is an asynchronous task. Training time depends on the number of person entries, and their faces in a Person Group. It could be several seconds to minutes. To check training status, please use \"Get Person Group Training Status\".", + "parameters": [ + { + "name": "personGroupId", + "in": "path", + "description": "ID of the container.", + "required": true, + "type": "string", + "minLength": 1, + "maxLength": 64, + "pattern": "^[a-z0-9-_]+$" + } + ], + "responses": { + "202": { + "description": "A successful call returns an empty response body.", + "headers": { + "operation-Location": { + "type": "string", + "format": "uri", + "description": "The location of an instance of TrainingResult" + } + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Train PersonGroup": { + "$ref": "./examples/PersonGroupOperations_TrainPersonGroup.json" + } + }, + "x-ms-long-running-operation": true + } + }, + "/persongroups/{personGroupId}/training": { + "get": { + "operationId": "PersonGroupOperations_GetPersonGroupTrainingStatus", + "description": "To check Person Group training status completed or still ongoing. Person Group training is an asynchronous operation triggered by \"Train Person Group\" API.", + "parameters": [ + { + "name": "personGroupId", + "in": "path", + "description": "ID of the container.", + "required": true, + "type": "string", + "minLength": 1, + "maxLength": 64, + "pattern": "^[a-z0-9-_]+$" + } + ], + "responses": { + "200": { + "description": "A successful call returns the Person Group's training status.", + "schema": { + "$ref": "#/definitions/TrainingResult" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Get Training Status of PersonGroup": { + "$ref": "./examples/PersonGroupOperations_GetPersonGroupTrainingStatus.json" + } + } + } + }, + "/sessionImages/{sessionImageId}": { + "get": { + "operationId": "LivenessSessionOperations_GetSessionImage", + "description": "Get session image stored during the liveness session.", + "produces": [ + "application/octet-stream", + "application/json" + ], + "parameters": [ + { + "name": "sessionImageId", + "in": "path", + "description": "The request ID of the image to be retrieved", + "required": true, + "type": "string" + } + ], + "responses": { + "200": { + "description": "The request has succeeded.", + "schema": { + "type": "file" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Get Session Image": { + "$ref": "./examples/LivenessSessionOperations_GetSessionImage.json" + } + } + } + }, + "/settings": { + "get": { + "operationId": "LivenessSessionOperations_GetSettings", + "description": "Get the liveness sessions setting object.", + "parameters": [], + "responses": { + "200": { + "description": "The request has succeeded.", + "schema": { + "$ref": "#/definitions/Settings" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Get LivenessSessionOperations Settings": { + "$ref": "./examples/LivenessSessionOperations_GetSettings.json" + } + } + }, + "patch": { + "operationId": "LivenessSessionOperations_PatchSettings", + "description": "Update the liveness setting object.", + "parameters": [ + { + "name": "body", + "in": "body", + "description": "Body parameter.", + "required": true, + "schema": { + "$ref": "#/definitions/SettingsUpdate" + } + } + ], + "responses": { + "200": { + "description": "The request has succeeded.", + "schema": { + "$ref": "#/definitions/Settings" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Patch LivenessSessionOperations Settings": { + "$ref": "./examples/LivenessSessionOperations_PatchSettings.json" + } + } + } + }, + "/settings/getClientAssetsAccessToken": { + "get": { + "operationId": "LivenessSessionOperations_GetClientAssetsAccessToken", + "description": "Get access token to get access to client AI model assets.", + "parameters": [], + "responses": { + "200": { + "description": "The request has succeeded.", + "schema": { + "$ref": "#/definitions/ClientAssetsAccessTokenResponse" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Get LivenessSessionOperations Settings ClientAssetsAccessToken": { + "$ref": "./examples/LivenessSessionOperations_GetClientAssetsAccessToken.json" + } + } + } + }, + "/verify": { + "post": { + "operationId": "FaceRecognitionOperations_VerifyFaceToFace", + "summary": "Verify whether two faces belong to a same person.", + "description": "> [!NOTE]\n>\n> *\n> * Higher face image quality means better identification precision. Please consider high-quality faces: frontal, clear, and face size is 200x200 pixels (100 pixels between eyes) or bigger.\n> * For the scenarios that are sensitive to accuracy please make your own judgment.\n> * The 'recognitionModel' associated with the both faces should be the same.", + "parameters": [ + { + "name": "body", + "in": "body", + "required": true, + "schema": { + "type": "object", + "properties": { + "faceId1": { + "$ref": "#/definitions/Azure.Core.uuid", + "description": "The faceId of one face, come from \"Detect\"." + }, + "faceId2": { + "$ref": "#/definitions/Azure.Core.uuid", + "description": "The faceId of another face, come from \"Detect\"." + } + }, + "required": [ + "faceId1", + "faceId2" + ] + } + } + ], + "responses": { + "200": { + "description": "A successful call returns the verification result.", + "schema": { + "$ref": "#/definitions/VerificationResult" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Verify Face to Face": { + "$ref": "./examples/FaceRecognitionOperations_VerifyFaceToFace.json" + } + } + } + } + }, + "x-ms-paths": { + "/detect?_overload=detect": { + "post": { + "operationId": "FaceDetectionOperations_Detect", + "summary": "Detect human faces in an image, return face rectangles, and optionally with faceIds, landmarks, and attributes.", + "description": "> [!IMPORTANT]\n> Microsoft has retired or limited facial recognition capabilities that can be used to try to infer emotional states and identity attributes which, if misused, can subject people to stereotyping, discrimination or unfair denial of services. The retired capabilities are emotion and gender. The limited capabilities are age, smile, facial hair, hair and makeup. Email [Azure Face API](mailto:azureface@microsoft.com) if you have a responsible use case that would benefit from the use of any of the limited capabilities. Read more about this decision [here](https://azure.microsoft.com/blog/responsible-ai-investments-and-safeguards-for-facial-recognition/).\n\n*\n * No image will be stored. Only the extracted face feature(s) will be stored on server. The faceId is an identifier of the face feature and will be used in \"Identify\", \"Verify\", and \"Find Similar\". The stored face features will expire and be deleted at the time specified by faceIdTimeToLive after the original detection call.\n * Optional parameters include faceId, landmarks, and attributes. Attributes include headPose, glasses, occlusion, accessories, blur, exposure, noise, mask, and qualityForRecognition. Some of the results returned for specific attributes may not be highly accurate.\n * JPEG, PNG, GIF (the first frame), and BMP format are supported. The allowed image file size is from 1KB to 6MB.\n * The minimum detectable face size is 36x36 pixels in an image no larger than 1920x1080 pixels. Images with dimensions higher than 1920x1080 pixels will need a proportionally larger minimum face size.\n * Up to 100 faces can be returned for an image. Faces are ranked by face rectangle size from large to small.\n * For optimal results when querying \"Identify\", \"Verify\", and \"Find Similar\" ('returnFaceId' is true), please use faces that are: frontal, clear, and with a minimum size of 200x200 pixels (100 pixels between eyes).\n * Different 'detectionModel' values can be provided. The availability of landmarks and supported attributes depends on the detection model specified. To use and compare different detection models, please refer to [here](https://learn.microsoft.com/azure/ai-services/computer-vision/how-to/specify-detection-model).\n * Different 'recognitionModel' values are provided. If follow-up operations like \"Verify\", \"Identify\", \"Find Similar\" are needed, please specify the recognition model with 'recognitionModel' parameter. The default value for 'recognitionModel' is 'recognition_01', if latest model needed, please explicitly specify the model you need in this parameter. Once specified, the detected faceIds will be associated with the specified recognition model. More details, please refer to [here](https://learn.microsoft.com/azure/ai-services/computer-vision/how-to/specify-recognition-model).", + "consumes": [ + "application/octet-stream" + ], + "parameters": [ + { + "name": "detectionModel", + "in": "query", + "description": "The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'. 'detection_03' is recommended since its accuracy is improved on smaller faces (64x64 pixels) and rotated face orientations.", + "required": false, + "type": "string", + "default": "detection_01", + "enum": [ + "detection_01", + "detection_02", + "detection_03" + ], + "x-ms-enum": { + "name": "DetectionModel", + "modelAsString": true, + "values": [ + { + "name": "detection_01", + "value": "detection_01", + "description": "The default detection model. Recommend for near frontal face detection. For scenarios with exceptionally large angle (head-pose) faces, occluded faces or wrong image orientation, the faces in such cases may not be detected." + }, + { + "name": "detection_02", + "value": "detection_02", + "description": "Detection model released in 2019 May with improved accuracy especially on small, side and blurry faces." + }, + { + "name": "detection_03", + "value": "detection_03", + "description": "Detection model released in 2021 February with improved accuracy especially on small faces." + } + ] + } + }, + { + "name": "recognitionModel", + "in": "query", + "description": "The 'recognitionModel' associated with the detected faceIds. Supported 'recognitionModel' values include 'recognition_01', 'recognition_02', 'recognition_03' or 'recognition_04'. The default value is 'recognition_01'. 'recognition_04' is recommended since its accuracy is improved on faces wearing masks compared with 'recognition_03', and its overall accuracy is improved compared with 'recognition_01' and 'recognition_02'.", + "required": false, + "type": "string", + "default": "recognition_01", + "enum": [ + "recognition_01", + "recognition_02", + "recognition_03", + "recognition_04" + ], + "x-ms-enum": { + "name": "RecognitionModel", + "modelAsString": true, + "values": [ + { + "name": "recognition_01", + "value": "recognition_01", + "description": "The default recognition model for \"Detect\". All those faceIds created before 2019 March are bonded with this recognition model." + }, + { + "name": "recognition_02", + "value": "recognition_02", + "description": "Recognition model released in 2019 March." + }, + { + "name": "recognition_03", + "value": "recognition_03", + "description": "Recognition model released in 2020 May." + }, + { + "name": "recognition_04", + "value": "recognition_04", + "description": "Recognition model released in 2021 February. It's recommended to use this recognition model for better recognition accuracy." + } + ] + } + }, + { + "name": "returnFaceId", + "in": "query", + "description": "Return faceIds of the detected faces or not. The default value is true.", + "required": false, + "type": "boolean", + "default": true + }, + { + "name": "returnFaceAttributes", + "in": "query", + "description": "Analyze and return the one or more specified face attributes in the comma-separated string like 'returnFaceAttributes=headPose,glasses'. Face attribute analysis has additional computational and time cost.", + "required": false, + "type": "array", + "items": { + "type": "string", + "enum": [ + "headPose", + "glasses", + "occlusion", + "accessories", + "blur", + "exposure", + "noise", + "mask", + "qualityForRecognition", + "age", + "smile", + "facialHair", + "hair" + ], + "x-ms-enum": { + "name": "FaceAttributeType", + "modelAsString": true, + "values": [ + { + "name": "headPose", + "value": "headPose", + "description": "3-D roll/yaw/pitch angles for face direction." + }, + { + "name": "glasses", + "value": "glasses", + "description": "Glasses type. Values include 'NoGlasses', 'ReadingGlasses', 'Sunglasses', 'SwimmingGoggles'." + }, + { + "name": "occlusion", + "value": "occlusion", + "description": "Whether each facial area is occluded, including forehead, eyes and mouth." + }, + { + "name": "accessories", + "value": "accessories", + "description": "Accessories around face, including 'headwear', 'glasses' and 'mask'. Empty array means no accessories detected. Note this is after a face is detected. Large mask could result in no face to be detected." + }, + { + "name": "blur", + "value": "blur", + "description": "Face is blurry or not. Level returns 'Low', 'Medium' or 'High'. Value returns a number between [0,1], the larger the blurrier." + }, + { + "name": "exposure", + "value": "exposure", + "description": "Face exposure level. Level returns 'GoodExposure', 'OverExposure' or 'UnderExposure'." + }, + { + "name": "noise", + "value": "noise", + "description": "Noise level of face pixels. Level returns 'Low', 'Medium' and 'High'. Value returns a number between [0,1], the larger the noisier" + }, + { + "name": "mask", + "value": "mask", + "description": "Whether each face is wearing a mask. Mask type returns 'noMask', 'faceMask', 'otherMaskOrOcclusion', or 'uncertain'. Value returns a boolean 'noseAndMouthCovered' indicating whether nose and mouth are covered." + }, + { + "name": "qualityForRecognition", + "value": "qualityForRecognition", + "description": "The overall image quality regarding whether the image being used in the detection is of sufficient quality to attempt face recognition on. The value is an informal rating of low, medium, or high. Only 'high' quality images are recommended for person enrollment and quality at or above 'medium' is recommended for identification scenarios. The attribute is only available when using recognition models recognition_03 or recognition_04." + }, + { + "name": "age", + "value": "age", + "description": "Age in years." + }, + { + "name": "smile", + "value": "smile", + "description": "Smile intensity, a number between [0,1]." + }, + { + "name": "facialHair", + "value": "facialHair", + "description": "Properties describing facial hair attributes." + }, + { + "name": "hair", + "value": "hair", + "description": "Properties describing hair attributes." + } + ] + } + }, + "collectionFormat": "csv" + }, + { + "name": "returnFaceLandmarks", + "in": "query", + "description": "Return face landmarks of the detected faces or not. The default value is false.", + "required": false, + "type": "boolean", + "default": false + }, + { + "name": "returnRecognitionModel", + "in": "query", + "description": "Return 'recognitionModel' or not. The default value is false. This is only applicable when returnFaceId = true.", + "required": false, + "type": "boolean", + "default": false + }, + { + "name": "faceIdTimeToLive", + "in": "query", + "description": "The number of seconds for the face ID being cached. Supported range from 60 seconds up to 86400 seconds. The default value is 86400 (24 hours).", + "required": false, + "type": "integer", + "format": "int32", + "default": 86400, + "minimum": 60, + "maximum": 86400 + }, + { + "name": "imageContent", + "in": "body", + "description": "The input image binary.", + "required": true, + "schema": { + "type": "string", + "format": "binary" + } + } + ], + "responses": { + "200": { + "description": "A successful call returns an array of face entries ranked by face rectangle size in descending order. An empty response indicates no faces detected.", + "schema": { + "type": "array", + "items": { + "$ref": "#/definitions/FaceDetectionResult" + } + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Detect with Image": { + "$ref": "./examples/Detect.json" + } + } + } + }, + "/detect?_overload=detectFromSessionImageId": { + "post": { + "operationId": "FaceDetectionOperations_DetectFromSessionImageId", + "summary": "Detect human faces in an image, return face rectangles, and optionally with faceIds, landmarks, and attributes.", + "description": "> [!IMPORTANT]\n> Microsoft has retired or limited facial recognition capabilities that can be used to try to infer emotional states and identity attributes which, if misused, can subject people to stereotyping, discrimination or unfair denial of services. The retired capabilities are emotion and gender. The limited capabilities are age, smile, facial hair, hair and makeup. Email [Azure Face API](mailto:azureface@microsoft.com) if you have a responsible use case that would benefit from the use of any of the limited capabilities. Read more about this decision [here](https://azure.microsoft.com/blog/responsible-ai-investments-and-safeguards-for-facial-recognition/).\n\n*\n * No image will be stored. Only the extracted face feature(s) will be stored on server. The faceId is an identifier of the face feature and will be used in \"Identify\", \"Verify\", and \"Find Similar\". The stored face features will expire and be deleted at the time specified by faceIdTimeToLive after the original detection call.\n * Optional parameters include faceId, landmarks, and attributes. Attributes include headPose, glasses, occlusion, accessories, blur, exposure, noise, mask, and qualityForRecognition. Some of the results returned for specific attributes may not be highly accurate.\n * JPEG, PNG, GIF (the first frame), and BMP format are supported. The allowed image file size is from 1KB to 6MB.\n * The minimum detectable face size is 36x36 pixels in an image no larger than 1920x1080 pixels. Images with dimensions higher than 1920x1080 pixels will need a proportionally larger minimum face size.\n * Up to 100 faces can be returned for an image. Faces are ranked by face rectangle size from large to small.\n * For optimal results when querying \"Identify\", \"Verify\", and \"Find Similar\" ('returnFaceId' is true), please use faces that are: frontal, clear, and with a minimum size of 200x200 pixels (100 pixels between eyes).\n * Different 'detectionModel' values can be provided. The availability of landmarks and supported attributes depends on the detection model specified. To use and compare different detection models, please refer to [here](https://learn.microsoft.com/azure/ai-services/computer-vision/how-to/specify-detection-model).\n * Different 'recognitionModel' values are provided. If follow-up operations like \"Verify\", \"Identify\", \"Find Similar\" are needed, please specify the recognition model with 'recognitionModel' parameter. The default value for 'recognitionModel' is 'recognition_01', if latest model needed, please explicitly specify the model you need in this parameter. Once specified, the detected faceIds will be associated with the specified recognition model. More details, please refer to [here](https://learn.microsoft.com/azure/ai-services/computer-vision/how-to/specify-recognition-model).", + "parameters": [ + { + "name": "detectionModel", + "in": "query", + "description": "The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'. 'detection_03' is recommended since its accuracy is improved on smaller faces (64x64 pixels) and rotated face orientations.", + "required": false, + "type": "string", + "default": "detection_01", + "enum": [ + "detection_01", + "detection_02", + "detection_03" + ], + "x-ms-enum": { + "name": "DetectionModel", + "modelAsString": true, + "values": [ + { + "name": "detection_01", + "value": "detection_01", + "description": "The default detection model. Recommend for near frontal face detection. For scenarios with exceptionally large angle (head-pose) faces, occluded faces or wrong image orientation, the faces in such cases may not be detected." + }, + { + "name": "detection_02", + "value": "detection_02", + "description": "Detection model released in 2019 May with improved accuracy especially on small, side and blurry faces." + }, + { + "name": "detection_03", + "value": "detection_03", + "description": "Detection model released in 2021 February with improved accuracy especially on small faces." + } + ] + } + }, + { + "name": "recognitionModel", + "in": "query", + "description": "The 'recognitionModel' associated with the detected faceIds. Supported 'recognitionModel' values include 'recognition_01', 'recognition_02', 'recognition_03' or 'recognition_04'. The default value is 'recognition_01'. 'recognition_04' is recommended since its accuracy is improved on faces wearing masks compared with 'recognition_03', and its overall accuracy is improved compared with 'recognition_01' and 'recognition_02'.", + "required": false, + "type": "string", + "default": "recognition_01", + "enum": [ + "recognition_01", + "recognition_02", + "recognition_03", + "recognition_04" + ], + "x-ms-enum": { + "name": "RecognitionModel", + "modelAsString": true, + "values": [ + { + "name": "recognition_01", + "value": "recognition_01", + "description": "The default recognition model for \"Detect\". All those faceIds created before 2019 March are bonded with this recognition model." + }, + { + "name": "recognition_02", + "value": "recognition_02", + "description": "Recognition model released in 2019 March." + }, + { + "name": "recognition_03", + "value": "recognition_03", + "description": "Recognition model released in 2020 May." + }, + { + "name": "recognition_04", + "value": "recognition_04", + "description": "Recognition model released in 2021 February. It's recommended to use this recognition model for better recognition accuracy." + } + ] + } + }, + { + "name": "returnFaceId", + "in": "query", + "description": "Return faceIds of the detected faces or not. The default value is true.", + "required": false, + "type": "boolean", + "default": true + }, + { + "name": "returnFaceAttributes", + "in": "query", + "description": "Analyze and return the one or more specified face attributes in the comma-separated string like 'returnFaceAttributes=headPose,glasses'. Face attribute analysis has additional computational and time cost.", + "required": false, + "type": "array", + "items": { + "type": "string", + "enum": [ + "headPose", + "glasses", + "occlusion", + "accessories", + "blur", + "exposure", + "noise", + "mask", + "qualityForRecognition", + "age", + "smile", + "facialHair", + "hair" + ], + "x-ms-enum": { + "name": "FaceAttributeType", + "modelAsString": true, + "values": [ + { + "name": "headPose", + "value": "headPose", + "description": "3-D roll/yaw/pitch angles for face direction." + }, + { + "name": "glasses", + "value": "glasses", + "description": "Glasses type. Values include 'NoGlasses', 'ReadingGlasses', 'Sunglasses', 'SwimmingGoggles'." + }, + { + "name": "occlusion", + "value": "occlusion", + "description": "Whether each facial area is occluded, including forehead, eyes and mouth." + }, + { + "name": "accessories", + "value": "accessories", + "description": "Accessories around face, including 'headwear', 'glasses' and 'mask'. Empty array means no accessories detected. Note this is after a face is detected. Large mask could result in no face to be detected." + }, + { + "name": "blur", + "value": "blur", + "description": "Face is blurry or not. Level returns 'Low', 'Medium' or 'High'. Value returns a number between [0,1], the larger the blurrier." + }, + { + "name": "exposure", + "value": "exposure", + "description": "Face exposure level. Level returns 'GoodExposure', 'OverExposure' or 'UnderExposure'." + }, + { + "name": "noise", + "value": "noise", + "description": "Noise level of face pixels. Level returns 'Low', 'Medium' and 'High'. Value returns a number between [0,1], the larger the noisier" + }, + { + "name": "mask", + "value": "mask", + "description": "Whether each face is wearing a mask. Mask type returns 'noMask', 'faceMask', 'otherMaskOrOcclusion', or 'uncertain'. Value returns a boolean 'noseAndMouthCovered' indicating whether nose and mouth are covered." + }, + { + "name": "qualityForRecognition", + "value": "qualityForRecognition", + "description": "The overall image quality regarding whether the image being used in the detection is of sufficient quality to attempt face recognition on. The value is an informal rating of low, medium, or high. Only 'high' quality images are recommended for person enrollment and quality at or above 'medium' is recommended for identification scenarios. The attribute is only available when using recognition models recognition_03 or recognition_04." + }, + { + "name": "age", + "value": "age", + "description": "Age in years." + }, + { + "name": "smile", + "value": "smile", + "description": "Smile intensity, a number between [0,1]." + }, + { + "name": "facialHair", + "value": "facialHair", + "description": "Properties describing facial hair attributes." + }, + { + "name": "hair", + "value": "hair", + "description": "Properties describing hair attributes." + } + ] + } + }, + "collectionFormat": "csv" + }, + { + "name": "returnFaceLandmarks", + "in": "query", + "description": "Return face landmarks of the detected faces or not. The default value is false.", + "required": false, + "type": "boolean", + "default": false + }, + { + "name": "returnRecognitionModel", + "in": "query", + "description": "Return 'recognitionModel' or not. The default value is false. This is only applicable when returnFaceId = true.", + "required": false, + "type": "boolean", + "default": false + }, + { + "name": "faceIdTimeToLive", + "in": "query", + "description": "The number of seconds for the face ID being cached. Supported range from 60 seconds up to 86400 seconds. The default value is 86400 (24 hours).", + "required": false, + "type": "integer", + "format": "int32", + "default": 86400, + "minimum": 60, + "maximum": 86400 + }, + { + "name": "body", + "in": "body", + "required": true, + "schema": { + "type": "object", + "properties": { + "sessionImageId": { + "type": "string", + "description": "Id of session image." + } + }, + "required": [ + "sessionImageId" + ] + } + } + ], + "responses": { + "200": { + "description": "A successful call returns an array of face entries ranked by face rectangle size in descending order. An empty response indicates no faces detected.", + "schema": { + "type": "array", + "items": { + "$ref": "#/definitions/FaceDetectionResult" + } + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Detect From Session Image Id": { + "$ref": "./examples/DetectFromSessionImageId.json" + } + } + } + }, + "/facelists/{faceListId}/persistedfaces?_overload=addFaceListFace": { + "post": { + "operationId": "FaceListOperations_AddFaceListFace", + "summary": "Add a face to a specified Face List, up to 1,000 faces.", + "description": "To deal with an image containing multiple faces, input face can be specified as an image with a targetFace rectangle. It returns a persistedFaceId representing the added face. No image will be stored. Only the extracted face feature(s) will be stored on server until \"Delete Face List Face\" or \"Delete Face List\" is called.\n\nNote that persistedFaceId is different from faceId generated by \"Detect\".\n\n>\n*\n * Higher face image quality means better recognition precision. Please consider high-quality faces: frontal, clear, and face size is 200x200 pixels (100 pixels between eyes) or bigger.\n * JPEG, PNG, GIF (the first frame), and BMP format are supported. The allowed image file size is from 1KB to 6MB.\n * \"targetFace\" rectangle should contain one face. Zero or multiple faces will be regarded as an error. If the provided \"targetFace\" rectangle is not returned from \"Detect\", there's no guarantee to detect and add the face successfully.\n * Out of detectable face size (36x36 - 4096x4096 pixels), large head-pose, or large occlusions will cause failures.\n * The minimum detectable face size is 36x36 pixels in an image no larger than 1920x1080 pixels. Images with dimensions higher than 1920x1080 pixels will need a proportionally larger minimum face size.\n * Different 'detectionModel' values can be provided. To use and compare different detection models, please refer to [here](https://learn.microsoft.com/azure/ai-services/computer-vision/how-to/specify-detection-model).", + "consumes": [ + "application/octet-stream" + ], + "parameters": [ + { + "name": "faceListId", + "in": "path", + "description": "Valid character is letter in lower case or digit or '-' or '_', maximum length is 64.", + "required": true, + "type": "string", + "minLength": 1, + "maxLength": 64, + "pattern": "^[a-z0-9-_]+$" + }, + { + "$ref": "#/parameters/AddFaceOptions.targetFace" + }, + { + "$ref": "#/parameters/AddFaceOptions.detectionModel" + }, + { + "$ref": "#/parameters/AddFaceOptions.userData" + }, + { + "$ref": "#/parameters/AddFaceRequest.imageContent" + } + ], + "responses": { + "200": { + "description": "A successful call returns a new persistedFaceId.", + "schema": { + "$ref": "#/definitions/AddFaceResult" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Add Face to FaceList": { + "$ref": "./examples/FaceListOperations_AddFaceListFaceFromStream.json" + } + } + } + }, + "/findsimilars?_overload=findSimilarFromFaceList": { + "post": { + "operationId": "FaceRecognitionOperations_FindSimilarFromFaceList", + "summary": "Given query face's faceId, to search the similar-looking faces from a Face List. A 'faceListId' is created by Create Face List.", + "description": "Depending on the input the returned similar faces list contains faceIds or persistedFaceIds ranked by similarity.\n\nFind similar has two working modes, \"matchPerson\" and \"matchFace\". \"matchPerson\" is the default mode that it tries to find faces of the same person as possible by using internal same-person thresholds. It is useful to find a known person's other photos. Note that an empty list will be returned if no faces pass the internal thresholds. \"matchFace\" mode ignores same-person thresholds and returns ranked similar faces anyway, even the similarity is low. It can be used in the cases like searching celebrity-looking faces.\n\nThe 'recognitionModel' associated with the query faceId should be the same as the 'recognitionModel' used by the target Face List.", + "parameters": [ + { + "name": "body", + "in": "body", + "required": true, + "schema": { + "type": "object", + "properties": { + "faceId": { + "$ref": "#/definitions/Azure.Core.uuid", + "description": "faceId of the query face. User needs to call \"Detect\" first to get a valid faceId. Note that this faceId is not persisted and will expire 24 hours after the detection call." + }, + "maxNumOfCandidatesReturned": { + "type": "integer", + "format": "int32", + "description": "The number of top similar faces returned. The valid range is [1, 1000]. Default value is 20.", + "default": 20, + "minimum": 1, + "maximum": 1000 + }, + "mode": { + "type": "string", + "description": "Similar face searching mode. It can be 'matchPerson' or 'matchFace'. Default value is 'matchPerson'.", + "default": "matchPerson", + "enum": [ + "matchPerson", + "matchFace" + ], + "x-ms-enum": { + "name": "FindSimilarMatchMode", + "modelAsString": true, + "values": [ + { + "name": "matchPerson", + "value": "matchPerson", + "description": "Match person." + }, + { + "name": "matchFace", + "value": "matchFace", + "description": "Match face." + } + ] + } + }, + "faceListId": { + "type": "string", + "description": "An existing user-specified unique candidate Face List, created in \"Create Face List\". Face List contains a set of persistedFaceIds which are persisted and will never expire." + } + }, + "required": [ + "faceId", + "faceListId" + ] + } + } + ], + "responses": { + "200": { + "description": "A successful call returns an array of the most similar faces represented in faceId if the input parameter is faceIds or persistedFaceId if the input parameter is faceListId or largeFaceListId.", + "schema": { + "type": "array", + "items": { + "$ref": "#/definitions/FindSimilarResult" + } + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Find Similar from FaceList": { + "$ref": "./examples/FaceRecognitionOperations_FindSimilarFromFaceList.json" + } + } + } + }, + "/findsimilars?_overload=findSimilarFromLargeFaceList": { + "post": { + "operationId": "FaceRecognitionOperations_FindSimilarFromLargeFaceList", + "summary": "Given query face's faceId, to search the similar-looking faces from a Large Face List. A 'largeFaceListId' is created by Create Large Face List.", + "description": "Depending on the input the returned similar faces list contains faceIds or persistedFaceIds ranked by similarity.\n\nFind similar has two working modes, \"matchPerson\" and \"matchFace\". \"matchPerson\" is the default mode that it tries to find faces of the same person as possible by using internal same-person thresholds. It is useful to find a known person's other photos. Note that an empty list will be returned if no faces pass the internal thresholds. \"matchFace\" mode ignores same-person thresholds and returns ranked similar faces anyway, even the similarity is low. It can be used in the cases like searching celebrity-looking faces.\n\nThe 'recognitionModel' associated with the query faceId should be the same as the 'recognitionModel' used by the target Large Face List.", + "parameters": [ + { + "name": "body", + "in": "body", + "required": true, + "schema": { + "type": "object", + "properties": { + "faceId": { + "$ref": "#/definitions/Azure.Core.uuid", + "description": "faceId of the query face. User needs to call \"Detect\" first to get a valid faceId. Note that this faceId is not persisted and will expire 24 hours after the detection call." + }, + "maxNumOfCandidatesReturned": { + "type": "integer", + "format": "int32", + "description": "The number of top similar faces returned. The valid range is [1, 1000]. Default value is 20.", + "default": 20, + "minimum": 1, + "maximum": 1000 + }, + "mode": { + "type": "string", + "description": "Similar face searching mode. It can be 'matchPerson' or 'matchFace'. Default value is 'matchPerson'.", + "default": "matchPerson", + "enum": [ + "matchPerson", + "matchFace" + ], + "x-ms-enum": { + "name": "FindSimilarMatchMode", + "modelAsString": true, + "values": [ + { + "name": "matchPerson", + "value": "matchPerson", + "description": "Match person." + }, + { + "name": "matchFace", + "value": "matchFace", + "description": "Match face." + } + ] + } + }, + "largeFaceListId": { + "type": "string", + "description": "An existing user-specified unique candidate Large Face List, created in \"Create Large Face List\". Large Face List contains a set of persistedFaceIds which are persisted and will never expire." + } + }, + "required": [ + "faceId", + "largeFaceListId" + ] + } + } + ], + "responses": { + "200": { + "description": "A successful call returns an array of the most similar faces represented in faceId if the input parameter is faceIds or persistedFaceId if the input parameter is faceListId or largeFaceListId.", + "schema": { + "type": "array", + "items": { + "$ref": "#/definitions/FindSimilarResult" + } + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Find Similar from LargeFaceList": { + "$ref": "./examples/FaceRecognitionOperations_FindSimilarFromLargeFaceList.json" + } + } + } + }, + "/identify?_overload=identifyFromDynamicPersonGroup": { + "post": { + "operationId": "FaceRecognitionOperations_IdentifyFromDynamicPersonGroup", + "summary": "1-to-many identification to find the closest matches of the specific query person face from a Dynamic Person Group.", + "description": "For each face in the faceIds array, Face Identify will compute similarities between the query face and all the faces in the Dynamic Person Group (given by dynamicPersonGroupId), and return candidate person(s) for that face ranked by similarity confidence.\n> [!NOTE]\n>\n> *\n> * The algorithm allows more than one face to be identified independently at the same request, but no more than 10 faces.\n> * Each person could have more than one face, but no more than 248 faces.\n> * Higher face image quality means better identification precision. Please consider high-quality faces: frontal, clear, and face size is 200x200 pixels (100 pixels between eyes) or bigger.\n> * Number of candidates returned is restricted by maxNumOfCandidatesReturned and confidenceThreshold. If no person is identified, the returned candidates will be an empty array.\n> * The Identify operation can only match faces obtained with the same recognition model, that is associated with the query faces.", + "parameters": [ + { + "name": "body", + "in": "body", + "required": true, + "schema": { + "type": "object", + "properties": { + "faceIds": { + "type": "array", + "description": "Array of query faces faceIds, created by the \"Detect\". Each of the faces are identified independently. The valid number of faceIds is between [1, 10].", + "minItems": 1, + "maxItems": 10, + "items": { + "$ref": "#/definitions/Azure.Core.uuid" + } + }, + "dynamicPersonGroupId": { + "type": "string", + "description": "DynamicPersonGroupId of the target PersonDirectory DynamicPersonGroup to match against." + }, + "maxNumOfCandidatesReturned": { + "type": "integer", + "format": "int32", + "description": "The range of maxNumOfCandidatesReturned is between 1 and 100. Default value is 10.", + "default": 10, + "minimum": 1, + "maximum": 100 + }, + "confidenceThreshold": { + "type": "number", + "format": "float", + "description": "Customized identification confidence threshold, in the range of [0, 1]. Advanced user can tweak this value to override default internal threshold for better precision on their scenario data. Note there is no guarantee of this threshold value working on other data and after algorithm updates.", + "minimum": 0, + "maximum": 1 + } + }, + "required": [ + "faceIds", + "dynamicPersonGroupId" + ] + } + } + ], + "responses": { + "200": { + "description": "A successful call returns the identified candidate person(s) for each query face.", + "schema": { + "type": "array", + "items": { + "$ref": "#/definitions/IdentificationResult" + } + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Identify from DynamicPersonGroup": { + "$ref": "./examples/FaceRecognitionOperations_IdentifyFromDynamicPersonGroup.json" + } + } + } + }, + "/identify?_overload=identifyFromLargePersonGroup": { + "post": { + "operationId": "FaceRecognitionOperations_IdentifyFromLargePersonGroup", + "summary": "1-to-many identification to find the closest matches of the specific query person face from a Large Person Group.", + "description": "For each face in the faceIds array, Face Identify will compute similarities between the query face and all the faces in the Large Person Group (given by largePersonGroupId), and return candidate person(s) for that face ranked by similarity confidence. The Large Person Group should be trained to make it ready for identification. See more in \"Train Large Person Group\".\n> [!NOTE]\n>\n> *\n> * The algorithm allows more than one face to be identified independently at the same request, but no more than 10 faces.\n> * Each person could have more than one face, but no more than 248 faces.\n> * Higher face image quality means better identification precision. Please consider high-quality faces: frontal, clear, and face size is 200x200 pixels (100 pixels between eyes) or bigger.\n> * Number of candidates returned is restricted by maxNumOfCandidatesReturned and confidenceThreshold. If no person is identified, the returned candidates will be an empty array.\n> * Try \"Find Similar\" when you need to find similar faces from a Face List/Large Face List instead of a Person Group/Large Person Group.\n> * The 'recognitionModel' associated with the query faces' faceIds should be the same as the 'recognitionModel' used by the target Person Group or Large Person Group.", + "parameters": [ + { + "name": "body", + "in": "body", + "required": true, + "schema": { + "type": "object", + "properties": { + "faceIds": { + "type": "array", + "description": "Array of query faces faceIds, created by the \"Detect\". Each of the faces are identified independently. The valid number of faceIds is between [1, 10].", + "minItems": 1, + "maxItems": 10, + "items": { + "$ref": "#/definitions/Azure.Core.uuid" + } + }, + "largePersonGroupId": { + "type": "string", + "description": "largePersonGroupId of the target Large Person Group, created by \"Create Large Person Group\". Parameter personGroupId and largePersonGroupId should not be provided at the same time." + }, + "maxNumOfCandidatesReturned": { + "type": "integer", + "format": "int32", + "description": "The range of maxNumOfCandidatesReturned is between 1 and 100. Default value is 10.", + "default": 10, + "minimum": 1, + "maximum": 100 + }, + "confidenceThreshold": { + "type": "number", + "format": "float", + "description": "Customized identification confidence threshold, in the range of [0, 1]. Advanced user can tweak this value to override default internal threshold for better precision on their scenario data. Note there is no guarantee of this threshold value working on other data and after algorithm updates.", + "minimum": 0, + "maximum": 1 + } + }, + "required": [ + "faceIds", + "largePersonGroupId" + ] + } + } + ], + "responses": { + "200": { + "description": "A successful call returns the identified candidate person(s) for each query face.", + "schema": { + "type": "array", + "items": { + "$ref": "#/definitions/IdentificationResult" + } + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Identify from LargePersonGroup": { + "$ref": "./examples/FaceRecognitionOperations_IdentifyFromLargePersonGroup.json" + } + } + } + }, + "/identify?_overload=identifyFromPersonDirectory": { + "post": { + "operationId": "FaceRecognitionOperations_IdentifyFromPersonDirectory", + "summary": "1-to-many identification to find the closest matches of the specific query person face from a person directory personIds array.", + "description": "For each face in the faceIds array, Face Identify will compute similarities between the query face and all the faces in the Person Directory Persons (given by personIds), and return candidate person(s) for that face ranked by similarity confidence.\nPassing personIds with an array with one element \"*\" can perform the operation over entire person directory.\n> [!NOTE]\n>\n> *\n> * The algorithm allows more than one face to be identified independently at the same request, but no more than 10 faces.\n> * Each person could have more than one face, but no more than 248 faces.\n> * Higher face image quality means better identification precision. Please consider high-quality faces: frontal, clear, and face size is 200x200 pixels (100 pixels between eyes) or bigger.\n> * Number of candidates returned is restricted by maxNumOfCandidatesReturned and confidenceThreshold. If no person is identified, the returned candidates will be an empty array.\n> * The Identify operation can only match faces obtained with the same recognition model, that is associated with the query faces.", + "parameters": [ + { + "name": "body", + "in": "body", + "required": true, + "schema": { + "type": "object", + "properties": { + "faceIds": { + "type": "array", + "description": "Array of query faces faceIds, created by the \"Detect\". Each of the faces are identified independently. The valid number of faceIds is between [1, 10].", + "minItems": 1, + "maxItems": 10, + "items": { + "$ref": "#/definitions/Azure.Core.uuid" + } + }, + "personIds": { + "type": "array", + "description": "Array of personIds created in Person Directory \"Create Person\". The valid number of personIds is between [1,30].", + "minItems": 1, + "maxItems": 30, + "items": { + "$ref": "#/definitions/Azure.Core.uuid" + } + }, + "maxNumOfCandidatesReturned": { + "type": "integer", + "format": "int32", + "description": "The range of maxNumOfCandidatesReturned is between 1 and 100. Default value is 10.", + "default": 10, + "minimum": 1, + "maximum": 100 + }, + "confidenceThreshold": { + "type": "number", + "format": "float", + "description": "Customized identification confidence threshold, in the range of [0, 1]. Advanced user can tweak this value to override default internal threshold for better precision on their scenario data. Note there is no guarantee of this threshold value working on other data and after algorithm updates.", + "minimum": 0, + "maximum": 1 + } + }, + "required": [ + "faceIds", + "personIds" + ] + } + } + ], + "responses": { + "200": { + "description": "A successful call returns the identified candidate person(s) for each query face.", + "schema": { + "type": "array", + "items": { + "$ref": "#/definitions/IdentificationResult" + } + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Identify from PersonDirectory": { + "$ref": "./examples/FaceRecognitionOperations_IdentifyFromPersonDirectory.json" + } + } + } + }, + "/largefacelists/{largeFaceListId}/persistedfaces?_overload=addLargeFaceListFace": { + "post": { + "operationId": "FaceListOperations_AddLargeFaceListFace", + "summary": "Add a face to a specified Large Face List, up to 1,000,000 faces.", + "description": "To deal with an image containing multiple faces, input face can be specified as an image with a targetFace rectangle. It returns a persistedFaceId representing the added face. No image will be stored. Only the extracted face feature(s) will be stored on server until \"Delete Large Face List Face\" or \"Delete Large Face List\" is called.\n\nNote that persistedFaceId is different from faceId generated by \"Detect\".\n\n>\n*\n * Higher face image quality means better recognition precision. Please consider high-quality faces: frontal, clear, and face size is 200x200 pixels (100 pixels between eyes) or bigger.\n * JPEG, PNG, GIF (the first frame), and BMP format are supported. The allowed image file size is from 1KB to 6MB.\n * \"targetFace\" rectangle should contain one face. Zero or multiple faces will be regarded as an error. If the provided \"targetFace\" rectangle is not returned from \"Detect\", there's no guarantee to detect and add the face successfully.\n * Out of detectable face size (36x36 - 4096x4096 pixels), large head-pose, or large occlusions will cause failures.\n * The minimum detectable face size is 36x36 pixels in an image no larger than 1920x1080 pixels. Images with dimensions higher than 1920x1080 pixels will need a proportionally larger minimum face size.\n * Different 'detectionModel' values can be provided. To use and compare different detection models, please refer to [here](https://learn.microsoft.com/azure/ai-services/computer-vision/how-to/specify-detection-model).\n\n> [!NOTE]\n>\n> *\n> * Free-tier subscription quota: 1,000 faces per Large Face List.\n> * S0-tier subscription quota: 1,000,000 faces per Large Face List.", + "consumes": [ + "application/octet-stream" + ], + "parameters": [ + { + "name": "largeFaceListId", + "in": "path", + "description": "Valid character is letter in lower case or digit or '-' or '_', maximum length is 64.", + "required": true, + "type": "string", + "minLength": 1, + "maxLength": 64, + "pattern": "^[a-z0-9-_]+$" + }, + { + "$ref": "#/parameters/AddFaceOptions.targetFace" + }, + { + "$ref": "#/parameters/AddFaceOptions.detectionModel" + }, + { + "$ref": "#/parameters/AddFaceOptions.userData" + }, + { + "$ref": "#/parameters/AddFaceRequest.imageContent" + } + ], + "responses": { + "200": { + "description": "A successful call returns a new persistedFaceId.", + "schema": { + "$ref": "#/definitions/AddFaceResult" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Add Face to LargeFaceList": { + "$ref": "./examples/FaceListOperations_AddLargeFaceListFaceFromStream.json" + } + } + } + }, + "/largepersongroups/{largePersonGroupId}/persons/{personId}/persistedfaces?_overload=addLargePersonGroupPersonFace": { + "post": { + "operationId": "PersonGroupOperations_AddLargePersonGroupPersonFace", + "summary": "Add a face to a person into a Large Person Group for face identification or verification.", + "description": "To deal with an image containing multiple faces, input face can be specified as an image with a targetFace rectangle. It returns a persistedFaceId representing the added face. No image will be stored. Only the extracted face feature(s) will be stored on server until \"Delete Large Person Group Person Face\", \"Delete Large Person Group Person\" or \"Delete Large Person Group\" is called.\n\nNote that persistedFaceId is different from faceId generated by \"Detect\".\n\n>\n*\n * Each person entry can hold up to 248 faces.\n * Higher face image quality means better recognition precision. Please consider high-quality faces: frontal, clear, and face size is 200x200 pixels (100 pixels between eyes) or bigger.\n * JPEG, PNG, GIF (the first frame), and BMP format are supported. The allowed image file size is from 1KB to 6MB.\n * \"targetFace\" rectangle should contain one face. Zero or multiple faces will be regarded as an error. If the provided \"targetFace\" rectangle is not returned from \"Detect\", there's no guarantee to detect and add the face successfully.\n * Out of detectable face size (36x36 - 4096x4096 pixels), large head-pose, or large occlusions will cause failures.\n * The minimum detectable face size is 36x36 pixels in an image no larger than 1920x1080 pixels. Images with dimensions higher than 1920x1080 pixels will need a proportionally larger minimum face size.\n * Different 'detectionModel' values can be provided. To use and compare different detection models, please refer to [here](https://learn.microsoft.com/azure/ai-services/computer-vision/how-to/specify-detection-model).", + "consumes": [ + "application/octet-stream" + ], + "parameters": [ + { + "name": "largePersonGroupId", + "in": "path", + "description": "ID of the container.", + "required": true, + "type": "string", + "minLength": 1, + "maxLength": 64, + "pattern": "^[a-z0-9-_]+$" + }, + { + "name": "personId", + "in": "path", + "description": "ID of the person.", + "required": true, + "type": "string", + "format": "uuid" + }, + { + "$ref": "#/parameters/AddFaceOptions.targetFace" + }, + { + "$ref": "#/parameters/AddFaceOptions.detectionModel" + }, + { + "$ref": "#/parameters/AddFaceOptions.userData" + }, + { + "$ref": "#/parameters/AddFaceRequest.imageContent" + } + ], + "responses": { + "200": { + "description": "A successful call returns a new persistedFaceId.", + "schema": { + "$ref": "#/definitions/AddFaceResult" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Add Face in LargePersonGroup Person": { + "$ref": "./examples/PersonGroupOperations_AddLargePersonGroupPersonFaceFromStream.json" + } + } + } + }, + "/persongroups/{personGroupId}/persons/{personId}/persistedfaces?_overload=addPersonGroupPersonFace": { + "post": { + "operationId": "PersonGroupOperations_AddPersonGroupPersonFace", + "summary": "Add a face to a person into a Person Group for face identification or verification.", + "description": "To deal with an image containing multiple faces, input face can be specified as an image with a targetFace rectangle. It returns a persistedFaceId representing the added face. No image will be stored. Only the extracted face feature(s) will be stored on server until \"Delete Person Group Person Face\", \"Delete Person Group Person\" or \"Delete Person Group\" is called.\n\nNote that persistedFaceId is different from faceId generated by \"Detect\".\n\n>\n* \n * Each person entry can hold up to 248 faces.\n * Higher face image quality means better recognition precision. Please consider high-quality faces: frontal, clear, and face size is 200x200 pixels (100 pixels between eyes) or bigger.\n * JPEG, PNG, GIF (the first frame), and BMP format are supported. The allowed image file size is from 1KB to 6MB.\n * \"targetFace\" rectangle should contain one face. Zero or multiple faces will be regarded as an error. If the provided \"targetFace\" rectangle is not returned from \"Detect\", there's no guarantee to detect and add the face successfully.\n * Out of detectable face size (36x36 - 4096x4096 pixels), large head-pose, or large occlusions will cause failures.\n * The minimum detectable face size is 36x36 pixels in an image no larger than 1920x1080 pixels. Images with dimensions higher than 1920x1080 pixels will need a proportionally larger minimum face size.\n * Different 'detectionModel' values can be provided. To use and compare different detection models, please refer to [here](https://learn.microsoft.com/azure/ai-services/computer-vision/how-to/specify-detection-model).", + "consumes": [ + "application/octet-stream" + ], + "parameters": [ + { + "name": "personGroupId", + "in": "path", + "description": "ID of the container.", + "required": true, + "type": "string", + "minLength": 1, + "maxLength": 64, + "pattern": "^[a-z0-9-_]+$" + }, + { + "name": "personId", + "in": "path", + "description": "ID of the person.", + "required": true, + "type": "string", + "format": "uuid" + }, + { + "$ref": "#/parameters/AddFaceOptions.targetFace" + }, + { + "$ref": "#/parameters/AddFaceOptions.detectionModel" + }, + { + "$ref": "#/parameters/AddFaceOptions.userData" + }, + { + "$ref": "#/parameters/AddFaceRequest.imageContent" + } + ], + "responses": { + "200": { + "description": "A successful call returns a new persistedFaceId.", + "schema": { + "$ref": "#/definitions/AddFaceResult" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Add Face to PersonGroup Person": { + "$ref": "./examples/PersonGroupOperations_AddPersonGroupPersonFaceFromStream.json" + } + } + } + }, + "/verify?_overload=verifyFromLargePersonGroup": { + "post": { + "operationId": "FaceRecognitionOperations_VerifyFromLargePersonGroup", + "summary": "Verify whether a face belongs to a person in a Large Person Group.", + "description": "> [!NOTE]\n>\n> *\n> * Higher face image quality means better identification precision. Please consider high-quality faces: frontal, clear, and face size is 200x200 pixels (100 pixels between eyes) or bigger.\n> * For the scenarios that are sensitive to accuracy please make your own judgment.\n> * The 'recognitionModel' associated with the query face should be the same as the 'recognitionModel' used by the Large Person Group.", + "parameters": [ + { + "name": "body", + "in": "body", + "required": true, + "schema": { + "type": "object", + "properties": { + "faceId": { + "$ref": "#/definitions/Azure.Core.uuid", + "description": "The faceId of the face, come from \"Detect\"." + }, + "largePersonGroupId": { + "type": "string", + "description": "Using existing largePersonGroupId and personId for fast loading a specified person. largePersonGroupId is created in \"Create Large Person Group\"." + }, + "personId": { + "$ref": "#/definitions/Azure.Core.uuid", + "description": "Specify a certain person in Large Person Group." + } + }, + "required": [ + "faceId", + "largePersonGroupId", + "personId" + ] + } + } + ], + "responses": { + "200": { + "description": "A successful call returns the verification result.", + "schema": { + "$ref": "#/definitions/VerificationResult" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Verify from LargePersonGroup": { + "$ref": "./examples/FaceRecognitionOperations_VerifyFromLargePersonGroup.json" + } + } + } + }, + "/verify?_overload=verifyFromPersonDirectory": { + "post": { + "operationId": "FaceRecognitionOperations_VerifyFromPersonDirectory", + "summary": "Verify whether a face belongs to a person in Person Directory.", + "description": "> [!NOTE]\n>\n> *\n> * Higher face image quality means better identification precision. Please consider high-quality faces: frontal, clear, and face size is 200x200 pixels (100 pixels between eyes) or bigger.\n> * For the scenarios that are sensitive to accuracy please make your own judgment.\n> * The Verify operation can only match faces obtained with the same recognition model, that is associated with the query face.", + "parameters": [ + { + "name": "body", + "in": "body", + "required": true, + "schema": { + "type": "object", + "properties": { + "faceId": { + "$ref": "#/definitions/Azure.Core.uuid", + "description": "The faceId of the face, come from \"Detect\"." + }, + "personId": { + "$ref": "#/definitions/Azure.Core.uuid", + "description": "Specify a certain person in PersonDirectory Person." + } + }, + "required": [ + "faceId", + "personId" + ] + } + } + ], + "responses": { + "200": { + "description": "A successful call returns the verification result.", + "schema": { + "$ref": "#/definitions/VerificationResult" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Verify from PersonDirectory": { + "$ref": "./examples/FaceRecognitionOperations_VerifyFromPersonDirectory.json" + } + } + } + }, + "/verify?_overload=verifyFromPersonGroup": { + "post": { + "operationId": "FaceRecognitionOperations_VerifyFromPersonGroup", + "summary": "Verify whether a face belongs to a person in a Person Group.", + "description": "> [!NOTE]\n>\n> *\n> * Higher face image quality means better identification precision. Please consider high-quality faces: frontal, clear, and face size is 200x200 pixels (100 pixels between eyes) or bigger.\n> * For the scenarios that are sensitive to accuracy please make your own judgment.\n> * The 'recognitionModel' associated with the query face should be the same as the 'recognitionModel' used by the Person Group.", + "parameters": [ + { + "name": "body", + "in": "body", + "required": true, + "schema": { + "type": "object", + "properties": { + "faceId": { + "$ref": "#/definitions/Azure.Core.uuid", + "description": "The faceId of the face, come from \"Detect\"." + }, + "personGroupId": { + "type": "string", + "description": "Using existing personGroupId and personId for fast loading a specified person. personGroupId is created in \"Create Person Group\"." + }, + "personId": { + "$ref": "#/definitions/Azure.Core.uuid", + "description": "Specify a certain person in Person Group." + } + }, + "required": [ + "faceId", + "personGroupId", + "personId" + ] + } + } + ], + "responses": { + "200": { + "description": "A successful call returns the verification result.", + "schema": { + "$ref": "#/definitions/VerificationResult" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Verify from PersonGroup": { + "$ref": "./examples/FaceRecognitionOperations_VerifyFromPersonGroup.json" + } + } + } + } + }, + "definitions": { + "AbuseMonitoringResult": { + "type": "object", + "description": "The abuse monitoring result for the liveness attempt.", + "properties": { + "isAbuseDetected": { + "type": "object", + "description": "Denotes if abuse detection triggered during this liveness attempt." + }, + "otherFlaggedSessions": { + "type": "array", + "description": "Denotes if abuse detection triggered during this liveness attempt.", + "items": { + "$ref": "#/definitions/OtherFlaggedSessions" + } + } + }, + "required": [ + "isAbuseDetected", + "otherFlaggedSessions" + ] + }, + "AccessoryItem": { + "type": "object", + "description": "Accessory item and corresponding confidence level.", + "properties": { + "type": { + "$ref": "#/definitions/AccessoryType", + "description": "Type of the accessory." + }, + "confidence": { + "type": "number", + "format": "float", + "description": "Confidence level of the accessory type. Range between [0,1].", + "minimum": 0, + "maximum": 1 + } + }, + "required": [ + "type", + "confidence" + ] + }, + "AccessoryType": { + "type": "string", + "description": "Type of the accessory.", + "enum": [ + "headwear", + "glasses", + "mask" + ], + "x-ms-enum": { + "name": "AccessoryType", + "modelAsString": true, + "values": [ + { + "name": "headwear", + "value": "headwear", + "description": "Head wear." + }, + { + "name": "glasses", + "value": "glasses", + "description": "Glasses." + }, + { + "name": "mask", + "value": "mask", + "description": "Mask." + } + ] + } + }, + "AddFaceFromUrlRequest": { + "type": "object", + "description": "Add face from url request.", + "properties": { + "url": { + "type": "string", + "format": "uri", + "description": "URL of input image." + } + }, + "required": [ + "url" + ] + }, + "AddFaceResult": { + "type": "object", + "description": "Response body for adding face.", + "properties": { + "persistedFaceId": { + "$ref": "#/definitions/Azure.Core.uuid", + "description": "Persisted Face ID of the added face, which is persisted and will not expire. Different from faceId which is created in \"Detect\" and will expire in 24 hours after the detection call." + } + }, + "required": [ + "persistedFaceId" + ] + }, + "Azure.Core.Foundations.OperationState": { + "type": "string", + "description": "Enum describing allowed operation states.", + "enum": [ + "NotStarted", + "Running", + "Succeeded", + "Failed", + "Canceled" + ], + "x-ms-enum": { + "name": "OperationState", + "modelAsString": true, + "values": [ + { + "name": "NotStarted", + "value": "NotStarted", + "description": "The operation has not started." + }, + { + "name": "Running", + "value": "Running", + "description": "The operation is in progress." + }, + { + "name": "Succeeded", + "value": "Succeeded", + "description": "The operation has completed successfully." + }, + { + "name": "Failed", + "value": "Failed", + "description": "The operation has failed." + }, + { + "name": "Canceled", + "value": "Canceled", + "description": "The operation has been canceled by the user." + } + ] + } + }, + "Azure.Core.uuid": { + "type": "string", + "format": "uuid", + "description": "Universally Unique Identifier" + }, + "BlurLevel": { + "type": "string", + "description": "Indicates level of blurriness.", + "enum": [ + "low", + "medium", + "high" + ], + "x-ms-enum": { + "name": "BlurLevel", + "modelAsString": true, + "values": [ + { + "name": "low", + "value": "low", + "description": "Low blur level." + }, + { + "name": "medium", + "value": "medium", + "description": "Medium blur level." + }, + { + "name": "high", + "value": "high", + "description": "High blur level." + } + ] + } + }, + "BlurProperties": { + "type": "object", + "description": "Properties describing any presence of blur within the image.", + "properties": { + "blurLevel": { + "$ref": "#/definitions/BlurLevel", + "description": "An enum value indicating level of blurriness." + }, + "value": { + "type": "number", + "format": "float", + "description": "A number indicating level of blurriness ranging from 0 to 1.", + "minimum": 0, + "maximum": 1 + } + }, + "required": [ + "blurLevel", + "value" + ] + }, + "ClientAssetsAccessTokenResponse": { + "type": "object", + "description": "Response model for client assets access token.", + "properties": { + "expiry": { + "type": "string", + "format": "date-time", + "description": "The expiry time of the access token." + }, + "accessToken": { + "type": "string", + "description": "The access token for client assets." + }, + "base64AccessToken": { + "type": "string", + "description": "The base64 encoded access token." + } + }, + "required": [ + "expiry", + "accessToken", + "base64AccessToken" + ] + }, + "ClientInformation": { + "type": "object", + "description": "The client information gathered during the liveness attempt.", + "properties": { + "ip": { + "type": "string", + "description": "The client ip address seen during the liveness attempt." + } + }, + "required": [ + "ip" + ] + }, + "CreateCollectionRequest": { + "type": "object", + "description": "Model for creating face collection.", + "properties": { + "name": { + "type": "string", + "description": "User defined name, maximum length is 128.", + "minLength": 1, + "maxLength": 128 + }, + "userData": { + "type": "string", + "description": "Optional user defined data. Length should not exceed 16K.", + "maxLength": 16384 + }, + "recognitionModel": { + "type": "string", + "description": "The 'recognitionModel' associated with this face list. Supported 'recognitionModel' values include 'recognition_01', 'recognition_02, 'recognition_03', and 'recognition_04'. The default value is 'recognition_01'. 'recognition_04' is recommended since its accuracy is improved on faces wearing masks compared with 'recognition_03', and its overall accuracy is improved compared with 'recognition_01' and 'recognition_02'.", + "default": "recognition_01", + "enum": [ + "recognition_01", + "recognition_02", + "recognition_03", + "recognition_04" + ], + "x-ms-enum": { + "name": "RecognitionModel", + "modelAsString": true, + "values": [ + { + "name": "recognition_01", + "value": "recognition_01", + "description": "The default recognition model for \"Detect\". All those faceIds created before 2019 March are bonded with this recognition model." + }, + { + "name": "recognition_02", + "value": "recognition_02", + "description": "Recognition model released in 2019 March." + }, + { + "name": "recognition_03", + "value": "recognition_03", + "description": "Recognition model released in 2020 May." + }, + { + "name": "recognition_04", + "value": "recognition_04", + "description": "Recognition model released in 2021 February. It's recommended to use this recognition model for better recognition accuracy." + } + ] + } + } + }, + "required": [ + "name" + ] + }, + "CreateLivenessSessionContent": { + "type": "object", + "description": "Request model for creating liveness session.", + "properties": { + "livenessOperationMode": { + "$ref": "#/definitions/LivenessOperationMode", + "description": "Type of liveness mode the client should follow." + }, + "deviceCorrelationIdSetInClient": { + "type": "boolean", + "description": "Whether or not to allow client to set their own 'deviceCorrelationId' via the Vision SDK. Default is false, and 'deviceCorrelationId' must be set in this request body." + }, + "enableSessionImage": { + "type": "boolean", + "description": "Whether or not store the session image." + }, + "livenessModelVersion": { + "$ref": "#/definitions/LivenessModel", + "description": "The model version used for liveness classification. This is an optional parameter, and if this is not specified, then the latest supported model version will be chosen" + }, + "deviceCorrelationId": { + "type": "string", + "description": "Unique Guid per each end-user device. This is to provide rate limiting and anti-hammering. If 'deviceCorrelationIdSetInClient' is true in this request, this 'deviceCorrelationId' must be null." + }, + "authTokenTimeToLiveInSeconds": { + "type": "integer", + "format": "int32", + "description": "Seconds the session should last for. Range is 60 to 86400 seconds. Default value is 600.", + "default": 600, + "minimum": 60, + "maximum": 86400 + }, + "numberOfClientAttemptsAllowed": { + "type": "object", + "description": "The number of times a client can attempt a liveness check using the same authToken. Default value is 1. Maximum value is 3." + }, + "userCorrelationId": { + "type": "string", + "description": "Unique Guid per each end-user. This is to provide rate limiting and anti-hammering. If 'userCorrelationIdSetInClient' is true in this request, this 'userCorrelationId' must be null." + }, + "userCorrelationIdSetInClient": { + "type": "boolean", + "description": "Whether or not to allow client to set their own 'userCorrelationId' via the Vision SDK. Default is false, and 'userCorrelationId' must be set in this request body." + }, + "expectedClientIpAddress": { + "type": "string", + "description": "Specify the expected IP address or CIDR block of the client that runs the liveness check." + } + }, + "required": [ + "livenessOperationMode" + ] + }, + "CreatePersonResult": { + "type": "object", + "description": "Response of create person.", + "properties": { + "personId": { + "$ref": "#/definitions/Azure.Core.uuid", + "description": "Person ID of the person." + } + }, + "required": [ + "personId" + ] + }, + "ExposureLevel": { + "type": "string", + "description": "Indicates level of exposure.", + "enum": [ + "underExposure", + "goodExposure", + "overExposure" + ], + "x-ms-enum": { + "name": "ExposureLevel", + "modelAsString": true, + "values": [ + { + "name": "underExposure", + "value": "underExposure", + "description": "Low exposure level." + }, + { + "name": "goodExposure", + "value": "goodExposure", + "description": "Good exposure level." + }, + { + "name": "overExposure", + "value": "overExposure", + "description": "High exposure level." + } + ] + } + }, + "ExposureProperties": { + "type": "object", + "description": "Properties describing exposure level of the image.", + "properties": { + "exposureLevel": { + "$ref": "#/definitions/ExposureLevel", + "description": "An enum value indicating level of exposure." + }, + "value": { + "type": "number", + "format": "float", + "description": "A number indicating level of exposure level ranging from 0 to 1. [0, 0.25) is under exposure. [0.25, 0.75) is good exposure. [0.75, 1] is over exposure.", + "minimum": 0, + "maximum": 1 + } + }, + "required": [ + "exposureLevel", + "value" + ] + }, + "FaceAttributes": { + "type": "object", + "description": "Face attributes for the detected face.", + "properties": { + "age": { + "type": "number", + "format": "float", + "description": "Age in years." + }, + "smile": { + "type": "number", + "format": "float", + "description": "Smile intensity, a number between [0,1].", + "minimum": 0, + "maximum": 1 + }, + "facialHair": { + "$ref": "#/definitions/FacialHair", + "description": "Properties describing facial hair attributes." + }, + "glasses": { + "$ref": "#/definitions/GlassesType", + "description": "Glasses type if any of the face." + }, + "headPose": { + "$ref": "#/definitions/HeadPose", + "description": "3-D roll/yaw/pitch angles for face direction." + }, + "hair": { + "$ref": "#/definitions/HairProperties", + "description": "Properties describing hair attributes." + }, + "occlusion": { + "$ref": "#/definitions/OcclusionProperties", + "description": "Properties describing occlusions on a given face." + }, + "accessories": { + "type": "array", + "description": "Properties describing any accessories on a given face.", + "items": { + "$ref": "#/definitions/AccessoryItem" + } + }, + "blur": { + "$ref": "#/definitions/BlurProperties", + "description": "Properties describing any presence of blur within the image." + }, + "exposure": { + "$ref": "#/definitions/ExposureProperties", + "description": "Properties describing exposure level of the image." + }, + "noise": { + "$ref": "#/definitions/NoiseProperties", + "description": "Properties describing noise level of the image." + }, + "mask": { + "$ref": "#/definitions/MaskProperties", + "description": "Properties describing the presence of a mask on a given face." + }, + "qualityForRecognition": { + "$ref": "#/definitions/QualityForRecognition", + "description": "Properties describing the overall image quality regarding whether the image being used in the detection is of sufficient quality to attempt face recognition on." + } + } + }, + "FaceDetectionResult": { + "type": "object", + "description": "Response for detect API.", + "properties": { + "faceId": { + "$ref": "#/definitions/Azure.Core.uuid", + "description": "Unique faceId of the detected face, created by detection API and it will expire 24 hours after the detection call. To return this, it requires 'returnFaceId' parameter to be true." + }, + "recognitionModel": { + "$ref": "#/definitions/RecognitionModel", + "description": "The 'recognitionModel' associated with this faceId. This is only returned when 'returnRecognitionModel' is explicitly set as true." + }, + "faceRectangle": { + "$ref": "#/definitions/FaceRectangle", + "description": "A rectangle area for the face location on image." + }, + "faceLandmarks": { + "$ref": "#/definitions/FaceLandmarks", + "description": "An array of 27-point face landmarks pointing to the important positions of face components. To return this, it requires 'returnFaceLandmarks' parameter to be true." + }, + "faceAttributes": { + "$ref": "#/definitions/FaceAttributes", + "description": "Face attributes for detected face." + } + }, + "required": [ + "faceRectangle" + ] + }, + "FaceError": { + "type": "object", + "description": "The error object. For comprehensive details on error codes and messages returned by the Face Service, please refer to the following link: https://aka.ms/face-error-codes-and-messages.", + "properties": { + "code": { + "type": "string", + "description": "One of a server-defined set of error codes." + }, + "message": { + "type": "string", + "description": "A human-readable representation of the error." + } + }, + "required": [ + "code", + "message" + ] + }, + "FaceErrorResponse": { + "type": "object", + "description": "A response containing error details.", + "properties": { + "error": { + "$ref": "#/definitions/FaceError", + "description": "The error object." + } + }, + "required": [ + "error" + ] + }, + "FaceLandmarks": { + "type": "object", + "description": "A collection of 27-point face landmarks pointing to the important positions of face components.", + "properties": { + "pupilLeft": { + "$ref": "#/definitions/LandmarkCoordinate", + "description": "The coordinates of the left eye pupil." + }, + "pupilRight": { + "$ref": "#/definitions/LandmarkCoordinate", + "description": "The coordinates of the right eye pupil." + }, + "noseTip": { + "$ref": "#/definitions/LandmarkCoordinate", + "description": "The coordinates of the nose tip." + }, + "mouthLeft": { + "$ref": "#/definitions/LandmarkCoordinate", + "description": "The coordinates of the mouth left." + }, + "mouthRight": { + "$ref": "#/definitions/LandmarkCoordinate", + "description": "The coordinates of the mouth right." + }, + "eyebrowLeftOuter": { + "$ref": "#/definitions/LandmarkCoordinate", + "description": "The coordinates of the left eyebrow outer." + }, + "eyebrowLeftInner": { + "$ref": "#/definitions/LandmarkCoordinate", + "description": "The coordinates of the left eyebrow inner." + }, + "eyeLeftOuter": { + "$ref": "#/definitions/LandmarkCoordinate", + "description": "The coordinates of the left eye outer." + }, + "eyeLeftTop": { + "$ref": "#/definitions/LandmarkCoordinate", + "description": "The coordinates of the left eye top." + }, + "eyeLeftBottom": { + "$ref": "#/definitions/LandmarkCoordinate", + "description": "The coordinates of the left eye bottom." + }, + "eyeLeftInner": { + "$ref": "#/definitions/LandmarkCoordinate", + "description": "The coordinates of the left eye inner." + }, + "eyebrowRightInner": { + "$ref": "#/definitions/LandmarkCoordinate", + "description": "The coordinates of the right eyebrow inner." + }, + "eyebrowRightOuter": { + "$ref": "#/definitions/LandmarkCoordinate", + "description": "The coordinates of the right eyebrow outer." + }, + "eyeRightInner": { + "$ref": "#/definitions/LandmarkCoordinate", + "description": "The coordinates of the right eye inner." + }, + "eyeRightTop": { + "$ref": "#/definitions/LandmarkCoordinate", + "description": "The coordinates of the right eye top." + }, + "eyeRightBottom": { + "$ref": "#/definitions/LandmarkCoordinate", + "description": "The coordinates of the right eye bottom." + }, + "eyeRightOuter": { + "$ref": "#/definitions/LandmarkCoordinate", + "description": "The coordinates of the right eye outer." + }, + "noseRootLeft": { + "$ref": "#/definitions/LandmarkCoordinate", + "description": "The coordinates of the nose root left." + }, + "noseRootRight": { + "$ref": "#/definitions/LandmarkCoordinate", + "description": "The coordinates of the nose root right." + }, + "noseLeftAlarTop": { + "$ref": "#/definitions/LandmarkCoordinate", + "description": "The coordinates of the nose left alar top." + }, + "noseRightAlarTop": { + "$ref": "#/definitions/LandmarkCoordinate", + "description": "The coordinates of the nose right alar top." + }, + "noseLeftAlarOutTip": { + "$ref": "#/definitions/LandmarkCoordinate", + "description": "The coordinates of the nose left alar out tip." + }, + "noseRightAlarOutTip": { + "$ref": "#/definitions/LandmarkCoordinate", + "description": "The coordinates of the nose right alar out tip." + }, + "upperLipTop": { + "$ref": "#/definitions/LandmarkCoordinate", + "description": "The coordinates of the upper lip top." + }, + "upperLipBottom": { + "$ref": "#/definitions/LandmarkCoordinate", + "description": "The coordinates of the upper lip bottom." + }, + "underLipTop": { + "$ref": "#/definitions/LandmarkCoordinate", + "description": "The coordinates of the under lip top." + }, + "underLipBottom": { + "$ref": "#/definitions/LandmarkCoordinate", + "description": "The coordinates of the under lip bottom." + } + }, + "required": [ + "pupilLeft", + "pupilRight", + "noseTip", + "mouthLeft", + "mouthRight", + "eyebrowLeftOuter", + "eyebrowLeftInner", + "eyeLeftOuter", + "eyeLeftTop", + "eyeLeftBottom", + "eyeLeftInner", + "eyebrowRightInner", + "eyebrowRightOuter", + "eyeRightInner", + "eyeRightTop", + "eyeRightBottom", + "eyeRightOuter", + "noseRootLeft", + "noseRootRight", + "noseLeftAlarTop", + "noseRightAlarTop", + "noseLeftAlarOutTip", + "noseRightAlarOutTip", + "upperLipTop", + "upperLipBottom", + "underLipTop", + "underLipBottom" + ] + }, + "FaceList": { + "type": "object", + "description": "Face list is a list of faces, up to 1,000 faces.", + "properties": { + "name": { + "type": "string", + "description": "User defined name, maximum length is 128.", + "minLength": 1, + "maxLength": 128 + }, + "userData": { + "type": "string", + "description": "Optional user defined data. Length should not exceed 16K.", + "maxLength": 16384 + }, + "recognitionModel": { + "$ref": "#/definitions/RecognitionModel", + "description": "Name of recognition model. Recognition model is used when the face features are extracted and associated with detected faceIds." + }, + "faceListId": { + "$ref": "#/definitions/collectionId", + "description": "Valid character is letter in lower case or digit or '-' or '_', maximum length is 64.", + "readOnly": true + }, + "persistedFaces": { + "type": "array", + "description": "Face ids of registered faces in the face list.", + "items": { + "$ref": "#/definitions/FaceListFace" + } + } + }, + "required": [ + "name", + "faceListId" + ] + }, + "FaceListFace": { + "type": "object", + "description": "Face resource for face list.", + "properties": { + "persistedFaceId": { + "$ref": "#/definitions/Azure.Core.uuid", + "description": "Face ID of the face.", + "readOnly": true + }, + "userData": { + "type": "string", + "description": "User-provided data attached to the face. The length limit is 1K.", + "maxLength": 1024 + } + }, + "required": [ + "persistedFaceId" + ] + }, + "FaceListItem": { + "type": "object", + "description": "Face list item for list face list.", + "properties": { + "name": { + "type": "string", + "description": "User defined name, maximum length is 128.", + "minLength": 1, + "maxLength": 128 + }, + "userData": { + "type": "string", + "description": "Optional user defined data. Length should not exceed 16K.", + "maxLength": 16384 + }, + "recognitionModel": { + "$ref": "#/definitions/RecognitionModel", + "description": "Name of recognition model. Recognition model is used when the face features are extracted and associated with detected faceIds." + }, + "faceListId": { + "$ref": "#/definitions/collectionId", + "description": "Valid character is letter in lower case or digit or '-' or '_', maximum length is 64." + } + }, + "required": [ + "name", + "faceListId" + ] + }, + "FaceRectangle": { + "type": "object", + "description": "A rectangle within which a face can be found.", + "properties": { + "top": { + "type": "integer", + "format": "int32", + "description": "The distance from the top edge if the image to the top edge of the rectangle, in pixels." + }, + "left": { + "type": "integer", + "format": "int32", + "description": "The distance from the left edge if the image to the left edge of the rectangle, in pixels." + }, + "width": { + "type": "integer", + "format": "int32", + "description": "The width of the rectangle, in pixels." + }, + "height": { + "type": "integer", + "format": "int32", + "description": "The height of the rectangle, in pixels." + } + }, + "required": [ + "top", + "left", + "width", + "height" + ] + }, + "FaceUserData": { + "type": "object", + "description": "User defined data for persisted face.", + "properties": { + "userData": { + "type": "string", + "description": "User-provided data attached to the face. The length limit is 1K.", + "maxLength": 1024 + } + } + }, + "FacialHair": { + "type": "object", + "description": "Properties describing facial hair attributes.", + "properties": { + "moustache": { + "type": "number", + "format": "float", + "description": "A number ranging from 0 to 1 indicating a level of confidence associated with a property.", + "minimum": 0, + "maximum": 1 + }, + "beard": { + "type": "number", + "format": "float", + "description": "A number ranging from 0 to 1 indicating a level of confidence associated with a property.", + "minimum": 0, + "maximum": 1 + }, + "sideburns": { + "type": "number", + "format": "float", + "description": "A number ranging from 0 to 1 indicating a level of confidence associated with a property.", + "minimum": 0, + "maximum": 1 + } + }, + "required": [ + "moustache", + "beard", + "sideburns" + ] + }, + "FindSimilarResult": { + "type": "object", + "description": "Response body for find similar face operation.", + "properties": { + "confidence": { + "type": "number", + "format": "float", + "description": "Confidence value of the candidate. The higher confidence, the more similar. Range between [0,1].", + "minimum": 0, + "maximum": 1 + }, + "faceId": { + "$ref": "#/definitions/Azure.Core.uuid", + "description": "faceId of candidate face when find by faceIds. faceId is created by \"Detect\" and will expire 24 hours after the detection call." + }, + "persistedFaceId": { + "$ref": "#/definitions/Azure.Core.uuid", + "description": "persistedFaceId of candidate face when find by faceListId or largeFaceListId. persistedFaceId in face list/large face list is persisted and will not expire." + } + }, + "required": [ + "confidence" + ] + }, + "GlassesType": { + "type": "string", + "description": "Glasses type of the face.", + "enum": [ + "noGlasses", + "readingGlasses", + "sunglasses", + "swimmingGoggles" + ], + "x-ms-enum": { + "name": "GlassesType", + "modelAsString": true, + "values": [ + { + "name": "noGlasses", + "value": "noGlasses", + "description": "No glasses on the face." + }, + { + "name": "readingGlasses", + "value": "readingGlasses", + "description": "Normal glasses on the face." + }, + { + "name": "sunglasses", + "value": "sunglasses", + "description": "Sunglasses on the face." + }, + { + "name": "swimmingGoggles", + "value": "swimmingGoggles", + "description": "Swimming goggles on the face." + } + ] + } + }, + "GroupingResult": { + "type": "object", + "description": "Response body for group face operation.", + "properties": { + "groups": { + "type": "array", + "description": "A partition of the original faces based on face similarity. Groups are ranked by number of faces.", + "items": { + "type": "array", + "items": { + "$ref": "#/definitions/Azure.Core.uuid" + } + } + }, + "messyGroup": { + "type": "array", + "description": "Face ids array of faces that cannot find any similar faces from original faces.", + "items": { + "$ref": "#/definitions/Azure.Core.uuid" + } + } + }, + "required": [ + "groups", + "messyGroup" + ] + }, + "HairColor": { + "type": "object", + "description": "An array of candidate colors and confidence level in the presence of each.", + "properties": { + "color": { + "$ref": "#/definitions/HairColorType", + "description": "Name of the hair color." + }, + "confidence": { + "type": "number", + "format": "float", + "description": "Confidence level of the color. Range between [0,1].", + "minimum": 0, + "maximum": 1 + } + }, + "required": [ + "color", + "confidence" + ] + }, + "HairColorType": { + "type": "string", + "description": "Name of the hair color.", + "enum": [ + "unknown", + "white", + "gray", + "blond", + "brown", + "red", + "black", + "other" + ], + "x-ms-enum": { + "name": "HairColorType", + "modelAsString": true, + "values": [ + { + "name": "unknownHairColor", + "value": "unknown", + "description": "Unknown." + }, + { + "name": "white", + "value": "white", + "description": "White." + }, + { + "name": "gray", + "value": "gray", + "description": "Gray." + }, + { + "name": "blond", + "value": "blond", + "description": "Blond." + }, + { + "name": "brown", + "value": "brown", + "description": "Brown." + }, + { + "name": "red", + "value": "red", + "description": "Red." + }, + { + "name": "black", + "value": "black", + "description": "Black." + }, + { + "name": "other", + "value": "other", + "description": "Other." + } + ] + } + }, + "HairProperties": { + "type": "object", + "description": "Properties describing hair attributes.", + "properties": { + "bald": { + "type": "number", + "format": "float", + "description": "A number describing confidence level of whether the person is bald.", + "minimum": 0, + "maximum": 1 + }, + "invisible": { + "type": "boolean", + "description": "A boolean value describing whether the hair is visible in the image." + }, + "hairColor": { + "type": "array", + "description": "An array of candidate colors and confidence level in the presence of each.", + "items": { + "$ref": "#/definitions/HairColor" + } + } + }, + "required": [ + "bald", + "invisible", + "hairColor" + ] + }, + "HeadPose": { + "type": "object", + "description": "3-D roll/yaw/pitch angles for face direction.", + "properties": { + "pitch": { + "type": "number", + "format": "float", + "description": "Value of angles." + }, + "roll": { + "type": "number", + "format": "float", + "description": "Value of angles." + }, + "yaw": { + "type": "number", + "format": "float", + "description": "Value of angles." + } + }, + "required": [ + "pitch", + "roll", + "yaw" + ] + }, + "IdentificationCandidate": { + "type": "object", + "description": "Candidate for identify call.", + "properties": { + "personId": { + "$ref": "#/definitions/Azure.Core.uuid", + "description": "personId of candidate person." + }, + "confidence": { + "type": "number", + "format": "float", + "description": "Confidence value of the candidate. The higher confidence, the more similar. Range between [0,1].", + "minimum": 0, + "maximum": 1 + } + }, + "required": [ + "personId", + "confidence" + ] + }, + "IdentificationResult": { + "type": "object", + "description": "Identify result.", + "properties": { + "faceId": { + "$ref": "#/definitions/Azure.Core.uuid", + "description": "faceId of the query face." + }, + "candidates": { + "type": "array", + "description": "Identified person candidates for that face (ranked by confidence). Array size should be no larger than input maxNumOfCandidatesReturned. If no person is identified, will return an empty array.", + "items": { + "$ref": "#/definitions/IdentificationCandidate" + } + } + }, + "required": [ + "faceId", + "candidates" + ] + }, + "ImageType": { + "type": "string", + "description": "The type of image.", + "enum": [ + "Color", + "Infrared", + "Depth" + ], + "x-ms-enum": { + "name": "ImageType", + "modelAsString": true, + "values": [ + { + "name": "Color", + "value": "Color", + "description": "Color image." + }, + { + "name": "Infrared", + "value": "Infrared", + "description": "Infrared image." + }, + { + "name": "Depth", + "value": "Depth", + "description": "Depth image." + } + ] + } + }, + "LandmarkCoordinate": { + "type": "object", + "description": "Landmark coordinates within an image.", + "properties": { + "x": { + "type": "number", + "format": "float", + "description": "The horizontal component, in pixels." + }, + "y": { + "type": "number", + "format": "float", + "description": "The vertical component, in pixels." + } + }, + "required": [ + "x", + "y" + ] + }, + "LargeFaceList": { + "type": "object", + "description": "Large face list is a list of faces, up to 1,000,000 faces.", + "properties": { + "name": { + "type": "string", + "description": "User defined name, maximum length is 128.", + "minLength": 1, + "maxLength": 128 + }, + "userData": { + "type": "string", + "description": "Optional user defined data. Length should not exceed 16K.", + "maxLength": 16384 + }, + "recognitionModel": { + "$ref": "#/definitions/RecognitionModel", + "description": "Name of recognition model. Recognition model is used when the face features are extracted and associated with detected faceIds." + }, + "largeFaceListId": { + "$ref": "#/definitions/collectionId", + "description": "Valid character is letter in lower case or digit or '-' or '_', maximum length is 64.", + "readOnly": true + } + }, + "required": [ + "name", + "largeFaceListId" + ] + }, + "LargeFaceListFace": { + "type": "object", + "description": "Face resource for large face list.", + "properties": { + "persistedFaceId": { + "$ref": "#/definitions/Azure.Core.uuid", + "description": "Face ID of the face.", + "readOnly": true + }, + "userData": { + "type": "string", + "description": "User-provided data attached to the face. The length limit is 1K.", + "maxLength": 1024 + } + }, + "required": [ + "persistedFaceId" + ] + }, + "LargePersonGroup": { + "type": "object", + "description": "The container of the uploaded person data, including face recognition feature, and up to 1,000,000 people.", + "properties": { + "name": { + "type": "string", + "description": "User defined name, maximum length is 128.", + "minLength": 1, + "maxLength": 128 + }, + "userData": { + "type": "string", + "description": "Optional user defined data. Length should not exceed 16K.", + "maxLength": 16384 + }, + "recognitionModel": { + "$ref": "#/definitions/RecognitionModel", + "description": "Name of recognition model. Recognition model is used when the face features are extracted and associated with detected faceIds." + }, + "largePersonGroupId": { + "$ref": "#/definitions/collectionId", + "description": "ID of the container.", + "readOnly": true + } + }, + "required": [ + "name", + "largePersonGroupId" + ] + }, + "LargePersonGroupPerson": { + "type": "object", + "description": "The person in a specified large person group. To add face to this person, please call \"Add Large Person Group Person Face\".", + "properties": { + "personId": { + "$ref": "#/definitions/Azure.Core.uuid", + "description": "ID of the person.", + "readOnly": true + }, + "name": { + "type": "string", + "description": "User defined name, maximum length is 128.", + "minLength": 1, + "maxLength": 128 + }, + "userData": { + "type": "string", + "description": "Optional user defined data. Length should not exceed 16K.", + "maxLength": 16384 + }, + "persistedFaceIds": { + "type": "array", + "description": "Face ids of registered faces in the person.", + "items": { + "$ref": "#/definitions/Azure.Core.uuid" + } + } + }, + "required": [ + "personId", + "name" + ] + }, + "LargePersonGroupPersonFace": { + "type": "object", + "description": "Face resource for large person group person.", + "properties": { + "persistedFaceId": { + "$ref": "#/definitions/Azure.Core.uuid", + "description": "Face ID of the face.", + "readOnly": true + }, + "userData": { + "type": "string", + "description": "User-provided data attached to the face. The length limit is 1K.", + "maxLength": 1024 + } + }, + "required": [ + "persistedFaceId" + ] + }, + "LivenessAbuseMonitoringSetting": { + "type": "object", + "description": "Settings for liveness abuse monitoring.", + "properties": { + "enabled": { + "type": "object", + "description": "Whether liveness abuse monitoring is enabled" + } + }, + "required": [ + "enabled" + ] + }, + "LivenessAbuseMonitoringSettingUpdate": { + "type": "object", + "description": "Settings for liveness abuse monitoring.", + "properties": { + "enabled": { + "type": "object", + "description": "Whether liveness abuse monitoring is enabled" + } + } + }, + "LivenessColorDecisionTarget": { + "type": "object", + "description": "The target from color image used for liveness classification.", + "properties": { + "faceRectangle": { + "$ref": "#/definitions/FaceRectangle", + "description": "The face region where the liveness classification was made on." + } + }, + "required": [ + "faceRectangle" + ] + }, + "LivenessDecision": { + "type": "string", + "description": "The outcome of the liveness classification.", + "enum": [ + "uncertain", + "realface", + "spoofface" + ], + "x-ms-enum": { + "name": "LivenessDecision", + "modelAsString": true, + "values": [ + { + "name": "uncertain", + "value": "uncertain", + "description": "The algorithm could not classify the target face as either real or spoof." + }, + { + "name": "realFace", + "value": "realface", + "description": "The algorithm has classified the target face as real." + }, + { + "name": "spoofFace", + "value": "spoofface", + "description": "The algorithm has classified the target face as a spoof." + } + ] + } + }, + "LivenessDecisionTargets": { + "type": "object", + "description": "The targets used for liveness classification.", + "properties": { + "color": { + "$ref": "#/definitions/LivenessColorDecisionTarget", + "description": "The target from color image used for liveness classification." + } + }, + "required": [ + "color" + ] + }, + "LivenessError": { + "type": "object", + "description": "The error of the liveness classification.", + "properties": { + "code": { + "type": "string", + "description": "The error code." + }, + "message": { + "type": "string", + "description": "The error message." + }, + "targets": { + "$ref": "#/definitions/LivenessDecisionTargets", + "description": "Targets used for liveness classification." + } + }, + "required": [ + "code", + "message", + "targets" + ] + }, + "LivenessModel": { + "type": "string", + "description": "The model version used for liveness classification.", + "enum": [ + "2024-11-15" + ], + "x-ms-enum": { + "name": "LivenessModel", + "modelAsString": true, + "values": [ + { + "name": "v2024_11_15", + "value": "2024-11-15" + } + ] + } + }, + "LivenessOperationMode": { + "type": "string", + "description": "The liveness operation mode to drive the client's end-user experience.", + "enum": [ + "Passive", + "PassiveActive" + ], + "x-ms-enum": { + "name": "LivenessOperationMode", + "modelAsString": true, + "values": [ + { + "name": "Passive", + "value": "Passive", + "description": "Utilizes a passive liveness technique that requires no additional actions from the user. Requires normal indoor lighting and high screen brightness for optimal performance. And thus, this mode has a narrow operational envelope and will not be suitable for scenarios that requires the end-user's to be in bright lighting conditions. Note: this is the only supported mode for the Mobile (iOS and Android) solution." + }, + { + "name": "PassiveActive", + "value": "PassiveActive", + "description": "This mode utilizes a hybrid passive or active liveness technique that necessitates user cooperation. It is optimized to require active motion only under suboptimal lighting conditions. Unlike the passive mode, this mode has no lighting restrictions, and thus offering a broader operational envelope. This mode is preferable on Web based solutions due to the lack of automatic screen brightness control available on browsers which hinders the Passive mode's operational envelope on Web based solutions." + } + ] + } + }, + "LivenessResult": { + "type": "object", + "description": "The results of the liveness classification.", + "properties": { + "livenessDecision": { + "$ref": "#/definitions/LivenessDecision", + "description": "The liveness classification for the target face." + }, + "targets": { + "$ref": "#/definitions/LivenessDecisionTargets", + "description": "Targets used for liveness classification." + }, + "digest": { + "type": "string", + "description": "The server calculated digest for this request. If the client reported digest differs from the server calculated digest, then the message integrity between the client and service has been compromised and the result should not be trusted. For more information, see how to guides on how to leverage this value to secure your end-to-end solution." + }, + "sessionImageId": { + "type": "string", + "description": "The image ID of the session request." + } + }, + "required": [ + "targets", + "digest" + ] + }, + "LivenessSession": { + "type": "object", + "description": "Session result of detect liveness.", + "properties": { + "sessionId": { + "type": "string", + "description": "The unique ID to reference this session.", + "readOnly": true + }, + "authToken": { + "type": "string", + "description": "Bearer token to provide authentication for the Vision SDK running on a client application. This Bearer token has limited permissions to perform only the required action and expires after the TTL time. It is also auditable." + }, + "status": { + "$ref": "#/definitions/Azure.Core.Foundations.OperationState", + "description": "The current status of the session." + }, + "modelVersion": { + "$ref": "#/definitions/LivenessModel", + "description": "The model version used for liveness classification. This is an optional parameter, and if this is not specified, then the latest supported model version will be chosen" + }, + "isAbuseMonitoringEnabled": { + "type": "object", + "description": "Denotes if the abuse monitoring feature was enabled during this session." + }, + "expectedClientIpAddress": { + "type": "string", + "description": "The expected IP address or CIDR block of the client that runs the liveness check." + }, + "results": { + "$ref": "#/definitions/LivenessSessionResults", + "description": "The results of the liveness session." + } + }, + "required": [ + "sessionId", + "authToken", + "status", + "results" + ] + }, + "LivenessSessionAttempt": { + "type": "object", + "description": "The liveness session attempt.", + "properties": { + "attemptId": { + "type": "integer", + "format": "int32", + "description": "The attempt ID, start from 1." + }, + "attemptStatus": { + "$ref": "#/definitions/Azure.Core.Foundations.OperationState", + "description": "The status of the attempt." + }, + "result": { + "$ref": "#/definitions/LivenessResult", + "description": "The result of the liveness call, will be null if there is error." + }, + "error": { + "$ref": "#/definitions/LivenessError", + "description": "The error of the liveness call, will be null if there is result." + }, + "clientInformation": { + "type": "array", + "description": "The client information gathered during the liveness attempt.", + "items": { + "$ref": "#/definitions/ClientInformation" + } + }, + "abuseMonitoringResult": { + "$ref": "#/definitions/AbuseMonitoringResult", + "description": "The abuse monitoring result for the liveness attempt." + } + }, + "required": [ + "attemptId", + "attemptStatus" + ] + }, + "LivenessSessionResults": { + "type": "object", + "description": "The results of the liveness session.", + "properties": { + "attempts": { + "type": "array", + "description": "The attempts data of underlying liveness call with the session.", + "items": { + "$ref": "#/definitions/LivenessSessionAttempt" + } + } + }, + "required": [ + "attempts" + ] + }, + "LivenessWithVerifyOutputs": { + "type": "object", + "description": "The face verification output.", + "properties": { + "matchConfidence": { + "type": "number", + "format": "float", + "description": "The target face liveness face and comparison image face verification confidence.", + "minimum": 0, + "maximum": 1 + }, + "isIdentical": { + "type": "boolean", + "description": "Whether the target liveness face and comparison image face match." + } + }, + "required": [ + "matchConfidence", + "isIdentical" + ] + }, + "LivenessWithVerifyReference": { + "type": "object", + "description": "The detail of face for verification.", + "properties": { + "referenceType": { + "$ref": "#/definitions/ImageType", + "description": "The image type which contains the face rectangle where the liveness classification was made on." + }, + "faceRectangle": { + "$ref": "#/definitions/FaceRectangle", + "description": "The face region where the comparison image's classification was made." + }, + "qualityForRecognition": { + "$ref": "#/definitions/QualityForRecognition", + "description": "Quality of face image for recognition." + } + }, + "required": [ + "referenceType", + "faceRectangle", + "qualityForRecognition" + ] + }, + "LivenessWithVerifyResult": { + "type": "object", + "description": "The results of the liveness with verify call.", + "properties": { + "livenessDecision": { + "$ref": "#/definitions/LivenessDecision", + "description": "The liveness classification for the target face." + }, + "targets": { + "$ref": "#/definitions/LivenessDecisionTargets", + "description": "Targets used for liveness classification." + }, + "digest": { + "type": "string", + "description": "The server calculated digest for this request. If the client reported digest differs from the server calculated digest, then the message integrity between the client and service has been compromised and the result should not be trusted. For more information, see how to guides on how to leverage this value to secure your end-to-end solution." + }, + "sessionImageId": { + "type": "string", + "description": "The image ID of the session request." + }, + "verifyResult": { + "$ref": "#/definitions/LivenessWithVerifyOutputs", + "description": "The face verification output. Only available when the request is liveness with verify." + }, + "verifyImageHash": { + "type": "string", + "description": "The sha256 hash of the verify-image in the request." + } + }, + "required": [ + "targets", + "digest" + ] + }, + "LivenessWithVerifySession": { + "type": "object", + "description": "Session result of detect liveness with verify.", + "properties": { + "sessionId": { + "type": "string", + "description": "The unique ID to reference this session.", + "readOnly": true + }, + "authToken": { + "type": "string", + "description": "Bearer token to provide authentication for the Vision SDK running on a client application. This Bearer token has limited permissions to perform only the required action and expires after the TTL time. It is also auditable." + }, + "status": { + "$ref": "#/definitions/Azure.Core.Foundations.OperationState", + "description": "The current status of the session." + }, + "modelVersion": { + "$ref": "#/definitions/LivenessModel", + "description": "The model version used for liveness classification. This is an optional parameter, and if this is not specified, then the latest supported model version will be chosen" + }, + "isAbuseMonitoringEnabled": { + "type": "object", + "description": "Denotes if the abuse monitoring feature was enabled during this session." + }, + "expectedClientIpAddress": { + "type": "string", + "description": "The expected IP address or CIDR block of the client that runs the liveness check." + }, + "results": { + "$ref": "#/definitions/LivenessWithVerifySessionResults", + "description": "The results of the liveness with verify session." + } + }, + "required": [ + "sessionId", + "authToken", + "status", + "results" + ] + }, + "LivenessWithVerifySessionAttempt": { + "type": "object", + "description": "The liveness with verify session attempt.", + "properties": { + "attemptId": { + "type": "integer", + "format": "int32", + "description": "The attempt ID, start from 1." + }, + "attemptStatus": { + "$ref": "#/definitions/Azure.Core.Foundations.OperationState", + "description": "The status of the attempt." + }, + "result": { + "$ref": "#/definitions/LivenessWithVerifyResult", + "description": "The result of the liveness with verify call, will be null if there is error." + }, + "error": { + "$ref": "#/definitions/LivenessError", + "description": "The error of the liveness with verify call, will be null if there is result." + }, + "clientInformation": { + "type": "array", + "description": "The client information gathered during the liveness attempt.", + "items": { + "$ref": "#/definitions/ClientInformation" + } + }, + "abuseMonitoringResult": { + "$ref": "#/definitions/AbuseMonitoringResult", + "description": "The abuse monitoring result for the liveness attempt." + } + }, + "required": [ + "attemptId", + "attemptStatus" + ] + }, + "LivenessWithVerifySessionResults": { + "type": "object", + "description": "The results of the liveness with verify session.", + "properties": { + "verifyReferences": { + "type": "array", + "description": "The references used for face verification.", + "items": { + "$ref": "#/definitions/LivenessWithVerifyReference" + } + }, + "attempts": { + "type": "array", + "description": "The attempts data of underlying liveness with verify call with the session.", + "items": { + "$ref": "#/definitions/LivenessWithVerifySessionAttempt" + } + } + }, + "required": [ + "verifyReferences", + "attempts" + ] + }, + "MaskProperties": { + "type": "object", + "description": "Properties describing the presence of a mask on a given face.", + "properties": { + "noseAndMouthCovered": { + "type": "boolean", + "description": "A boolean value indicating whether nose and mouth are covered." + }, + "type": { + "$ref": "#/definitions/MaskType", + "description": "Type of the mask." + } + }, + "required": [ + "noseAndMouthCovered", + "type" + ] + }, + "MaskType": { + "type": "string", + "description": "Type of the mask.", + "enum": [ + "faceMask", + "noMask", + "otherMaskOrOcclusion", + "uncertain" + ], + "x-ms-enum": { + "name": "MaskType", + "modelAsString": true, + "values": [ + { + "name": "faceMask", + "value": "faceMask", + "description": "Face mask." + }, + { + "name": "noMask", + "value": "noMask", + "description": "No mask." + }, + { + "name": "otherMaskOrOcclusion", + "value": "otherMaskOrOcclusion", + "description": "Other types of mask or occlusion." + }, + { + "name": "uncertain", + "value": "uncertain", + "description": "Uncertain." + } + ] + } + }, + "NoiseLevel": { + "type": "string", + "description": "Indicates level of noise.", + "enum": [ + "low", + "medium", + "high" + ], + "x-ms-enum": { + "name": "NoiseLevel", + "modelAsString": true, + "values": [ + { + "name": "low", + "value": "low", + "description": "Low noise level." + }, + { + "name": "medium", + "value": "medium", + "description": "Medium noise level." + }, + { + "name": "high", + "value": "high", + "description": "High noise level." + } + ] + } + }, + "NoiseProperties": { + "type": "object", + "description": "Properties describing noise level of the image.", + "properties": { + "noiseLevel": { + "$ref": "#/definitions/NoiseLevel", + "description": "An enum value indicating level of noise." + }, + "value": { + "type": "number", + "format": "float", + "description": "A number indicating level of noise level ranging from 0 to 1. [0, 0.25) is under exposure. [0.25, 0.75) is good exposure. [0.75, 1] is over exposure. [0, 0.3) is low noise level. [0.3, 0.7) is medium noise level. [0.7, 1] is high noise level.", + "minimum": 0, + "maximum": 1 + } + }, + "required": [ + "noiseLevel", + "value" + ] + }, + "OcclusionProperties": { + "type": "object", + "description": "Properties describing occlusions on a given face.", + "properties": { + "foreheadOccluded": { + "type": "boolean", + "description": "A boolean value indicating whether forehead is occluded." + }, + "eyeOccluded": { + "type": "boolean", + "description": "A boolean value indicating whether eyes are occluded." + }, + "mouthOccluded": { + "type": "boolean", + "description": "A boolean value indicating whether the mouth is occluded." + } + }, + "required": [ + "foreheadOccluded", + "eyeOccluded", + "mouthOccluded" + ] + }, + "OperationStatus": { + "type": "string", + "description": "The status of long running operation.", + "enum": [ + "notStarted", + "running", + "succeeded", + "failed" + ], + "x-ms-enum": { + "name": "OperationStatus", + "modelAsString": true, + "values": [ + { + "name": "notStarted", + "value": "notStarted", + "description": "The operation is not started." + }, + { + "name": "running", + "value": "running", + "description": "The operation is still running." + }, + { + "name": "succeeded", + "value": "succeeded", + "description": "The operation is succeeded." + }, + { + "name": "failed", + "value": "failed", + "description": "The operation is failed." + } + ] + } + }, + "OtherFlaggedSessions": { + "type": "object", + "description": "The other sessions flagged as abuse based on the information gathered during this attempt.", + "properties": { + "attemptId": { + "type": "integer", + "format": "int32", + "description": "The attempt ID, start from 1." + }, + "sessionId": { + "type": "string", + "description": "The unique session ID of the flagged session." + }, + "sessionImageId": { + "type": "string", + "description": "The image ID from the flagged session." + } + }, + "required": [ + "attemptId", + "sessionId" + ] + }, + "PersonGroup": { + "type": "object", + "description": "The container of the uploaded person data, including face recognition feature, and up to 10,000 persons. To handle larger scale face identification problem, please consider using Large Person Group.", + "properties": { + "name": { + "type": "string", + "description": "User defined name, maximum length is 128.", + "minLength": 1, + "maxLength": 128 + }, + "userData": { + "type": "string", + "description": "Optional user defined data. Length should not exceed 16K.", + "maxLength": 16384 + }, + "recognitionModel": { + "$ref": "#/definitions/RecognitionModel", + "description": "Name of recognition model. Recognition model is used when the face features are extracted and associated with detected faceIds." + }, + "personGroupId": { + "$ref": "#/definitions/collectionId", + "description": "ID of the container.", + "readOnly": true + } + }, + "required": [ + "name", + "personGroupId" + ] + }, + "PersonGroupPerson": { + "type": "object", + "description": "The person in a specified person group. To add face to this person, please call \"Add Large Person Group Person Face\".", + "properties": { + "personId": { + "$ref": "#/definitions/Azure.Core.uuid", + "description": "ID of the person.", + "readOnly": true + }, + "name": { + "type": "string", + "description": "User defined name, maximum length is 128.", + "minLength": 1, + "maxLength": 128 + }, + "userData": { + "type": "string", + "description": "Optional user defined data. Length should not exceed 16K.", + "maxLength": 16384 + }, + "persistedFaceIds": { + "type": "array", + "description": "Face ids of registered faces in the person.", + "items": { + "$ref": "#/definitions/Azure.Core.uuid" + } + } + }, + "required": [ + "personId", + "name" + ] + }, + "PersonGroupPersonFace": { + "type": "object", + "description": "Face resource for person group person.", + "properties": { + "persistedFaceId": { + "$ref": "#/definitions/Azure.Core.uuid", + "description": "Face ID of the face.", + "readOnly": true + }, + "userData": { + "type": "string", + "description": "User-provided data attached to the face. The length limit is 1K.", + "maxLength": 1024 + } + }, + "required": [ + "persistedFaceId" + ] + }, + "QualityForRecognition": { + "type": "string", + "description": "Indicates quality of image for recognition.", + "enum": [ + "low", + "medium", + "high" + ], + "x-ms-enum": { + "name": "QualityForRecognition", + "modelAsString": true, + "values": [ + { + "name": "low", + "value": "low", + "description": "Low quality." + }, + { + "name": "medium", + "value": "medium", + "description": "Medium quality." + }, + { + "name": "high", + "value": "high", + "description": "High quality." + } + ] + } + }, + "RecognitionModel": { + "type": "string", + "description": "The recognition model for the face.", + "enum": [ + "recognition_01", + "recognition_02", + "recognition_03", + "recognition_04" + ], + "x-ms-enum": { + "name": "RecognitionModel", + "modelAsString": true, + "values": [ + { + "name": "recognition_01", + "value": "recognition_01", + "description": "The default recognition model for \"Detect\". All those faceIds created before 2019 March are bonded with this recognition model." + }, + { + "name": "recognition_02", + "value": "recognition_02", + "description": "Recognition model released in 2019 March." + }, + { + "name": "recognition_03", + "value": "recognition_03", + "description": "Recognition model released in 2020 May." + }, + { + "name": "recognition_04", + "value": "recognition_04", + "description": "Recognition model released in 2021 February. It's recommended to use this recognition model for better recognition accuracy." + } + ] + } + }, + "Settings": { + "type": "object", + "description": "Response model for settings.", + "properties": { + "livenessAbuseMonitoring": { + "$ref": "#/definitions/LivenessAbuseMonitoringSetting", + "description": "Liveness abuse monitoring settings" + } + }, + "required": [ + "livenessAbuseMonitoring" + ] + }, + "SettingsUpdate": { + "type": "object", + "description": "Response model for settings.", + "properties": { + "livenessAbuseMonitoring": { + "$ref": "#/definitions/LivenessAbuseMonitoringSettingUpdate", + "description": "Liveness abuse monitoring settings" + } + } + }, + "TrainingResult": { + "type": "object", + "description": "Training result of a container", + "properties": { + "status": { + "$ref": "#/definitions/OperationStatus", + "description": "Training status of the container." + }, + "createdDateTime": { + "type": "string", + "format": "date-time", + "description": "A combined UTC date and time string that describes the created time of the person group, large person group or large face list." + }, + "lastActionDateTime": { + "type": "string", + "format": "date-time", + "description": "A combined UTC date and time string that describes the last modify time of the person group, large person group or large face list, could be null value when the group is not successfully trained." + }, + "lastSuccessfulTrainingDateTime": { + "type": "string", + "format": "date-time", + "description": "A combined UTC date and time string that describes the last successful training time of the person group, large person group or large face list." + }, + "message": { + "type": "string", + "description": "Show failure message when training failed (omitted when training succeed)." + } + }, + "required": [ + "status", + "createdDateTime", + "lastActionDateTime", + "lastSuccessfulTrainingDateTime" + ] + }, + "UserDefinedFields": { + "type": "object", + "description": "User defined fields for object creation.", + "properties": { + "name": { + "type": "string", + "description": "User defined name, maximum length is 128.", + "minLength": 1, + "maxLength": 128 + }, + "userData": { + "type": "string", + "description": "Optional user defined data. Length should not exceed 16K.", + "maxLength": 16384 + } + }, + "required": [ + "name" + ] + }, + "UserDefinedFieldsForUpdate": { + "type": "object", + "description": "User defined fields for object update.", + "properties": { + "name": { + "type": "string", + "description": "User defined name, maximum length is 128.", + "minLength": 1, + "maxLength": 128 + }, + "userData": { + "type": "string", + "description": "Optional user defined data. Length should not exceed 16K.", + "maxLength": 16384 + } + } + }, + "VerificationResult": { + "type": "object", + "description": "Verify result.", + "properties": { + "isIdentical": { + "type": "boolean", + "description": "True if the two faces belong to the same person or the face belongs to the person, otherwise false." + }, + "confidence": { + "type": "number", + "format": "float", + "description": "A number indicates the similarity confidence of whether two faces belong to the same person, or whether the face belongs to the person. By default, isIdentical is set to True if similarity confidence is greater than or equal to 0.5. This is useful for advanced users to override 'isIdentical' and fine-tune the result on their own data.", + "minimum": 0, + "maximum": 1 + } + }, + "required": [ + "isIdentical", + "confidence" + ] + }, + "collectionId": { + "type": "string", + "minLength": 1, + "maxLength": 64, + "pattern": "^[a-z0-9-_]+$" + } + }, + "parameters": { + "AddFaceOptions.detectionModel": { + "name": "detectionModel", + "in": "query", + "description": "The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'.", + "required": false, + "type": "string", + "default": "detection_01", + "enum": [ + "detection_01", + "detection_02", + "detection_03" + ], + "x-ms-enum": { + "name": "DetectionModel", + "modelAsString": true, + "values": [ + { + "name": "detection_01", + "value": "detection_01", + "description": "The default detection model. Recommend for near frontal face detection. For scenarios with exceptionally large angle (head-pose) faces, occluded faces or wrong image orientation, the faces in such cases may not be detected." + }, + { + "name": "detection_02", + "value": "detection_02", + "description": "Detection model released in 2019 May with improved accuracy especially on small, side and blurry faces." + }, + { + "name": "detection_03", + "value": "detection_03", + "description": "Detection model released in 2021 February with improved accuracy especially on small faces." + } + ] + }, + "x-ms-parameter-location": "method" + }, + "AddFaceOptions.targetFace": { + "name": "targetFace", + "in": "query", + "description": "A face rectangle to specify the target face to be added to a person, in the format of 'targetFace=left,top,width,height'.", + "required": false, + "type": "array", + "items": { + "type": "integer", + "format": "int32" + }, + "collectionFormat": "csv", + "minItems": 4, + "maxItems": 4, + "x-ms-parameter-location": "method" + }, + "AddFaceOptions.userData": { + "name": "userData", + "in": "query", + "description": "User-provided data attached to the face. The size limit is 1K.", + "required": false, + "type": "string", + "maxLength": 1024, + "x-ms-parameter-location": "method" + }, + "AddFaceRequest.imageContent": { + "name": "imageContent", + "in": "body", + "description": "The image to be analyzed", + "required": true, + "schema": { + "type": "string", + "format": "binary" + }, + "x-ms-parameter-location": "method" + } + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/Detect.json b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/Detect.json new file mode 100644 index 000000000000..9dcd2e753490 --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/Detect.json @@ -0,0 +1,181 @@ +{ + "title": "Detect with Image", + "operationId": "FaceDetectionOperations_Detect", + "parameters": { + "apiVersion": "v1.3-preview.1", + "returnFaceId": true, + "returnFaceLandmarks": true, + "returnFaceAttributes": "glasses,headPose,occlusion,accessories,blur,exposure,noise,qualityForRecognition", + "recognitionModel": "recognition_03", + "returnRecognitionModel": true, + "detectionModel": "detection_01", + "faceIdTimeToLive": 60, + "imageContent": "" + }, + "responses": { + "200": { + "body": [ + { + "faceId": "c5c24a82-6845-4031-9d5d-978df9175426", + "recognitionModel": "recognition_03", + "faceRectangle": { + "width": 78, + "height": 78, + "left": 394, + "top": 54 + }, + "faceLandmarks": { + "pupilLeft": { + "x": 412.7, + "y": 78.4 + }, + "pupilRight": { + "x": 446.8, + "y": 74.2 + }, + "noseTip": { + "x": 437.7, + "y": 92.4 + }, + "mouthLeft": { + "x": 417.8, + "y": 114.4 + }, + "mouthRight": { + "x": 451.3, + "y": 109.3 + }, + "eyebrowLeftOuter": { + "x": 397.9, + "y": 78.5 + }, + "eyebrowLeftInner": { + "x": 425.4, + "y": 70.5 + }, + "eyeLeftOuter": { + "x": 406.7, + "y": 80.6 + }, + "eyeLeftTop": { + "x": 412.2, + "y": 76.2 + }, + "eyeLeftBottom": { + "x": 413.0, + "y": 80.1 + }, + "eyeLeftInner": { + "x": 418.9, + "y": 78.0 + }, + "eyebrowRightInner": { + "x": 4.8, + "y": 69.7 + }, + "eyebrowRightOuter": { + "x": 5.5, + "y": 68.5 + }, + "eyeRightInner": { + "x": 441.5, + "y": 75.0 + }, + "eyeRightTop": { + "x": 446.4, + "y": 71.7 + }, + "eyeRightBottom": { + "x": 447.0, + "y": 75.3 + }, + "eyeRightOuter": { + "x": 451.7, + "y": 73.4 + }, + "noseRootLeft": { + "x": 428.0, + "y": 77.1 + }, + "noseRootRight": { + "x": 435.8, + "y": 75.6 + }, + "noseLeftAlarTop": { + "x": 428.3, + "y": 89.7 + }, + "noseRightAlarTop": { + "x": 442.2, + "y": 87.0 + }, + "noseLeftAlarOutTip": { + "x": 424.3, + "y": 96.4 + }, + "noseRightAlarOutTip": { + "x": 446.6, + "y": 92.5 + }, + "upperLipTop": { + "x": 437.6, + "y": 105.9 + }, + "upperLipBottom": { + "x": 437.6, + "y": 108.2 + }, + "underLipTop": { + "x": 436.8, + "y": 111.4 + }, + "underLipBottom": { + "x": 437.3, + "y": 114.5 + } + }, + "faceAttributes": { + "glasses": "sunglasses", + "headPose": { + "roll": 2.1, + "yaw": 3, + "pitch": 1.6 + }, + "occlusion": { + "foreheadOccluded": false, + "eyeOccluded": false, + "mouthOccluded": false + }, + "accessories": [ + { + "type": "headwear", + "confidence": 0.99 + }, + { + "type": "glasses", + "confidence": 1.0 + }, + { + "type": "mask", + "confidence": 0.87 + } + ], + "blur": { + "blurLevel": "medium", + "value": 0.51 + }, + "exposure": { + "exposureLevel": "goodExposure", + "value": 0.55 + }, + "noise": { + "noiseLevel": "low", + "value": 0.12 + }, + "qualityForRecognition": "high" + } + } + ] + } + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/DetectFromSessionImageId.json b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/DetectFromSessionImageId.json new file mode 100644 index 000000000000..d5413102b43d --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/DetectFromSessionImageId.json @@ -0,0 +1,183 @@ +{ + "title": "Detect From Session Image Id", + "operationId": "FaceDetectionOperations_DetectFromSessionImageId", + "parameters": { + "apiVersion": "v1.3-preview.1", + "returnFaceId": true, + "returnFaceLandmarks": true, + "returnFaceAttributes": "glasses,headPose,occlusion,accessories,blur,exposure,noise,qualityForRecognition", + "recognitionModel": "recognition_03", + "returnRecognitionModel": true, + "detectionModel": "detection_01", + "faceIdTimeToLive": 60, + "body": { + "sessionImageId": "aa93ce80-9a9b-48bd-ae1a-1c7543841e92" + } + }, + "responses": { + "200": { + "body": [ + { + "faceId": "c5c24a82-6845-4031-9d5d-978df9175426", + "recognitionModel": "recognition_03", + "faceRectangle": { + "width": 78, + "height": 78, + "left": 394, + "top": 54 + }, + "faceLandmarks": { + "pupilLeft": { + "x": 412.7, + "y": 78.4 + }, + "pupilRight": { + "x": 446.8, + "y": 74.2 + }, + "noseTip": { + "x": 437.7, + "y": 92.4 + }, + "mouthLeft": { + "x": 417.8, + "y": 114.4 + }, + "mouthRight": { + "x": 451.3, + "y": 109.3 + }, + "eyebrowLeftOuter": { + "x": 397.9, + "y": 78.5 + }, + "eyebrowLeftInner": { + "x": 425.4, + "y": 70.5 + }, + "eyeLeftOuter": { + "x": 406.7, + "y": 80.6 + }, + "eyeLeftTop": { + "x": 412.2, + "y": 76.2 + }, + "eyeLeftBottom": { + "x": 413.0, + "y": 80.1 + }, + "eyeLeftInner": { + "x": 418.9, + "y": 78.0 + }, + "eyebrowRightInner": { + "x": 4.8, + "y": 69.7 + }, + "eyebrowRightOuter": { + "x": 5.5, + "y": 68.5 + }, + "eyeRightInner": { + "x": 441.5, + "y": 75.0 + }, + "eyeRightTop": { + "x": 446.4, + "y": 71.7 + }, + "eyeRightBottom": { + "x": 447.0, + "y": 75.3 + }, + "eyeRightOuter": { + "x": 451.7, + "y": 73.4 + }, + "noseRootLeft": { + "x": 428.0, + "y": 77.1 + }, + "noseRootRight": { + "x": 435.8, + "y": 75.6 + }, + "noseLeftAlarTop": { + "x": 428.3, + "y": 89.7 + }, + "noseRightAlarTop": { + "x": 442.2, + "y": 87.0 + }, + "noseLeftAlarOutTip": { + "x": 424.3, + "y": 96.4 + }, + "noseRightAlarOutTip": { + "x": 446.6, + "y": 92.5 + }, + "upperLipTop": { + "x": 437.6, + "y": 105.9 + }, + "upperLipBottom": { + "x": 437.6, + "y": 108.2 + }, + "underLipTop": { + "x": 436.8, + "y": 111.4 + }, + "underLipBottom": { + "x": 437.3, + "y": 114.5 + } + }, + "faceAttributes": { + "glasses": "sunglasses", + "headPose": { + "roll": 2.1, + "yaw": 3, + "pitch": 1.6 + }, + "occlusion": { + "foreheadOccluded": false, + "eyeOccluded": false, + "mouthOccluded": false + }, + "accessories": [ + { + "type": "headwear", + "confidence": 0.99 + }, + { + "type": "glasses", + "confidence": 1.0 + }, + { + "type": "mask", + "confidence": 0.87 + } + ], + "blur": { + "blurLevel": "medium", + "value": 0.51 + }, + "exposure": { + "exposureLevel": "goodExposure", + "value": 0.55 + }, + "noise": { + "noiseLevel": "low", + "value": 0.12 + }, + "qualityForRecognition": "high" + } + } + ] + } + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/DetectFromUrl.json b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/DetectFromUrl.json new file mode 100644 index 000000000000..b45a4a6d5687 --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/DetectFromUrl.json @@ -0,0 +1,183 @@ +{ + "title": "Detect with Image URL", + "operationId": "FaceDetectionOperations_DetectFromUrl", + "parameters": { + "apiVersion": "v1.3-preview.1", + "returnFaceId": true, + "returnFaceLandmarks": true, + "returnFaceAttributes": "glasses,headPose,occlusion,accessories,blur,exposure,noise,qualityForRecognition", + "recognitionModel": "recognition_03", + "returnRecognitionModel": true, + "detectionModel": "detection_01", + "faceIdTimeToLive": 60, + "body": { + "url": "https://microsoft.com/example.jpg" + } + }, + "responses": { + "200": { + "body": [ + { + "faceId": "c5c24a82-6845-4031-9d5d-978df9175426", + "recognitionModel": "recognition_03", + "faceRectangle": { + "width": 78, + "height": 78, + "left": 394, + "top": 54 + }, + "faceLandmarks": { + "pupilLeft": { + "x": 412.7, + "y": 78.4 + }, + "pupilRight": { + "x": 446.8, + "y": 74.2 + }, + "noseTip": { + "x": 437.7, + "y": 92.4 + }, + "mouthLeft": { + "x": 417.8, + "y": 114.4 + }, + "mouthRight": { + "x": 451.3, + "y": 109.3 + }, + "eyebrowLeftOuter": { + "x": 397.9, + "y": 78.5 + }, + "eyebrowLeftInner": { + "x": 425.4, + "y": 70.5 + }, + "eyeLeftOuter": { + "x": 406.7, + "y": 80.6 + }, + "eyeLeftTop": { + "x": 412.2, + "y": 76.2 + }, + "eyeLeftBottom": { + "x": 413.0, + "y": 80.1 + }, + "eyeLeftInner": { + "x": 418.9, + "y": 78.0 + }, + "eyebrowRightInner": { + "x": 4.8, + "y": 69.7 + }, + "eyebrowRightOuter": { + "x": 5.5, + "y": 68.5 + }, + "eyeRightInner": { + "x": 441.5, + "y": 75.0 + }, + "eyeRightTop": { + "x": 446.4, + "y": 71.7 + }, + "eyeRightBottom": { + "x": 447.0, + "y": 75.3 + }, + "eyeRightOuter": { + "x": 451.7, + "y": 73.4 + }, + "noseRootLeft": { + "x": 428.0, + "y": 77.1 + }, + "noseRootRight": { + "x": 435.8, + "y": 75.6 + }, + "noseLeftAlarTop": { + "x": 428.3, + "y": 89.7 + }, + "noseRightAlarTop": { + "x": 442.2, + "y": 87.0 + }, + "noseLeftAlarOutTip": { + "x": 424.3, + "y": 96.4 + }, + "noseRightAlarOutTip": { + "x": 446.6, + "y": 92.5 + }, + "upperLipTop": { + "x": 437.6, + "y": 105.9 + }, + "upperLipBottom": { + "x": 437.6, + "y": 108.2 + }, + "underLipTop": { + "x": 436.8, + "y": 111.4 + }, + "underLipBottom": { + "x": 437.3, + "y": 114.5 + } + }, + "faceAttributes": { + "glasses": "sunglasses", + "headPose": { + "roll": 2.1, + "yaw": 3, + "pitch": 1.6 + }, + "occlusion": { + "foreheadOccluded": false, + "eyeOccluded": false, + "mouthOccluded": false + }, + "accessories": [ + { + "type": "headwear", + "confidence": 0.99 + }, + { + "type": "glasses", + "confidence": 1.0 + }, + { + "type": "mask", + "confidence": 0.87 + } + ], + "blur": { + "blurLevel": "medium", + "value": 0.51 + }, + "exposure": { + "exposureLevel": "goodExposure", + "value": 0.55 + }, + "noise": { + "noiseLevel": "low", + "value": 0.12 + }, + "qualityForRecognition": "high" + } + } + ] + } + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/FaceListOperations_AddFaceListFaceFromStream.json b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/FaceListOperations_AddFaceListFaceFromStream.json new file mode 100644 index 000000000000..30dd931e0466 --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/FaceListOperations_AddFaceListFaceFromStream.json @@ -0,0 +1,19 @@ +{ + "title": "Add Face to FaceList", + "operationId": "FaceListOperations_AddFaceListFace", + "parameters": { + "apiVersion": "v1.3-preview.1", + "faceListId": "your_face_list_id", + "targetFace": "10,10,100,100", + "detectionModel": "detection_01", + "userData": "your_user_data", + "imageContent": "" + }, + "responses": { + "200": { + "body": { + "persistedFaceId": "43897a75-8d6f-42cf-885e-74832febb055" + } + } + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/FaceListOperations_AddFaceListFaceFromUrl.json b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/FaceListOperations_AddFaceListFaceFromUrl.json new file mode 100644 index 000000000000..a3b526782972 --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/FaceListOperations_AddFaceListFaceFromUrl.json @@ -0,0 +1,21 @@ +{ + "title": "Add Face to FaceList from Url", + "operationId": "FaceListOperations_AddFaceListFaceFromUrl", + "parameters": { + "apiVersion": "v1.3-preview.1", + "faceListId": "your_face_list_id", + "targetFace": "10,10,100,100", + "detectionModel": "detection_01", + "userData": "your_user_data", + "body": { + "url": "https://microsoft.com/example.jpg" + } + }, + "responses": { + "200": { + "body": { + "persistedFaceId": "43897a75-8d6f-42cf-885e-74832febb055" + } + } + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/FaceListOperations_AddLargeFaceListFaceFromStream.json b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/FaceListOperations_AddLargeFaceListFaceFromStream.json new file mode 100644 index 000000000000..60c02274964f --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/FaceListOperations_AddLargeFaceListFaceFromStream.json @@ -0,0 +1,19 @@ +{ + "title": "Add Face to LargeFaceList", + "operationId": "FaceListOperations_AddLargeFaceListFace", + "parameters": { + "apiVersion": "v1.3-preview.1", + "largeFaceListId": "your_large_face_list_id", + "targetFace": "10,10,100,100", + "detectionModel": "detection_01", + "userData": "your_user_data", + "imageContent": "" + }, + "responses": { + "200": { + "body": { + "persistedFaceId": "43897a75-8d6f-42cf-885e-74832febb055" + } + } + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/FaceListOperations_AddLargeFaceListFaceFromUrl.json b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/FaceListOperations_AddLargeFaceListFaceFromUrl.json new file mode 100644 index 000000000000..a39e9e95dc20 --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/FaceListOperations_AddLargeFaceListFaceFromUrl.json @@ -0,0 +1,21 @@ +{ + "title": "Add Face to LargeFaceList from Url", + "operationId": "FaceListOperations_AddLargeFaceListFaceFromUrl", + "parameters": { + "apiVersion": "v1.3-preview.1", + "largeFaceListId": "your_large_face_list_id", + "targetFace": "10,10,100,100", + "detectionModel": "detection_01", + "userData": "your_user_data", + "body": { + "url": "https://microsoft.com/example.jpg" + } + }, + "responses": { + "200": { + "body": { + "persistedFaceId": "43897a75-8d6f-42cf-885e-74832febb055" + } + } + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/FaceListOperations_CreateFaceList.json b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/FaceListOperations_CreateFaceList.json new file mode 100644 index 000000000000..26a8f77ce462 --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/FaceListOperations_CreateFaceList.json @@ -0,0 +1,16 @@ +{ + "title": "Create FaceList", + "operationId": "FaceListOperations_CreateFaceList", + "parameters": { + "apiVersion": "v1.3-preview.1", + "faceListId": "your_face_list_id", + "body": { + "name": "your_face_list_name", + "userData": "your_user_data", + "recognitionModel": "recognition_01" + } + }, + "responses": { + "200": {} + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/FaceListOperations_CreateLargeFaceList.json b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/FaceListOperations_CreateLargeFaceList.json new file mode 100644 index 000000000000..8f1a5c572ddd --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/FaceListOperations_CreateLargeFaceList.json @@ -0,0 +1,16 @@ +{ + "title": "Create LargeFaceList", + "operationId": "FaceListOperations_CreateLargeFaceList", + "parameters": { + "apiVersion": "v1.3-preview.1", + "largeFaceListId": "your_large_face_list_id", + "body": { + "name": "your_large_face_list_name", + "userData": "your_user_data", + "recognitionModel": "recognition_01" + } + }, + "responses": { + "200": {} + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/FaceListOperations_DeleteFaceList.json b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/FaceListOperations_DeleteFaceList.json new file mode 100644 index 000000000000..9da1fd3098f1 --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/FaceListOperations_DeleteFaceList.json @@ -0,0 +1,11 @@ +{ + "title": "Delete FaceList", + "operationId": "FaceListOperations_DeleteFaceList", + "parameters": { + "apiVersion": "v1.3-preview.1", + "faceListId": "your_face_list_id" + }, + "responses": { + "200": {} + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/FaceListOperations_DeleteFaceListFace.json b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/FaceListOperations_DeleteFaceListFace.json new file mode 100644 index 000000000000..00d207310be0 --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/FaceListOperations_DeleteFaceListFace.json @@ -0,0 +1,12 @@ +{ + "title": "Delete Face from FaceList", + "operationId": "FaceListOperations_DeleteFaceListFace", + "parameters": { + "apiVersion": "v1.3-preview.1", + "faceListId": "your_face_list_id", + "persistedFaceId": "43897a75-8d6f-42cf-885e-74832febb055" + }, + "responses": { + "200": {} + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/FaceListOperations_DeleteLargeFaceList.json b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/FaceListOperations_DeleteLargeFaceList.json new file mode 100644 index 000000000000..60b3618d2319 --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/FaceListOperations_DeleteLargeFaceList.json @@ -0,0 +1,11 @@ +{ + "title": "Delete LargeFaceList", + "operationId": "FaceListOperations_DeleteLargeFaceList", + "parameters": { + "apiVersion": "v1.3-preview.1", + "largeFaceListId": "your_large_face_list_id" + }, + "responses": { + "200": {} + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/FaceListOperations_DeleteLargeFaceListFace.json b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/FaceListOperations_DeleteLargeFaceListFace.json new file mode 100644 index 000000000000..cf773a7d3f0e --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/FaceListOperations_DeleteLargeFaceListFace.json @@ -0,0 +1,12 @@ +{ + "title": "Delete Face From LargeFaceList", + "operationId": "FaceListOperations_DeleteLargeFaceListFace", + "parameters": { + "apiVersion": "v1.3-preview.1", + "largeFaceListId": "your_large_face_list_id", + "persistedFaceId": "43897a75-8d6f-42cf-885e-74832febb055" + }, + "responses": { + "200": {} + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/FaceListOperations_GetFaceList.json b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/FaceListOperations_GetFaceList.json new file mode 100644 index 000000000000..177c4756f9e5 --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/FaceListOperations_GetFaceList.json @@ -0,0 +1,19 @@ +{ + "title": "Get FaceList", + "operationId": "FaceListOperations_GetFaceList", + "parameters": { + "apiVersion": "v1.3-preview.1", + "faceListId": "your_face_list_id", + "returnRecognitionModel": true + }, + "responses": { + "200": { + "body": { + "name": "your_face_list_name", + "userData": "your_user_data", + "recognitionModel": "recognition_01", + "faceListId": "your_face_list_id" + } + } + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/FaceListOperations_GetFaceLists.json b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/FaceListOperations_GetFaceLists.json new file mode 100644 index 000000000000..336e510315a8 --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/FaceListOperations_GetFaceLists.json @@ -0,0 +1,20 @@ +{ + "title": "Get FaceLists", + "operationId": "FaceListOperations_GetFaceLists", + "parameters": { + "apiVersion": "v1.3-preview.1", + "returnRecognitionModel": true + }, + "responses": { + "200": { + "body": [ + { + "name": "your_face_list_name", + "userData": "your_user_data", + "recognitionModel": "recognition_01", + "faceListId": "your_face_list_id" + } + ] + } + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/FaceListOperations_GetLargeFaceList.json b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/FaceListOperations_GetLargeFaceList.json new file mode 100644 index 000000000000..ee7562801402 --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/FaceListOperations_GetLargeFaceList.json @@ -0,0 +1,19 @@ +{ + "title": "Get LargeFaceList", + "operationId": "FaceListOperations_GetLargeFaceList", + "parameters": { + "apiVersion": "v1.3-preview.1", + "largeFaceListId": "your_large_face_list_id", + "returnRecognitionModel": true + }, + "responses": { + "200": { + "body": { + "name": "your_large_face_list_name", + "userData": "your_user_data", + "recognitionModel": "recognition_01", + "largeFaceListId": "your_large_face_list_id" + } + } + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/FaceListOperations_GetLargeFaceListFace.json b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/FaceListOperations_GetLargeFaceListFace.json new file mode 100644 index 000000000000..46943a5f2ffe --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/FaceListOperations_GetLargeFaceListFace.json @@ -0,0 +1,17 @@ +{ + "title": "Get Face from LargeFaceList", + "operationId": "FaceListOperations_GetLargeFaceListFace", + "parameters": { + "apiVersion": "v1.3-preview.1", + "largeFaceListId": "your_large_face_list_id", + "persistedFaceId": "43897a75-8d6f-42cf-885e-74832febb055" + }, + "responses": { + "200": { + "body": { + "persistedFaceId": "43897a75-8d6f-42cf-885e-74832febb055", + "userData": "your_user_data" + } + } + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/FaceListOperations_GetLargeFaceListFaces.json b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/FaceListOperations_GetLargeFaceListFaces.json new file mode 100644 index 000000000000..7330d90aa137 --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/FaceListOperations_GetLargeFaceListFaces.json @@ -0,0 +1,20 @@ +{ + "title": "Get Faces from LargeFaceList", + "operationId": "FaceListOperations_GetLargeFaceListFaces", + "parameters": { + "apiVersion": "v1.3-preview.1", + "largeFaceListId": "your_large_face_list_id", + "start": "00000000-0000-0000-0000-000000000000", + "top": 20 + }, + "responses": { + "200": { + "body": [ + { + "persistedFaceId": "43897a75-8d6f-42cf-885e-74832febb055", + "userData": "your_user_data" + } + ] + } + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/FaceListOperations_GetLargeFaceListTrainingStatus.json b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/FaceListOperations_GetLargeFaceListTrainingStatus.json new file mode 100644 index 000000000000..7aa0970b3e13 --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/FaceListOperations_GetLargeFaceListTrainingStatus.json @@ -0,0 +1,19 @@ +{ + "title": "Get Training Status of LargeFaceList", + "operationId": "FaceListOperations_GetLargeFaceListTrainingStatus", + "parameters": { + "apiVersion": "v1.3-preview.1", + "largeFaceListId": "your_large_face_list_id" + }, + "responses": { + "200": { + "body": { + "status": "notStarted", + "createdDateTime": "2024-03-05T11:07:58.371Z", + "lastActionDateTime": "2024-03-05T11:07:58.371Z", + "lastSuccessfulTrainingDateTime": "2024-03-05T11:07:58.371Z", + "message": null + } + } + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/FaceListOperations_GetLargeFaceLists.json b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/FaceListOperations_GetLargeFaceLists.json new file mode 100644 index 000000000000..e7e8978da228 --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/FaceListOperations_GetLargeFaceLists.json @@ -0,0 +1,22 @@ +{ + "title": "Get LargeFaceLists", + "operationId": "FaceListOperations_GetLargeFaceLists", + "parameters": { + "apiVersion": "v1.3-preview.1", + "start": "my_list_id", + "top": 20, + "returnRecognitionModel": true + }, + "responses": { + "200": { + "body": [ + { + "name": "your_large_face_list_name", + "userData": "your_user_data", + "recognitionModel": "recognition_01", + "largeFaceListId": "your_large_face_list_id" + } + ] + } + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/FaceListOperations_TrainLargeFaceList.json b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/FaceListOperations_TrainLargeFaceList.json new file mode 100644 index 000000000000..1c7de71dfd8b --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/FaceListOperations_TrainLargeFaceList.json @@ -0,0 +1,15 @@ +{ + "title": "Train LargeFaceList", + "operationId": "FaceListOperations_TrainLargeFaceList", + "parameters": { + "apiVersion": "v1.3-preview.1", + "largeFaceListId": "your_large_face_list_id" + }, + "responses": { + "202": { + "headers": { + "operation-Location": "https://contoso.com/operationstatus" + } + } + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/FaceListOperations_UpdateFaceList.json b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/FaceListOperations_UpdateFaceList.json new file mode 100644 index 000000000000..521bd9d54298 --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/FaceListOperations_UpdateFaceList.json @@ -0,0 +1,15 @@ +{ + "title": "Update FaceList", + "operationId": "FaceListOperations_UpdateFaceList", + "parameters": { + "apiVersion": "v1.3-preview.1", + "faceListId": "your_face_list_id", + "body": { + "name": "your_face_list_name", + "userData": "your_user_data" + } + }, + "responses": { + "200": {} + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/FaceListOperations_UpdateLargeFaceList.json b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/FaceListOperations_UpdateLargeFaceList.json new file mode 100644 index 000000000000..1f87bd146bc8 --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/FaceListOperations_UpdateLargeFaceList.json @@ -0,0 +1,15 @@ +{ + "title": "Update LargeFaceList", + "operationId": "FaceListOperations_UpdateLargeFaceList", + "parameters": { + "apiVersion": "v1.3-preview.1", + "largeFaceListId": "your_large_face_list_id", + "body": { + "name": "your_large_face_list_name", + "userData": "your_user_data" + } + }, + "responses": { + "200": {} + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/FaceListOperations_UpdateLargeFaceListFace.json b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/FaceListOperations_UpdateLargeFaceListFace.json new file mode 100644 index 000000000000..bdb1cf8f47bf --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/FaceListOperations_UpdateLargeFaceListFace.json @@ -0,0 +1,15 @@ +{ + "title": "Update Face in LargeFaceList", + "operationId": "FaceListOperations_UpdateLargeFaceListFace", + "parameters": { + "apiVersion": "v1.3-preview.1", + "largeFaceListId": "your_large_face_list_id", + "persistedFaceId": "43897a75-8d6f-42cf-885e-74832febb055", + "body": { + "userData": "your_user_data" + } + }, + "responses": { + "200": {} + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/FaceRecognitionOperations_FindSimilar.json b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/FaceRecognitionOperations_FindSimilar.json new file mode 100644 index 000000000000..3dcb8c7e67d5 --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/FaceRecognitionOperations_FindSimilar.json @@ -0,0 +1,26 @@ +{ + "title": "Find Similar among Face IDs", + "operationId": "FaceRecognitionOperations_FindSimilar", + "parameters": { + "apiVersion": "v1.3-preview.1", + "body": { + "faceId": "c5c24a82-6845-4031-9d5d-978df9175426", + "maxNumOfCandidatesReturned": 3, + "mode": "matchPerson", + "faceIds": [ + "015839fb-fbd9-4f79-ace9-7675fc2f1dd9", + "be386ab3-af91-4104-9e6d-4dae4c9fddb7" + ] + } + }, + "responses": { + "200": { + "body": [ + { + "confidence": 0.9, + "faceId": "015839fb-fbd9-4f79-ace9-7675fc2f1dd9" + } + ] + } + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/FaceRecognitionOperations_FindSimilarFromFaceList.json b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/FaceRecognitionOperations_FindSimilarFromFaceList.json new file mode 100644 index 000000000000..871b3e0c56b1 --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/FaceRecognitionOperations_FindSimilarFromFaceList.json @@ -0,0 +1,23 @@ +{ + "title": "Find Similar from FaceList", + "operationId": "FaceRecognitionOperations_FindSimilarFromFaceList", + "parameters": { + "apiVersion": "v1.3-preview.1", + "body": { + "faceId": "c5c24a82-6845-4031-9d5d-978df9175426", + "maxNumOfCandidatesReturned": 3, + "mode": "matchPerson", + "faceListId": "your_face_list_id" + } + }, + "responses": { + "200": { + "body": [ + { + "confidence": 0.8, + "persistedFaceId": "015839fb-fbd9-4f79-ace9-7675fc2f1dd9" + } + ] + } + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/FaceRecognitionOperations_FindSimilarFromLargeFaceList.json b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/FaceRecognitionOperations_FindSimilarFromLargeFaceList.json new file mode 100644 index 000000000000..f4ba3d86f3ec --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/FaceRecognitionOperations_FindSimilarFromLargeFaceList.json @@ -0,0 +1,23 @@ +{ + "title": "Find Similar from LargeFaceList", + "operationId": "FaceRecognitionOperations_FindSimilarFromLargeFaceList", + "parameters": { + "apiVersion": "v1.3-preview.1", + "body": { + "faceId": "c5c24a82-6845-4031-9d5d-978df9175426", + "maxNumOfCandidatesReturned": 3, + "mode": "matchPerson", + "largeFaceListId": "your_large_face_list_id" + } + }, + "responses": { + "200": { + "body": [ + { + "confidence": 0.8, + "persistedFaceId": "015839fb-fbd9-4f79-ace9-7675fc2f1dd9" + } + ] + } + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/FaceRecognitionOperations_Group.json b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/FaceRecognitionOperations_Group.json new file mode 100644 index 000000000000..96eac4adec94 --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/FaceRecognitionOperations_Group.json @@ -0,0 +1,41 @@ +{ + "title": "Group Face IDs", + "operationId": "FaceRecognitionOperations_Group", + "parameters": { + "apiVersion": "v1.3-preview.1", + "body": { + "faceIds": [ + "c5c24a82-6845-4031-9d5d-978df9175426", + "015839fb-fbd9-4f79-ace9-7675fc2f1dd9", + "65d083d4-9447-47d1-af30-b626144bf0fb", + "fce92aed-d578-4d2e-8114-068f8af4492e", + "30ea1073-cc9e-4652-b1e3-d08fb7b95315", + "be386ab3-af91-4104-9e6d-4dae4c9fddb7", + "fbd2a038-dbff-452c-8e79-2ee81b1aa84e", + "b64d5e15-8257-4af2-b20a-5a750f8940e7" + ] + } + }, + "responses": { + "200": { + "body": { + "groups": [ + [ + "c5c24a82-6845-4031-9d5d-978df9175426", + "015839fb-fbd9-4f79-ace9-7675fc2f1dd9", + "fce92aed-d578-4d2e-8114-068f8af4492e", + "b64d5e15-8257-4af2-b20a-5a750f8940e7" + ], + [ + "65d083d4-9447-47d1-af30-b626144bf0fb", + "30ea1073-cc9e-4652-b1e3-d08fb7b95315" + ] + ], + "messyGroup": [ + "be386ab3-af91-4104-9e6d-4dae4c9fddb7", + "fbd2a038-dbff-452c-8e79-2ee81b1aa84e" + ] + } + } + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/FaceRecognitionOperations_IdentifyFromDynamicPersonGroup.json b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/FaceRecognitionOperations_IdentifyFromDynamicPersonGroup.json new file mode 100644 index 000000000000..658d283b0a18 --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/FaceRecognitionOperations_IdentifyFromDynamicPersonGroup.json @@ -0,0 +1,30 @@ +{ + "title": "Identify from DynamicPersonGroup", + "operationId": "FaceRecognitionOperations_IdentifyFromDynamicPersonGroup", + "parameters": { + "apiVersion": "v1.3-preview.1", + "body": { + "faceIds": [ + "c5c24a82-6845-4031-9d5d-978df9175426" + ], + "dynamicPersonGroupId": "your_dynamic_person_group_id", + "maxNumOfCandidatesReturned": 9, + "confidenceThreshold": 0.7 + } + }, + "responses": { + "200": { + "body": [ + { + "faceId": "c5c24a82-6845-4031-9d5d-978df9175426", + "candidates": [ + { + "personId": "85c0c630-c9c9-40f8-8a4e-f9ae4f926ea5", + "confidence": 0.8 + } + ] + } + ] + } + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/FaceRecognitionOperations_IdentifyFromLargePersonGroup.json b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/FaceRecognitionOperations_IdentifyFromLargePersonGroup.json new file mode 100644 index 000000000000..1733b5003bcf --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/FaceRecognitionOperations_IdentifyFromLargePersonGroup.json @@ -0,0 +1,30 @@ +{ + "title": "Identify from LargePersonGroup", + "operationId": "FaceRecognitionOperations_IdentifyFromLargePersonGroup", + "parameters": { + "apiVersion": "v1.3-preview.1", + "body": { + "faceIds": [ + "c5c24a82-6845-4031-9d5d-978df9175426" + ], + "largePersonGroupId": "your_large_person_group_id", + "maxNumOfCandidatesReturned": 9, + "confidenceThreshold": 0.7 + } + }, + "responses": { + "200": { + "body": [ + { + "faceId": "c5c24a82-6845-4031-9d5d-978df9175426", + "candidates": [ + { + "personId": "85c0c630-c9c9-40f8-8a4e-f9ae4f926ea5", + "confidence": 0.8 + } + ] + } + ] + } + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/FaceRecognitionOperations_IdentifyFromPersonDirectory.json b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/FaceRecognitionOperations_IdentifyFromPersonDirectory.json new file mode 100644 index 000000000000..ecdf4a2f339c --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/FaceRecognitionOperations_IdentifyFromPersonDirectory.json @@ -0,0 +1,32 @@ +{ + "title": "Identify from PersonDirectory", + "operationId": "FaceRecognitionOperations_IdentifyFromPersonDirectory", + "parameters": { + "apiVersion": "v1.3-preview.1", + "body": { + "faceIds": [ + "c5c24a82-6845-4031-9d5d-978df9175426" + ], + "personIds": [ + "85c0c630-c9c9-40f8-8a4e-f9ae4f926ea5" + ], + "maxNumOfCandidatesReturned": 9, + "confidenceThreshold": 0.7 + } + }, + "responses": { + "200": { + "body": [ + { + "faceId": "c5c24a82-6845-4031-9d5d-978df9175426", + "candidates": [ + { + "personId": "85c0c630-c9c9-40f8-8a4e-f9ae4f926ea5", + "confidence": 0.8 + } + ] + } + ] + } + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/FaceRecognitionOperations_IdentifyFromPersonGroup.json b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/FaceRecognitionOperations_IdentifyFromPersonGroup.json new file mode 100644 index 000000000000..7d0cd9a3cecc --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/FaceRecognitionOperations_IdentifyFromPersonGroup.json @@ -0,0 +1,30 @@ +{ + "title": "Identify from PersonGroup", + "operationId": "FaceRecognitionOperations_IdentifyFromPersonGroup", + "parameters": { + "apiVersion": "v1.3-preview.1", + "body": { + "faceIds": [ + "c5c24a82-6845-4031-9d5d-978df9175426" + ], + "personGroupId": "your_person_group_id", + "maxNumOfCandidatesReturned": 9, + "confidenceThreshold": 0.7 + } + }, + "responses": { + "200": { + "body": [ + { + "faceId": "c5c24a82-6845-4031-9d5d-978df9175426", + "candidates": [ + { + "personId": "85c0c630-c9c9-40f8-8a4e-f9ae4f926ea5", + "confidence": 0.8 + } + ] + } + ] + } + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/FaceRecognitionOperations_VerifyFaceToFace.json b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/FaceRecognitionOperations_VerifyFaceToFace.json new file mode 100644 index 000000000000..c1860d8c7e3c --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/FaceRecognitionOperations_VerifyFaceToFace.json @@ -0,0 +1,19 @@ +{ + "title": "Verify Face to Face", + "operationId": "FaceRecognitionOperations_VerifyFaceToFace", + "parameters": { + "apiVersion": "v1.3-preview.1", + "body": { + "faceId1": "c5c24a82-6845-4031-9d5d-978df9175426", + "faceId2": "3aa87e30-b380-48eb-ad9e-1aa54fc52bd3" + } + }, + "responses": { + "200": { + "body": { + "isIdentical": true, + "confidence": 0.8 + } + } + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/FaceRecognitionOperations_VerifyFromLargePersonGroup.json b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/FaceRecognitionOperations_VerifyFromLargePersonGroup.json new file mode 100644 index 000000000000..cf6c5bfeca7c --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/FaceRecognitionOperations_VerifyFromLargePersonGroup.json @@ -0,0 +1,20 @@ +{ + "title": "Verify from LargePersonGroup", + "operationId": "FaceRecognitionOperations_VerifyFromLargePersonGroup", + "parameters": { + "apiVersion": "v1.3-preview.1", + "body": { + "faceId": "c5c24a82-6845-4031-9d5d-978df9175426", + "personId": "815df99c-598f-4926-930a-a734b3fd651c", + "largePersonGroupId": "your_large_person_group" + } + }, + "responses": { + "200": { + "body": { + "isIdentical": true, + "confidence": 0.8 + } + } + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/FaceRecognitionOperations_VerifyFromPersonDirectory.json b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/FaceRecognitionOperations_VerifyFromPersonDirectory.json new file mode 100644 index 000000000000..eb37fb3c2f21 --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/FaceRecognitionOperations_VerifyFromPersonDirectory.json @@ -0,0 +1,19 @@ +{ + "title": "Verify from PersonDirectory", + "operationId": "FaceRecognitionOperations_VerifyFromPersonDirectory", + "parameters": { + "apiVersion": "v1.3-preview.1", + "body": { + "faceId": "c5c24a82-6845-4031-9d5d-978df9175426", + "personId": "815df99c-598f-4926-930a-a734b3fd651c" + } + }, + "responses": { + "200": { + "body": { + "isIdentical": true, + "confidence": 0.8 + } + } + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/FaceRecognitionOperations_VerifyFromPersonGroup.json b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/FaceRecognitionOperations_VerifyFromPersonGroup.json new file mode 100644 index 000000000000..1591fa7396ef --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/FaceRecognitionOperations_VerifyFromPersonGroup.json @@ -0,0 +1,20 @@ +{ + "title": "Verify from PersonGroup", + "operationId": "FaceRecognitionOperations_VerifyFromPersonGroup", + "parameters": { + "apiVersion": "v1.3-preview.1", + "body": { + "faceId": "c5c24a82-6845-4031-9d5d-978df9175426", + "personId": "815df99c-598f-4926-930a-a734b3fd651c", + "personGroupId": "your_person_group" + } + }, + "responses": { + "200": { + "body": { + "isIdentical": true, + "confidence": 0.8 + } + } + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/LivenessSessionOperations_CreateLivenessSession.json b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/LivenessSessionOperations_CreateLivenessSession.json new file mode 100644 index 000000000000..77529b46ef8a --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/LivenessSessionOperations_CreateLivenessSession.json @@ -0,0 +1,32 @@ +{ + "title": "Create Liveness Session", + "operationId": "LivenessSessionOperations_CreateLivenessSession", + "parameters": { + "apiVersion": "v1.3-preview.1", + "body": { + "livenessOperationMode": "PassiveActive", + "deviceCorrelationIdSetInClient": false, + "deviceCorrelationId": "your_device_correlation_id", + "userCorrelationIdSetInClient": false, + "userCorrelationId": "your_user_correlation_id", + "authTokenTimeToLiveInSeconds": 60, + "numberOfClientAttemptsAllowed": 1, + "expectedClientIpAddress": "1.2.3.4" + } + }, + "responses": { + "200": { + "body": { + "sessionId": "b12e033e-bda7-4b83-a211-e721c661f30e", + "authToken": "eyJhbGciOiJFUzI1NiIsIm", + "status": "NotStarted", + "modelVersion": "2024-11-15", + "isAbuseMonitoringEnabled": true, + "expectedClientIpAddress": "1.2.3.4", + "results": { + "attempts": [] + } + } + } + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/LivenessSessionOperations_CreateLivenessWithVerifySession.json b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/LivenessSessionOperations_CreateLivenessWithVerifySession.json new file mode 100644 index 000000000000..958236a457b3 --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/LivenessSessionOperations_CreateLivenessWithVerifySession.json @@ -0,0 +1,42 @@ +{ + "title": "Create LivenessWithVerify Session", + "operationId": "LivenessSessionOperations_CreateLivenessWithVerifySession", + "parameters": { + "apiVersion": "v1.3-preview.1", + "livenessOperationMode": "PassiveActive", + "deviceCorrelationIdSetInClient": false, + "deviceCorrelationId": "your_device_correlation_id", + "userCorrelationIdSetInClient": false, + "userCorrelationId": "your_user_correlation_id", + "authTokenTimeToLiveInSeconds": 60, + "numberOfClientAttemptsAllowed": 1, + "expectedClientIpAddress": "1.2.3.4", + "verifyImage": "" + }, + "responses": { + "200": { + "body": { + "sessionId": "b12e033e-bda7-4b83-a211-e721c661f30e", + "authToken": "eyJhbGciOiJFUzI1NiIsIm", + "status": "NotStarted", + "modelVersion": "2024-11-15", + "isAbuseMonitoringEnabled": true, + "results": { + "attempts": [], + "verifyReferences": [ + { + "referenceType": "image", + "faceRectangle": { + "top": 316, + "left": 131, + "width": 498, + "height": 677 + }, + "qualityForRecognition": "high" + } + ] + } + } + } + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/LivenessSessionOperations_DeleteLivenessSession.json b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/LivenessSessionOperations_DeleteLivenessSession.json new file mode 100644 index 000000000000..929aa7ced311 --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/LivenessSessionOperations_DeleteLivenessSession.json @@ -0,0 +1,11 @@ +{ + "title": "Delete Liveness Session", + "operationId": "LivenessSessionOperations_DeleteLivenessSession", + "parameters": { + "apiVersion": "v1.3-preview.1", + "sessionId": "b12e033e-bda7-4b83-a211-e721c661f30e" + }, + "responses": { + "204": {} + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/LivenessSessionOperations_DeleteLivenessWithVerifySession.json b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/LivenessSessionOperations_DeleteLivenessWithVerifySession.json new file mode 100644 index 000000000000..d86b75b1fbfc --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/LivenessSessionOperations_DeleteLivenessWithVerifySession.json @@ -0,0 +1,11 @@ +{ + "title": "Delete LivenessWithVerify Session", + "operationId": "LivenessSessionOperations_DeleteLivenessWithVerifySession", + "parameters": { + "apiVersion": "v1.3-preview.1", + "sessionId": "b12e033e-bda7-4b83-a211-e721c661f30e" + }, + "responses": { + "204": {} + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/LivenessSessionOperations_GetClientAssetsAccessToken.json b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/LivenessSessionOperations_GetClientAssetsAccessToken.json new file mode 100644 index 000000000000..14dd93beae62 --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/LivenessSessionOperations_GetClientAssetsAccessToken.json @@ -0,0 +1,16 @@ +{ + "title": "Get LivenessSessionOperations Settings ClientAssetsAccessToken", + "operationId": "LivenessSessionOperations_GetClientAssetsAccessToken", + "parameters": { + "apiVersion": "v1.3-preview.1" + }, + "responses": { + "200": { + "body": { + "expiry": "2025-07-03T15:30:00.000Z", + "accessToken": "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9", + "base64AccessToken": "ZXlKaGJHY2lPaUpJVXpJMU5pSXNJblI1=" + } + } + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/LivenessSessionOperations_GetLivenessSessionResult.json b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/LivenessSessionOperations_GetLivenessSessionResult.json new file mode 100644 index 000000000000..f6042d5ee73c --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/LivenessSessionOperations_GetLivenessSessionResult.json @@ -0,0 +1,80 @@ +{ + "title": "Get LivenessSession Result", + "operationId": "LivenessSessionOperations_GetLivenessSessionResult", + "parameters": { + "apiVersion": "v1.3-preview.1", + "sessionId": "b12e033e-bda7-4b83-a211-e721c661f30e" + }, + "responses": { + "200": { + "body": { + "sessionId": "b12e033e-bda7-4b83-a211-e721c661f30e", + "authToken": "eyJhbGciOiJFUzI1NiIsIm", + "status": "NotStarted", + "modelVersion": "2024-11-15", + "isAbuseMonitoringEnabled": true, + "results": { + "attempts": [ + { + "attemptId": 2, + "attemptStatus": "Succeeded", + "result": { + "livenessDecision": "realface", + "targets": { + "color": { + "faceRectangle": { + "top": 669, + "left": 203, + "width": 646, + "height": 724 + } + } + }, + "digest": "B0A803BB7B26F3C8F29CD36030F8E63ED3FAF955FEEF8E01C88AB8FD89CCF761", + "sessionImageId": "Ae3PVWlXAmVAnXgkAFt1QSjGUWONKzWiSr2iPh9p9G4I" + }, + "clientInformation": [ + { + "ip": "73.21.34.122" + } + ], + "abuseMonitoringResult": { + "isAbuseDetected": true, + "otherFlaggedSessions": [ + { + "attemptId": 1, + "sessionId": "b12e033e-bda7-4b83-a211-e721c661f30e", + "sessionImageId": "Ae3PVWlXAmVAnXgkAFt1QSjGUWONKzWiSr2iPh9p9G4I" + } + ] + } + }, + { + "attemptId": 1, + "attemptStatus": "Failed", + "error": { + "code": "FaceWithMaskDetected", + "message": "Mask detected on face image.", + "targets": { + "color": { + "faceRectangle": { + "top": 669, + "left": 203, + "width": 646, + "height": 724 + } + } + } + }, + "clientInformation": [ + { + "ip": "73.21.34.122" + } + ] + } + ] + } + } + } + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/LivenessSessionOperations_GetLivenessWithVerifySessionResult.json b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/LivenessSessionOperations_GetLivenessWithVerifySessionResult.json new file mode 100644 index 000000000000..8b8bb97f9359 --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/LivenessSessionOperations_GetLivenessWithVerifySessionResult.json @@ -0,0 +1,98 @@ +{ + "title": "Get LivenessWithVerify Session Result", + "operationId": "LivenessSessionOperations_GetLivenessWithVerifySessionResult", + "parameters": { + "apiVersion": "v1.3-preview.1", + "sessionId": "b12e033e-bda7-4b83-a211-e721c661f30e" + }, + "responses": { + "200": { + "body": { + "sessionId": "b12e033e-bda7-4b83-a211-e721c661f30e", + "authToken": "eyJhbGciOiJFUzI1NiIsIm", + "status": "NotStarted", + "modelVersion": "2024-11-15", + "isAbuseMonitoringEnabled": true, + "expectedClientIpAddress": "1.2.3.4", + "results": { + "attempts": [ + { + "attemptId": 2, + "attemptStatus": "Succeeded", + "result": { + "livenessDecision": "realface", + "targets": { + "color": { + "faceRectangle": { + "top": 669, + "left": 203, + "width": 646, + "height": 724 + } + } + }, + "verifyResult": { + "matchConfidence": 0.08871888, + "isIdentical": false + }, + "digest": "B0A803BB7B26F3C8F29CD36030F8E63ED3FAF955FEEF8E01C88AB8FD89CCF761", + "sessionImageId": "Ae3PVWlXAmVAnXgkAFt1QSjGUWONKzWiSr2iPh9p9G4I", + "verifyImageHash": "43B7D8E8769533C3290DBD37A84D821B2C28CB4381DF9C6784DBC4AAF7E45018" + }, + "clientInformation": [ + { + "ip": "73.21.34.122" + } + ], + "abuseMonitoringResult": { + "isAbuseDetected": true, + "otherFlaggedSessions": [ + { + "attemptId": 1, + "sessionId": "b12e033e-bda7-4b83-a211-e721c661f30e", + "sessionImageId": "Ae3PVWlXAmVAnXgkAFt1QSjGUWONKzWiSr2iPh9p9G4I" + } + ] + } + }, + { + "attemptId": 1, + "attemptStatus": "Failed", + "error": { + "code": "FaceWithMaskDetected", + "message": "Mask detected on face image.", + "targets": { + "color": { + "faceRectangle": { + "top": 669, + "left": 203, + "width": 646, + "height": 724 + } + } + } + }, + "clientInformation": [ + { + "ip": "73.21.34.122" + } + ] + } + ], + "verifyReferences": [ + { + "referenceType": "image", + "faceRectangle": { + "top": 316, + "left": 131, + "width": 498, + "height": 677 + }, + "qualityForRecognition": "high" + } + ] + } + } + } + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/LivenessSessionOperations_GetSessionImage.json b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/LivenessSessionOperations_GetSessionImage.json new file mode 100644 index 000000000000..577b4a7b8ccc --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/LivenessSessionOperations_GetSessionImage.json @@ -0,0 +1,13 @@ +{ + "title": "Get Session Image", + "operationId": "LivenessSessionOperations_GetSessionImage", + "parameters": { + "apiVersion": "v1.3-preview.1", + "sessionImageId": "3d035d35-2e01-4ed4-8935-577afde9caaa" + }, + "responses": { + "200": { + "body": "" + } + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/LivenessSessionOperations_GetSettings.json b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/LivenessSessionOperations_GetSettings.json new file mode 100644 index 000000000000..0ad339fbe901 --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/LivenessSessionOperations_GetSettings.json @@ -0,0 +1,16 @@ +{ + "title": "Get LivenessSessionOperations Settings", + "operationId": "LivenessSessionOperations_GetSettings", + "parameters": { + "apiVersion": "v1.3-preview.1" + }, + "responses": { + "200": { + "body": { + "livenessAbuseMonitoring": { + "enabled": true + } + } + } + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/LivenessSessionOperations_PatchSettings.json b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/LivenessSessionOperations_PatchSettings.json new file mode 100644 index 000000000000..6264247b6d9e --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/LivenessSessionOperations_PatchSettings.json @@ -0,0 +1,21 @@ +{ + "title": "Patch LivenessSessionOperations Settings", + "operationId": "LivenessSessionOperations_PatchSettings", + "parameters": { + "apiVersion": "v1.3-preview.1", + "body": { + "livenessAbuseMonitoring": { + "enabled": true + } + } + }, + "responses": { + "200": { + "body": { + "livenessAbuseMonitoring": { + "enabled": true + } + } + } + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/PersonGroupOperations_AddLargePersonGroupPersonFaceFromStream.json b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/PersonGroupOperations_AddLargePersonGroupPersonFaceFromStream.json new file mode 100644 index 000000000000..e5b5233766b8 --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/PersonGroupOperations_AddLargePersonGroupPersonFaceFromStream.json @@ -0,0 +1,20 @@ +{ + "title": "Add Face in LargePersonGroup Person", + "operationId": "PersonGroupOperations_AddLargePersonGroupPersonFace", + "parameters": { + "apiVersion": "v1.3-preview.1", + "largePersonGroupId": "your_large_person_group_id", + "personId": "25985303-c537-4467-b41d-bdb45cd95ca1", + "targetFace": "10,10,100,100", + "detectionModel": "detection_01", + "userData": "your_user_data", + "imageContent": "" + }, + "responses": { + "200": { + "body": { + "persistedFaceId": "43897a75-8d6f-42cf-885e-74832febb055" + } + } + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/PersonGroupOperations_AddLargePersonGroupPersonFaceFromUrl.json b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/PersonGroupOperations_AddLargePersonGroupPersonFaceFromUrl.json new file mode 100644 index 000000000000..5010cc3524fc --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/PersonGroupOperations_AddLargePersonGroupPersonFaceFromUrl.json @@ -0,0 +1,22 @@ +{ + "title": "Add Face in LargePersonGroup Person from Url", + "operationId": "PersonGroupOperations_AddLargePersonGroupPersonFaceFromUrl", + "parameters": { + "apiVersion": "v1.3-preview.1", + "largePersonGroupId": "your_large_person_group_id", + "personId": "25985303-c537-4467-b41d-bdb45cd95ca1", + "targetFace": "10,10,100,100", + "detectionModel": "detection_01", + "userData": "your_user_data", + "body": { + "url": "https://microsoft.com/example.jpg" + } + }, + "responses": { + "200": { + "body": { + "persistedFaceId": "43897a75-8d6f-42cf-885e-74832febb055" + } + } + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/PersonGroupOperations_AddPersonGroupPersonFaceFromStream.json b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/PersonGroupOperations_AddPersonGroupPersonFaceFromStream.json new file mode 100644 index 000000000000..94a27103d0f2 --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/PersonGroupOperations_AddPersonGroupPersonFaceFromStream.json @@ -0,0 +1,20 @@ +{ + "title": "Add Face to PersonGroup Person", + "operationId": "PersonGroupOperations_AddPersonGroupPersonFace", + "parameters": { + "apiVersion": "v1.3-preview.1", + "personGroupId": "your_person_group_id", + "personId": "25985303-c537-4467-b41d-bdb45cd95ca1", + "targetFace": "10,10,100,100", + "detectionModel": "detection_01", + "userData": "your_user_data", + "imageContent": "" + }, + "responses": { + "200": { + "body": { + "persistedFaceId": "43897a75-8d6f-42cf-885e-74832febb055" + } + } + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/PersonGroupOperations_AddPersonGroupPersonFaceFromUrl.json b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/PersonGroupOperations_AddPersonGroupPersonFaceFromUrl.json new file mode 100644 index 000000000000..3d0961bad0f7 --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/PersonGroupOperations_AddPersonGroupPersonFaceFromUrl.json @@ -0,0 +1,22 @@ +{ + "title": "Add Face to PersonGroupPerson from Url", + "operationId": "PersonGroupOperations_AddPersonGroupPersonFaceFromUrl", + "parameters": { + "apiVersion": "v1.3-preview.1", + "personGroupId": "your_person_group_id", + "personId": "25985303-c537-4467-b41d-bdb45cd95ca1", + "targetFace": "10,10,100,100", + "detectionModel": "detection_01", + "userData": "your_user_data", + "body": { + "url": "https://microsoft.com/example.jpg" + } + }, + "responses": { + "200": { + "body": { + "persistedFaceId": "43897a75-8d6f-42cf-885e-74832febb055" + } + } + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/PersonGroupOperations_CreateLargePersonGroup.json b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/PersonGroupOperations_CreateLargePersonGroup.json new file mode 100644 index 000000000000..ef5b10d2063b --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/PersonGroupOperations_CreateLargePersonGroup.json @@ -0,0 +1,16 @@ +{ + "title": "Create LargePersonGroup", + "operationId": "PersonGroupOperations_CreateLargePersonGroup", + "parameters": { + "apiVersion": "v1.3-preview.1", + "largePersonGroupId": "your_large_person_group_id", + "body": { + "name": "your_large_person_group_name", + "userData": "your_user_data", + "recognitionModel": "recognition_01" + } + }, + "responses": { + "200": {} + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/PersonGroupOperations_CreateLargePersonGroupPerson.json b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/PersonGroupOperations_CreateLargePersonGroupPerson.json new file mode 100644 index 000000000000..7dd4680cf354 --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/PersonGroupOperations_CreateLargePersonGroupPerson.json @@ -0,0 +1,19 @@ +{ + "title": "Create Person in LargePersonGroup", + "operationId": "PersonGroupOperations_CreateLargePersonGroupPerson", + "parameters": { + "apiVersion": "v1.3-preview.1", + "largePersonGroupId": "your_large_person_group_id", + "body": { + "name": "your_large_person_group_person_name", + "userData": "your_user_data" + } + }, + "responses": { + "200": { + "body": { + "personId": "25985303-c537-4467-b41d-bdb45cd95ca1" + } + } + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/PersonGroupOperations_CreatePersonGroup.json b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/PersonGroupOperations_CreatePersonGroup.json new file mode 100644 index 000000000000..b532487c5cd1 --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/PersonGroupOperations_CreatePersonGroup.json @@ -0,0 +1,16 @@ +{ + "title": "Create PersonGroup", + "operationId": "PersonGroupOperations_CreatePersonGroup", + "parameters": { + "apiVersion": "v1.3-preview.1", + "personGroupId": "your_person_group_id", + "body": { + "name": "your_person_group_name", + "userData": "your_user_data", + "recognitionModel": "recognition_01" + } + }, + "responses": { + "200": {} + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/PersonGroupOperations_CreatePersonGroupPerson.json b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/PersonGroupOperations_CreatePersonGroupPerson.json new file mode 100644 index 000000000000..cc335edb11aa --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/PersonGroupOperations_CreatePersonGroupPerson.json @@ -0,0 +1,19 @@ +{ + "title": "Create Person in PersonGroup", + "operationId": "PersonGroupOperations_CreatePersonGroupPerson", + "parameters": { + "apiVersion": "v1.3-preview.1", + "personGroupId": "your_person_group_id", + "body": { + "name": "your_person_group_person_name", + "userData": "your_user_data" + } + }, + "responses": { + "200": { + "body": { + "personId": "25985303-c537-4467-b41d-bdb45cd95ca1" + } + } + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/PersonGroupOperations_DeleteLargePersonGroup.json b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/PersonGroupOperations_DeleteLargePersonGroup.json new file mode 100644 index 000000000000..16a49c6b756c --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/PersonGroupOperations_DeleteLargePersonGroup.json @@ -0,0 +1,11 @@ +{ + "title": "Delete LargePersonGroup", + "operationId": "PersonGroupOperations_DeleteLargePersonGroup", + "parameters": { + "apiVersion": "v1.3-preview.1", + "largePersonGroupId": "your_large_person_group_id" + }, + "responses": { + "200": {} + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/PersonGroupOperations_DeleteLargePersonGroupPerson.json b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/PersonGroupOperations_DeleteLargePersonGroupPerson.json new file mode 100644 index 000000000000..65f099335b57 --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/PersonGroupOperations_DeleteLargePersonGroupPerson.json @@ -0,0 +1,12 @@ +{ + "title": "Delete Person from LargePersonGroup", + "operationId": "PersonGroupOperations_DeleteLargePersonGroupPerson", + "parameters": { + "apiVersion": "v1.3-preview.1", + "largePersonGroupId": "your_large_person_group_id", + "personId": "25985303-c537-4467-b41d-bdb45cd95ca1" + }, + "responses": { + "200": {} + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/PersonGroupOperations_DeleteLargePersonGroupPersonFace.json b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/PersonGroupOperations_DeleteLargePersonGroupPersonFace.json new file mode 100644 index 000000000000..6f9439d4afb0 --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/PersonGroupOperations_DeleteLargePersonGroupPersonFace.json @@ -0,0 +1,13 @@ +{ + "title": "Delete Face from LargePersonGroup Person", + "operationId": "PersonGroupOperations_DeleteLargePersonGroupPersonFace", + "parameters": { + "apiVersion": "v1.3-preview.1", + "largePersonGroupId": "your_large_person_group_id", + "personId": "25985303-c537-4467-b41d-bdb45cd95ca1", + "persistedFaceId": "43897a75-8d6f-42cf-885e-74832febb055" + }, + "responses": { + "200": {} + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/PersonGroupOperations_DeletePersonGroup.json b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/PersonGroupOperations_DeletePersonGroup.json new file mode 100644 index 000000000000..4ff1d5c78777 --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/PersonGroupOperations_DeletePersonGroup.json @@ -0,0 +1,11 @@ +{ + "title": "Delete PersonGroup", + "operationId": "PersonGroupOperations_DeletePersonGroup", + "parameters": { + "apiVersion": "v1.3-preview.1", + "personGroupId": "your_person_group_id" + }, + "responses": { + "200": {} + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/PersonGroupOperations_DeletePersonGroupPerson.json b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/PersonGroupOperations_DeletePersonGroupPerson.json new file mode 100644 index 000000000000..2a13f7abf443 --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/PersonGroupOperations_DeletePersonGroupPerson.json @@ -0,0 +1,12 @@ +{ + "title": "Delete Person from PersonGroup", + "operationId": "PersonGroupOperations_DeletePersonGroupPerson", + "parameters": { + "apiVersion": "v1.3-preview.1", + "personGroupId": "your_person_group_id", + "personId": "25985303-c537-4467-b41d-bdb45cd95ca1" + }, + "responses": { + "200": {} + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/PersonGroupOperations_DeletePersonGroupPersonFace.json b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/PersonGroupOperations_DeletePersonGroupPersonFace.json new file mode 100644 index 000000000000..c17b75b9eccb --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/PersonGroupOperations_DeletePersonGroupPersonFace.json @@ -0,0 +1,13 @@ +{ + "title": "Delete Face from PersonGroup Person", + "operationId": "PersonGroupOperations_DeletePersonGroupPersonFace", + "parameters": { + "apiVersion": "v1.3-preview.1", + "personGroupId": "your_person_group_id", + "personId": "25985303-c537-4467-b41d-bdb45cd95ca1", + "persistedFaceId": "43897a75-8d6f-42cf-885e-74832febb055" + }, + "responses": { + "200": {} + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/PersonGroupOperations_GetLargePersonGroup.json b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/PersonGroupOperations_GetLargePersonGroup.json new file mode 100644 index 000000000000..8fd27f06b3a5 --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/PersonGroupOperations_GetLargePersonGroup.json @@ -0,0 +1,19 @@ +{ + "title": "Get LargePersonGroup", + "operationId": "PersonGroupOperations_GetLargePersonGroup", + "parameters": { + "apiVersion": "v1.3-preview.1", + "largePersonGroupId": "your_large_person_group_id", + "returnRecognitionModel": true + }, + "responses": { + "200": { + "body": { + "name": "your_large_person_group_name", + "userData": "your_user_data", + "recognitionModel": "recognition_01", + "largePersonGroupId": "your_large_person_group_id" + } + } + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/PersonGroupOperations_GetLargePersonGroupPerson.json b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/PersonGroupOperations_GetLargePersonGroupPerson.json new file mode 100644 index 000000000000..ae17b5113282 --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/PersonGroupOperations_GetLargePersonGroupPerson.json @@ -0,0 +1,21 @@ +{ + "title": "Get Person from LargePersonGroup", + "operationId": "PersonGroupOperations_GetLargePersonGroupPerson", + "parameters": { + "apiVersion": "v1.3-preview.1", + "largePersonGroupId": "your_large_person_group_id", + "personId": "25985303-c537-4467-b41d-bdb45cd95ca1" + }, + "responses": { + "200": { + "body": { + "personId": "25985303-c537-4467-b41d-bdb45cd95ca1", + "name": "your_large_person_group_person_name", + "userData": "your_user_data", + "persistedFaceIds": [ + "43897a75-8d6f-42cf-885e-74832febb055" + ] + } + } + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/PersonGroupOperations_GetLargePersonGroupPersonFace.json b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/PersonGroupOperations_GetLargePersonGroupPersonFace.json new file mode 100644 index 000000000000..562db1c19045 --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/PersonGroupOperations_GetLargePersonGroupPersonFace.json @@ -0,0 +1,18 @@ +{ + "title": "Get Face from LargePersonGroup Person", + "operationId": "PersonGroupOperations_GetLargePersonGroupPersonFace", + "parameters": { + "apiVersion": "v1.3-preview.1", + "largePersonGroupId": "your_large_person_group_id", + "personId": "25985303-c537-4467-b41d-bdb45cd95ca1", + "persistedFaceId": "43897a75-8d6f-42cf-885e-74832febb055" + }, + "responses": { + "200": { + "body": { + "persistedFaceId": "43897a75-8d6f-42cf-885e-74832febb055", + "userData": "your_user_data" + } + } + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/PersonGroupOperations_GetLargePersonGroupPersons.json b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/PersonGroupOperations_GetLargePersonGroupPersons.json new file mode 100644 index 000000000000..9c8262286f16 --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/PersonGroupOperations_GetLargePersonGroupPersons.json @@ -0,0 +1,24 @@ +{ + "title": "Get Persons from LargePersonGroup", + "operationId": "PersonGroupOperations_GetLargePersonGroupPersons", + "parameters": { + "apiVersion": "v1.3-preview.1", + "largePersonGroupId": "your_large_person_group_id", + "start": "00000000-0000-0000-0000-000000000000", + "top": 20 + }, + "responses": { + "200": { + "body": [ + { + "personId": "25985303-c537-4467-b41d-bdb45cd95ca1", + "name": "your_large_person_group_person_name", + "userData": "your_user_data", + "persistedFaceIds": [ + "43897a75-8d6f-42cf-885e-74832febb055" + ] + } + ] + } + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/PersonGroupOperations_GetLargePersonGroupTrainingStatus.json b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/PersonGroupOperations_GetLargePersonGroupTrainingStatus.json new file mode 100644 index 000000000000..f43d627feec0 --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/PersonGroupOperations_GetLargePersonGroupTrainingStatus.json @@ -0,0 +1,19 @@ +{ + "title": "Get Training Status of LargePersonGroup", + "operationId": "PersonGroupOperations_GetLargePersonGroupTrainingStatus", + "parameters": { + "apiVersion": "v1.3-preview.1", + "largePersonGroupId": "your_large_person_group_id" + }, + "responses": { + "200": { + "body": { + "status": "notStarted", + "createdDateTime": "2024-03-05T11:07:58.371Z", + "lastActionDateTime": "2024-03-05T11:07:58.371Z", + "lastSuccessfulTrainingDateTime": "2024-03-05T11:07:58.371Z", + "message": null + } + } + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/PersonGroupOperations_GetLargePersonGroups.json b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/PersonGroupOperations_GetLargePersonGroups.json new file mode 100644 index 000000000000..7dd496aa32e2 --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/PersonGroupOperations_GetLargePersonGroups.json @@ -0,0 +1,22 @@ +{ + "title": "Get LargePersonGroups", + "operationId": "PersonGroupOperations_GetLargePersonGroups", + "parameters": { + "apiVersion": "v1.3-preview.1", + "start": "00000000-0000-0000-0000-000000000000", + "top": 20, + "returnRecognitionModel": true + }, + "responses": { + "200": { + "body": [ + { + "name": "your_large_person_group_name", + "userData": "your_user_data", + "recognitionModel": "recognition_01", + "largePersonGroupId": "your_large_person_group_id" + } + ] + } + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/PersonGroupOperations_GetPersonGroup.json b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/PersonGroupOperations_GetPersonGroup.json new file mode 100644 index 000000000000..4100a7353fd5 --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/PersonGroupOperations_GetPersonGroup.json @@ -0,0 +1,19 @@ +{ + "title": "Get PersonGroup", + "operationId": "PersonGroupOperations_GetPersonGroup", + "parameters": { + "apiVersion": "v1.3-preview.1", + "personGroupId": "your_person_group_id", + "returnRecognitionModel": true + }, + "responses": { + "200": { + "body": { + "name": "your_person_group_name", + "userData": "your_user_data", + "recognitionModel": "recognition_01", + "personGroupId": "your_person_group_id" + } + } + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/PersonGroupOperations_GetPersonGroupPerson.json b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/PersonGroupOperations_GetPersonGroupPerson.json new file mode 100644 index 000000000000..6f994263f961 --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/PersonGroupOperations_GetPersonGroupPerson.json @@ -0,0 +1,21 @@ +{ + "title": "Get Person from PersonGroup", + "operationId": "PersonGroupOperations_GetPersonGroupPerson", + "parameters": { + "apiVersion": "v1.3-preview.1", + "personGroupId": "your_person_group_id", + "personId": "25985303-c537-4467-b41d-bdb45cd95ca1" + }, + "responses": { + "200": { + "body": { + "personId": "25985303-c537-4467-b41d-bdb45cd95ca1", + "name": "your_person_group_person_name", + "userData": "your_user_data", + "persistedFaceIds": [ + "43897a75-8d6f-42cf-885e-74832febb055" + ] + } + } + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/PersonGroupOperations_GetPersonGroupPersonFace.json b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/PersonGroupOperations_GetPersonGroupPersonFace.json new file mode 100644 index 000000000000..ec3e2bce1e57 --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/PersonGroupOperations_GetPersonGroupPersonFace.json @@ -0,0 +1,18 @@ +{ + "title": "Get Face form PersonGroup Person", + "operationId": "PersonGroupOperations_GetPersonGroupPersonFace", + "parameters": { + "apiVersion": "v1.3-preview.1", + "personGroupId": "your_person_group_id", + "personId": "25985303-c537-4467-b41d-bdb45cd95ca1", + "persistedFaceId": "43897a75-8d6f-42cf-885e-74832febb055" + }, + "responses": { + "200": { + "body": { + "persistedFaceId": "43897a75-8d6f-42cf-885e-74832febb055", + "userData": "your_user_data" + } + } + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/PersonGroupOperations_GetPersonGroupPersons.json b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/PersonGroupOperations_GetPersonGroupPersons.json new file mode 100644 index 000000000000..424721fd7a3d --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/PersonGroupOperations_GetPersonGroupPersons.json @@ -0,0 +1,24 @@ +{ + "title": "Get Persons from PersonGroup", + "operationId": "PersonGroupOperations_GetPersonGroupPersons", + "parameters": { + "apiVersion": "v1.3-preview.1", + "personGroupId": "your_person_group_id", + "start": "00000000-0000-0000-0000-000000000000", + "top": 20 + }, + "responses": { + "200": { + "body": [ + { + "personId": "25985303-c537-4467-b41d-bdb45cd95ca1", + "name": "your_person_group_person_name", + "userData": "your_user_data", + "persistedFaceIds": [ + "43897a75-8d6f-42cf-885e-74832febb055" + ] + } + ] + } + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/PersonGroupOperations_GetPersonGroupTrainingStatus.json b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/PersonGroupOperations_GetPersonGroupTrainingStatus.json new file mode 100644 index 000000000000..ab2ef36a910c --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/PersonGroupOperations_GetPersonGroupTrainingStatus.json @@ -0,0 +1,19 @@ +{ + "title": "Get Training Status of PersonGroup", + "operationId": "PersonGroupOperations_GetPersonGroupTrainingStatus", + "parameters": { + "apiVersion": "v1.3-preview.1", + "personGroupId": "your_person_group_id" + }, + "responses": { + "200": { + "body": { + "status": "notStarted", + "createdDateTime": "2024-03-05T11:07:58.371Z", + "lastActionDateTime": "2024-03-05T11:07:58.371Z", + "lastSuccessfulTrainingDateTime": "2024-03-05T11:07:58.371Z", + "message": null + } + } + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/PersonGroupOperations_GetPersonGroups.json b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/PersonGroupOperations_GetPersonGroups.json new file mode 100644 index 000000000000..a92fb21ec63e --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/PersonGroupOperations_GetPersonGroups.json @@ -0,0 +1,22 @@ +{ + "title": "Get PersonGroups", + "operationId": "PersonGroupOperations_GetPersonGroups", + "parameters": { + "apiVersion": "v1.3-preview.1", + "start": "00000000-0000-0000-0000-000000000000", + "top": 20, + "returnRecognitionModel": true + }, + "responses": { + "200": { + "body": [ + { + "name": "your_person_group_name", + "userData": "your_user_data", + "recognitionModel": "recognition_01", + "personGroupId": "your_person_group_id" + } + ] + } + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/PersonGroupOperations_TrainLargePersonGroup.json b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/PersonGroupOperations_TrainLargePersonGroup.json new file mode 100644 index 000000000000..a7aba2f271d1 --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/PersonGroupOperations_TrainLargePersonGroup.json @@ -0,0 +1,15 @@ +{ + "title": "Train LargePersonGroup", + "operationId": "PersonGroupOperations_TrainLargePersonGroup", + "parameters": { + "apiVersion": "v1.3-preview.1", + "largePersonGroupId": "your_large_person_group_id" + }, + "responses": { + "202": { + "headers": { + "operation-Location": "https://contoso.com/operationstatus" + } + } + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/PersonGroupOperations_TrainPersonGroup.json b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/PersonGroupOperations_TrainPersonGroup.json new file mode 100644 index 000000000000..7174943ec9f6 --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/PersonGroupOperations_TrainPersonGroup.json @@ -0,0 +1,15 @@ +{ + "title": "Train PersonGroup", + "operationId": "PersonGroupOperations_TrainPersonGroup", + "parameters": { + "apiVersion": "v1.3-preview.1", + "personGroupId": "your_person_group_id" + }, + "responses": { + "202": { + "headers": { + "operation-Location": "https://contoso.com/operationstatus" + } + } + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/PersonGroupOperations_UpdateLargePersonGroup.json b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/PersonGroupOperations_UpdateLargePersonGroup.json new file mode 100644 index 000000000000..6bce2e96a32b --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/PersonGroupOperations_UpdateLargePersonGroup.json @@ -0,0 +1,15 @@ +{ + "title": "Update LargePersonGroup", + "operationId": "PersonGroupOperations_UpdateLargePersonGroup", + "parameters": { + "apiVersion": "v1.3-preview.1", + "largePersonGroupId": "your_large_person_group_id", + "body": { + "name": "your_large_person_group_name", + "userData": "your_user_data" + } + }, + "responses": { + "200": {} + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/PersonGroupOperations_UpdateLargePersonGroupPerson.json b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/PersonGroupOperations_UpdateLargePersonGroupPerson.json new file mode 100644 index 000000000000..f50e6d43cb65 --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/PersonGroupOperations_UpdateLargePersonGroupPerson.json @@ -0,0 +1,16 @@ +{ + "title": "Update Person in LargePersonGroup", + "operationId": "PersonGroupOperations_UpdateLargePersonGroupPerson", + "parameters": { + "apiVersion": "v1.3-preview.1", + "largePersonGroupId": "your_large_person_group_id", + "personId": "25985303-c537-4467-b41d-bdb45cd95ca1", + "body": { + "name": "your_large_person_group_person_name", + "userData": "your_user_data" + } + }, + "responses": { + "200": {} + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/PersonGroupOperations_UpdateLargePersonGroupPersonFace.json b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/PersonGroupOperations_UpdateLargePersonGroupPersonFace.json new file mode 100644 index 000000000000..3688f758ce99 --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/PersonGroupOperations_UpdateLargePersonGroupPersonFace.json @@ -0,0 +1,16 @@ +{ + "title": "Update Face in LargePersonGroup Person", + "operationId": "PersonGroupOperations_UpdateLargePersonGroupPersonFace", + "parameters": { + "apiVersion": "v1.3-preview.1", + "largePersonGroupId": "your_large_person_group_id", + "personId": "25985303-c537-4467-b41d-bdb45cd95ca1", + "persistedFaceId": "43897a75-8d6f-42cf-885e-74832febb055", + "body": { + "userData": "your_user_data" + } + }, + "responses": { + "200": {} + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/PersonGroupOperations_UpdatePersonGroup.json b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/PersonGroupOperations_UpdatePersonGroup.json new file mode 100644 index 000000000000..a909f776c4a0 --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/PersonGroupOperations_UpdatePersonGroup.json @@ -0,0 +1,15 @@ +{ + "title": "Update PersonGroup", + "operationId": "PersonGroupOperations_UpdatePersonGroup", + "parameters": { + "apiVersion": "v1.3-preview.1", + "personGroupId": "your_person_group_id", + "body": { + "name": "your_person_group_name", + "userData": "your_user_data" + } + }, + "responses": { + "200": {} + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/PersonGroupOperations_UpdatePersonGroupPerson.json b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/PersonGroupOperations_UpdatePersonGroupPerson.json new file mode 100644 index 000000000000..fc3597e8186f --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/PersonGroupOperations_UpdatePersonGroupPerson.json @@ -0,0 +1,16 @@ +{ + "title": "Update PersonGroup Person", + "operationId": "PersonGroupOperations_UpdatePersonGroupPerson", + "parameters": { + "apiVersion": "v1.3-preview.1", + "personGroupId": "your_person_group_id", + "personId": "25985303-c537-4467-b41d-bdb45cd95ca1", + "body": { + "name": "your_person_group_person_name", + "userData": "your_user_data" + } + }, + "responses": { + "200": {} + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/PersonGroupOperations_UpdatePersonGroupPersonFace.json b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/PersonGroupOperations_UpdatePersonGroupPersonFace.json new file mode 100644 index 000000000000..2c6eccce0140 --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.3-preview.1/examples/PersonGroupOperations_UpdatePersonGroupPersonFace.json @@ -0,0 +1,16 @@ +{ + "title": "Update Face in PersonGroup Person", + "operationId": "PersonGroupOperations_UpdatePersonGroupPersonFace", + "parameters": { + "apiVersion": "v1.3-preview.1", + "personGroupId": "your_person_group_id", + "personId": "25985303-c537-4467-b41d-bdb45cd95ca1", + "persistedFaceId": "43897a75-8d6f-42cf-885e-74832febb055", + "body": { + "userData": "your_user_data" + } + }, + "responses": { + "200": {} + } +} diff --git a/specification/ai/data-plane/Face/readme.md b/specification/ai/data-plane/Face/readme.md index 769de06b7617..8eb4f48dd24a 100644 --- a/specification/ai/data-plane/Face/readme.md +++ b/specification/ai/data-plane/Face/readme.md @@ -4,11 +4,11 @@ Configuration for generating Face SDK. -The current release is `v1.2`. +The current release is `v1.3-preview.1`. ``` yaml -tag: v1.2 +tag: v1.3-preview.1 add-credentials: true openapi-type: data-plane ``` @@ -73,3 +73,14 @@ suppressions: from: Face.json reason: Use anonymous parameter to provide interface with flatten parameters ``` + +### Release v1.3-preview.1 +These settings apply only when `--tag=v1.3-preview.1` is specified on the command line. +``` yaml $(tag) == 'v1.3-preview.1' +input-file: + - preview/v1.3-preview.1/Face.json +suppressions: + - code: AvoidAnonymousParameter + from: Face.json + reason: Use anonymous parameter to provide interface with flatten parameters +``` \ No newline at end of file diff --git a/specification/ai/data-plane/Face/stable/v1.2/Face.json b/specification/ai/data-plane/Face/stable/v1.2/Face.json index 3f03411b409a..009aee824cf5 100644 --- a/specification/ai/data-plane/Face/stable/v1.2/Face.json +++ b/specification/ai/data-plane/Face/stable/v1.2/Face.json @@ -349,7 +349,7 @@ "post": { "operationId": "LivenessSessionOperations_CreateLivenessSession", "summary": "Create a new detect liveness session.", - "description": "A session is best for client device scenarios where developers want to authorize a client device to perform only a liveness detection without granting full access to their resource. Created sessions have a limited life span and only authorize clients to perform the desired action before access is expired.\n\nPermissions includes...\n>\n*\n * Ability to call /detectLiveness/singleModal for up to 3 retries.\n * A token lifetime of 10 minutes.\n\n> [!NOTE]\n> Client access can be revoked by deleting the session using the Delete Liveness Session operation. To retrieve a result, use the Get Liveness Session. To audit the individual requests that a client has made to your resource, use the List Liveness Session Audit Entries.", + "description": "A session is best for client device scenarios where developers want to authorize a client device to perform only a liveness detection without granting full access to their resource. Created sessions have a limited life span and only authorize clients to perform the desired action before access is expired.\n\nPermissions includes...\n>\n*\n * A token lifetime of 10 minutes.\n\n> [!NOTE]\n> Client access can be revoked by deleting the session using the Delete Liveness Session operation. To retrieve a result, use the Get Liveness Session. To audit the individual requests that a client has made to your resource, use the List Liveness Session Audit Entries.", "parameters": [ { "name": "body", @@ -468,7 +468,7 @@ "post": { "operationId": "LivenessSessionOperations_CreateLivenessWithVerifySession", "summary": "Create a new liveness session with verify. Provide the verify image during session creation.", - "description": "A session is best for client device scenarios where developers want to authorize a client device to perform only a liveness detection without granting full access to their resource. Created sessions have a limited life span and only authorize clients to perform the desired action before access is expired.\n\nPermissions includes...\n>\n*\n * Ability to call /detectLivenessWithVerify/singleModal for up to 3 retries.\n * A token lifetime of 10 minutes.\n\n> [!NOTE]\n>\n> *\n> * Client access can be revoked by deleting the session using the Delete Liveness With Verify Session operation.\n> * To retrieve a result, use the Get Liveness With Verify Session.\n> * To audit the individual requests that a client has made to your resource, use the List Liveness With Verify Session Audit Entries.", + "description": "A session is best for client device scenarios where developers want to authorize a client device to perform only a liveness detection without granting full access to their resource. Created sessions have a limited life span and only authorize clients to perform the desired action before access is expired.\n\nPermissions includes...\n>\n*\n * A token lifetime of 10 minutes.\n\n> [!NOTE]\n>\n> *\n> * Client access can be revoked by deleting the session using the Delete Liveness With Verify Session operation.\n> * To retrieve a result, use the Get Liveness With Verify Session.\n> * To audit the individual requests that a client has made to your resource, use the List Liveness With Verify Session Audit Entries.", "consumes": [ "multipart/form-data" ],