From dc610b20bfe0cd7ba400fbcad4a6b833d4b8d91e Mon Sep 17 00:00:00 2001 From: Jiayuan Shi Date: Fri, 16 Oct 2020 14:54:00 -0700 Subject: [PATCH 1/8] v3.1-preview.2 --- .../preview/v3.2-preview/Ocr.json | 500 ++++++++++++++++++ .../examples/SuccessfulGetReadResult.json | 385 ++++++++++++++ .../examples/SuccessfulReadWithStream.json | 19 + .../examples/SuccessfulReadWithUrl.json | 21 + 4 files changed, 925 insertions(+) create mode 100644 specification/cognitiveservices/data-plane/ComputerVision/preview/v3.2-preview/Ocr.json create mode 100644 specification/cognitiveservices/data-plane/ComputerVision/preview/v3.2-preview/examples/SuccessfulGetReadResult.json create mode 100644 specification/cognitiveservices/data-plane/ComputerVision/preview/v3.2-preview/examples/SuccessfulReadWithStream.json create mode 100644 specification/cognitiveservices/data-plane/ComputerVision/preview/v3.2-preview/examples/SuccessfulReadWithUrl.json diff --git a/specification/cognitiveservices/data-plane/ComputerVision/preview/v3.2-preview/Ocr.json b/specification/cognitiveservices/data-plane/ComputerVision/preview/v3.2-preview/Ocr.json new file mode 100644 index 000000000000..2cd2d43ed37c --- /dev/null +++ b/specification/cognitiveservices/data-plane/ComputerVision/preview/v3.2-preview/Ocr.json @@ -0,0 +1,500 @@ +{ + "swagger": "2.0", + "info": { + "version": "3.1-preview.2", + "title": "Computer Vision Client", + "description": "The Computer Vision API provides state-of-the-art algorithms to process images and return information. For example, it can be used to determine if an image contains mature content, or it can be used to find all the faces in an image. It also has other features like estimating dominant and accent colors, categorizing the content of images, and describing an image with complete English sentences. Additionally, it can also intelligently generate images thumbnails for displaying large images effectively." + }, + "securityDefinitions": { + "apim_key": { + "type": "apiKey", + "name": "Ocp-Apim-Subscription-Key", + "in": "header" + } + }, + "security": [ + { + "apim_key": [] + } + ], + "x-ms-parameterized-host": { + "hostTemplate": "{Endpoint}", + "useSchemePrefix": false, + "parameters": [ + { + "$ref": "#/parameters/Endpoint" + } + ] + }, + "host": "westcentralus.api.cognitive.microsoft.com", + "basePath": "/vision/v3.1-preview.2", + "schemes": [ + "https" + ], + "paths": { + "/read/analyze": { + "post": { + "description": "Use this interface to get the result of a Read operation, employing the state-of-the-art Optical Character Recognition (OCR) algorithms optimized for text-heavy documents. When you use the Read interface, the response contains a field called 'Operation-Location'. The 'Operation-Location' field contains the URL that you must use for your 'GetReadResult' operation to access OCR results.​", + "operationId": "Read", + "parameters": [ + { + "$ref": "#/parameters/OcrDetectionLanguage" + }, + { + "$ref": "#/parameters/ImageUrl" + }, + { + "$ref": "#/parameters/Pages" + } + ], + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "responses": { + "202": { + "description": "The service has accepted the request and will start processing later.", + "headers": { + "Operation-Location": { + "description": "URL to query for status of the operation. The operation ID will expire in 48 hours. ", + "type": "string" + } + } + }, + "default": { + "description": "Error response.", + "schema": { + "$ref": "#/definitions/ComputerVisionError" + } + } + }, + "x-ms-examples": { + "Successful Analyze request": { + "$ref": "./examples/SuccessfulReadWithUrl.json" + } + } + } + }, + "/read/analyzeResults/{operationId}": { + "get": { + "description": "This interface is used for getting OCR results of Read operation. The URL to this interface should be retrieved from 'Operation-Location' field returned from Read interface.", + "operationId": "GetReadResult", + "parameters": [ + { + "name": "operationId", + "in": "path", + "description": "Id of read operation returned in the response of the 'Read' interface.", + "required": true, + "type": "string", + "format": "uuid" + } + ], + "produces": [ + "application/json" + ], + "responses": { + "200": { + "description": "Returns the read operation status.", + "schema": { + "$ref": "#/definitions/ReadOperationResult" + } + }, + "default": { + "description": "Error response.", + "schema": { + "$ref": "#/definitions/ComputerVisionError" + } + } + }, + "x-ms-examples": { + "Successful Get Read Result request": { + "$ref": "./examples/SuccessfulGetReadResult.json" + } + } + } + } + }, + "x-ms-paths": { + "/read/analyze?overload=stream": { + "post": { + "description": "Use this interface to get the result of a Read operation, employing the state-of-the-art Optical Character Recognition (OCR) algorithms optimized for text-heavy documents. When you use the Read interface, the response contains a field called 'Operation-Location'. The 'Operation-Location' field contains the URL that you must use for your 'GetReadResult' operation to access OCR results.​", + "operationId": "ReadInStream", + "parameters": [ + { + "$ref": "#/parameters/OcrDetectionLanguage" + }, + { + "$ref": "#/parameters/ImageStream" + }, + { + "$ref": "#/parameters/Pages" + } + ], + "consumes": [ + "application/octet-stream" + ], + "produces": [ + "application/json" + ], + "responses": { + "202": { + "description": "The service has accepted the request and will start processing later.", + "headers": { + "Operation-Location": { + "description": "URL to query for status of the operation. The operation ID will expire in 48 hours. ", + "type": "string" + } + } + }, + "default": { + "description": "Error response.", + "schema": { + "$ref": "#/definitions/ComputerVisionError" + } + } + }, + "x-ms-examples": { + "Successful Analyze request": { + "$ref": "./examples/SuccessfulReadWithStream.json" + } + } + } + } + }, + "definitions": { + "ReadOperationResult": { + "type": "object", + "description": "OCR result of the read operation.", + "properties": { + "status": { + "description": "Status of the read operation.", + "$ref": "#/definitions/OperationStatus" + }, + "createdDateTime": { + "type": "string", + "description": "Get UTC date time the batch operation was submitted.", + "x-nullable": false + }, + "lastUpdatedDateTime": { + "type": "string", + "description": "Get last updated UTC date time of this batch operation.", + "x-nullable": false + }, + "analyzeResult": { + "description": "Analyze batch operation result.", + "type": "object", + "$ref": "#/definitions/analyzeResults" + } + } + }, + "OperationStatus": { + "type": "string", + "description": "Status code of the text operation.", + "enum": [ + "notStarted", + "running", + "failed", + "succeeded" + ], + "x-ms-enum": { + "name": "OperationStatusCodes", + "modelAsString": false + }, + "x-nullable": false + }, + "ReadResult": { + "description": "Text extracted from a page in the input document.", + "type": "object", + "required": [ + "page", + "angle", + "width", + "height", + "unit", + "lines" + ], + "properties": { + "page": { + "description": "The 1-based page number of the recognition result.", + "type": "integer" + }, + "language": { + "description": "The BCP-47 language code of the recognized text page.", + "type": "string" + }, + "angle": { + "description": "The orientation of the image in degrees in the clockwise direction. Range between [-180, 180).", + "type": "number" + }, + "width": { + "description": "The width of the image in pixels or the PDF in inches.", + "type": "number" + }, + "height": { + "description": "The height of the image in pixels or the PDF in inches.", + "type": "number" + }, + "unit": { + "description": "The unit used in the Width, Height and BoundingBox. For images, the unit is 'pixel'. For PDF, the unit is 'inch'.", + "type": "string", + "enum": [ + "pixel", + "inch" + ], + "x-ms-enum": { + "name": "TextRecognitionResultDimensionUnit", + "modelAsString": false + }, + "x-nullable": false + }, + "lines": { + "description": "A list of recognized text lines.", + "type": "array", + "items": { + "$ref": "#/definitions/Line" + } + } + } + }, + "analyzeResults": { + "description": "Analyze batch operation result.", + "type": "object", + "required": [ + "version", + "readResults" + ], + "properties": { + "version": { + "description": "Version of schema used for this result.", + "type": "string" + }, + "readResults": { + "description": "Text extracted from the input.", + "type": "array", + "items": { + "$ref": "#/definitions/ReadResult" + } + } + } + }, + "Line": { + "description": "An object representing a recognized text line.", + "type": "object", + "required": [ + "boundingBox", + "text", + "words" + ], + "properties": { + "language": { + "description": "The BCP-47 language code of the recognized text line. Only provided where the language of the line differs from the page's.", + "type": "string" + }, + "boundingBox": { + "description": "Bounding box of a recognized line.", + "$ref": "#/definitions/BoundingBox" + }, + "appearance": { + "description": "Style and styleConfidence of the text line.", + "type": "object", + "$ref": "#/definitions/Appearance" + }, + "text": { + "description": "The text content of the line.", + "type": "string" + }, + "words": { + "description": "List of words in the text line.", + "type": "array", + "items": { + "$ref": "#/definitions/Word" + } + } + } + }, + "Word": { + "description": "An object representing a recognized word.", + "type": "object", + "required": [ + "boundingBox", + "text", + "confidence" + ], + "properties": { + "boundingBox": { + "description": "Bounding box of a recognized word.", + "$ref": "#/definitions/BoundingBox" + }, + "text": { + "description": "The text content of the word.", + "type": "string" + }, + "confidence": { + "description": "Qualitative confidence measure.", + "type": "number", + "format": "float" + } + } + }, + "BoundingBox": { + "description": "Quadrangle bounding box, with coordinates in original image. The eight numbers represent the four points (x-coordinate, y-coordinate from the left-top corner of the image) of the detected rectangle from the left-top corner in the clockwise direction. For images, coordinates are in pixels. For PDF, coordinates are in inches.", + "type": "array", + "items": { + "type": "number", + "x-nullable": false + } + }, + "Appearance": { + "description": "An object representing the style and styleConfidence.", + "type": "object", + "required": [ + "style", + "styleConfidence" + ], + "properties": { + "style": { + "description": "The text line style.", + "type": "string", + "enum": [ + "handwriting", + "print" + ] + }, + "styleConfidence": { + "description": "The confidence of text line style.", + "type": "number", + "format": "float" + } + } + }, + "ComputerVisionError": { + "description": "Details about the API request error.", + "required": [ + "code", + "message" + ], + "type": "object", + "properties": { + "code": { + "description": "The error code.", + "enum": [ + "InvalidImageFormat", + "UnsupportedMediaType", + "InvalidImageUrl", + "NotSupportedFeature", + "NotSupportedImage", + "Timeout", + "InternalServerError", + "InvalidImageSize", + "BadArgument", + "DetectFaceError", + "NotSupportedLanguage", + "InvalidThumbnailSize", + "InvalidDetails", + "InvalidModel", + "CancelledRequest", + "NotSupportedVisualFeature", + "FailedToProcess", + "Unspecified", + "StorageException", + "InvalidPageRange" + ], + "x-ms-enum": { + "name": "ComputerVisionErrorCodes", + "modelAsString": true + } + }, + "message": { + "description": "A message explaining the error reported by the service.", + "type": "string" + }, + "requestId": { + "description": "A unique request identifier.", + "type": "string" + } + } + }, + "ImageUrl": { + "type": "object", + "required": [ + "url" + ], + "properties": { + "url": { + "description": "Publicly reachable URL of an image.", + "type": "string" + } + } + } + }, + "parameters": { + "OcrDetectionLanguage": { + "name": "language", + "in": "query", + "description": "The BCP-47 language code of the text in the document. Currently, only English ('en'), Dutch ('nl'), French ('fr'), German ('de'), Italian ('it'), Portuguese ('pt'), and Spanish ('es') are supported. Read supports auto language identification and multi-language documents, so only provide a language code if you would like to force the documented to be processed as that specific language.", + "required": false, + "default": "en", + "x-ms-parameter-location": "method", + "type": "string", + "x-ms-enum": { + "name": "OcrDetectionLanguage", + "modelAsString": true + }, + "enum": [ + "en", + "es", + "fr", + "de", + "it", + "nl", + "pt" + ] + }, + "ImageUrl": { + "name": "ImageUrl", + "in": "body", + "required": true, + "x-ms-parameter-location": "method", + "x-ms-client-flatten": true, + "description": "A JSON document with a URL pointing to the image that is to be analyzed.", + "schema": { + "$ref": "#/definitions/ImageUrl" + } + }, + "ImageStream": { + "name": "Image", + "in": "body", + "required": true, + "x-ms-parameter-location": "method", + "description": "An image stream.", + "schema": { + "type": "object", + "format": "file" + } + }, + "Endpoint": { + "name": "Endpoint", + "description": "Supported Cognitive Services endpoints.", + "x-ms-parameter-location": "client", + "required": true, + "type": "string", + "in": "path", + "x-ms-skip-url-encoding": true + }, + "Pages": { + "name": "Pages", + "in": "query", + "description": "Custom page numbers for multi-page documents(PDF/TIFF), input the number of the pages you want to get OCR result. For a range of pages, use a hyphen. Separate each page or range with a comma or space.", + "required": false, + "x-ms-parameter-location": "method", + "type": "array", + "items": { + "type": "string", + "pattern": "(^[0-9]+-[0-9]+$)|(^[0-9]+$)" + }, + "collectionFormat": "csv" + } + } +} diff --git a/specification/cognitiveservices/data-plane/ComputerVision/preview/v3.2-preview/examples/SuccessfulGetReadResult.json b/specification/cognitiveservices/data-plane/ComputerVision/preview/v3.2-preview/examples/SuccessfulGetReadResult.json new file mode 100644 index 000000000000..a6ad36931547 --- /dev/null +++ b/specification/cognitiveservices/data-plane/ComputerVision/preview/v3.2-preview/examples/SuccessfulGetReadResult.json @@ -0,0 +1,385 @@ +{ + "parameters": { + "Endpoint": "{Endpoint}", + "Ocp-Apim-Subscription-Key": "{API key}", + "operationId": "e56ffa6e-1ee4-4042-bc07-993db706c95f" + }, + "responses": { + "200": { + "headers": {}, + "body": { + "status": "succeeded", + "createdDateTime": "2019-10-03T14:32:04.236Z", + "lastUpdatedDateTime": "2019-10-03T14:38:14.852Z", + "analyzeResult": { + "version": "v3.1", + "readResults": [ + { + "page": 1, + "language": "en", + "angle": 49.59, + "width": 600, + "height": 400, + "unit": "pixel", + "lines": [ + { + "boundingBox": [ + 202, + 618, + 2047, + 643, + 2046, + 840, + 200, + 813 + ], + "appearance": { + "style": "print", + "styleConfidence": 0.995 + }, + "text": "Our greatest glory is not", + "words": [ + { + "boundingBox": [ + 204, + 627, + 481, + 628, + 481, + 830, + 204, + 829 + ], + "text": "Our", + "confidence": 0.164 + }, + { + "boundingBox": [ + 519, + 628, + 1057, + 630, + 1057, + 832, + 518, + 830 + ], + "text": "greatest", + "confidence": 0.164 + }, + { + "boundingBox": [ + 1114, + 630, + 1549, + 631, + 1548, + 833, + 1114, + 832 + ], + "text": "glory", + "confidence": 0.164 + }, + { + "boundingBox": [ + 1586, + 631, + 1785, + 632, + 1784, + 834, + 1586, + 833 + ], + "text": "is", + "confidence": 0.164 + }, + { + "boundingBox": [ + 1822, + 632, + 2115, + 633, + 2115, + 835, + 1822, + 834 + ], + "text": "not", + "confidence": 0.164 + } + ] + }, + { + "boundingBox": [ + 420, + 1273, + 2954, + 1250, + 2958, + 1488, + 422, + 1511 + ], + "appearance": { + "style": "handwriting", + "styleConfidence": 0.985 + }, + "text": "but in rising every time we fall", + "words": [ + { + "boundingBox": [ + 423, + 1269, + 634, + 1268, + 635, + 1507, + 424, + 1508 + ], + "text": "but", + "confidence": 0.164 + }, + { + "boundingBox": [ + 667, + 1268, + 808, + 1268, + 809, + 1506, + 668, + 1507 + ], + "text": "in", + "confidence": 0.164 + }, + { + "boundingBox": [ + 874, + 1267, + 1289, + 1265, + 1290, + 1504, + 875, + 1506 + ], + "text": "rising", + "confidence": 0.164 + }, + { + "boundingBox": [ + 1331, + 1265, + 1771, + 1263, + 1772, + 1502, + 1332, + 1504 + ], + "text": "every", + "confidence": 0.164 + }, + { + "boundingBox": [ + 1812, + 1263, + 2178, + 1261, + 2179, + 1500, + 1813, + 1502 + ], + "text": "time", + "confidence": 0.164 + }, + { + "boundingBox": [ + 2219, + 1261, + 2510, + 1260, + 2511, + 1498, + 2220, + 1500 + ], + "text": "we", + "confidence": 0.164 + }, + { + "boundingBox": [ + 2551, + 1260, + 3016, + 1258, + 3017, + 1496, + 2552, + 1498 + ], + "text": "fall", + "confidence": 0.164 + } + ] + }, + { + "language": "es", + "boundingBox": [ + 1612, + 903, + 2744, + 935, + 2738, + 1139, + 1607, + 1107 + ], + "appearance": { + "style": "print", + "styleConfidence": 0.995 + }, + "text": "Viva la vida", + "words": [ + { + "boundingBox": [ + 323, + 454, + 416, + 449, + 418, + 494, + 325, + 501 + ], + "text": "Viva", + "confidence": 0.164 + }, + { + "boundingBox": [ + 92, + 550, + 429, + 541, + 430, + 591, + 94, + 600 + ], + "text": "la", + "confidence": 0.164 + }, + { + "boundingBox": [ + 58, + 466, + 268, + 458, + 270, + 505, + 161, + 512 + ], + "text": "vida", + "confidence": 0.164 + } + ] + } + ] + }, + { + "page": 2, + "language": "en", + "angle": 1.32, + "width": 600, + "height": 400, + "unit": "pixel", + "lines": [ + { + "boundingBox": [ + 1612, + 903, + 2744, + 935, + 2738, + 1139, + 1607, + 1107 + ], + "appearance": { + "style": "handwriting", + "styleConfidence": 0.855 + }, + "text": "in never failing ,", + "words": [ + { + "boundingBox": [ + 1611, + 934, + 1707, + 933, + 1708, + 1147, + 1613, + 1147 + ], + "text": "in", + "confidence": 0.164 + }, + { + "boundingBox": [ + 1753, + 933, + 2132, + 930, + 2133, + 1144, + 1754, + 1146 + ], + "text": "never", + "confidence": 0.999 + }, + { + "boundingBox": [ + 2162, + 930, + 2673, + 927, + 2674, + 1140, + 2164, + 1144 + ], + "text": "failing", + "confidence": 0.164 + }, + { + "boundingBox": [ + 2703, + 926, + 2788, + 926, + 2790, + 1139, + 2705, + 1140 + ], + "text": ",", + "confidence": 0.164 + } + ] + } + ] + } + ] + } + } + } + } +} diff --git a/specification/cognitiveservices/data-plane/ComputerVision/preview/v3.2-preview/examples/SuccessfulReadWithStream.json b/specification/cognitiveservices/data-plane/ComputerVision/preview/v3.2-preview/examples/SuccessfulReadWithStream.json new file mode 100644 index 000000000000..dc0a4f1675a1 --- /dev/null +++ b/specification/cognitiveservices/data-plane/ComputerVision/preview/v3.2-preview/examples/SuccessfulReadWithStream.json @@ -0,0 +1,19 @@ +{ + "parameters": { + "Endpoint": "{Endpoint}", + "Ocp-Apim-Subscription-Key": "{API key}", + "language": "en", + "Image": "{binary}", + "Pages": [ + "2", + "3" + ] + }, + "responses": { + "202": { + "header": { + "location": "https://{domain}/vision/v3.1/read/e56ffa6e-1ee4-4042-bc07-993db706c95f" + } + } + } +} diff --git a/specification/cognitiveservices/data-plane/ComputerVision/preview/v3.2-preview/examples/SuccessfulReadWithUrl.json b/specification/cognitiveservices/data-plane/ComputerVision/preview/v3.2-preview/examples/SuccessfulReadWithUrl.json new file mode 100644 index 000000000000..a8d69fc50abe --- /dev/null +++ b/specification/cognitiveservices/data-plane/ComputerVision/preview/v3.2-preview/examples/SuccessfulReadWithUrl.json @@ -0,0 +1,21 @@ +{ + "parameters": { + "Endpoint": "{Endpoint}", + "Ocp-Apim-Subscription-Key": "{API key}", + "language": "en", + "ImageUrl": { + "url": "{url}" + }, + "Pages": [ + "2", + "3" + ] + }, + "responses": { + "202": { + "header": { + "Operation-Location": "https://{domain}/vision/v3.1/read/e56ffa6e-1ee4-4042-bc07-993db706c95f" + } + } + } +} From 2533f20037406396742d80afd66338903430d3fe Mon Sep 17 00:00:00 2001 From: Jiayuan Shi Date: Fri, 16 Oct 2020 15:12:32 -0700 Subject: [PATCH 2/8] v3.2-preview change appearance format --- .../preview/v3.2-preview/Ocr.json | 28 ++++++++++++++----- .../examples/SuccessfulGetReadResult.json | 24 ++++++++++------ 2 files changed, 37 insertions(+), 15 deletions(-) diff --git a/specification/cognitiveservices/data-plane/ComputerVision/preview/v3.2-preview/Ocr.json b/specification/cognitiveservices/data-plane/ComputerVision/preview/v3.2-preview/Ocr.json index 2cd2d43ed37c..95ae51c269a3 100644 --- a/specification/cognitiveservices/data-plane/ComputerVision/preview/v3.2-preview/Ocr.json +++ b/specification/cognitiveservices/data-plane/ComputerVision/preview/v3.2-preview/Ocr.json @@ -297,7 +297,7 @@ "$ref": "#/definitions/BoundingBox" }, "appearance": { - "description": "Style and styleConfidence of the text line.", + "description": "Appearance of the text line.", "type": "object", "$ref": "#/definitions/Appearance" }, @@ -347,22 +347,36 @@ } }, "Appearance": { - "description": "An object representing the style and styleConfidence.", + "description": "An object representing the appearance of the text line.", "type": "object", "required": [ - "style", - "styleConfidence" + "style" ], "properties": { "style": { - "description": "The text line style.", + "description": "An object representing the Style of the text line.", + "type": "object", + "$ref": "#/definitions/Style" + } + } + }, + "Style": { + "description": "An object representing the style of the text line.", + "type": "object", + "required": [ + "name", + "confidence" + ], + "properties": { + "name": { + "description": "The text line style name.", "type": "string", "enum": [ "handwriting", - "print" + "other" ] }, - "styleConfidence": { + "confidence": { "description": "The confidence of text line style.", "type": "number", "format": "float" diff --git a/specification/cognitiveservices/data-plane/ComputerVision/preview/v3.2-preview/examples/SuccessfulGetReadResult.json b/specification/cognitiveservices/data-plane/ComputerVision/preview/v3.2-preview/examples/SuccessfulGetReadResult.json index a6ad36931547..8e0f357195db 100644 --- a/specification/cognitiveservices/data-plane/ComputerVision/preview/v3.2-preview/examples/SuccessfulGetReadResult.json +++ b/specification/cognitiveservices/data-plane/ComputerVision/preview/v3.2-preview/examples/SuccessfulGetReadResult.json @@ -34,8 +34,10 @@ 813 ], "appearance": { - "style": "print", - "styleConfidence": 0.995 + "style": { + "name" : "other", + "confidence": 0.995 + } }, "text": "Our greatest glory is not", "words": [ @@ -123,8 +125,10 @@ 1511 ], "appearance": { - "style": "handwriting", - "styleConfidence": 0.985 + "style": { + "name" : "handwriting", + "confidence": 0.985 + } }, "text": "but in rising every time we fall", "words": [ @@ -241,8 +245,10 @@ 1107 ], "appearance": { - "style": "print", - "styleConfidence": 0.995 + "style": { + "name" : "other", + "confidence": 0.995 + } }, "text": "Viva la vida", "words": [ @@ -312,8 +318,10 @@ 1107 ], "appearance": { - "style": "handwriting", - "styleConfidence": 0.855 + "style": { + "name" : "handwriting", + "confidence": 0.855 + } }, "text": "in never failing ,", "words": [ From e3a6aa9a160cfe0c2b44ef82b9cf982489e026d0 Mon Sep 17 00:00:00 2001 From: Jiayuan Shi Date: Mon, 19 Oct 2020 16:51:19 -0700 Subject: [PATCH 3/8] change to solve comments --- .../ComputerVision/preview/v3.2-preview/Ocr.json | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/specification/cognitiveservices/data-plane/ComputerVision/preview/v3.2-preview/Ocr.json b/specification/cognitiveservices/data-plane/ComputerVision/preview/v3.2-preview/Ocr.json index 95ae51c269a3..e767ea6a9c24 100644 --- a/specification/cognitiveservices/data-plane/ComputerVision/preview/v3.2-preview/Ocr.json +++ b/specification/cognitiveservices/data-plane/ComputerVision/preview/v3.2-preview/Ocr.json @@ -354,7 +354,7 @@ ], "properties": { "style": { - "description": "An object representing the Style of the text line.", + "description": "An object representing the style of the text line.", "type": "object", "$ref": "#/definitions/Style" } @@ -369,8 +369,12 @@ ], "properties": { "name": { - "description": "The text line style name.", + "description": "The text line style name, inlcuding handwriting and other.", "type": "string", + "x-ms-enum": { + "name": "name", + "modelAsString": true + }, "enum": [ "handwriting", "other" From e4cf9b9ad7945224f295ed036e9d51bc4a32560c Mon Sep 17 00:00:00 2001 From: Jiayuan Shi Date: Mon, 19 Oct 2020 16:54:45 -0700 Subject: [PATCH 4/8] typo --- .../data-plane/ComputerVision/preview/v3.2-preview/Ocr.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/specification/cognitiveservices/data-plane/ComputerVision/preview/v3.2-preview/Ocr.json b/specification/cognitiveservices/data-plane/ComputerVision/preview/v3.2-preview/Ocr.json index e767ea6a9c24..f26f1d50864b 100644 --- a/specification/cognitiveservices/data-plane/ComputerVision/preview/v3.2-preview/Ocr.json +++ b/specification/cognitiveservices/data-plane/ComputerVision/preview/v3.2-preview/Ocr.json @@ -369,7 +369,7 @@ ], "properties": { "name": { - "description": "The text line style name, inlcuding handwriting and other.", + "description": "The text line style name, including handwriting and other.", "type": "string", "x-ms-enum": { "name": "name", From 6cceca7b09b9e16c92057f89e218e2800bc46666 Mon Sep 17 00:00:00 2001 From: Jiayuan Shi Date: Mon, 19 Oct 2020 17:07:26 -0700 Subject: [PATCH 5/8] Prettier --- .../v3.2-preview/examples/SuccessfulGetReadResult.json | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/specification/cognitiveservices/data-plane/ComputerVision/preview/v3.2-preview/examples/SuccessfulGetReadResult.json b/specification/cognitiveservices/data-plane/ComputerVision/preview/v3.2-preview/examples/SuccessfulGetReadResult.json index 8e0f357195db..201858e1d966 100644 --- a/specification/cognitiveservices/data-plane/ComputerVision/preview/v3.2-preview/examples/SuccessfulGetReadResult.json +++ b/specification/cognitiveservices/data-plane/ComputerVision/preview/v3.2-preview/examples/SuccessfulGetReadResult.json @@ -35,7 +35,7 @@ ], "appearance": { "style": { - "name" : "other", + "name": "other", "confidence": 0.995 } }, @@ -126,7 +126,7 @@ ], "appearance": { "style": { - "name" : "handwriting", + "name": "handwriting", "confidence": 0.985 } }, @@ -246,7 +246,7 @@ ], "appearance": { "style": { - "name" : "other", + "name": "other", "confidence": 0.995 } }, @@ -319,7 +319,7 @@ ], "appearance": { "style": { - "name" : "handwriting", + "name": "handwriting", "confidence": 0.855 } }, From 72ce593575dd0050a8c752a7ee63b7724f4c1665 Mon Sep 17 00:00:00 2001 From: Jiayuan Shi Date: Mon, 19 Oct 2020 17:24:00 -0700 Subject: [PATCH 6/8] update readme --- .../data-plane/ComputerVision/readme.md | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/specification/cognitiveservices/data-plane/ComputerVision/readme.md b/specification/cognitiveservices/data-plane/ComputerVision/readme.md index aa285d970a63..5a34539ce377 100644 --- a/specification/cognitiveservices/data-plane/ComputerVision/readme.md +++ b/specification/cognitiveservices/data-plane/ComputerVision/readme.md @@ -66,6 +66,14 @@ input-file: - stable/v3.1/Ocr.json ``` +### Release 3.2-preview +These settings apply only when `--tag=release_3_2_preview` is specified on the command line. + +``` yaml $(tag) == 'release_3_2_preview' +input-file: + - preview/v3.2-preview/Ocr.json +``` + ## Swagger to SDK This section describes what SDK should be generated by the automatic system. @@ -184,6 +192,7 @@ input-file: - $(this-folder)/stable/v3.0/ComputerVision.json - $(this-folder)/stable/v3.0/Ocr.json - $(this-folder)/preview/v3.1-preview.2/Ocr.json + - $(this-folder)/preview/v3.2-preview/Ocr.json ``` From 942970b09f6c6eb4b02b46f6b03931ade731064c Mon Sep 17 00:00:00 2001 From: Jiayuan Shi Date: Mon, 19 Oct 2020 22:44:42 -0700 Subject: [PATCH 7/8] update version and solve comment --- .../ComputerVision/preview/v3.2-preview/Ocr.json | 10 +++++----- .../v3.2-preview/examples/SuccessfulGetReadResult.json | 2 +- .../examples/SuccessfulReadWithStream.json | 2 +- .../v3.2-preview/examples/SuccessfulReadWithUrl.json | 2 +- 4 files changed, 8 insertions(+), 8 deletions(-) diff --git a/specification/cognitiveservices/data-plane/ComputerVision/preview/v3.2-preview/Ocr.json b/specification/cognitiveservices/data-plane/ComputerVision/preview/v3.2-preview/Ocr.json index f26f1d50864b..7afaaf88f1fc 100644 --- a/specification/cognitiveservices/data-plane/ComputerVision/preview/v3.2-preview/Ocr.json +++ b/specification/cognitiveservices/data-plane/ComputerVision/preview/v3.2-preview/Ocr.json @@ -1,7 +1,7 @@ { "swagger": "2.0", "info": { - "version": "3.1-preview.2", + "version": "3.2-preview", "title": "Computer Vision Client", "description": "The Computer Vision API provides state-of-the-art algorithms to process images and return information. For example, it can be used to determine if an image contains mature content, or it can be used to find all the faces in an image. It also has other features like estimating dominant and accent colors, categorizing the content of images, and describing an image with complete English sentences. Additionally, it can also intelligently generate images thumbnails for displaying large images effectively." }, @@ -27,7 +27,7 @@ ] }, "host": "westcentralus.api.cognitive.microsoft.com", - "basePath": "/vision/v3.1-preview.2", + "basePath": "/vision/v3.2-preview", "schemes": [ "https" ], @@ -372,12 +372,12 @@ "description": "The text line style name, including handwriting and other.", "type": "string", "x-ms-enum": { - "name": "name", + "name": "TextStyle", "modelAsString": true }, "enum": [ - "handwriting", - "other" + "other", + "handwriting" ] }, "confidence": { diff --git a/specification/cognitiveservices/data-plane/ComputerVision/preview/v3.2-preview/examples/SuccessfulGetReadResult.json b/specification/cognitiveservices/data-plane/ComputerVision/preview/v3.2-preview/examples/SuccessfulGetReadResult.json index 201858e1d966..1cd69c0ce9a9 100644 --- a/specification/cognitiveservices/data-plane/ComputerVision/preview/v3.2-preview/examples/SuccessfulGetReadResult.json +++ b/specification/cognitiveservices/data-plane/ComputerVision/preview/v3.2-preview/examples/SuccessfulGetReadResult.json @@ -12,7 +12,7 @@ "createdDateTime": "2019-10-03T14:32:04.236Z", "lastUpdatedDateTime": "2019-10-03T14:38:14.852Z", "analyzeResult": { - "version": "v3.1", + "version": "v3.2", "readResults": [ { "page": 1, diff --git a/specification/cognitiveservices/data-plane/ComputerVision/preview/v3.2-preview/examples/SuccessfulReadWithStream.json b/specification/cognitiveservices/data-plane/ComputerVision/preview/v3.2-preview/examples/SuccessfulReadWithStream.json index dc0a4f1675a1..f10e496a1170 100644 --- a/specification/cognitiveservices/data-plane/ComputerVision/preview/v3.2-preview/examples/SuccessfulReadWithStream.json +++ b/specification/cognitiveservices/data-plane/ComputerVision/preview/v3.2-preview/examples/SuccessfulReadWithStream.json @@ -12,7 +12,7 @@ "responses": { "202": { "header": { - "location": "https://{domain}/vision/v3.1/read/e56ffa6e-1ee4-4042-bc07-993db706c95f" + "location": "https://{domain}/vision/v3.2/read/e56ffa6e-1ee4-4042-bc07-993db706c95f" } } } diff --git a/specification/cognitiveservices/data-plane/ComputerVision/preview/v3.2-preview/examples/SuccessfulReadWithUrl.json b/specification/cognitiveservices/data-plane/ComputerVision/preview/v3.2-preview/examples/SuccessfulReadWithUrl.json index a8d69fc50abe..cca2904b541d 100644 --- a/specification/cognitiveservices/data-plane/ComputerVision/preview/v3.2-preview/examples/SuccessfulReadWithUrl.json +++ b/specification/cognitiveservices/data-plane/ComputerVision/preview/v3.2-preview/examples/SuccessfulReadWithUrl.json @@ -14,7 +14,7 @@ "responses": { "202": { "header": { - "Operation-Location": "https://{domain}/vision/v3.1/read/e56ffa6e-1ee4-4042-bc07-993db706c95f" + "Operation-Location": "https://{domain}/vision/v3.2/read/e56ffa6e-1ee4-4042-bc07-993db706c95f" } } } From 08db06a8635d963c1bf928fc178d6e6ecc67a873 Mon Sep 17 00:00:00 2001 From: Jiayuan Shi Date: Wed, 21 Oct 2020 16:58:28 -0700 Subject: [PATCH 8/8] change version to v3.2-preview.2 --- .../preview/{v3.2-preview => v3.2-preview.2}/Ocr.json | 2 +- .../examples/SuccessfulGetReadResult.json | 0 .../examples/SuccessfulReadWithStream.json | 0 .../examples/SuccessfulReadWithUrl.json | 0 .../data-plane/ComputerVision/readme.md | 10 +++++----- 5 files changed, 6 insertions(+), 6 deletions(-) rename specification/cognitiveservices/data-plane/ComputerVision/preview/{v3.2-preview => v3.2-preview.2}/Ocr.json (99%) rename specification/cognitiveservices/data-plane/ComputerVision/preview/{v3.2-preview => v3.2-preview.2}/examples/SuccessfulGetReadResult.json (100%) rename specification/cognitiveservices/data-plane/ComputerVision/preview/{v3.2-preview => v3.2-preview.2}/examples/SuccessfulReadWithStream.json (100%) rename specification/cognitiveservices/data-plane/ComputerVision/preview/{v3.2-preview => v3.2-preview.2}/examples/SuccessfulReadWithUrl.json (100%) diff --git a/specification/cognitiveservices/data-plane/ComputerVision/preview/v3.2-preview/Ocr.json b/specification/cognitiveservices/data-plane/ComputerVision/preview/v3.2-preview.2/Ocr.json similarity index 99% rename from specification/cognitiveservices/data-plane/ComputerVision/preview/v3.2-preview/Ocr.json rename to specification/cognitiveservices/data-plane/ComputerVision/preview/v3.2-preview.2/Ocr.json index 7afaaf88f1fc..cb4c7b682f31 100644 --- a/specification/cognitiveservices/data-plane/ComputerVision/preview/v3.2-preview/Ocr.json +++ b/specification/cognitiveservices/data-plane/ComputerVision/preview/v3.2-preview.2/Ocr.json @@ -1,7 +1,7 @@ { "swagger": "2.0", "info": { - "version": "3.2-preview", + "version": "3.2-preview.2", "title": "Computer Vision Client", "description": "The Computer Vision API provides state-of-the-art algorithms to process images and return information. For example, it can be used to determine if an image contains mature content, or it can be used to find all the faces in an image. It also has other features like estimating dominant and accent colors, categorizing the content of images, and describing an image with complete English sentences. Additionally, it can also intelligently generate images thumbnails for displaying large images effectively." }, diff --git a/specification/cognitiveservices/data-plane/ComputerVision/preview/v3.2-preview/examples/SuccessfulGetReadResult.json b/specification/cognitiveservices/data-plane/ComputerVision/preview/v3.2-preview.2/examples/SuccessfulGetReadResult.json similarity index 100% rename from specification/cognitiveservices/data-plane/ComputerVision/preview/v3.2-preview/examples/SuccessfulGetReadResult.json rename to specification/cognitiveservices/data-plane/ComputerVision/preview/v3.2-preview.2/examples/SuccessfulGetReadResult.json diff --git a/specification/cognitiveservices/data-plane/ComputerVision/preview/v3.2-preview/examples/SuccessfulReadWithStream.json b/specification/cognitiveservices/data-plane/ComputerVision/preview/v3.2-preview.2/examples/SuccessfulReadWithStream.json similarity index 100% rename from specification/cognitiveservices/data-plane/ComputerVision/preview/v3.2-preview/examples/SuccessfulReadWithStream.json rename to specification/cognitiveservices/data-plane/ComputerVision/preview/v3.2-preview.2/examples/SuccessfulReadWithStream.json diff --git a/specification/cognitiveservices/data-plane/ComputerVision/preview/v3.2-preview/examples/SuccessfulReadWithUrl.json b/specification/cognitiveservices/data-plane/ComputerVision/preview/v3.2-preview.2/examples/SuccessfulReadWithUrl.json similarity index 100% rename from specification/cognitiveservices/data-plane/ComputerVision/preview/v3.2-preview/examples/SuccessfulReadWithUrl.json rename to specification/cognitiveservices/data-plane/ComputerVision/preview/v3.2-preview.2/examples/SuccessfulReadWithUrl.json diff --git a/specification/cognitiveservices/data-plane/ComputerVision/readme.md b/specification/cognitiveservices/data-plane/ComputerVision/readme.md index 5a34539ce377..5dd5359153e8 100644 --- a/specification/cognitiveservices/data-plane/ComputerVision/readme.md +++ b/specification/cognitiveservices/data-plane/ComputerVision/readme.md @@ -66,12 +66,12 @@ input-file: - stable/v3.1/Ocr.json ``` -### Release 3.2-preview -These settings apply only when `--tag=release_3_2_preview` is specified on the command line. +### Release 3.2-preview.2 +These settings apply only when `--tag=release_3_2_preview_2` is specified on the command line. -``` yaml $(tag) == 'release_3_2_preview' +``` yaml $(tag) == 'release_3_2_preview_2' input-file: - - preview/v3.2-preview/Ocr.json + - preview/v3.2-preview.2/Ocr.json ``` ## Swagger to SDK @@ -192,7 +192,7 @@ input-file: - $(this-folder)/stable/v3.0/ComputerVision.json - $(this-folder)/stable/v3.0/Ocr.json - $(this-folder)/preview/v3.1-preview.2/Ocr.json - - $(this-folder)/preview/v3.2-preview/Ocr.json + - $(this-folder)/preview/v3.2-preview.2/Ocr.json ```