diff --git a/common/config/rush/pnpm-lock.yaml b/common/config/rush/pnpm-lock.yaml index e2ac4a887b9b..f5b2050900fe 100644 --- a/common/config/rush/pnpm-lock.yaml +++ b/common/config/rush/pnpm-lock.yaml @@ -12139,17 +12139,19 @@ packages: dev: false file:projects/ai-document-intelligence.tgz: - resolution: {integrity: sha512-rK979Zu3i9HdmjhkPAgXOzd0NeXbFrY3YZVRjpjV2wogivrRPGkZZzbNRYCJ7jJbjdFmyOW0xaVddvmfvbqYuw==, tarball: file:projects/ai-document-intelligence.tgz} + resolution: {integrity: sha512-uPM0Lu/r/28Rr2zleL5cUX1WpRz9DYs6c83L2DszZfRgwLfZTqRXtAGX13CWg1fNPhq4SacSC4GCna22zKc+wg==, tarball: file:projects/ai-document-intelligence.tgz} name: '@rush-temp/ai-document-intelligence' version: 0.0.0 dependencies: + '@microsoft/api-extractor': 7.47.11(@types/node@18.19.64) '@types/node': 18.19.64 '@vitest/browser': 2.1.4(@types/node@18.19.64)(playwright@1.48.2)(typescript@5.6.3)(vitest@2.1.4) '@vitest/coverage-istanbul': 2.1.4(vitest@2.1.4) dotenv: 16.4.5 - eslint: 9.14.0 + eslint: 8.57.1 playwright: 1.48.2 prettier: 3.3.3 + tshy: 2.0.1 tslib: 2.8.1 typescript: 5.6.3 vitest: 2.1.4(@types/node@18.19.64)(@vitest/browser@2.1.4) @@ -12158,7 +12160,6 @@ packages: - '@vitest/ui' - bufferutil - happy-dom - - jiti - jsdom - less - lightningcss @@ -15416,7 +15417,7 @@ packages: dev: false file:projects/arm-hybridcompute.tgz: - resolution: {integrity: sha512-+DX7eIMt7WAbn6AD+QZpjVyoMuXdNpGH452FuoUH2UvFhOBtlZD3B5y8bwW/oE8d3E/u2eZ3B4N2cAFV32bgbw==, tarball: file:projects/arm-hybridcompute.tgz} + resolution: {integrity: sha512-IxFMCCeH0D2P6u+4Vwi9ROrWBKeO0DJEfs2jxyfJnrGNMrRKAWfplx0QVZY6ETK5Thjlm3myTMffxO2BT5PBiA==, tarball: file:projects/arm-hybridcompute.tgz} name: '@rush-temp/arm-hybridcompute' version: 0.0.0 dependencies: @@ -19240,7 +19241,7 @@ packages: dev: false file:projects/communication-job-router.tgz: - resolution: {integrity: sha512-xPt7oEFIEnDcZHXkEsgPlaOBwbjSF6dQq4uDtXsp/yeslQgiTdGNnO5bUMfuqpTTiiwSsXCyCKEy2FmmrcRHLw==, tarball: file:projects/communication-job-router.tgz} + resolution: {integrity: sha512-2asWr03USbfs287ufZkj8sgX94e8AIkX8BU9mVvBxqPp+5NwM8F2I2OeKa/ZRb9z7dcawliux8IwyB2kNGIEAg==, tarball: file:projects/communication-job-router.tgz} name: '@rush-temp/communication-job-router' version: 0.0.0 dependencies: diff --git a/sdk/documentintelligence/ai-document-intelligence-rest/CHANGELOG.md b/sdk/documentintelligence/ai-document-intelligence-rest/CHANGELOG.md index 0773a05e6281..55c1638a0294 100644 --- a/sdk/documentintelligence/ai-document-intelligence-rest/CHANGELOG.md +++ b/sdk/documentintelligence/ai-document-intelligence-rest/CHANGELOG.md @@ -1,154 +1,7 @@ # Release History - -## 1.0.0-beta.3 (2024-08-20) + +## 1.0.0 (2024-11-16) ### Features Added -- Added support for the Analyze Batch Documents API with the long-running operations, `/documentModels/{modelId}:analyzeBatch`. -- Added support for method `/documentModels/{modelId}/analyzeResults/{resultId}/pdf`. -- Added support for method `/documentModels/{modelId}/analyzeResults/{resultId}/figures/{figureId}`. -- Added support for the analysis method to specify other kinds of output through `AnalyzeDocumentFromStreamBodyParam#output` param : can be `pdf` and `figures`. -- Added property `id` to `Output` model. -- Added support for the Copy Classifier API with method `/documentClassifiers:authorizeCopy`. -- Added method `/documentClassifiers/{classifierId}:copyTo`. -- Added new enum type to `DocumentBuildMode`: `generative`. -- Added property `warnings` to `AnalyzeResultOutput ` model. -- Added properties `classifierId`, `split`, and `trainingHours` to `DocumentModelDetailsOutput` model. -- Added properties `confidenceThreshold`, `features`, `maxDocumentsToAnalyze`, `modelId`, and `queryFields` to `DocumentTypeDetailsOutput` model. -- Added properties `allowOverwrite` and `maxTrainingHours` to `BuildDocumentModelRequest` model. -- Added parameter `pages` to `ClassifyDocument` overloads. -- Added properties `classifierId`, `docTypes`, and `split` to `ComposeDocumentModelRequest`. -- Added property `allowOverwrite` to `BuildDocumentClassifierRequest`. -- Added property `getOperationId()` method to the pollers. - -### Other Changes - -- The `@azure-rest/ai-document-intelligence` Rest Level Client Library, now targets the Azure AI Document Intelligence service API version `2024-07-31-preview`. Support for `2024-02-29-preview` has been removed. -- Removed support for extracting lists from analyzed documents: - - Removed models `DocumentListOutput` and `DocumentListItemOutput`. - - Removed property `lists` from `AnalyzeResultOutput`. -- Changes to the Compose Document API: - - Removed model `ComponentDocumentModelDetails`. - - Removed property `componentModels` from `ComposeDocumentModelRequest`. - - `ComposeDocumentModelRequest` now requires a map of `DocumentTypeDetails` and a classifier ID. -- Removed model `QuotaDetailsOutput`. -- Removed property `customNeuralDocumentModelBuilds` from `ResourceDetailsOutput`. -- Removed required property `fieldSchema` and from `DocumentTypeDetailsOuput`. -- `DocumentFieldType` is now a required property of `DocumentFieldSchema`. - -## 1.0.0-beta.2 (2024-03-06) - -### Features Added - -- Support for "retry-after" header has been added for long-running operations, including `/documentModels/{modelId}:analyze`, `/documentModels:build`, and `/documentClassifiers:build`. -- `BuildDocumentClassifierContent` now includes a new property, `baseClassifierId`. -- `DocumentClassifierDetailsOutput` now includes a new property, `baseClassifierId`. -- A new property, `warnings`, has been added to `DocumentModelDetailsOutput` and `DocumentClassifierDetailsOutput`, represents an array of objects with `code`, `message`, and `target`. -- `DocumentFieldOutput` now includes a new property, `valueSelectionGroup`. -- A new member, `"completed"`, has been added to `OperationDetails#status`. - -### Breaking Changes - -- The `@azure-rest/ai-document-intelligence` Rest Level Client Library, previously known as Form Recognizer, now targets the Azure AI Document Intelligence service API version `"2024-02-29-preview"`. Please note that support for `2023-10-31-preview` has been discontinued. - -## 1.0.0-beta.1 (2023-11-16) - -### Features Added - -This marks the first preview of `@azure-rest/ai-document-intelligence` Rest Level Client Library for the Azure AI Document Intelligence service (formerly known as Form Recognizer), targeting service API version `"2023-10-31-preview"`. - -_**Note: Form Recognizer has been rebranded to Document Intelligence.**_ - -- Updates all REST API operation paths from `{endpoint}/formrecognizer` to `{endpoint}/documentintelligence`. SDK would handle this change automatically, users would not have to do additional work to support this. -- `@azure-rest/ai-document-intelligence` is the new package, replacing `@azure/ai-form-recognizer` package. The new package supports a Rest Level Client, which is part of the new generation of Azure SDKs to simplify the development experience. The new package is not compatible with the previous `@azure/ai-form-recognizer` package without necessary changes to your code. -- **Breaking Changes (with the `@azure/ai-form-recognizer` SDK)** - API shapes have been designed from scratch to support the new Rest Level Client for the Document Intelligence service. Please refer to the [Readme](https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/documentintelligence/ai-document-intelligence-rest/README.md) and [Samples](https://github.com/Azure/azure-sdk-for-js/tree/main/sdk/documentintelligence/ai-document-intelligence-rest/samples) for more understanding. - -### `"2023-10-31-preview"` Service API version - -The new `"2023-10-31-preview"` service version comes with some new features and a few breaking changes when compared to the API versions supported by the `@azure/ai-form-recognizer` library. - -**New Features** - -- **Markdown content format** - - Supports output with Markdown content format along with the default plain _text_. For now, this is only supported for "prebuilt-layout". Markdown content format is deemed a more friendly format for LLM consumption in a chat or automation use scenario. - - Service follows the GFM spec ([GitHub Flavored Markdown](https://github.github.com/gfm/)) for the Markdown format. Also introduces a new _contentFormat_ property with value "text" or "markdown" to indicate the result content format. - - ```ts - import DocumentIntelligence from "@azure-rest/ai-document-intelligence"; - const client = DocumentIntelligence(process.env["DOCUMENT_INTELLIGENCE_ENDPOINT"], { - key: process.env["DOCUMENT_INTELLIGENCE_API_KEY"], - }); - - const initialResponse = await client - .path("/documentModels/{modelId}:analyze", "prebuilt-layout") - .post({ - contentType: "application/json", - body: { - urlSource: - "https://raw.githubusercontent.com/Azure/azure-sdk-for-js/6704eff082aaaf2d97c1371a28461f512f8d748a/sdk/formrecognizer/ai-form-recognizer/assets/forms/Invoice_1.pdf", - }, - queryParameters: { outputContentFormat: "markdown" }, // <-- new query parameter - }); - ``` - -- **Query Fields** - - When this feature flag is specified, the service will further extract the values of the fields specified via the queryFields query parameter to supplement any existing fields defined by the model as fallback. - - ```ts - await client.path("/documentModels/{modelId}:analyze", "prebuilt-layout").post({ - contentType: "application/json", - body: { urlSource: "..." }, - queryParameters: { - features: ["queryFields"], - queryFields: ["NumberOfGuests", "StoreNumber"], - }, // <-- new query parameter - }); - ``` - -- **Split Options** - - In the previous API versions supported by the older `@azure/ai-form-recognizer` library, document splitting and classification operation (`"/documentClassifiers/{classifierId}:analyze"`) always tried to split the input file into multiple documents. - - To enable a wider set of scenarios, service introduces a "split" query parameter with the new "2023-10-31-preview" service version. The following values are supported: - - - `split: "auto"` - - Let service determine where to split. - - - `split: "none"` - - The entire file is treated as a single document. No splitting is performed. - - - `split: "perPage"` - - Each page is treated as a separate document. Each empty page is kept as its own document. - -**Breaking Changes** - -- **prebuilt-receipt** - Currency related fields have been updated. Currency symbol ("$") and code ("USD") are returned along with the amount as shown below. - - ```json - { - "content": "$123.45", - "confidence": 0.995, - "type": "currency", - "valueCurrency": { - "amount": 123.45, - "currencySymbol": "$", - "currencyCode": "USD" - }, - ... - } - ``` - -**Retirements/Deprecations** - -- `"prebuilt-businessCard"` model is retired. -- `"prebuilt-document"` model is retired, this model is essentially `"prebuilt-layout"` with `features: ["keyValuePairs"]` specified. _(This is only supported as an optional feature for "prebuilt-layout" and "prebuilt-invoice".)_ - -If you wish to still use these models, please rely on the older `@azure/ai-form-recognizer` library through the older service API versions. - -If you were using the old `@azure/ai-form-recognizer` package, please refer [MIGRATION_GUIDE.MD](https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/documentintelligence/ai-document-intelligence-rest/MIGRATION-FR_v4-DI_v1.md) for more details. +The package of @azure-rest/ai-document-intelligence is using our next generation design principles. To learn more, please refer to our documentation [Quick Start](https://aka.ms/azsdk/js/mgmt/quickstart). diff --git a/sdk/documentintelligence/ai-document-intelligence-rest/MIGRATION-FR_v4-DI_v1.md b/sdk/documentintelligence/ai-document-intelligence-rest/MIGRATION-FR_v4-DI_v1.md deleted file mode 100644 index 2e4a5e8784e5..000000000000 --- a/sdk/documentintelligence/ai-document-intelligence-rest/MIGRATION-FR_v4-DI_v1.md +++ /dev/null @@ -1,315 +0,0 @@ -# Migrating from `@azure/ai-form-recognizer` Version 4.0.0 to `@azure-rest/ai-document-intelligence` Version 1.0.0-beta.1 - -In this first preview of `@azure-rest/ai-document-intelligence` Rest Level Client Library, this package introduces a new design for the Azure AI Document Intelligence service (formerly known as Form Recognizer), targeting service API version `"2023-10-31-preview"`. - -To leverage features of the newest Azure AI Document Intelligence service API (version "2023-10-31-preview" and newer), the new SDK is required, and application code must be changed to use the new client. Similarly, the new major version 4 of the client library cannot be used to communicate with versions 2.1 of the service API. To summarize: - -- Version 3 of the `@azure/ai-form-recognizer` package _only_ supports Form Recognizer service API version 2.1, and will not receive support for newer (date-based) versions of Form Recognizer. -- Version 4 of the `@azure/ai-form-recognizer` package supports service API version "2022-08-31" of Form Recognizer. - -_**Note: Form Recognizer has been rebranded to Document Intelligence.**_ - -- Version 1 of the `@azure-rest/ai-document-intelligence` package will receive support for newer (date-based) versions of Azure AI Document Intelligence. - -This document provides instructions for updating your application code to the new `@azure-rest/ai-document-intelligence` SDK client library. In this document, the examples provided use TypeScript to provide type information, but all runtime behavior changes naturally apply to plain JavaScript as well. - -## Partial Migration (Side-by-Side) - -To avoid migrating an application all at once, `@azure-rest/ai-document-intelligence` Version 1.0.0-beta.1 may be installed alongside `@azure/ai-form-recognizer` Version 4.0.0. For instance, add the following to the `dependencies` field of `package.json`: - -```javascript -{ - ..., - "dependencies": { - ..., - "@azure/ai-form-recognizer": "^4.0.0", - "@azure-rest/ai-document-intelligence": "1.0.0-beta.1" - } -} -``` - -Then, the two packages may be used side-by-side, and an application may be migrated partially or over time: - -```javascript -import { DocumentAnalysisClient } from "@azure/ai-form-recognizer"; -import DocumentIntelligence from "@azure-rest/ai-document-intelligence"; -``` - -## Understanding the New Package - -In the new `@azure-rest/ai-document-intelligence` package, several types and method signatures have been introduced which would feel newer owing to the redesign as a Rest Level Client package. - -- `@azure/ai-form-recognizer` package offered `DocumentAnalysisClient` and `DocumentModelAdministrationClient` classes. -- The new `@azure-rest/ai-document-intelligence` package offers a `createClient` method that allows creating a an instance of `DocumentIntelligenceClient`, which would be the equivalent of the two classes offered through `@azure/ai-form-recognizer` package. - -## Migrating from `DocumentAnalysisClient` to `DocumentIntelligence` - -The previous `DocumentAnalysisClient` class offered - -```ts -beginAnalyzeDocument(modelId, document: FormRecognizerRequestBody) -beginAnalyzeDocumentFromUrl(modelId, documentUrl) -beginClassifyDocument(classifierId, document: FormRecognizerRequestBody) -beginClassifyDocumentFromUrl(classifierId, documentUrl) -``` - -methods to analyze/classify documents. - -Equivalently, the new `DocumentIntelligence` offers the following instead through the routes: - -```ts -path("/documentModels/{modelId}:analyze", "").post({ - contentType: "application/json", - body: { urlSource: "..." }, // or { base64Source: "..." } -}); - -path("/documentClassifiers/{classifierId}:analyze", "").post({ - contentType: "application/json", - body: { urlSource: "..." }, // or { base64Source: "..." } -}); -``` - -### URL Inputs - -As in the previous "@azure/ai-form-recognizer" v4.0.0 SDK, to provide a publicly-accessible URL as an input to an analysis operation, use the `beginAnalyzeDocumentFromUrl` method, which treats the data passed to it as a file's contents. In the new `@azure-rest/ai-document-intelligence` package, the equivalent is to use the `path` method to call the `analyze` route and pass the URL as body (shown below). - -Previous ("@azure/ai-form-recognizer" v4.0.0): - -```typescript -import { AzureKeyCredential, DocumentAnalysisClient } from "@azure/ai-form-recognizer"; - -const client = new DocumentAnalysisClient(process.env.FORM_RECOGNIZER_ENDPOINT, new AzureKeyCredential(process.env.FORM_RECOGNIZER_API_KEY); ); -const url = "https://raw.githubusercontent.com/Azure/azure-sdk-for-js/main/sdk/formrecognizer/ai-form-recognizer/assets/identityDocument/license.jpg"; - -const poller = await client.beginAnalyzeDocumentFromUrl("", url); -``` - -Current ("@azure-rest/ai-document-intelligence" v1.0.0-beta.1): - -```typescript -import DocumentIntelligence from "@azure-rest/ai-document-intelligence"; - -const client = DocumentIntelligence(process.env["DOCUMENT_INTELLIGENCE_ENDPOINT"], { - key: process.env["DOCUMENT_INTELLIGENCE_API_KEY"], -}); -const url = - "https://raw.githubusercontent.com/Azure/azure-sdk-for-js/main/sdk/formrecognizer/ai-form-recognizer/assets/identityDocument/license.jpg"; - -const initialResponse = await client.path("/documentModels/{modelId}:analyze", "").post({ - contentType: "application/json", - body: { urlSource: url }, -}); - -if (isUnexpected(initialResponse)) { - throw initialResponse.body.error; -} -const poller = await getLongRunningPoller(client, initialResponse); -``` - -### Base64 Inputs - -As in the previous "@azure/ai-form-recognizer" v4.0.0 SDK, to provide a document input to an analysis operation, use the `beginAnalyzeDocument` method, which treats the data passed to it as a file's contents. -In the new `@azure-rest/ai-document-intelligence` package, the equivalent is to use the `path` method to call the `analyze` route and pass the base64-encoded contents (string) as body (shown below). - -_Note: Subject to change in the future previews of this package for better._ - -Previous ("@azure/ai-form-recognizer" v4.0.0): - -```ts -const { DocumentAnalysisClient, AzureKeyCredential } = require("@azure/ai-form-recognizer"); -const fs = require("fs"); - -const client = new DocumentAnalysisClient(endpoint, new AzureKeyCredential(apiKey)); -const path = ""; -const readStream = fs.createReadStream(path); -const poller = await client.beginAnalyzeDocument(modelId, readStream); -``` - -Current ("@azure-rest/ai-document-intelligence" v1.0.0-beta.1): - -```ts -import DocumentIntelligence from "@azure-rest/ai-document-intelligence"; - -const client = DocumentIntelligence(endpoint, { key: apiKey }); -const filePath = ""; -const base64Source = fs.readFileSync(filePath, { encoding: "base64" }); -const initialResponse = await client.path("/documentModels/{modelId}:analyze", "").post({ - contentType: "application/json", - body: { base64Source }, -}); - -if (isUnexpected(initialResponse)) { - throw initialResponse.body.error; -} - -const poller = getLongRunningPoller(client, initialResponse, { ...testPollingOptions }); -``` - -## Example with polling - -```ts -const endpoint = ""; -const apiKey = ""; -const path = ""; // pdf/jpeg/png/tiff formats - -const readStream = fs.createReadStream(path); - -const client = new DocumentAnalysisClient(endpoint, new AzureKeyCredential(apiKey)); -const poller = await client.beginAnalyzeDocument("prebuilt-layout", readStream); -const { pages, tables } = await poller.pollUntilDone(); -``` - -```ts -const client = DocumentIntelligence( - process.env["DOCUMENT_INTELLIGENCE_ENDPOINT"] || "", - { key: process.env["DOCUMENT_INTELLIGENCE_API_KEY"] || "" } -); - -const base64Source = fs.readFileSync(filePath, { encoding: "base64" }); -const initialResponse = await client - .path("/documentModels/{modelId}:analyze", "prebuilt-layout") - .post({ - contentType: "application/json", - body: { base64Source }, - }); - -if (isUnexpected(initialResponse)) { - throw initialResponse.body.error; -} - -const poller = await getLongRunningPoller(client, initialResponse); -const analyzeResult = ((await poller.pollUntilDone().body) as AnalyzeResultOperationOutput) - .analyzeResult; - -const pages = analyzeResult?.pages; -const tables = analyzeResult?.tables; -``` - -## Migrating from `DocumentModelAdministrationClient` to `DocumentIntelligence` - -The `DocumentModelAdministrationClient` class, was used for all model management operations (creating, reading, listing, and deleting models). The new `DocumentIntelligence` offers the following instead through the routes: - -```ts -beginBuildDocumentClassifier(classifierId, [docTypeSources]) -beginBuildDocumentModel(modelId, containerUrl, buildMode: "template") -beginBuildDocumentModel(modelId, contentSource, buildMode: "template") -beginComposeDocumentModel(modelId, [componentModelIds]) -beginCopyModelTo(sourceModelId, authorization) -deleteDocumentClassifier(classifierId) -deleteDocumentModel(modelId) -getCopyAuthorization(destinationModelId) -getDocumentClassifier(classifierId) -getDocumentModel(modelId) -getOperation(operationId) -getResourceDetails() -listDocumentClassifiers() -listDocumentModels() -listOperations() -``` - -Equivalently, the new `DocumentIntelligence` offers the following instead through the routes: - -```ts -path("/documentClassifiers:build").post({ - body: { - classifierId: "", - docTypes: { - foo: { - azureBlobSource: { - containerUrl: "", - }, - }, - }, - }, -}); -path("/documentModels:build").post({ - body: { - buildMode: "template", - modelId: modelName, - azureBlobSource: { - containerUrl: "", - }, - }, -}); -path("/documentModels:compose").post({ - body: { - componentModels: componentModelIds, - modelId, - }, -}); -path("/documentClassifiers/{classifierId}", classifierId).delete(); -path("/documentModels/{modelId}", modelId).delete(); -path("/documentModels/{modelId}:copyTo", sourceModel.modelId).post({ - body: targetAuth.body, -}); -path("/documentClassifiers/{classifierId}", classifierId).get(); -path("/documentModels/{modelId}", model.modelId).get(); -path("/operations/{operationId}", "").get(); -path("/info").get(); -path("/documentClassifiers").get(); -path("/documentModels").get(); -path("/operations").get(); -``` - -_**All the methods presented above do support options bags to configure the settings**_ - -## Features Added in the new service API version "2023-10-31-preview" - -### Markdown content format - -Supports output with Markdown content format along with the default plain _text_. For now, this is only supported for "prebuilt-layout". Markdown content format is deemed a more friendly format for LLM consumption in a chat or automation use scenario. - -Service follows the GFM spec ([GitHub Flavored Markdown](https://github.github.com/gfm/)) for the Markdown format. Also introduces a new _contentFormat_ property with value "text" or "markdown" to indicate the result content format. - -```ts -import DocumentIntelligence from "@azure-rest/ai-document-intelligence"; -const client = DocumentIntelligence(process.env["DOCUMENT_INTELLIGENCE_ENDPOINT"], { - key: process.env["DOCUMENT_INTELLIGENCE_API_KEY"], -}); - -const initialResponse = await client - .path("/documentModels/{modelId}:analyze", "prebuilt-layout") - .post({ - contentType: "application/json", - body: { - urlSource: - "https://raw.githubusercontent.com/Azure/azure-sdk-for-js/6704eff082aaaf2d97c1371a28461f512f8d748a/sdk/formrecognizer/ai-form-recognizer/assets/forms/Invoice_1.pdf", - }, - queryParameters: { outputContentFormat: "markdown" }, // <-- new query parameter - }); -``` - -### Query Fields - -When this feature flag is specified, the service will further extract the values of the fields specified via the queryFields query parameter to supplement any existing fields defined by the model as fallback. - -```ts -await client.path("/documentModels/{modelId}:analyze", "prebuilt-layout").post({ - contentType: "application/json", - body: { urlSource: "..." }, - queryParameters: { - features: ["queryFields"], - queryFields: ["NumberOfGuests", "StoreNumber"], - }, // <-- new query parameter -}); -``` - -### Split Options - -In the previous API versions supported by the older `@azure/ai-form-recognizer` library, document splitting and classification operation (`"/documentClassifiers/{classifierId}:analyze"`) always tried to split the input file into multiple documents. - -To enable a wider set of scenarios, service introduces a "split" query parameter with the new "2023-10-31-preview" service version. The following values are supported: - -- `split: "auto"` - - Let service determine where to split. - -- `split: "none"` - - The entire file is treated as a single document. No splitting is performed. - -- `split: "perPage"` - - Each page is treated as a separate document. Each empty page is kept as its own document. - -Checkout our [samples](https://github.com/Azure/azure-sdk-for-js/tree/main/sdk/documentintelligence/ai-document-intelligence-rest/samples/v1-beta) for more examples. diff --git a/sdk/documentintelligence/ai-document-intelligence-rest/README.md b/sdk/documentintelligence/ai-document-intelligence-rest/README.md index 7f6e954883a7..49a977778c18 100644 --- a/sdk/documentintelligence/ai-document-intelligence-rest/README.md +++ b/sdk/documentintelligence/ai-document-intelligence-rest/README.md @@ -1,41 +1,13 @@ -# Azure DocumentIntelligence (formerly FormRecognizer) REST client library for JavaScript +# Azure DocumentIntelligence(formerlyFormRecognizer) REST client library for JavaScript Extracts content, layout, and structured data from documents. **Please rely heavily on our [REST client docs](https://github.com/Azure/azure-sdk-for-js/blob/main/documentation/rest-clients.md) to use this library** -> NOTE: Form Recognizer has been rebranded to Document Intelligence. Please check the [Migration Guide from `@azure/ai-form-recognizer` to `@azure-rest/ai-document-intelligence`](https://github.com/Azure/azure-sdk-for-js/tree/main/sdk/documentintelligence/ai-document-intelligence-rest/MIGRATION-FR_v4-DI_v1.md). - Key links: -- [Source code](https://github.com/Azure/azure-sdk-for-js/tree/main/sdk/documentintelligence/ai-document-intelligence-rest) - [Package (NPM)](https://www.npmjs.com/package/@azure-rest/ai-document-intelligence) -- [API reference documentation](https://docs.microsoft.com/javascript/api/@azure-rest/ai-document-intelligence?view=azure-node-preview) -- [Samples](https://github.com/Azure/azure-sdk-for-js/tree/main/sdk/documentintelligence/ai-document-intelligence-rest/samples) -- [Changelog](https://github.com/Azure/azure-sdk-for-js/tree/main/sdk/documentintelligence/ai-document-intelligence-rest/CHANGELOG.md) -- [Migration Guide from Form Recognizer](https://github.com/Azure/azure-sdk-for-js/tree/main/sdk/documentintelligence/ai-document-intelligence-rest/MIGRATION-FR_v4-DI_v1.md) - -> This version of the client library defaults to the `"2024-07-31-preview"` version of the service. - -This table shows the relationship between SDK versions and supported API versions of the service: - -| SDK version | Supported API version of service | -| ------------ | -------------------------------- | -| 1.0.0-beta.3 | 2024-07-31-preview | -| 1.0.0-beta.2 | 2024-02-29-preview | -| 1.0.0-beta.1 | 2023-10-31-preview | - -> Please rely on the older `@azure/ai-form-recognizer` library through the older service API versions for retired models, such as `"prebuilt-businessCard"` and `"prebuilt-document"`. For more information, see [Changelog](https://github.com/Azure/azure-sdk-for-js/tree/main/sdk/documentintelligence/ai-document-intelligence-rest/CHANGELOG.md). - -The below table describes the relationship of each client and its supported API version(s): - -| Service API version | Supported clients | Package | -| ------------------- | ------------------------------------------------------------ | ------------------------------------------------------------- | -| 2024-07-31-preview | DocumentIntelligenceClient | `@azure-rest/ai-document-intelligence` version `1.0.0-beta.3` | -| 2024-02-29-preview | DocumentIntelligenceClient | `@azure-rest/ai-document-intelligence` version `1.0.0-beta.2` | -| 2023-10-31-preview | DocumentIntelligenceClient | `@azure-rest/ai-document-intelligence` version `1.0.0-beta.1` | -| 2023-07-31 | DocumentAnalysisClient and DocumentModelAdministrationClient | `@azure/ai-form-recognizer` version `^5.0.0` | -| 2022-08-01 | DocumentAnalysisClient and DocumentModelAdministrationClient | `@azure/ai-form-recognizer` version `^4.0.0` | +- [API reference documentation](https://docs.microsoft.com/javascript/api/@azure-rest/ai-document-intelligence) ## Getting started @@ -61,7 +33,7 @@ To use an [Azure Active Directory (AAD) token credential](https://github.com/Azu provide an instance of the desired credential type obtained from the [@azure/identity](https://github.com/Azure/azure-sdk-for-js/tree/main/sdk/identity/identity#credentials) library. -To authenticate with AAD, you must first `npm` install [`@azure/identity`](https://www.npmjs.com/package/@azure/identity) +To authenticate with AAD, you must first `npm` install [`@azure/identity`](https://www.npmjs.com/package/@azure/identity) After setup, you can choose which type of [credential](https://github.com/Azure/azure-sdk-for-js/tree/main/sdk/identity/identity#credentials) from `@azure/identity` to use. As an example, [DefaultAzureCredential](https://github.com/Azure/azure-sdk-for-js/tree/main/sdk/identity/identity#defaultazurecredential) @@ -70,229 +42,6 @@ can be used to authenticate the client. Set the values of the client ID, tenant ID, and client secret of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, AZURE_CLIENT_SECRET -### Using a Token Credential - -```ts -import DocumentIntelligence from "@azure-rest/ai-document-intelligence"; - -const client = DocumentIntelligence( - process.env["DOCUMENT_INTELLIGENCE_ENDPOINT"], - new DefaultAzureCredential(), -); -``` - -### Using an API KEY - -```ts -import DocumentIntelligence from "@azure-rest/ai-document-intelligence"; - -const client = DocumentIntelligence(process.env["DOCUMENT_INTELLIGENCE_ENDPOINT"], { - key: process.env["DOCUMENT_INTELLIGENCE_API_KEY"], -}); -``` - -## Document Models - -### Analyze prebuilt-layout (urlSource) - -```ts -const initialResponse = await client - .path("/documentModels/{modelId}:analyze", "prebuilt-layout") - .post({ - contentType: "application/json", - body: { - urlSource: - "https://raw.githubusercontent.com/Azure/azure-sdk-for-js/6704eff082aaaf2d97c1371a28461f512f8d748a/sdk/formrecognizer/ai-form-recognizer/assets/forms/Invoice_1.pdf", - }, - queryParameters: { locale: "en-IN" }, - }); -``` - -### Analyze prebuilt-layout (base64Source) - -```ts -import fs from "fs"; -import path from "path"; - -const filePath = path.join(ASSET_PATH, "forms", "Invoice_1.pdf"); -const base64Source = fs.readFileSync(filePath, { encoding: "base64" }); -const initialResponse = await client - .path("/documentModels/{modelId}:analyze", "prebuilt-layout") - .post({ - contentType: "application/json", - body: { - base64Source, - }, - queryParameters: { locale: "en-IN" }, - }); -``` - -Continue creating the poller from initial response - -```ts -import { - getLongRunningPoller, - AnalyzeResultOperationOutput, - isUnexpected, -} from "@azure-rest/ai-document-intelligence"; - -if (isUnexpected(initialResponse)) { - throw initialResponse.body.error; -} -const poller = await getLongRunningPoller(client, initialResponse); -const result = (await poller.pollUntilDone()).body as AnalyzeResultOperationOutput; -console.log(result); -// { -// status: 'succeeded', -// createdDateTime: '2023-11-10T13:31:31Z', -// lastUpdatedDateTime: '2023-11-10T13:31:34Z', -// analyzeResult: { -// apiVersion: '2023-10-31-preview', -// . -// . -// . -// contentFormat: 'text' -// } -// } -``` - -### Markdown content format - -Supports output with Markdown content format along with the default plain _text_. For now, this is only supported for "prebuilt-layout". Markdown content format is deemed a more friendly format for LLM consumption in a chat or automation use scenario. - -Service follows the GFM spec ([GitHub Flavored Markdown](https://github.github.com/gfm/)) for the Markdown format. Also introduces a new _contentFormat_ property with value "text" or "markdown" to indicate the result content format. - -```ts -import DocumentIntelligence from "@azure-rest/ai-document-intelligence"; -const client = DocumentIntelligence(process.env["DOCUMENT_INTELLIGENCE_ENDPOINT"], { - key: process.env["DOCUMENT_INTELLIGENCE_API_KEY"], -}); - -const initialResponse = await client - .path("/documentModels/{modelId}:analyze", "prebuilt-layout") - .post({ - contentType: "application/json", - body: { - urlSource: - "https://raw.githubusercontent.com/Azure/azure-sdk-for-js/6704eff082aaaf2d97c1371a28461f512f8d748a/sdk/formrecognizer/ai-form-recognizer/assets/forms/Invoice_1.pdf", - }, - queryParameters: { outputContentFormat: "markdown" }, // <-- new query parameter - }); -``` - -### Query Fields - -When this feature flag is specified, the service will further extract the values of the fields specified via the queryFields query parameter to supplement any existing fields defined by the model as fallback. - -```ts -await client.path("/documentModels/{modelId}:analyze", "prebuilt-layout").post({ - contentType: "application/json", - body: { urlSource: "..." }, - queryParameters: { - features: ["queryFields"], - queryFields: ["NumberOfGuests", "StoreNumber"], - }, // <-- new query parameter -}); -``` - -### Split Options - -In the previous API versions supported by the older `@azure/ai-form-recognizer` library, document splitting and classification operation (`"/documentClassifiers/{classifierId}:analyze"`) always tried to split the input file into multiple documents. - -To enable a wider set of scenarios, service introduces a "split" query parameter with the new "2023-10-31-preview" service version. The following values are supported: - -- `split: "auto"` - - Let service determine where to split. - -- `split: "none"` - - The entire file is treated as a single document. No splitting is performed. - -- `split: "perPage"` - - Each page is treated as a separate document. Each empty page is kept as its own document. - -## Document Classifiers #Build - -```ts -import { - DocumentClassifierBuildOperationDetailsOutput, - getLongRunningPoller, - isUnexpected, -} from "@azure-rest/ai-document-intelligence"; - -const containerSasUrl = (): string => - process.env["DOCUMENT_INTELLIGENCE_TRAINING_CONTAINER_SAS_URL"]; -const initialResponse = await client.path("/documentClassifiers:build").post({ - body: { - classifierId: `customClassifier${getRandomNumber()}`, - description: "Custom classifier description", - docTypes: { - foo: { - azureBlobSource: { - containerUrl: containerSasUrl(), - }, - }, - bar: { - azureBlobSource: { - containerUrl: containerSasUrl(), - }, - }, - }, - }, -}); - -if (isUnexpected(initialResponse)) { - throw initialResponse.body.error; -} -const poller = await getLongRunningPoller(client, initialResponse); -const response = (await poller.pollUntilDone()) - .body as DocumentClassifierBuildOperationDetailsOutput; -console.log(response); -// { -// operationId: '31466834048_f3ee629e-73fb-48ab-993b-1d55d73ca460', -// kind: 'documentClassifierBuild', -// status: 'succeeded', -// . -// . -// result: { -// classifierId: 'customClassifier10978', -// createdDateTime: '2023-11-09T12:45:56Z', -// . -// . -// description: 'Custom classifier description' -// }, -// apiVersion: '2023-10-31-preview' -// } -``` - -## Get Info - -```ts -const response = await client.path("/info").get(); -if (isUnexpected(response)) { - throw response.body.error; -} -console.log(response.body.customDocumentModels.limit); -// 20000 -``` - -## List Document Models - -```ts -import { paginate } from "@azure-rest/ai-document-intelligence"; -const response = await client.path("/documentModels").get(); -if (isUnexpected(response)) { - throw response.body.error; -} - -const modelsInAccount: string[] = []; -for await (const model of paginate(client, response)) { - console.log(model.modelId); -} -``` - ## Troubleshooting ### Logging diff --git a/sdk/documentintelligence/ai-document-intelligence-rest/api-extractor.json b/sdk/documentintelligence/ai-document-intelligence-rest/api-extractor.json index 6a70a0a1f069..00758c3a187a 100644 --- a/sdk/documentintelligence/ai-document-intelligence-rest/api-extractor.json +++ b/sdk/documentintelligence/ai-document-intelligence-rest/api-extractor.json @@ -1,18 +1,31 @@ { "$schema": "https://developer.microsoft.com/json-schemas/api-extractor/v7/api-extractor.schema.json", "mainEntryPointFilePath": "./dist/esm/index.d.ts", - "docModel": { "enabled": true }, - "apiReport": { "enabled": true, "reportFolder": "./review" }, + "docModel": { + "enabled": true + }, + "apiReport": { + "enabled": true, + "reportFolder": "./review" + }, "dtsRollup": { "enabled": true, "untrimmedFilePath": "", "publicTrimmedFilePath": "./types/ai-document-intelligence.d.ts" }, "messages": { - "tsdocMessageReporting": { "default": { "logLevel": "none" } }, + "tsdocMessageReporting": { + "default": { + "logLevel": "none" + } + }, "extractorMessageReporting": { - "ae-missing-release-tag": { "logLevel": "none" }, - "ae-unresolved-link": { "logLevel": "none" } + "ae-missing-release-tag": { + "logLevel": "none" + }, + "ae-unresolved-link": { + "logLevel": "none" + } } } -} +} \ No newline at end of file diff --git a/sdk/documentintelligence/ai-document-intelligence-rest/assets.json b/sdk/documentintelligence/ai-document-intelligence-rest/assets.json deleted file mode 100644 index ff651b11c442..000000000000 --- a/sdk/documentintelligence/ai-document-intelligence-rest/assets.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "AssetsRepo": "Azure/azure-sdk-assets", - "AssetsRepoPrefixPath": "js", - "TagPrefix": "js/documentintelligence/ai-document-intelligence-rest", - "Tag": "js/documentintelligence/ai-document-intelligence-rest_c52f06c699" -} diff --git a/sdk/documentintelligence/ai-document-intelligence-rest/assets/Acord_27.pdf b/sdk/documentintelligence/ai-document-intelligence-rest/assets/Acord_27.pdf deleted file mode 100644 index 4e0b5b42d884..000000000000 Binary files a/sdk/documentintelligence/ai-document-intelligence-rest/assets/Acord_27.pdf and /dev/null differ diff --git a/sdk/documentintelligence/ai-document-intelligence-rest/assets/businessCard/business-card-english.jpg b/sdk/documentintelligence/ai-document-intelligence-rest/assets/businessCard/business-card-english.jpg deleted file mode 100644 index 7c2bef76ed3e..000000000000 Binary files a/sdk/documentintelligence/ai-document-intelligence-rest/assets/businessCard/business-card-english.jpg and /dev/null differ diff --git a/sdk/documentintelligence/ai-document-intelligence-rest/assets/forms/Form_1.jpg b/sdk/documentintelligence/ai-document-intelligence-rest/assets/forms/Form_1.jpg deleted file mode 100644 index 29cae664f1b8..000000000000 Binary files a/sdk/documentintelligence/ai-document-intelligence-rest/assets/forms/Form_1.jpg and /dev/null differ diff --git a/sdk/documentintelligence/ai-document-intelligence-rest/assets/forms/Invoice_1.pdf b/sdk/documentintelligence/ai-document-intelligence-rest/assets/forms/Invoice_1.pdf deleted file mode 100644 index 5ffff2960d74..000000000000 Binary files a/sdk/documentintelligence/ai-document-intelligence-rest/assets/forms/Invoice_1.pdf and /dev/null differ diff --git a/sdk/documentintelligence/ai-document-intelligence-rest/assets/forms/Invoice_1.tiff b/sdk/documentintelligence/ai-document-intelligence-rest/assets/forms/Invoice_1.tiff deleted file mode 100644 index 224fb82205b0..000000000000 Binary files a/sdk/documentintelligence/ai-document-intelligence-rest/assets/forms/Invoice_1.tiff and /dev/null differ diff --git a/sdk/documentintelligence/ai-document-intelligence-rest/assets/forms/selection_mark_form.pdf b/sdk/documentintelligence/ai-document-intelligence-rest/assets/forms/selection_mark_form.pdf deleted file mode 100644 index 0721647fa52b..000000000000 Binary files a/sdk/documentintelligence/ai-document-intelligence-rest/assets/forms/selection_mark_form.pdf and /dev/null differ diff --git a/sdk/documentintelligence/ai-document-intelligence-rest/assets/healthInsuranceCard/insurance.png b/sdk/documentintelligence/ai-document-intelligence-rest/assets/healthInsuranceCard/insurance.png deleted file mode 100644 index 676c8a231d1e..000000000000 Binary files a/sdk/documentintelligence/ai-document-intelligence-rest/assets/healthInsuranceCard/insurance.png and /dev/null differ diff --git a/sdk/documentintelligence/ai-document-intelligence-rest/assets/identityDocument/license.png b/sdk/documentintelligence/ai-document-intelligence-rest/assets/identityDocument/license.png deleted file mode 100644 index 661312305a45..000000000000 Binary files a/sdk/documentintelligence/ai-document-intelligence-rest/assets/identityDocument/license.png and /dev/null differ diff --git a/sdk/documentintelligence/ai-document-intelligence-rest/assets/invoice/Invoice_1.pdf b/sdk/documentintelligence/ai-document-intelligence-rest/assets/invoice/Invoice_1.pdf deleted file mode 100644 index 5ffff2960d74..000000000000 Binary files a/sdk/documentintelligence/ai-document-intelligence-rest/assets/invoice/Invoice_1.pdf and /dev/null differ diff --git a/sdk/documentintelligence/ai-document-intelligence-rest/assets/invoice/sample_invoice.jpg b/sdk/documentintelligence/ai-document-intelligence-rest/assets/invoice/sample_invoice.jpg deleted file mode 100644 index 6f8796469d78..000000000000 Binary files a/sdk/documentintelligence/ai-document-intelligence-rest/assets/invoice/sample_invoice.jpg and /dev/null differ diff --git a/sdk/documentintelligence/ai-document-intelligence-rest/assets/layout-pageobject.pdf b/sdk/documentintelligence/ai-document-intelligence-rest/assets/layout-pageobject.pdf deleted file mode 100644 index 1115199236c8..000000000000 Binary files a/sdk/documentintelligence/ai-document-intelligence-rest/assets/layout-pageobject.pdf and /dev/null differ diff --git a/sdk/documentintelligence/ai-document-intelligence-rest/assets/receipt/contoso-allinone.jpg b/sdk/documentintelligence/ai-document-intelligence-rest/assets/receipt/contoso-allinone.jpg deleted file mode 100644 index 1aaad34387ec..000000000000 Binary files a/sdk/documentintelligence/ai-document-intelligence-rest/assets/receipt/contoso-allinone.jpg and /dev/null differ diff --git a/sdk/documentintelligence/ai-document-intelligence-rest/assets/receipt/contoso-receipt.png b/sdk/documentintelligence/ai-document-intelligence-rest/assets/receipt/contoso-receipt.png deleted file mode 100644 index 1f9fcbf60a04..000000000000 Binary files a/sdk/documentintelligence/ai-document-intelligence-rest/assets/receipt/contoso-receipt.png and /dev/null differ diff --git a/sdk/documentintelligence/ai-document-intelligence-rest/assets/receipt/multipage_invoice1.pdf b/sdk/documentintelligence/ai-document-intelligence-rest/assets/receipt/multipage_invoice1.pdf deleted file mode 100644 index 5ac1edf588c9..000000000000 Binary files a/sdk/documentintelligence/ai-document-intelligence-rest/assets/receipt/multipage_invoice1.pdf and /dev/null differ diff --git a/sdk/documentintelligence/ai-document-intelligence-rest/assets/vaccinationCard/vaccination.jpg b/sdk/documentintelligence/ai-document-intelligence-rest/assets/vaccinationCard/vaccination.jpg deleted file mode 100644 index 4b5f7d10dedc..000000000000 Binary files a/sdk/documentintelligence/ai-document-intelligence-rest/assets/vaccinationCard/vaccination.jpg and /dev/null differ diff --git a/sdk/documentintelligence/ai-document-intelligence-rest/assets/w2/w2-multiple.png b/sdk/documentintelligence/ai-document-intelligence-rest/assets/w2/w2-multiple.png deleted file mode 100644 index df2af007a02c..000000000000 Binary files a/sdk/documentintelligence/ai-document-intelligence-rest/assets/w2/w2-multiple.png and /dev/null differ diff --git a/sdk/documentintelligence/ai-document-intelligence-rest/assets/w2/w2-single.png b/sdk/documentintelligence/ai-document-intelligence-rest/assets/w2/w2-single.png deleted file mode 100644 index 3d282db03149..000000000000 Binary files a/sdk/documentintelligence/ai-document-intelligence-rest/assets/w2/w2-single.png and /dev/null differ diff --git a/sdk/documentintelligence/ai-document-intelligence-rest/eslint.config.mjs b/sdk/documentintelligence/ai-document-intelligence-rest/eslint.config.mjs index 68c02604a4c1..113bdc3eaf5f 100644 --- a/sdk/documentintelligence/ai-document-intelligence-rest/eslint.config.mjs +++ b/sdk/documentintelligence/ai-document-intelligence-rest/eslint.config.mjs @@ -8,10 +8,10 @@ export default [ "@azure/azure-sdk/ts-apiextractor-json-types": "warn", "@azure/azure-sdk/ts-package-json-types": "warn", "@azure/azure-sdk/ts-package-json-engine-is-present": "warn", - "tsdoc/syntax": "warn", "@azure/azure-sdk/ts-package-json-module": "off", "@azure/azure-sdk/ts-package-json-files-required": "off", "@azure/azure-sdk/ts-package-json-main-is-cjs": "off", - }, - }, + "tsdoc/syntax": "warn" + } + } ]; diff --git a/sdk/documentintelligence/ai-document-intelligence-rest/karma.conf.js b/sdk/documentintelligence/ai-document-intelligence-rest/karma.conf.js deleted file mode 100644 index 4fdf26c79ac0..000000000000 --- a/sdk/documentintelligence/ai-document-intelligence-rest/karma.conf.js +++ /dev/null @@ -1,133 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT License. - -// https://github.com/karma-runner/karma-chrome-launcher -process.env.CHROME_BIN = require("puppeteer").executablePath(); -require("dotenv").config(); -const { relativeRecordingsPath } = require("@azure-tools/test-recorder"); -process.env.RECORDINGS_RELATIVE_PATH = relativeRecordingsPath(); - -module.exports = function (config) { - config.set({ - // base path that will be used to resolve all patterns (eg. files, exclude) - basePath: "./", - - // frameworks to use - // available frameworks: https://npmjs.org/browse/keyword/karma-adapter - frameworks: ["source-map-support", "mocha"], - - plugins: [ - "karma-mocha", - "karma-mocha-reporter", - "karma-chrome-launcher", - "karma-firefox-launcher", - "karma-env-preprocessor", - "karma-coverage", - "karma-sourcemap-loader", - "karma-junit-reporter", - "karma-source-map-support", - ], - - // list of files / patterns to load in the browser - files: [ - "dist-test/index.browser.js", - { - pattern: "dist-test/index.browser.js.map", - type: "html", - included: false, - served: true, - }, - ], - - // list of files / patterns to exclude - exclude: [], - - // preprocess matching files before serving them to the browser - // available preprocessors: https://npmjs.org/browse/keyword/karma-preprocessor - preprocessors: { - "**/*.js": ["sourcemap", "env"], - // IMPORTANT: COMMENT following line if you want to debug in your browsers!! - // Preprocess source file to calculate code coverage, however this will make source file unreadable - // "dist-test/index.js": ["coverage"] - }, - - envPreprocessor: [ - "TEST_MODE", - "ENDPOINT", - "AZURE_CLIENT_SECRET", - "AZURE_CLIENT_ID", - "AZURE_TENANT_ID", - "SUBSCRIPTION_ID", - "RECORDINGS_RELATIVE_PATH", - ], - - // test results reporter to use - // possible values: 'dots', 'progress' - // available reporters: https://npmjs.org/browse/keyword/karma-reporter - reporters: ["mocha", "coverage", "junit"], - - coverageReporter: { - // specify a common output directory - dir: "coverage-browser/", - reporters: [ - { type: "json", subdir: ".", file: "coverage.json" }, - { type: "lcovonly", subdir: ".", file: "lcov.info" }, - { type: "html", subdir: "html" }, - { type: "cobertura", subdir: ".", file: "cobertura-coverage.xml" }, - ], - }, - - junitReporter: { - outputDir: "", // results will be saved as $outputDir/$browserName.xml - outputFile: "test-results.browser.xml", // if included, results will be saved as $outputDir/$browserName/$outputFile - suite: "", // suite will become the package name attribute in xml testsuite element - useBrowserName: false, // add browser name to report and classes names - nameFormatter: undefined, // function (browser, result) to customize the name attribute in xml testcase element - classNameFormatter: undefined, // function (browser, result) to customize the classname attribute in xml testcase element - properties: {}, // key value pair of properties to add to the section of the report - }, - - // web server port - port: 9876, - - // enable / disable colors in the output (reporters and logs) - colors: true, - - // level of logging - // possible values: config.LOG_DISABLE || config.LOG_ERROR || config.LOG_WARN || config.LOG_INFO || config.LOG_DEBUG - logLevel: config.LOG_INFO, - - // enable / disable watching file and executing tests whenever any file changes - autoWatch: false, - - // --no-sandbox allows our tests to run in Linux without having to change the system. - // --disable-web-security allows us to authenticate from the browser without having to write tests using interactive auth, which would be far more complex. - browsers: ["ChromeHeadlessNoSandbox"], - customLaunchers: { - ChromeHeadlessNoSandbox: { - base: "ChromeHeadless", - flags: ["--no-sandbox", "--disable-web-security"], - }, - }, - - // Continuous Integration mode - // if true, Karma captures browsers, runs the tests and exits - singleRun: false, - - // Concurrency level - // how many browser should be started simultaneous - concurrency: 1, - - browserNoActivityTimeout: 60000000, - browserDisconnectTimeout: 10000, - browserDisconnectTolerance: 3, - - client: { - mocha: { - // change Karma's debug.html to the mocha web reporter - reporter: "html", - timeout: "600000", - }, - }, - }); -}; diff --git a/sdk/documentintelligence/ai-document-intelligence-rest/package.json b/sdk/documentintelligence/ai-document-intelligence-rest/package.json index c65e11e165ed..ac4bf667835d 100644 --- a/sdk/documentintelligence/ai-document-intelligence-rest/package.json +++ b/sdk/documentintelligence/ai-document-intelligence-rest/package.json @@ -1,6 +1,6 @@ { "name": "@azure-rest/ai-document-intelligence", - "version": "1.0.0-beta.3", + "version": "1.0.0", "description": "Document Intelligence Rest Client", "engines": { "node": ">=18.0.0" @@ -55,55 +55,54 @@ ] }, "dependencies": { - "@azure-rest/core-client": "^2.1.0", - "@azure/abort-controller": "^2.1.2", + "@azure-rest/core-client": "^2.3.1", "@azure/core-auth": "^1.6.0", - "@azure/core-lro": "^3.0.0", - "@azure/core-paging": "^1.5.0", "@azure/core-rest-pipeline": "^1.5.0", "@azure/logger": "^1.0.0", - "tslib": "^2.6.2" + "tslib": "^2.6.2", + "@azure/core-lro": "^3.1.0", + "@azure/abort-controller": "^2.1.2" }, "devDependencies": { - "@azure-tools/test-credential": "^2.0.0", - "@azure-tools/test-recorder": "^4.0.0", - "@azure/core-util": "^1.0.0", - "@azure/dev-tool": "^1.0.0", - "@azure/eslint-plugin-azure-sdk": "^3.0.0", - "@azure/identity": "^4.2.1", + "dotenv": "^16.0.0", + "@microsoft/api-extractor": "^7.40.3", "@types/node": "^18.0.0", + "eslint": "^8.55.0", + "typescript": "~5.6.2", + "tshy": "^2.0.0", + "@azure/identity": "^4.2.1", "@vitest/browser": "^2.0.5", "@vitest/coverage-istanbul": "^2.0.5", - "dotenv": "^16.0.0", - "eslint": "^9.9.0", "playwright": "^1.41.2", - "prettier": "^3.2.5", - "typescript": "~5.6.2", - "vitest": "^2.0.5" + "vitest": "^2.0.5", + "@azure-tools/test-credential": "^2.0.0", + "@azure-tools/test-recorder": "^4.0.0", + "@azure/dev-tool": "^1.0.0", + "@azure/eslint-plugin-azure-sdk": "^3.0.0" }, "scripts": { - "build": "npm run clean && dev-tool run build-package && dev-tool run vendored mkdirp ./review && dev-tool run extract-api", - "build:samples": "dev-tool samples publish --force", - "build:test": "npm run clean && dev-tool run build-package && dev-tool run build-test", - "check-format": "dev-tool run vendored prettier --list-different --config ../../../.prettierrc.json --ignore-path ../../../.prettierignore \"src/**/*.{ts,cts,mts}\" \"test/**/*.{ts,cts,mts}\" \"*.{js,cjs,mjs,json}\"", "clean": "dev-tool run vendored rimraf --glob dist dist-browser dist-esm test-dist temp types *.tgz *.log", - "execute:samples": "dev-tool samples run samples-dev", "extract-api": "dev-tool run vendored rimraf review && dev-tool run vendored mkdirp ./review && dev-tool run extract-api", - "format": "dev-tool run vendored prettier --write --config ../../../.prettierrc.json --ignore-path ../../../.prettierignore \"src/**/*.{ts,cts,mts}\" \"test/**/*.{ts,cts,mts}\" \"*.{js,cjs,mjs,json}\"", - "generate:client": "echo skipped", + "pack": "npm pack 2>&1", + "lint": "eslint package.json api-extractor.json src test", + "lint:fix": "eslint package.json api-extractor.json src test --fix --fix-type [problem,suggestion]", + "unit-test": "npm run unit-test:node && npm run unit-test:browser", + "unit-test:browser": "npm run build:test && dev-tool run test:vitest --browser", + "unit-test:node": "dev-tool run test:vitest", "integration-test": "npm run integration-test:node && npm run integration-test:browser", "integration-test:browser": "echo skipped", "integration-test:node": "echo skipped", - "lint": "eslint package.json api-extractor.json src test", - "lint:fix": "eslint package.json api-extractor.json src test --fix --fix-type [problem,suggestion]", - "minify": "dev-tool run vendored uglifyjs -c -m --comments --source-map \"content='./dist/index.js.map'\" -o ./dist/index.min.js ./dist/index.js", - "pack": "npm pack 2>&1", - "test": "npm run clean && dev-tool run build-package && npm run unit-test:node && dev-tool run bundle && npm run unit-test:browser && npm run integration-test", + "build:samples": "echo skipped", + "check-format": "dev-tool run vendored prettier --list-different --config ../../../.prettierrc.json --ignore-path ../../../.prettierignore \"src/**/*.{ts,cts,mts}\" \"test/**/*.{ts,cts,mts}\" \"*.{js,cjs,mjs,json}\" ", + "execute:samples": "echo skipped", + "format": "dev-tool run vendored prettier --write --config ../../../.prettierrc.json --ignore-path ../../../.prettierignore \"src/**/*.{ts,cts,mts}\" \"test/**/*.{ts,cts,mts}\" \"*.{js,cjs,mjs,json}\" ", + "generate:client": "echo skipped", "test:browser": "npm run clean && npm run build:test && npm run unit-test:browser && npm run integration-test:browser", + "minify": "dev-tool run vendored uglifyjs -c -m --comments --source-map \"content='./dist/index.js.map'\" -o ./dist/index.min.js ./dist/index.js", + "build:test": "npm run clean && dev-tool run build-package && dev-tool run build-test", + "build": "npm run clean && dev-tool run build-package && dev-tool run vendored mkdirp ./review && dev-tool run extract-api", "test:node": "npm run clean && dev-tool run build-package && npm run unit-test:node && npm run integration-test:node", - "unit-test": "npm run unit-test:node && npm run unit-test:browser", - "unit-test:browser": "echo skipped", - "unit-test:node": "dev-tool run test:vitest", + "test": "npm run clean && dev-tool run build-package && npm run unit-test:node && dev-tool run bundle && npm run unit-test:browser && npm run integration-test", "update-snippets": "echo skipped" }, "exports": { @@ -128,15 +127,5 @@ } }, "main": "./dist/commonjs/index.js", - "types": "./dist/commonjs/index.d.ts", - "module": "./dist/esm/index.js", - "//sampleConfiguration": { - "productName": "Azure Document Intelligence Rest Client", - "productSlugs": [ - "azure", - "document-intelligence", - "ai-document-intelligence" - ], - "disableDocsMs": true - } -} + "types": "./dist/commonjs/index.d.ts" +} \ No newline at end of file diff --git a/sdk/documentintelligence/ai-document-intelligence-rest/review/ai-document-intelligence.api.md b/sdk/documentintelligence/ai-document-intelligence-rest/review/ai-document-intelligence.api.md index 385a79d2ddbe..7fb822684648 100644 --- a/sdk/documentintelligence/ai-document-intelligence-rest/review/ai-document-intelligence.api.md +++ b/sdk/documentintelligence/ai-document-intelligence-rest/review/ai-document-intelligence.api.md @@ -4,22 +4,20 @@ ```ts -import type { AbortSignalLike } from '@azure/abort-controller'; -import type { CancelOnProgress } from '@azure/core-lro'; -import type { Client } from '@azure-rest/core-client'; -import type { ClientOptions } from '@azure-rest/core-client'; -import type { CreateHttpPollerOptions } from '@azure/core-lro'; -import type { HttpResponse } from '@azure-rest/core-client'; -import type { KeyCredential } from '@azure/core-auth'; -import type { OperationState } from '@azure/core-lro'; -import type { Paged } from '@azure/core-paging'; -import type { PagedAsyncIterableIterator } from '@azure/core-paging'; -import type { PathUncheckedResponse } from '@azure-rest/core-client'; -import type { RawHttpHeaders } from '@azure/core-rest-pipeline'; -import type { RawHttpHeadersInput } from '@azure/core-rest-pipeline'; -import type { RequestParameters } from '@azure-rest/core-client'; -import type { StreamableMethod } from '@azure-rest/core-client'; -import type { TokenCredential } from '@azure/core-auth'; +import { AbortSignalLike } from '@azure/abort-controller'; +import { CancelOnProgress } from '@azure/core-lro'; +import { Client } from '@azure-rest/core-client'; +import { ClientOptions } from '@azure-rest/core-client'; +import { CreateHttpPollerOptions } from '@azure/core-lro'; +import { HttpResponse } from '@azure-rest/core-client'; +import { KeyCredential } from '@azure/core-auth'; +import { OperationState } from '@azure/core-lro'; +import { PathUncheckedResponse } from '@azure-rest/core-client'; +import { RawHttpHeaders } from '@azure/core-rest-pipeline'; +import { RawHttpHeadersInput } from '@azure/core-rest-pipeline'; +import { RequestParameters } from '@azure-rest/core-client'; +import { StreamableMethod } from '@azure-rest/core-client'; +import { TokenCredential } from '@azure/core-auth'; // @public export interface AddressValueOutput { @@ -67,7 +65,7 @@ export interface AnalyzeBatchDocumentsBodyParam { // @public (undocumented) export interface AnalyzeBatchDocumentsDefaultResponse extends HttpResponse { // (undocumented) - body: ErrorResponseOutput; + body: DocumentIntelligenceErrorResponseOutput; // (undocumented) status: string; } @@ -97,7 +95,7 @@ export interface AnalyzeBatchDocumentsQueryParamProperties { features?: DocumentAnalysisFeature[]; locale?: string; output?: AnalyzeOutputOption[]; - outputContentFormat?: ContentFormat; + outputContentFormat?: DocumentContentFormat; pages?: string; queryFields?: string[]; stringIndexType?: StringIndexType; @@ -114,30 +112,40 @@ export interface AnalyzeBatchDocumentsRequest { // @public export interface AnalyzeBatchOperationDetailOutput { - error?: ErrorModelOutput; + error?: DocumentIntelligenceErrorOutput; resultUrl?: string; sourceUrl: string; - status: OperationStatusOutput; + status: DocumentIntelligenceOperationStatusOutput; } // @public -export interface AnalyzeBatchResultOperationOutput { +export interface AnalyzeBatchOperationOutput { createdDateTime: string; - error?: ErrorModelOutput; + error?: DocumentIntelligenceErrorOutput; lastUpdatedDateTime: string; percentCompleted?: number; result?: AnalyzeBatchResultOutput; - status: OperationStatusOutput; + resultId?: string; + status: DocumentIntelligenceOperationStatusOutput; } // @public export interface AnalyzeBatchResultOutput { - details: Array; + details?: Array; failedCount: number; skippedCount: number; succeededCount: number; } +// @public +export interface AnalyzedDocumentOutput { + boundingRegions?: Array; + confidence: number; + docType: string; + fields?: Record; + spans: Array; +} + // @public (undocumented) export interface AnalyzeDocument202Headers { // (undocumented) @@ -161,7 +169,7 @@ export interface AnalyzeDocumentBodyParam { // @public (undocumented) export interface AnalyzeDocumentDefaultResponse extends HttpResponse { // (undocumented) - body: ErrorResponseOutput; + body: DocumentIntelligenceErrorResponseOutput; // (undocumented) status: string; } @@ -195,7 +203,7 @@ export interface AnalyzeDocumentFromStreamBodyParam { // @public (undocumented) export interface AnalyzeDocumentFromStreamDefaultResponse extends HttpResponse { // (undocumented) - body: ErrorResponseOutput; + body: DocumentIntelligenceErrorResponseOutput; // (undocumented) status: string; } @@ -225,7 +233,7 @@ export interface AnalyzeDocumentFromStreamQueryParamProperties { features?: DocumentAnalysisFeature[]; locale?: string; output?: AnalyzeOutputOption[]; - outputContentFormat?: ContentFormat; + outputContentFormat?: DocumentContentFormat; pages?: string; queryFields?: string[]; stringIndexType?: StringIndexType; @@ -256,7 +264,7 @@ export interface AnalyzeDocumentQueryParamProperties { features?: DocumentAnalysisFeature[]; locale?: string; output?: AnalyzeOutputOption[]; - outputContentFormat?: ContentFormat; + outputContentFormat?: DocumentContentFormat; pages?: string; queryFields?: string[]; stringIndexType?: StringIndexType; @@ -269,23 +277,23 @@ export interface AnalyzeDocumentRequest { } // @public -export type AnalyzeOutputOption = string; - -// @public -export interface AnalyzeResultOperationOutput { +export interface AnalyzeOperationOutput { analyzeResult?: AnalyzeResultOutput; createdDateTime: string; - error?: ErrorModelOutput; + error?: DocumentIntelligenceErrorOutput; lastUpdatedDateTime: string; - status: OperationStatusOutput; + status: DocumentIntelligenceOperationStatusOutput; } +// @public +export type AnalyzeOutputOption = string; + // @public export interface AnalyzeResultOutput { apiVersion: string; content: string; - contentFormat?: ContentFormatOutput; - documents?: Array; + contentFormat?: DocumentContentFormatOutput; + documents?: Array; figures?: Array; keyValuePairs?: Array; languages?: Array; @@ -296,7 +304,7 @@ export interface AnalyzeResultOutput { stringIndexType: StringIndexTypeOutput; styles?: Array; tables?: Array; - warnings?: Array; + warnings?: Array; } // @public (undocumented) @@ -320,7 +328,7 @@ export interface AuthorizeClassifierCopyBodyParam { // @public (undocumented) export interface AuthorizeClassifierCopyDefaultResponse extends HttpResponse { // (undocumented) - body: ErrorResponseOutput; + body: DocumentIntelligenceErrorResponseOutput; // (undocumented) status: string; } @@ -350,7 +358,7 @@ export interface AuthorizeModelCopy { // @public export interface AuthorizeModelCopy200Response extends HttpResponse { // (undocumented) - body: CopyAuthorizationOutput; + body: ModelCopyAuthorizationOutput; // (undocumented) status: "200"; } @@ -363,7 +371,7 @@ export interface AuthorizeModelCopyBodyParam { // @public (undocumented) export interface AuthorizeModelCopyDefaultResponse extends HttpResponse { // (undocumented) - body: ErrorResponseOutput; + body: DocumentIntelligenceErrorResponseOutput; // (undocumented) status: string; } @@ -429,7 +437,7 @@ export interface BuildClassifierBodyParam { // @public (undocumented) export interface BuildClassifierDefaultResponse extends HttpResponse { // (undocumented) - body: ErrorResponseOutput; + body: DocumentIntelligenceErrorResponseOutput; // (undocumented) status: string; } @@ -492,7 +500,7 @@ export interface BuildModelBodyParam { // @public (undocumented) export interface BuildModelDefaultResponse extends HttpResponse { // (undocumented) - body: ErrorResponseOutput; + body: DocumentIntelligenceErrorResponseOutput; // (undocumented) status: string; } @@ -563,7 +571,7 @@ export interface ClassifyDocumentBodyParam { // @public (undocumented) export interface ClassifyDocumentDefaultResponse extends HttpResponse { // (undocumented) - body: ErrorResponseOutput; + body: DocumentIntelligenceErrorResponseOutput; // (undocumented) status: string; } @@ -597,7 +605,7 @@ export interface ClassifyDocumentFromStreamBodyParam { // @public (undocumented) export interface ClassifyDocumentFromStreamDefaultResponse extends HttpResponse { // (undocumented) - body: ErrorResponseOutput; + body: DocumentIntelligenceErrorResponseOutput; // (undocumented) status: string; } @@ -700,7 +708,7 @@ export interface ComposeModelBodyParam { // @public (undocumented) export interface ComposeModelDefaultResponse extends HttpResponse { // (undocumented) - body: ErrorResponseOutput; + body: DocumentIntelligenceErrorResponseOutput; // (undocumented) status: string; } @@ -714,38 +722,12 @@ export interface ComposeModelLogicalResponse extends HttpResponse { // @public (undocumented) export type ComposeModelParameters = ComposeModelBodyParam & RequestParameters; -// @public -export type ContentFormat = string; - -// @public -export type ContentFormatOutput = string; - // @public export type ContentSourceKind = string; // @public export type ContentSourceKindOutput = string; -// @public -export interface CopyAuthorization { - accessToken: string; - expirationDateTime: Date | string; - targetModelId: string; - targetModelLocation: string; - targetResourceId: string; - targetResourceRegion: string; -} - -// @public -export interface CopyAuthorizationOutput { - accessToken: string; - expirationDateTime: string; - targetModelId: string; - targetModelLocation: string; - targetResourceId: string; - targetResourceRegion: string; -} - // @public (undocumented) export interface CopyClassifierTo { post(options: CopyClassifierToParameters): StreamableMethod; @@ -774,7 +756,7 @@ export interface CopyClassifierToBodyParam { // @public (undocumented) export interface CopyClassifierToDefaultResponse extends HttpResponse { // (undocumented) - body: ErrorResponseOutput; + body: DocumentIntelligenceErrorResponseOutput; // (undocumented) status: string; } @@ -810,13 +792,13 @@ export interface CopyModelTo202Response extends HttpResponse { // @public (undocumented) export interface CopyModelToBodyParam { - body: CopyAuthorization; + body: ModelCopyAuthorization; } // @public (undocumented) export interface CopyModelToDefaultResponse extends HttpResponse { // (undocumented) - body: ErrorResponseOutput; + body: DocumentIntelligenceErrorResponseOutput; // (undocumented) status: string; } @@ -847,6 +829,40 @@ export interface CustomDocumentModelsDetailsOutput { limit: number; } +// @public +export interface DeleteAnalyzeBatchResult204Response extends HttpResponse { + // (undocumented) + status: "204"; +} + +// @public (undocumented) +export interface DeleteAnalyzeBatchResultDefaultResponse extends HttpResponse { + // (undocumented) + body: DocumentIntelligenceErrorResponseOutput; + // (undocumented) + status: string; +} + +// @public (undocumented) +export type DeleteAnalyzeBatchResultParameters = RequestParameters; + +// @public +export interface DeleteAnalyzeResult204Response extends HttpResponse { + // (undocumented) + status: "204"; +} + +// @public (undocumented) +export interface DeleteAnalyzeResultDefaultResponse extends HttpResponse { + // (undocumented) + body: DocumentIntelligenceErrorResponseOutput; + // (undocumented) + status: string; +} + +// @public (undocumented) +export type DeleteAnalyzeResultParameters = RequestParameters; + // @public (undocumented) export interface DeleteClassifier204Headers { "x-ms-client-request-id"?: string; @@ -863,7 +879,7 @@ export interface DeleteClassifier204Response extends HttpResponse { // @public (undocumented) export interface DeleteClassifierDefaultResponse extends HttpResponse { // (undocumented) - body: ErrorResponseOutput; + body: DocumentIntelligenceErrorResponseOutput; // (undocumented) status: string; } @@ -898,7 +914,7 @@ export interface DeleteModel204Response extends HttpResponse { // @public (undocumented) export interface DeleteModelDefaultResponse extends HttpResponse { // (undocumented) - body: ErrorResponseOutput; + body: DocumentIntelligenceErrorResponseOutput; // (undocumented) status: string; } @@ -950,13 +966,13 @@ export interface DocumentCaptionOutput { } // @public -export interface DocumentClassifierBuildOperationDetailsOutput extends OperationDetailsOutputParent { +export interface DocumentClassifierBuildOperationDetailsOutput extends DocumentIntelligenceOperationDetailsOutputParent { kind: "documentClassifierBuild"; result?: DocumentClassifierDetailsOutput; } // @public -export interface DocumentClassifierCopyToOperationDetailsOutput extends OperationDetailsOutputParent { +export interface DocumentClassifierCopyToOperationDetailsOutput extends DocumentIntelligenceOperationDetailsOutputParent { kind: "documentClassifierCopyTo"; result?: DocumentClassifierDetailsOutput; } @@ -970,9 +986,16 @@ export interface DocumentClassifierDetailsOutput { description?: string; docTypes: Record; expirationDateTime?: string; - warnings?: Array; + readonly modifiedDateTime?: string; + warnings?: Array; } +// @public +export type DocumentContentFormat = string; + +// @public +export type DocumentContentFormatOutput = string; + // @public export interface DocumentFieldOutput { boundingRegions?: Array; @@ -1031,6 +1054,12 @@ export interface DocumentFigureOutput { spans: Array; } +// @public +export type DocumentFontStyleOutput = string; + +// @public +export type DocumentFontWeightOutput = string; + // @public export interface DocumentFootnoteOutput { boundingRegions?: Array; @@ -1061,6 +1090,60 @@ export interface DocumentIntelligenceClientOptions extends ClientOptions { apiVersion?: string; } +// @public +export interface DocumentIntelligenceErrorOutput { + code: string; + details?: Array; + innererror?: DocumentIntelligenceInnerErrorOutput; + message: string; + target?: string; +} + +// @public +export interface DocumentIntelligenceErrorResponseOutput { + error: DocumentIntelligenceErrorOutput; +} + +// @public +export interface DocumentIntelligenceInnerErrorOutput { + code?: string; + innererror?: DocumentIntelligenceInnerErrorOutput; + message?: string; +} + +// @public +export type DocumentIntelligenceOperationDetailsOutput = DocumentIntelligenceOperationDetailsOutputParent | DocumentModelBuildOperationDetailsOutput | DocumentModelComposeOperationDetailsOutput | DocumentModelCopyToOperationDetailsOutput | DocumentClassifierCopyToOperationDetailsOutput | DocumentClassifierBuildOperationDetailsOutput; + +// @public +export interface DocumentIntelligenceOperationDetailsOutputParent { + apiVersion?: string; + createdDateTime: string; + error?: DocumentIntelligenceErrorOutput; + // (undocumented) + kind: OperationKindOutput; + lastUpdatedDateTime: string; + operationId: string; + percentCompleted?: number; + resourceLocation: string; + status: DocumentIntelligenceOperationStatusOutput; + tags?: Record; +} + +// @public +export type DocumentIntelligenceOperationStatusOutput = string; + +// @public +export interface DocumentIntelligenceResourceDetailsOutput { + customDocumentModels: CustomDocumentModelsDetailsOutput; +} + +// @public +export interface DocumentIntelligenceWarningOutput { + code: string; + message: string; + target?: string; +} + // @public export interface DocumentKeyValueElementOutput { boundingRegions?: Array; @@ -1090,19 +1173,19 @@ export interface DocumentLineOutput { } // @public -export interface DocumentModelBuildOperationDetailsOutput extends OperationDetailsOutputParent { +export interface DocumentModelBuildOperationDetailsOutput extends DocumentIntelligenceOperationDetailsOutputParent { kind: "documentModelBuild"; result?: DocumentModelDetailsOutput; } // @public -export interface DocumentModelComposeOperationDetailsOutput extends OperationDetailsOutputParent { +export interface DocumentModelComposeOperationDetailsOutput extends DocumentIntelligenceOperationDetailsOutputParent { kind: "documentModelCompose"; result?: DocumentModelDetailsOutput; } // @public -export interface DocumentModelCopyToOperationDetailsOutput extends OperationDetailsOutputParent { +export interface DocumentModelCopyToOperationDetailsOutput extends DocumentIntelligenceOperationDetailsOutputParent { kind: "documentModelCopyTo"; result?: DocumentModelDetailsOutput; } @@ -1119,19 +1202,11 @@ export interface DocumentModelDetailsOutput { readonly docTypes?: Record; readonly expirationDateTime?: string; modelId: string; + readonly modifiedDateTime?: string; split?: SplitModeOutput; tags?: Record; readonly trainingHours?: number; - readonly warnings?: Array; -} - -// @public -export interface DocumentOutput { - boundingRegions?: Array; - confidence: number; - docType: string; - fields?: Record; - spans: Array; + readonly warnings?: Array; } // @public @@ -1188,8 +1263,8 @@ export interface DocumentStyleOutput { backgroundColor?: string; color?: string; confidence: number; - fontStyle?: FontStyleOutput; - fontWeight?: FontWeightOutput; + fontStyle?: DocumentFontStyleOutput; + fontWeight?: DocumentFontWeightOutput; isHandwritten?: boolean; similarFontFamily?: string; spans: Array; @@ -1256,35 +1331,16 @@ export interface DocumentWordOutput { span: DocumentSpanOutput; } -// @public -export interface ErrorModelOutput { - code: string; - details?: Array; - innererror?: InnerErrorOutput; - message: string; - target?: string; -} - -// @public -export interface ErrorResponseOutput { - error: ErrorModelOutput; -} - -// @public -export type FontStyleOutput = string; - -// @public -export type FontWeightOutput = string; - // @public (undocumented) export interface GetAnalyzeBatchResult { + delete(options?: DeleteAnalyzeBatchResultParameters): StreamableMethod; get(options?: GetAnalyzeBatchResultParameters): StreamableMethod; } // @public export interface GetAnalyzeBatchResult200Response extends HttpResponse { // (undocumented) - body: AnalyzeBatchResultOperationOutput; + body: AnalyzeBatchOperationOutput; // (undocumented) status: "200"; } @@ -1292,7 +1348,7 @@ export interface GetAnalyzeBatchResult200Response extends HttpResponse { // @public (undocumented) export interface GetAnalyzeBatchResultDefaultResponse extends HttpResponse { // (undocumented) - body: ErrorResponseOutput; + body: DocumentIntelligenceErrorResponseOutput; // (undocumented) status: string; } @@ -1302,13 +1358,14 @@ export type GetAnalyzeBatchResultParameters = RequestParameters; // @public (undocumented) export interface GetAnalyzeResult { + delete(options?: DeleteAnalyzeResultParameters): StreamableMethod; get(options?: GetAnalyzeResultParameters): StreamableMethod; } // @public export interface GetAnalyzeResult200Response extends HttpResponse { // (undocumented) - body: AnalyzeResultOperationOutput; + body: AnalyzeOperationOutput; // (undocumented) status: "200"; } @@ -1316,7 +1373,7 @@ export interface GetAnalyzeResult200Response extends HttpResponse { // @public (undocumented) export interface GetAnalyzeResultDefaultResponse extends HttpResponse { // (undocumented) - body: ErrorResponseOutput; + body: DocumentIntelligenceErrorResponseOutput; // (undocumented) status: string; } @@ -1343,7 +1400,7 @@ export interface GetAnalyzeResultFigure200Response extends HttpResponse { // @public (undocumented) export interface GetAnalyzeResultFigureDefaultResponse extends HttpResponse { // (undocumented) - body: ErrorResponseOutput; + body: DocumentIntelligenceErrorResponseOutput; // (undocumented) status: string; } @@ -1376,7 +1433,7 @@ export interface GetAnalyzeResultPdf200Response extends HttpResponse { // @public (undocumented) export interface GetAnalyzeResultPdfDefaultResponse extends HttpResponse { // (undocumented) - body: ErrorResponseOutput; + body: DocumentIntelligenceErrorResponseOutput; // (undocumented) status: string; } @@ -1411,7 +1468,7 @@ export interface GetClassifier200Response extends HttpResponse { // @public (undocumented) export interface GetClassifierDefaultResponse extends HttpResponse { // (undocumented) - body: ErrorResponseOutput; + body: DocumentIntelligenceErrorResponseOutput; // (undocumented) status: string; } @@ -1438,7 +1495,7 @@ export interface GetClassifyResult { // @public export interface GetClassifyResult200Response extends HttpResponse { // (undocumented) - body: AnalyzeResultOperationOutput; + body: AnalyzeOperationOutput; // (undocumented) status: "200"; } @@ -1446,7 +1503,7 @@ export interface GetClassifyResult200Response extends HttpResponse { // @public (undocumented) export interface GetClassifyResultDefaultResponse extends HttpResponse { // (undocumented) - body: ErrorResponseOutput; + body: DocumentIntelligenceErrorResponseOutput; // (undocumented) status: string; } @@ -1472,7 +1529,7 @@ export interface GetDocumentClassifierBuildOperation200Response extends HttpResp // @public (undocumented) export interface GetDocumentClassifierBuildOperationDefaultResponse extends HttpResponse { // (undocumented) - body: ErrorResponseOutput; + body: DocumentIntelligenceErrorResponseOutput; // (undocumented) status: string; } @@ -1509,7 +1566,7 @@ export interface GetDocumentClassifierCopyToOperation200Response extends HttpRes // @public (undocumented) export interface GetDocumentClassifierCopyToOperationDefaultResponse extends HttpResponse { // (undocumented) - body: ErrorResponseOutput; + body: DocumentIntelligenceErrorResponseOutput; // (undocumented) status: string; } @@ -1556,7 +1613,7 @@ export interface GetDocumentModelBuildOperation200Response extends HttpResponse // @public (undocumented) export interface GetDocumentModelBuildOperationDefaultResponse extends HttpResponse { // (undocumented) - body: ErrorResponseOutput; + body: DocumentIntelligenceErrorResponseOutput; // (undocumented) status: string; } @@ -1593,7 +1650,7 @@ export interface GetDocumentModelComposeOperation200Response extends HttpRespons // @public (undocumented) export interface GetDocumentModelComposeOperationDefaultResponse extends HttpResponse { // (undocumented) - body: ErrorResponseOutput; + body: DocumentIntelligenceErrorResponseOutput; // (undocumented) status: string; } @@ -1630,7 +1687,7 @@ export interface GetDocumentModelCopyToOperation200Response extends HttpResponse // @public (undocumented) export interface GetDocumentModelCopyToOperationDefaultResponse extends HttpResponse { // (undocumented) - body: ErrorResponseOutput; + body: DocumentIntelligenceErrorResponseOutput; // (undocumented) status: string; } @@ -1697,7 +1754,7 @@ export interface GetModel200Response extends HttpResponse { // @public (undocumented) export interface GetModelDefaultResponse extends HttpResponse { // (undocumented) - body: ErrorResponseOutput; + body: DocumentIntelligenceErrorResponseOutput; // (undocumented) status: string; } @@ -1724,7 +1781,7 @@ export interface GetOperation200Headers { // @public export interface GetOperation200Response extends HttpResponse { // (undocumented) - body: OperationDetailsOutput; + body: DocumentIntelligenceOperationDetailsOutput; // (undocumented) headers: RawHttpHeaders & GetOperation200Headers; // (undocumented) @@ -1734,7 +1791,7 @@ export interface GetOperation200Response extends HttpResponse { // @public (undocumented) export interface GetOperationDefaultResponse extends HttpResponse { // (undocumented) - body: ErrorResponseOutput; + body: DocumentIntelligenceErrorResponseOutput; // (undocumented) status: string; } @@ -1754,41 +1811,34 @@ export interface GetOperationHeaders { export type GetOperationParameters = GetOperationHeaderParam & RequestParameters; // @public -export type GetPage = (pageLink: string, maxPageSize?: number) => Promise<{ +export type GetPage = (pageLink: string) => Promise<{ page: TPage; nextPageLink?: string; }>; // @public (undocumented) -export interface GetResourceInfo { - get(options?: GetResourceInfoParameters): StreamableMethod; +export interface GetResourceDetails { + get(options?: GetResourceDetailsParameters): StreamableMethod; } // @public -export interface GetResourceInfo200Response extends HttpResponse { +export interface GetResourceDetails200Response extends HttpResponse { // (undocumented) - body: ResourceDetailsOutput; + body: DocumentIntelligenceResourceDetailsOutput; // (undocumented) status: "200"; } // @public (undocumented) -export interface GetResourceInfoDefaultResponse extends HttpResponse { +export interface GetResourceDetailsDefaultResponse extends HttpResponse { // (undocumented) - body: ErrorResponseOutput; + body: DocumentIntelligenceErrorResponseOutput; // (undocumented) status: string; } // @public (undocumented) -export type GetResourceInfoParameters = RequestParameters; - -// @public -export interface InnerErrorOutput { - code?: string; - innererror?: InnerErrorOutput; - message?: string; -} +export type GetResourceDetailsParameters = RequestParameters; // @public (undocumented) export function isUnexpected(response: ListOperations200Response | ListOperationsDefaultResponse): response is ListOperationsDefaultResponse; @@ -1812,11 +1862,14 @@ export function isUnexpected(response: GetDocumentClassifierBuildOperation200Res export function isUnexpected(response: GetOperation200Response | GetOperationDefaultResponse): response is GetOperationDefaultResponse; // @public (undocumented) -export function isUnexpected(response: GetResourceInfo200Response | GetResourceInfoDefaultResponse): response is GetResourceInfoDefaultResponse; +export function isUnexpected(response: GetResourceDetails200Response | GetResourceDetailsDefaultResponse): response is GetResourceDetailsDefaultResponse; // @public (undocumented) export function isUnexpected(response: GetAnalyzeResult200Response | GetAnalyzeResultDefaultResponse): response is GetAnalyzeResultDefaultResponse; +// @public (undocumented) +export function isUnexpected(response: DeleteAnalyzeResult204Response | DeleteAnalyzeResultDefaultResponse): response is DeleteAnalyzeResultDefaultResponse; + // @public (undocumented) export function isUnexpected(response: GetAnalyzeResultPdf200Response | GetAnalyzeResultPdfDefaultResponse): response is GetAnalyzeResultPdfDefaultResponse; @@ -1832,9 +1885,15 @@ export function isUnexpected(response: AnalyzeDocument202Response | AnalyzeDocum // @public (undocumented) export function isUnexpected(response: GetAnalyzeBatchResult200Response | GetAnalyzeBatchResultDefaultResponse): response is GetAnalyzeBatchResultDefaultResponse; +// @public (undocumented) +export function isUnexpected(response: DeleteAnalyzeBatchResult204Response | DeleteAnalyzeBatchResultDefaultResponse): response is DeleteAnalyzeBatchResultDefaultResponse; + // @public (undocumented) export function isUnexpected(response: AnalyzeBatchDocuments202Response | AnalyzeBatchDocumentsLogicalResponse | AnalyzeBatchDocumentsDefaultResponse): response is AnalyzeBatchDocumentsDefaultResponse; +// @public (undocumented) +export function isUnexpected(response: ListAnalyzeBatchResults200Response | ListAnalyzeBatchResultsDefaultResponse): response is ListAnalyzeBatchResultsDefaultResponse; + // @public (undocumented) export function isUnexpected(response: GetModel200Response | GetModelDefaultResponse): response is GetModelDefaultResponse; @@ -1886,6 +1945,30 @@ export function isUnexpected(response: CopyClassifierTo202Response | CopyClassif // @public export type LengthUnitOutput = string; +// @public (undocumented) +export interface ListAnalyzeBatchResults { + get(options?: ListAnalyzeBatchResultsParameters): StreamableMethod; +} + +// @public +export interface ListAnalyzeBatchResults200Response extends HttpResponse { + // (undocumented) + body: PagedAnalyzeBatchOperationOutput; + // (undocumented) + status: "200"; +} + +// @public (undocumented) +export interface ListAnalyzeBatchResultsDefaultResponse extends HttpResponse { + // (undocumented) + body: DocumentIntelligenceErrorResponseOutput; + // (undocumented) + status: string; +} + +// @public (undocumented) +export type ListAnalyzeBatchResultsParameters = RequestParameters; + // @public (undocumented) export interface ListClassifiers { get(options?: ListClassifiersParameters): StreamableMethod; @@ -1909,7 +1992,7 @@ export interface ListClassifiers200Response extends HttpResponse { // @public (undocumented) export interface ListClassifiersDefaultResponse extends HttpResponse { // (undocumented) - body: ErrorResponseOutput; + body: DocumentIntelligenceErrorResponseOutput; // (undocumented) status: string; } @@ -1951,7 +2034,7 @@ export interface ListModels200Response extends HttpResponse { // @public (undocumented) export interface ListModelsDefaultResponse extends HttpResponse { // (undocumented) - body: ErrorResponseOutput; + body: DocumentIntelligenceErrorResponseOutput; // (undocumented) status: string; } @@ -1983,7 +2066,7 @@ export interface ListOperations200Headers { // @public export interface ListOperations200Response extends HttpResponse { // (undocumented) - body: PagedOperationDetailsOutput; + body: PagedDocumentIntelligenceOperationDetailsOutput; // (undocumented) headers: RawHttpHeaders & ListOperations200Headers; // (undocumented) @@ -1993,7 +2076,7 @@ export interface ListOperations200Response extends HttpResponse { // @public (undocumented) export interface ListOperationsDefaultResponse extends HttpResponse { // (undocumented) - body: ErrorResponseOutput; + body: DocumentIntelligenceErrorResponseOutput; // (undocumented) status: string; } @@ -2013,37 +2096,63 @@ export interface ListOperationsHeaders { export type ListOperationsParameters = ListOperationsHeaderParam & RequestParameters; // @public -export type OperationDetailsOutput = OperationDetailsOutputParent | DocumentModelBuildOperationDetailsOutput | DocumentModelComposeOperationDetailsOutput | DocumentModelCopyToOperationDetailsOutput | DocumentClassifierCopyToOperationDetailsOutput | DocumentClassifierBuildOperationDetailsOutput; +export interface ModelCopyAuthorization { + accessToken: string; + expirationDateTime: Date | string; + targetModelId: string; + targetModelLocation: string; + targetResourceId: string; + targetResourceRegion: string; +} // @public -export interface OperationDetailsOutputParent { - apiVersion?: string; - createdDateTime: string; - error?: ErrorModelOutput; - // (undocumented) - kind: OperationKindOutput; - lastUpdatedDateTime: string; - operationId: string; - percentCompleted?: number; - resourceLocation: string; - status: OperationStatusOutput; - tags?: Record; +export interface ModelCopyAuthorizationOutput { + accessToken: string; + expirationDateTime: string; + targetModelId: string; + targetModelLocation: string; + targetResourceId: string; + targetResourceRegion: string; } // @public export type OperationKindOutput = string; // @public -export type OperationStatusOutput = string; +export interface PagedAnalyzeBatchOperationOutput { + nextLink?: string; + value: Array; +} // @public -export type PagedDocumentClassifierDetailsOutput = Paged; +export interface PagedAsyncIterableIterator { + [Symbol.asyncIterator](): PagedAsyncIterableIterator; + byPage: (settings?: TPageSettings) => AsyncIterableIterator; + next(): Promise>; +} // @public -export type PagedDocumentModelDetailsOutput = Paged; +export interface PagedDocumentClassifierDetailsOutput { + nextLink?: string; + value: Array; +} + +// @public +export interface PagedDocumentIntelligenceOperationDetailsOutput { + nextLink?: string; + value: Array; +} // @public -export type PagedOperationDetailsOutput = Paged; +export interface PagedDocumentModelDetailsOutput { + nextLink?: string; + value: Array; +} + +// @public +export interface PageSettings { + continuationToken?: string; +} // @public export function paginate(client: Client, initialResponse: TResponse, options?: PagingOptions): PagedAsyncIterableIterator>; @@ -2063,22 +2172,18 @@ export interface PagingOptions { // @public export type ParagraphRoleOutput = string; -// @public -export interface ResourceDetailsOutput { - customDocumentModels: CustomDocumentModelsDetailsOutput; -} - // @public (undocumented) export interface Routes { (path: "/operations"): ListOperations; (path: "/operations/{operationId}", operationId: string): GetDocumentModelBuildOperation; - (path: "/info"): GetResourceInfo; + (path: "/info"): GetResourceDetails; (path: "/documentModels/{modelId}/analyzeResults/{resultId}", modelId: string, resultId: string): GetAnalyzeResult; (path: "/documentModels/{modelId}/analyzeResults/{resultId}/pdf", modelId: string, resultId: string): GetAnalyzeResultPdf; (path: "/documentModels/{modelId}/analyzeResults/{resultId}/figures/{figureId}", modelId: string, resultId: string, figureId: string): GetAnalyzeResultFigure; (path: "/documentModels/{modelId}:analyze", modelId: string): AnalyzeDocumentFromStream; (path: "/documentModels/{modelId}/analyzeBatchResults/{resultId}", modelId: string, resultId: string): GetAnalyzeBatchResult; (path: "/documentModels/{modelId}:analyzeBatch", modelId: string): AnalyzeBatchDocuments; + (path: "/documentModels/{modelId}/analyzeBatchResults", modelId: string): ListAnalyzeBatchResults; (path: "/documentModels/{modelId}", modelId: string): GetModel; (path: "/documentModels:build"): BuildModel; (path: "/documentModels:compose"): ComposeModel; @@ -2096,7 +2201,6 @@ export interface Routes { // @public export interface SimplePollerLike, TResult> { - getOperationId(): string; getOperationState(): TState; getResult(): TResult | undefined; isDone(): boolean; @@ -2129,13 +2233,6 @@ export type StringIndexType = string; // @public export type StringIndexTypeOutput = string; -// @public -export interface WarningOutput { - code: string; - message: string; - target?: string; -} - // (No @packageDocumentation comment for this package) ``` diff --git a/sdk/documentintelligence/ai-document-intelligence-rest/sample.env b/sdk/documentintelligence/ai-document-intelligence-rest/sample.env index 4edbb15d213c..508439fc7d62 100644 --- a/sdk/documentintelligence/ai-document-intelligence-rest/sample.env +++ b/sdk/documentintelligence/ai-document-intelligence-rest/sample.env @@ -1,8 +1 @@ -# Used in most samples. Retrieve these values from a Cognitive Services instance -# in the Azure Portal. -DOCUMENT_INTELLIGENCE_ENDPOINT="https://.cognitiveservies.azure.com/" -DOCUMENT_INTELLIGENCE_API_KEY="
" - -# Our tests assume that TEST_MODE is "playback" by default. You can -# change it to "record" to generate new recordings, or "live" to bypass the recorder entirely. -# TEST_MODE=playback +# Feel free to add your own environment variables. \ No newline at end of file diff --git a/sdk/documentintelligence/ai-document-intelligence-rest/samples-dev/analyzeDocumentByModelId.ts b/sdk/documentintelligence/ai-document-intelligence-rest/samples-dev/analyzeDocumentByModelId.ts deleted file mode 100644 index ac70505c598a..000000000000 --- a/sdk/documentintelligence/ai-document-intelligence-rest/samples-dev/analyzeDocumentByModelId.ts +++ /dev/null @@ -1,57 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT License. - -/** - * This sample shows how to analyze a document using a model with a given ID. The model ID may refer to any model, - * whether custom, prebuilt, composed, etc. - * - * @summary analyze a document using a model by ID - */ - -import DocumentIntelligence, { AnalyzeResultOperationOutput, getLongRunningPoller, isUnexpected } from "@azure-rest/ai-document-intelligence"; - -import * as dotenv from "dotenv"; -dotenv.config(); - -async function main() { - const client = DocumentIntelligence( - process.env["DOCUMENT_INTELLIGENCE_ENDPOINT"] || "", - { key: process.env["DOCUMENT_INTELLIGENCE_API_KEY"] || "" }) - const modelId = process.env.DOCUMENT_INTELLIGENCE_CUSTOM_MODEL_ID || "";// "prebuilt-layout"; - - const initialResponse = await client - .path("/documentModels/{modelId}:analyze", modelId) - .post({ - contentType: "application/json", - body: { - urlSource: "https://raw.githubusercontent.com/Azure/azure-sdk-for-js/main/sdk/formrecognizer/ai-form-recognizer/assets/receipt/contoso-receipt.png", - }, - queryParameters: { locale: "en-IN" }, - }); - if (isUnexpected(initialResponse)) { - throw initialResponse.body.error; - } - const poller = await getLongRunningPoller(client, initialResponse); - const analyzeResult = ( - (await (poller).pollUntilDone()).body as AnalyzeResultOperationOutput - ).analyzeResult; - - const documents = analyzeResult?.documents; - - const document = documents && documents[0]; - if (!document) { - throw new Error("Expected at least one document in the result."); - } - - console.log( - "Extracted document:", - document.docType, - `(confidence: ${document.confidence || ""})` - ); - console.log("Fields:", document.fields); -} - -main().catch((error) => { - console.error("An error occurred:", error); - process.exit(1); -}); diff --git a/sdk/documentintelligence/ai-document-intelligence-rest/samples-dev/analyzeIdentityDocument.ts b/sdk/documentintelligence/ai-document-intelligence-rest/samples-dev/analyzeIdentityDocument.ts deleted file mode 100644 index 37df16bc9b66..000000000000 --- a/sdk/documentintelligence/ai-document-intelligence-rest/samples-dev/analyzeIdentityDocument.ts +++ /dev/null @@ -1,74 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT License. - -/** - * This sample shows how to extract elements of an identity document (such as a driver license or passport) from a URL - * to a file using the prebuilt identity document model. - * - * The prebuilt identity document model can return several fields. For a detailed list of the fields supported by the - * identity document model, see the `IdentityDocument` type in the documentation, or refer to the following link: - * - * https://aka.ms/azsdk/documentitelligence/iddocumentfieldschema - * - * @summary extract data from an identity document - * @azsdk-skip-javascript - */ - -import DocumentIntelligence, { AnalyzeResultOperationOutput, getLongRunningPoller, isUnexpected } from "@azure-rest/ai-document-intelligence"; - -import * as dotenv from "dotenv"; -dotenv.config(); - -async function main() { - const client = DocumentIntelligence( - process.env["DOCUMENT_INTELLIGENCE_ENDPOINT"] || "", - { key: process.env["DOCUMENT_INTELLIGENCE_API_KEY"] || "" }) - - const initialResponse = await client - .path("/documentModels/{modelId}:analyze", "prebuilt-idDocument") - .post({ - contentType: "application/json", - body: { - // The Document Intelligence service will access the following URL to a driver license image and extract data from it - urlSource: - "https://raw.githubusercontent.com/Azure/azure-sdk-for-js/main/sdk/formrecognizer/ai-form-recognizer/assets/identityDocument/license.png", - }, - queryParameters: { locale: "en-IN" }, - }); - if (isUnexpected(initialResponse)) { - throw initialResponse.body.error; - } - const poller = await getLongRunningPoller(client, initialResponse); - const analyzeResult = ( - (await (poller).pollUntilDone()).body as AnalyzeResultOperationOutput - ).analyzeResult; - - const documents = analyzeResult?.documents; - - const document = documents && documents[0]; - - // Use of PrebuiltModels.Receipt above (rather than the raw model ID), adds strong typing of the model's output - if (document) { - // The identity document model has multiple document types, so we need to know which document type was actually - // extracted. - if (document.docType === "idDocument.driverLicense") { - // For the sake of the example, we'll only show a few of the fields that are produced. - console.log("Extracted a Driver License:"); - console.log(document.fields) - } else if (document.docType === "idDocument.passport") { - console.log("Extracted a Passport:"); - console.log(document.fields) - } else { - // The only reason this would happen is if the client library's schema for the prebuilt identity document model is - // out of date, and a new document type has been introduced. - console.error("Unknown document type in result:", document); - } - } else { - throw new Error("Expected at least one receipt in the result."); - } -} - -main().catch((error) => { - console.error("An error occurred:", error); - process.exit(1); -}); diff --git a/sdk/documentintelligence/ai-document-intelligence-rest/samples-dev/analyzeInvoice.ts b/sdk/documentintelligence/ai-document-intelligence-rest/samples-dev/analyzeInvoice.ts deleted file mode 100644 index ed2ce320051d..000000000000 --- a/sdk/documentintelligence/ai-document-intelligence-rest/samples-dev/analyzeInvoice.ts +++ /dev/null @@ -1,63 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT License. - -/** - * This sample shows how to extract elements of an invoice from a URL to a file using the prebuilt invoice model. - * - * The prebuilt invoice model can return several fields. For a detailed list of the fields supported by the invoice - * model, see the `Invoice` type in the documentation, or refer to the following link: - * - * https://aka.ms/azsdk/documentitelligence/invoicefieldschema - * - * @summary extract data from an invoice document - * @azsdk-skip-javascript - */ - -import DocumentIntelligence, { AnalyzeResultOperationOutput, getLongRunningPoller, isUnexpected } from "@azure-rest/ai-document-intelligence"; - -import * as dotenv from "dotenv"; -dotenv.config(); - -async function main() { - - const client = DocumentIntelligence( - process.env["DOCUMENT_INTELLIGENCE_ENDPOINT"] || "", - { key: process.env["DOCUMENT_INTELLIGENCE_API_KEY"] || "" }) - - const initialResponse = await client - .path("/documentModels/{modelId}:analyze", "prebuilt-invoice") - .post({ - contentType: "application/json", - body: { - // The Document Intelligence service will access the following URL to an invoice image and extract data from it - urlSource: "https://raw.githubusercontent.com/Azure/azure-sdk-for-js/main/sdk/formrecognizer/ai-form-recognizer/assets/invoice/sample_invoice.jpg", - } - }); - if (isUnexpected(initialResponse)) { - throw initialResponse.body.error; - } - const poller = await getLongRunningPoller(client, initialResponse); - const analyzeResult = ( - (await (poller).pollUntilDone()).body as AnalyzeResultOperationOutput - ).analyzeResult; - - const documents = analyzeResult?.documents; - - const document = documents && documents[0]; - if (!document) { - throw new Error("Expected at least one document in the result."); - } - - - // Use of PrebuiltModels.Receipt above (rather than the raw model ID), adds strong typing of the model's output - if (document) { - console.log(document.fields); - } else { - throw new Error("Expected at least one receipt in the result."); - } -} - -main().catch((error) => { - console.error("An error occurred:", error); - process.exit(1); -}); diff --git a/sdk/documentintelligence/ai-document-intelligence-rest/samples-dev/analyzeReceipt.ts b/sdk/documentintelligence/ai-document-intelligence-rest/samples-dev/analyzeReceipt.ts deleted file mode 100644 index 8f0eaeae8bc7..000000000000 --- a/sdk/documentintelligence/ai-document-intelligence-rest/samples-dev/analyzeReceipt.ts +++ /dev/null @@ -1,60 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT License. - -/** - * This sample shows how to extract elements of a receipt from a URL to a file using the prebuilt receipt model. - * - * The prebuilt receipt model can return several fields. For a detailed list of the fields supported by the receipt - * model, see the `Receipt` type in the documentation, or refer to the following link: - * - * https://aka.ms/azsdk/documentitelligence/receiptfieldschema - * - * @summary extract data from a receipt document - * @azsdk-skip-javascript - */ - -import DocumentIntelligence, { AnalyzeResultOperationOutput, getLongRunningPoller, isUnexpected } from "@azure-rest/ai-document-intelligence"; - -import * as dotenv from "dotenv"; -dotenv.config(); - -async function main() { - const client = DocumentIntelligence( - process.env["DOCUMENT_INTELLIGENCE_ENDPOINT"] || "", - { key: process.env["DOCUMENT_INTELLIGENCE_API_KEY"] || "" }) - - const initialResponse = await client - .path("/documentModels/{modelId}:analyze", "prebuilt-receipt") - .post({ - contentType: "application/json", - body: { - // The Document Intelligence service will access the following URL to a receipt image and extract data from it - urlSource: "https://raw.githubusercontent.com/Azure/azure-sdk-for-js/main/sdk/formrecognizer/ai-form-recognizer/assets/receipt/contoso-receipt.png", - } - }); - if (isUnexpected(initialResponse)) { - throw initialResponse.body.error; - } - const poller = await getLongRunningPoller(client, initialResponse); - const analyzeResult = ( - (await (poller).pollUntilDone()).body as AnalyzeResultOperationOutput - ).analyzeResult; - - const documents = analyzeResult?.documents; - - const document = documents && documents[0]; - - - - // Use of PrebuiltModels.Receipt above (rather than the raw model ID), as it adds strong typing of the model's output - if (document) { - console.log(document.fields); - } else { - throw new Error("Expected at least one receipt in the result."); - } -} - -main().catch((error) => { - console.error("An error occurred:", error); - process.exit(1); -}); diff --git a/sdk/documentintelligence/ai-document-intelligence-rest/samples-dev/analyzeReceiptByModelId.ts b/sdk/documentintelligence/ai-document-intelligence-rest/samples-dev/analyzeReceiptByModelId.ts deleted file mode 100644 index c8fc574766e9..000000000000 --- a/sdk/documentintelligence/ai-document-intelligence-rest/samples-dev/analyzeReceiptByModelId.ts +++ /dev/null @@ -1,59 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT License. - -/** - * This sample shows how to extract elements of a receipt from a URL to a file using the prebuilt receipt model. Rather - * than using the `PrebuiltModels.Receipt` document model, this sample shows the use of the prebuilt model by ID, - * resulting in a weaker type that exactly mirrors the model's field schema at runtime. - * - * The prebuilt receipt model can return several fields. For a detailed list of the fields supported by the - * receipt model, see the `Receipt` type in the documentation, or refer to the following link: - * - * https://aka.ms/azsdk/documentitelligence/receiptfieldschema - * - * @summary use the "prebuilt-receipt" model ID to extract data from a receipt document (weakly-typed) - */ - -import DocumentIntelligence, { AnalyzeResultOperationOutput, getLongRunningPoller, isUnexpected } from "@azure-rest/ai-document-intelligence"; - -import * as dotenv from "dotenv"; -dotenv.config(); - -async function main() { - const client = DocumentIntelligence( - process.env["DOCUMENT_INTELLIGENCE_ENDPOINT"] || "", - { key: process.env["DOCUMENT_INTELLIGENCE_API_KEY"] || "" }) - - const initialResponse = await client - .path("/documentModels/{modelId}:analyze", "prebuilt-receipt") - .post({ - contentType: "application/json", - body: { - // The Document Intelligence service will access the following URL to a receipt image and extract data from it - urlSource: "https://raw.githubusercontent.com/Azure/azure-sdk-for-js/main/sdk/formrecognizer/ai-form-recognizer/assets/receipt/contoso-receipt.png", - } - }); - if (isUnexpected(initialResponse)) { - throw initialResponse.body.error; - } - const poller = await getLongRunningPoller(client, initialResponse); - - poller.onProgress((state) => console.log("Operation:", state.result, state.status)); - const analyzeResult = ( - (await (poller).pollUntilDone()).body as AnalyzeResultOperationOutput - ).analyzeResult; - - const documents = analyzeResult?.documents; - - const result = documents && documents[0]; - if (result) { - console.log(result.fields); - } else { - throw new Error("Expected at least one receipt in the result."); - } -} - -main().catch((error) => { - console.error("An error occurred:", error); - process.exit(1); -}); diff --git a/sdk/documentintelligence/ai-document-intelligence-rest/samples-dev/analyzeW2TaxForm.ts b/sdk/documentintelligence/ai-document-intelligence-rest/samples-dev/analyzeW2TaxForm.ts deleted file mode 100644 index 261e0eb6011d..000000000000 --- a/sdk/documentintelligence/ai-document-intelligence-rest/samples-dev/analyzeW2TaxForm.ts +++ /dev/null @@ -1,64 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT License. - -/** - * This sample shows how to extract elements of a United States W2 tax form from a file using the prebuilt US W2 model. - * - * The prebuilt W2 model can return several fields. For a detailed list of the fields supported by the model, see the - * `TaxUsW2` type in the documentation, or refer to the following link: - * - * https://aka.ms/azsdk/documentitelligence/taxusw2fieldschema - * - * @summary extract data from a United States W2 tax document - * @azsdk-skip-javascript - */ - -import DocumentIntelligence, { AnalyzeResultOperationOutput, getLongRunningPoller, isUnexpected } from "@azure-rest/ai-document-intelligence"; -import fs from "fs"; -import path from "path"; - -import * as dotenv from "dotenv"; -dotenv.config(); - -async function main() { - const client = DocumentIntelligence( - process.env["DOCUMENT_INTELLIGENCE_ENDPOINT"] || "", - { key: process.env["DOCUMENT_INTELLIGENCE_API_KEY"] || "" }) - - const filePath = fs.readFileSync(path.join(".", "assets", "w2", "w2-single.png")); - - const base64Source = fs.readFileSync(filePath, { encoding: "base64" }); - - const initialResponse = await client - .path("/documentModels/{modelId}:analyze", "prebuilt-tax.us.w2") - .post({ - contentType: "application/json", - body: { - base64Source, - }, - }); - - if (isUnexpected(initialResponse)) { - throw initialResponse.body.error; - } - - const poller = await getLongRunningPoller(client, initialResponse); - const analyzeResult = ( - (await (poller).pollUntilDone()).body as AnalyzeResultOperationOutput - ).analyzeResult; - - const documents = analyzeResult?.documents; - const document = documents?.[0]; - - if (document) { - console.log("Extracted W2 tax form:"); - console.log(document.fields); - } else { - throw new Error("Expected at least one document in the result."); - } -} - -main().catch((error) => { - console.error(error); - process.exit(1); -}); diff --git a/sdk/documentintelligence/ai-document-intelligence-rest/samples-dev/buildClassifier.ts b/sdk/documentintelligence/ai-document-intelligence-rest/samples-dev/buildClassifier.ts deleted file mode 100644 index 9358f1110f00..000000000000 --- a/sdk/documentintelligence/ai-document-intelligence-rest/samples-dev/buildClassifier.ts +++ /dev/null @@ -1,80 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT License. - -/** - * This sample shows how to programmatically build a custom classifier. - * - * The Document Intelligence service expects the training data to be organized and labeled according to a particular - * convention and stored in an Azure Storage container. For more information about creating a training data set, please - * see the information at the following link to the service's documentation: - * - * https://aka.ms/azsdk/documentitelligence/buildclassifiermodel - * - * @summary build a classifier from a training data set - */ - -import DocumentIntelligence, { DocumentClassifierBuildOperationDetailsOutput, getLongRunningPoller, isUnexpected } from "@azure-rest/ai-document-intelligence"; - -import * as dotenv from "dotenv"; -dotenv.config(); - -async function main() { - const client = DocumentIntelligence( - process.env["DOCUMENT_INTELLIGENCE_ENDPOINT"] || "", - { key: process.env["DOCUMENT_INTELLIGENCE_API_KEY"] || "" }) - - const random = Date.now().toString(); - const modelId = - (process.env.CUSTOM_CLASSIFIER_ID || "") + random.substring(random.length - 6); - - const trainingDataSasUrl1 = - process.env.CUSTOM_CLASSIFIER_TRAINING_DATA_SAS_URL_1 || ""; - - const trainingDataSasUrl2 = - process.env.CUSTOM_CLASSIFIER_TRAINING_DATA_SAS_URL_2 || ""; - - const initialResponse = await client.path("/documentClassifiers:build").post({ - body: { - classifierId: modelId, - description: "Custom classifier description", - docTypes: { - foo: { - azureBlobSource: { - containerUrl: trainingDataSasUrl1, - }, - }, - bar: { - azureBlobSource: { - containerUrl: trainingDataSasUrl2, - }, - }, - }, - } - }); - - if (isUnexpected(initialResponse)) { - throw initialResponse.body.error; - } - const poller = await getLongRunningPoller(client, initialResponse); - const classifier = ( - (await (poller).pollUntilDone()).body as DocumentClassifierBuildOperationDetailsOutput - ).result; - if (!classifier) { - throw new Error("Expected a DocumentClassifierDetailsOutput response."); - } - - - console.log("Classifier ID:", classifier.classifierId); - console.log("Description:", classifier.description); - console.log("Created:", classifier.createdDateTime); - - console.log("Document Types:"); - for (const [docType, details] of Object.entries(classifier.docTypes)) { - console.log(`- Name: "${docType}", source: ${JSON.stringify(details, null, 2)}`); - } -} - -main().catch((error) => { - console.error("An error occurred:", error); - process.exit(1); -}); diff --git a/sdk/documentintelligence/ai-document-intelligence-rest/samples-dev/buildModel.ts b/sdk/documentintelligence/ai-document-intelligence-rest/samples-dev/buildModel.ts deleted file mode 100644 index a50eaa0e4a3e..000000000000 --- a/sdk/documentintelligence/ai-document-intelligence-rest/samples-dev/buildModel.ts +++ /dev/null @@ -1,79 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT License. - -/** - * This sample shows how to programmatically build a model with a single document type using a training data set. - * - * The Document Intelligence service expects the training data to be organized and labeled according to a particular - * convention and stored in an Azure Storage container. For more information about creating a training data set, please - * see the information at the following link to the service's documentation: - * - * https://aka.ms/azsdk/documentitelligence/buildtrainingset - * - * @summary build a model with a single document type from a training data set - */ - -import DocumentIntelligence, { DocumentModelBuildOperationDetailsOutput, getLongRunningPoller, isUnexpected } from "@azure-rest/ai-document-intelligence"; - -import * as dotenv from "dotenv"; -dotenv.config(); - -async function main() { - const client = DocumentIntelligence( - process.env["DOCUMENT_INTELLIGENCE_ENDPOINT"] || "", - { key: process.env["DOCUMENT_INTELLIGENCE_API_KEY"] || "" }) - const random = Date.now().toString(); - const modelId = - (process.env.CUSTOM_MODEL_ID || "") + random.substring(random.length - 6); - const trainingDataSasUrl = - process.env.CUSTOM_MODEL_TRAINING_DATA_SAS_URL || ""; - - const initialResponse = await client.path("/documentModels:build").post({ - body: { - buildMode: "template", - modelId, - azureBlobSource: { - containerUrl: trainingDataSasUrl - }, - }, - }); - - if (isUnexpected(initialResponse)) { - throw initialResponse.body.error; - } - const poller = getLongRunningPoller(client, initialResponse); - const model = ( - (await (await poller).pollUntilDone()).body as DocumentModelBuildOperationDetailsOutput - ).result; - if (!model) { - throw new Error("Expected a DocumentModelDetailsOutput response."); - } - - console.log("Model ID:", model.modelId); - console.log("Description:", model.description); - console.log("Created:", model.createdDateTime); - - // A model may contain several document types, which describe the possible object structures of fields extracted using - // this model - - console.log("Document Types:"); - for (const [docType, { description, fieldSchema: schema }] of Object.entries( - model.docTypes || {} - )) { - console.log(`- Name: "${docType}"`); - console.log(` Description: "${description}"`); - - // For simplicity, this example will only show top-level field names - console.log(" Fields:"); - - for (const [fieldName, fieldSchema] of Object.entries(schema)) { - console.log(` - "${fieldName}" (${fieldSchema.type})`); - console.log(` ${fieldSchema.description || ""}`); - } - } -} - -main().catch((error) => { - console.error("An error occurred:", error); - process.exit(1); -}); diff --git a/sdk/documentintelligence/ai-document-intelligence-rest/samples-dev/classifyDocument.ts b/sdk/documentintelligence/ai-document-intelligence-rest/samples-dev/classifyDocument.ts deleted file mode 100644 index ad6ba315f0e3..000000000000 --- a/sdk/documentintelligence/ai-document-intelligence-rest/samples-dev/classifyDocument.ts +++ /dev/null @@ -1,55 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT License. - -/** - * This sample shows how to use a custom classifier to get the document type (class) of a document. - * - * @summary use a custom classifier to classify a document - */ - -import DocumentIntelligence, { AnalyzeResultOperationOutput, getLongRunningPoller, isUnexpected } from "@azure-rest/ai-document-intelligence"; - -import * as dotenv from "dotenv"; -dotenv.config(); - -async function main() { - const client = DocumentIntelligence( - process.env["DOCUMENT_INTELLIGENCE_ENDPOINT"] || "", - { key: process.env["DOCUMENT_INTELLIGENCE_API_KEY"] || "" }) - const documentUrl = - "https://raw.githubusercontent.com/Azure/azure-sdk-for-js/main/sdk/formrecognizer/ai-form-recognizer/assets/invoice/Invoice_1.pdf"; - - const classifierId = process.env.CUSTOM_CLASSIFIER_ID ?? ""; - const initialResponse = await client - .path("/documentClassifiers/{classifierId}:analyze", classifierId) - .post({ - contentType: "application/json", - body: { - urlSource: documentUrl, - }, - }); - - if (isUnexpected(initialResponse)) { - throw initialResponse.body.error; - } - - const poller = await getLongRunningPoller(client, initialResponse); - const analyzeResult = ( - (await (poller).pollUntilDone()).body as AnalyzeResultOperationOutput - ).analyzeResult; - - if (analyzeResult?.documents === undefined || analyzeResult.documents.length === 0) { - throw new Error("Failed to extract any documents."); - } - - for (const document of analyzeResult.documents) { - console.log( - `Extracted a document with type '${document.docType}' on page ${document.boundingRegions?.[0].pageNumber} (confidence: ${document.confidence})` - ); - } -} - -main().catch((error) => { - console.error("An error occurred:", error); - process.exit(1); -}); diff --git a/sdk/documentintelligence/ai-document-intelligence-rest/samples-dev/composeModel.ts b/sdk/documentintelligence/ai-document-intelligence-rest/samples-dev/composeModel.ts deleted file mode 100644 index d16fca04a898..000000000000 --- a/sdk/documentintelligence/ai-document-intelligence-rest/samples-dev/composeModel.ts +++ /dev/null @@ -1,120 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT License. - -/** - * This sample demonstrates how to create a composed model from several individual labeled models. - * - * We build all of the component models used in the composition operation and then finally create the composed model. - * The resulting composed model will have all of the document types of its component submodels. When used for analysis, - * it will first classify the input as belonging to one of the document types.zzs - * - * @summary create a composed model from several individual labeled models - * @azsdk-weight 60 - */ - -import DocumentIntelligence, { DocumentModelBuildOperationDetailsOutput, DocumentModelComposeOperationDetailsOutput, getLongRunningPoller, isUnexpected } from "@azure-rest/ai-document-intelligence"; - -import * as dotenv from "dotenv"; -dotenv.config(); - -async function main() { - const client = DocumentIntelligence( - process.env["DOCUMENT_INTELLIGENCE_ENDPOINT"] || "", - { key: process.env["DOCUMENT_INTELLIGENCE_API_KEY"] || "" }) - - // This object will hold the SAS-encoded URLs to containers that hold - // different types of purchase order documents and their labels. - const purchaseOrderSasUrls = { - supplies: - process.env["PURCHASE_ORDER_SUPPLIES_SAS_URL"] || - "", - equipment: - process.env["PURCHASE_ORDER_EQUIPMENT_SAS_URL"] || - "", - furniture: - process.env["PURCHASE_ORDER_FURNITURE_SAS_URL"] || - "", - cleaningSupplies: - process.env["PURCHASE_ORDER_CLEANING_SUPPLIES_SAS_URL"] || - "", - }; - - // We'll put the last few digits of the current timestamp into the model IDs, just to make sure they're unique. - const random = Date.now().toString(); - - const modelIds = await Promise.all( - Object.entries(purchaseOrderSasUrls) - .map(async ([kind, sasUrl]) => { - const modelId = kind + "ComponentModel" + random.substring(random.length - 6); - const initialResponse = await client.path("/documentModels:build").post({ - body: { - buildMode: "template", - modelId: modelId, - azureBlobSource: { - containerUrl: sasUrl, - }, - }, - }); - if (isUnexpected(initialResponse)) { - throw initialResponse.body.error; - } - const poller = await getLongRunningPoller(client, initialResponse); - const model = ( - (await (poller).pollUntilDone()).body as DocumentModelBuildOperationDetailsOutput - ).result!; - - return model; - }) - .map(async (model) => { return { modelId: (await model).modelId } }) - ); - - // Finally, create the composed model. - - const composedModelId = "purchaseOrders" + random.substring(random.length - 6); - - const initialResponse = await client.path("/documentModels:compose").post({ - body: { - description: "A composed model that classifies purchase order documents and extracts data from them.", - componentModels: modelIds, - modelId: composedModelId, - - }, - }); - - if (isUnexpected(initialResponse)) { - throw initialResponse.body.error; - } - const poller = await getLongRunningPoller(client, initialResponse); - - const composedModel = ( - (await (poller).pollUntilDone()).body as DocumentModelComposeOperationDetailsOutput - ).result!; - - - console.log("Model ID:", composedModel.modelId); - console.log("Description:", composedModel.description); - console.log("Created:", composedModel.createdDateTime); - - // The composed model should have a document type for each one of the individually built models that are composed into - // this larger model. - - console.log("Document Types:"); - for (const [docType, { description, fieldSchema: schema }] of Object.entries( - composedModel.docTypes || {} - )) { - console.log(`- Name: "${docType}"`); - console.log(` Description: "${description}"`); - - // For simplicity, this example will only show top-level field names - console.log(" Fields:"); - - for (const [fieldName, fieldSchema] of Object.entries(schema)) { - console.log(` - "${fieldName}" (${fieldSchema.type})`); - console.log(` ${fieldSchema.description || ""}`); - } - } -} - -main().catch((err) => { - console.error("The sample encountered an error:", err); -}); diff --git a/sdk/documentintelligence/ai-document-intelligence-rest/samples-dev/copyModel.ts b/sdk/documentintelligence/ai-document-intelligence-rest/samples-dev/copyModel.ts deleted file mode 100644 index 2da44e98c499..000000000000 --- a/sdk/documentintelligence/ai-document-intelligence-rest/samples-dev/copyModel.ts +++ /dev/null @@ -1,66 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT License. - -/** - * This sample shows how to copy a model from one resource to another. The model is created with a new model ID (and - * optionally a new description) in the destination resource, but will have the same document types as the source model. - * - * @summary copy a model from one resource to another - */ - -import DocumentIntelligence, { DocumentModelCopyToOperationDetailsOutput, getLongRunningPoller, isUnexpected } from "@azure-rest/ai-document-intelligence"; - -import * as dotenv from "dotenv"; -dotenv.config(); - -async function main() { - - const random = Date.now().toString(); - const destinationModelId = - (process.env.CUSTOM_MODEL_ID || "") + random.substring(random.length - 6); - - // The authorization must be created by the destination resource. - // const destinationClient = new DocumentModelAdministrationClient(endpoint, credential); - const destinationClient = DocumentIntelligence( - process.env["DOCUMENT_INTELLIGENCE_ENDPOINT"] || "", - { key: process.env["DOCUMENT_INTELLIGENCE_API_KEY"] || "" }) - // const authorization = await destinationClient.getCopyAuthorization(destinationModelId); - const targetAuth = await destinationClient.path("/documentModels:authorizeCopy").post({ - body: { - modelId: destinationModelId, - }, - }); - if (isUnexpected(targetAuth)) { - throw targetAuth.body.error; - } - const sourceEndpoint = process.env.DOCUMENT_INTELLIGENCE_SOURCE_ENDPOINT || ""; - const sourceModelId = process.env.COPY_SOURCE_MODEL_ID || ""; - - // Then, the source resource can initiate the copy operation. - const sourceClient = DocumentIntelligence( - sourceEndpoint, - { key: process.env.DOCUMENT_INTELLIGENCE_SOURCE_API_KEY || "" }) - - const copyInitResponse = await sourceClient - .path("/documentModels/{modelId}:copyTo", sourceModelId) - .post({ - body: targetAuth.body, - }); - - if (isUnexpected(copyInitResponse)) { - throw copyInitResponse.body.error; - } - const copyPoller = getLongRunningPoller(sourceClient, copyInitResponse); - const model = ( - (await (await copyPoller).pollUntilDone()).body as DocumentModelCopyToOperationDetailsOutput - ).result!; - - console.log("Model ID:", model.modelId); - console.log("Description:", model.description); - console.log("Created:", model.createdDateTime); -} - -main().catch((error) => { - console.error("An error occurred:", error); - process.exit(1); -}); diff --git a/sdk/documentintelligence/ai-document-intelligence-rest/samples-dev/extractLayout.ts b/sdk/documentintelligence/ai-document-intelligence-rest/samples-dev/extractLayout.ts deleted file mode 100644 index 6b94c2622512..000000000000 --- a/sdk/documentintelligence/ai-document-intelligence-rest/samples-dev/extractLayout.ts +++ /dev/null @@ -1,81 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT License. - -/** - * This sample shows how to extract only the basic layout information from a document using the `beginExtractLayout` - * method. Layout information consists of the arrangement of basic OCR elements, such as pages (including their contents - * such as lines, words, and selection marks), tables, and text font styles. - * - * @summary use the prebuilt layout model to extract basic document elements only - * @azsdk-skip-javascript - */ - -import DocumentIntelligence, { AnalyzeResultOperationOutput, getLongRunningPoller, isUnexpected } from "@azure-rest/ai-document-intelligence"; - - -import * as dotenv from "dotenv"; -dotenv.config(); - -async function main() { - const client = DocumentIntelligence( - process.env["DOCUMENT_INTELLIGENCE_ENDPOINT"] || "", - { key: process.env["DOCUMENT_INTELLIGENCE_API_KEY"] || "" }) - - const initialResponse = await client - .path("/documentModels/{modelId}:analyze", "prebuilt-layout") - .post({ - contentType: "application/json", - body: { - urlSource: "https://raw.githubusercontent.com/Azure/azure-sdk-for-js/main/sdk/formrecognizer/ai-form-recognizer/assets/forms/Invoice_1.pdf", - } - }); - - if (isUnexpected(initialResponse)) { - throw initialResponse.body.error; - } - - const poller = getLongRunningPoller(client, initialResponse); - const analyzeResult = ( - (await (await poller).pollUntilDone()).body as AnalyzeResultOperationOutput - ).analyzeResult; - - const pages = analyzeResult?.pages; - const tables = analyzeResult?.tables; - - if (!pages || pages.length <= 0) { - console.log("No pages were extracted from the document."); - } else { - console.log("Pages:"); - for (const page of pages) { - console.log("- Page", page.pageNumber, `(unit: ${page.unit})`); - console.log(` ${page.width}x${page.height}, angle: ${page.angle}`); - console.log( - ` ${page.lines && page.lines.length} lines, ${page.words && page.words.length} words` - ); - - if (page.lines && page.lines.length > 0) { - console.log(" Lines:"); - - for (const line of page.lines) { - console.log(` - "${line.content}"`); - } - } - } - } - - if (!tables || tables.length <= 0) { - console.log("No tables were extracted from the document."); - } else { - console.log("Tables:"); - for (const table of tables) { - console.log( - `- Extracted table: ${table.columnCount} columns, ${table.rowCount} rows (${table.cells.length} cells)` - ); - } - } -} - -main().catch((error) => { - console.error("An error occurred:", error); - process.exit(1); -}); diff --git a/sdk/documentintelligence/ai-document-intelligence-rest/samples-dev/getClassifier.ts b/sdk/documentintelligence/ai-document-intelligence-rest/samples-dev/getClassifier.ts deleted file mode 100644 index 1a6352dba334..000000000000 --- a/sdk/documentintelligence/ai-document-intelligence-rest/samples-dev/getClassifier.ts +++ /dev/null @@ -1,41 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT License. - -/** - * This sample shows how to get the details of a custom classifier by its ID, including information about the document - * types that the classifier supports. - * - * @summary get information about a classifier by its ID - */ - -import DocumentIntelligence, { isUnexpected } from "@azure-rest/ai-document-intelligence"; - -import * as dotenv from "dotenv"; -dotenv.config(); - -async function main() { - const client = DocumentIntelligence( - process.env["DOCUMENT_INTELLIGENCE_ENDPOINT"] || "", - { key: process.env["DOCUMENT_INTELLIGENCE_API_KEY"] || "" }) - - const classifierId = process.env.CUSTOM_CLASSIFIER_ID ?? ""; - const classifier = await client.path("/documentClassifiers/{classifierId}", classifierId).get(); - - if (isUnexpected(classifier)) { - throw classifier.body.error; - } - console.log("ID", classifier.body.classifierId); - console.log("Created:", classifier.body.createdDateTime); - console.log("Description: ", classifier.body.description || ""); - - console.log("Document Types:"); - for (const [docType, details] of Object.entries(classifier.body.docTypes || {})) { - // We can also programmatically access a schema of the fields. - console.log(`- Name "${docType}", source: ${JSON.stringify(details, null, 2)}`); - } -} - -main().catch((error) => { - console.error("An error occurred:", error); - process.exit(1); -}); diff --git a/sdk/documentintelligence/ai-document-intelligence-rest/samples-dev/getInfo.ts b/sdk/documentintelligence/ai-document-intelligence-rest/samples-dev/getInfo.ts deleted file mode 100644 index b5644db43610..000000000000 --- a/sdk/documentintelligence/ai-document-intelligence-rest/samples-dev/getInfo.ts +++ /dev/null @@ -1,32 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT License. - -/** - * This sample shows how to programmatically retrieve metadata about the number of custom models in the Form Recognizer - * resource and the limit of custom models that the resource will allow to be created. - * - * @summary get information about the count and limit of custom models in the resource - */ - -import DocumentIntelligence, { isUnexpected } from "@azure-rest/ai-document-intelligence"; - -import * as dotenv from "dotenv"; -dotenv.config(); - -async function main() { - const client = DocumentIntelligence( - process.env["DOCUMENT_INTELLIGENCE_ENDPOINT"] || "", - { key: process.env["DOCUMENT_INTELLIGENCE_API_KEY"] || "" }) - const info = await client.path("/info").get(); - if (isUnexpected(info)) { - throw info.body.error; - } - console.log( - `Custom document models: ${info.body.customDocumentModels.count} of ${info.body.customDocumentModels.limit}` - ); -} - -main().catch((error) => { - console.error("An error occurred:", error); - process.exit(1); -}); diff --git a/sdk/documentintelligence/ai-document-intelligence-rest/samples-dev/getModel.ts b/sdk/documentintelligence/ai-document-intelligence-rest/samples-dev/getModel.ts deleted file mode 100644 index 1ca43511e8c0..000000000000 --- a/sdk/documentintelligence/ai-document-intelligence-rest/samples-dev/getModel.ts +++ /dev/null @@ -1,45 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT License. - -/** - * This sample shows how to get the full information about a custom model by its model ID, including information about - * the document types in the model and their field schemas. - * - * @summary get information about a model by its ID - */ - -import DocumentIntelligence, { isUnexpected } from "@azure-rest/ai-document-intelligence"; - -import * as dotenv from "dotenv"; -dotenv.config(); - -async function main() { - const client = DocumentIntelligence( - process.env["DOCUMENT_INTELLIGENCE_ENDPOINT"] || "", - { key: process.env["DOCUMENT_INTELLIGENCE_API_KEY"] || "" }) - - // The model ID to query. This can be any model ID, not just a custom model, so for example - // the following sample uses `"prebuilt-idDocument"`, but you can change it to any model ID - // you'd like to inspect. - const modelId = "prebuilt-idDocument"; - const model = await client.path("/documentModels/{modelId}", modelId).get(); - - if (isUnexpected(model)) { - throw model.body.error; - } - - console.log("ID", model.body.modelId); - console.log("Created:", model.body.createdDateTime); - console.log("Description: ", model.body.description || ""); - - console.log("Document Types:"); - for (const [docType, { fieldSchema }] of Object.entries(model.body.docTypes || {})) { - // We can also programmatically access a schema of the fields. - console.log("-", docType, JSON.stringify(fieldSchema, undefined, 2)); - } -} - -main().catch((error) => { - console.error("An error occurred:", error); - process.exit(1); -}); diff --git a/sdk/documentintelligence/ai-document-intelligence-rest/samples-dev/listModels.ts b/sdk/documentintelligence/ai-document-intelligence-rest/samples-dev/listModels.ts deleted file mode 100644 index eb081a1ce61a..000000000000 --- a/sdk/documentintelligence/ai-document-intelligence-rest/samples-dev/listModels.ts +++ /dev/null @@ -1,49 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT License. - -/** - * This sample shows how to iterate over the models in a resource. This will include both custom and prebuilt models. - * - * @summary iterate over the models in a resource - */ - -import DocumentIntelligence, { isUnexpected, paginate } from "@azure-rest/ai-document-intelligence"; - -import * as dotenv from "dotenv"; -dotenv.config(); - -async function main() { - const client = DocumentIntelligence( - process.env["DOCUMENT_INTELLIGENCE_ENDPOINT"] || "", - { key: process.env["DOCUMENT_INTELLIGENCE_API_KEY"] || "" }) - - - const response = await client.path("/documentModels").get(); - if (isUnexpected(response)) { - throw response.body.error; - } - - for await (const model of paginate(client, response)) { - console.log("- ID", model.modelId); - console.log(" Created:", model.createdDateTime); - console.log(" Description: ", model.description || ""); - - // The model summary does not include `docTypes`, so we must additionally call `getModel` to retrieve them - const detailedModel = (await client.path("/documentModels/{modelId}", model.modelId).get()); - - if (isUnexpected(detailedModel)) { - throw detailedModel.body.error; - } - const docTypes = detailedModel.body.docTypes; - - console.log(" Document Types:"); - for (const docType of Object.keys(docTypes || {})) { - console.log(" -", docType); - } - } -} - -main().catch((error) => { - console.error("An error occurred:", error); - process.exit(1); -}); diff --git a/sdk/documentintelligence/ai-document-intelligence-rest/samples-dev/readDocument.ts b/sdk/documentintelligence/ai-document-intelligence-rest/samples-dev/readDocument.ts deleted file mode 100644 index 440c4e16040c..000000000000 --- a/sdk/documentintelligence/ai-document-intelligence-rest/samples-dev/readDocument.ts +++ /dev/null @@ -1,93 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT License. - -/** - * This sample shows how to extract the text content of a document using the "prebuilt-read" model. - * - * @summary use the prebuilt "read" model to extract information about the text content of a document - */ - -import DocumentIntelligence, { AnalyzeResultOperationOutput, getLongRunningPoller, isUnexpected } from "@azure-rest/ai-document-intelligence"; - -import * as dotenv from "dotenv"; -dotenv.config(); - -async function main() { - const client = DocumentIntelligence( - process.env["DOCUMENT_INTELLIGENCE_ENDPOINT"] || "", - { key: process.env["DOCUMENT_INTELLIGENCE_API_KEY"] || "" }) - - const initialResponse = await client - .path("/documentModels/{modelId}:analyze", "prebuilt-read") - .post({ - contentType: "application/json", - body: { - urlSource: "https://raw.githubusercontent.com/Azure/azure-sdk-for-js/main/sdk/formrecognizer/ai-form-recognizer/assets/forms/Invoice_1.pdf", - }, - queryParameters: { features: ["barcodes"] }, - }); - - if (isUnexpected(initialResponse)) { - throw initialResponse.body.error; - } - const poller = await getLongRunningPoller(client, initialResponse); - const analyzeResult = ( - (await (poller).pollUntilDone()).body as AnalyzeResultOperationOutput - ).analyzeResult; - - - // The "prebuilt-read" model (`beginReadDocument` method) only extracts information about the textual content of the - // document, such as page text elements and information about the language of the text. - const pages = analyzeResult?.pages; - const languages = analyzeResult?.languages; - const styles = analyzeResult?.styles; - - - if (!pages || pages.length <= 0) { - console.log("No pages were extracted from the document."); - } else { - console.log("Pages:"); - for (const page of pages) { - console.log("- Page", page.pageNumber, `(unit: ${page.unit})`); - console.log(` ${page.width}x${page.height}, angle: ${page.angle}`); - console.log( - ` ${page.lines && page.lines.length} lines, ${page.words && page.words.length} words` - ); - - if (page.lines && page.lines.length > 0) { - console.log(" Lines:"); - - for (const line of page.lines) { - console.log(` - "${line.content}"`); - } - } - } - } - - if (!languages || languages.length <= 0) { - console.log("No language spans were extracted from the document."); - } else { - console.log("Languages:"); - for (const languageEntry of languages) { - console.log( - `- Found language: ${languageEntry.locale} (confidence: ${languageEntry.confidence})` - ); - } - } - - if (!styles || styles.length <= 0) { - console.log("No text styles were extracted from the document."); - } else { - console.log("Styles:"); - for (const style of styles) { - console.log( - `- Handwritten: ${style.isHandwritten ? "yes" : "no"} (confidence=${style.confidence})` - ); - } - } -} - -main().catch((error) => { - console.error("An error occurred:", error); - process.exit(1); -}); diff --git a/sdk/documentintelligence/ai-document-intelligence-rest/samples/v1-beta/javascript/README.md b/sdk/documentintelligence/ai-document-intelligence-rest/samples/v1-beta/javascript/README.md deleted file mode 100644 index aac9eaa2d9c5..000000000000 --- a/sdk/documentintelligence/ai-document-intelligence-rest/samples/v1-beta/javascript/README.md +++ /dev/null @@ -1,72 +0,0 @@ -# Azure Document Intelligence Rest Client client library samples for JavaScript (Beta) - -These sample programs show how to use the JavaScript client libraries for Azure Document Intelligence Rest Client in some common scenarios. - -| **File Name** | **Description** | -| ------------------------------------------------------- | ------------------------------------------------------------------------------------------ | -| [composeModel.js][composemodel] | create a composed model from several individual labeled models | -| [analyzeDocumentByModelId.js][analyzedocumentbymodelid] | analyze a document using a model by ID | -| [analyzeReceiptByModelId.js][analyzereceiptbymodelid] | use the "prebuilt-receipt" model ID to extract data from a receipt document (weakly-typed) | -| [buildClassifier.js][buildclassifier] | build a classifier from a training data set | -| [buildModel.js][buildmodel] | build a model with a single document type from a training data set | -| [classifyDocument.js][classifydocument] | use a custom classifier to classify a document | -| [copyModel.js][copymodel] | copy a model from one resource to another | -| [getClassifier.js][getclassifier] | get information about a classifier by its ID | -| [getInfo.js][getinfo] | get information about the count and limit of custom models in the resource | -| [getModel.js][getmodel] | get information about a model by its ID | -| [listModels.js][listmodels] | iterate over the models in a resource | -| [readDocument.js][readdocument] | use the prebuilt "read" model to extract information about the text content of a document | - -## Prerequisites - -The sample programs are compatible with [LTS versions of Node.js](https://github.com/nodejs/release#release-schedule). - -You need [an Azure subscription][freesub] to run these sample programs. - -Samples retrieve credentials to access the service endpoint from environment variables. Alternatively, edit the source code to include the appropriate credentials. See each individual sample for details on which environment variables/credentials it requires to function. - -Adapting the samples to run in the browser may require some additional consideration. For details, please see the [package README][package]. - -## Setup - -To run the samples using the published version of the package: - -1. Install the dependencies using `npm`: - -```bash -npm install -``` - -2. Edit the file `sample.env`, adding the correct credentials to access the Azure service and run the samples. Then rename the file from `sample.env` to just `.env`. The sample programs will read this file automatically. - -3. Run whichever samples you like (note that some samples may require additional setup, see the table above): - -```bash -node composeModel.js -``` - -Alternatively, run a single sample with the correct environment variables set (setting up the `.env` file is not required if you do this), for example (cross-platform): - -```bash -npx dev-tool run vendored cross-env DOCUMENT_INTELLIGENCE_ENDPOINT="" DOCUMENT_INTELLIGENCE_API_KEY="" PURCHASE_ORDER_SUPPLIES_SAS_URL="" PURCHASE_ORDER_EQUIPMENT_SAS_URL="" PURCHASE_ORDER_FURNITURE_SAS_URL="" PURCHASE_ORDER_CLEANING_SUPPLIES_SAS_URL="" node composeModel.js -``` - -## Next Steps - -Take a look at our [API Documentation][apiref] for more information about the APIs that are available in the clients. - -[composemodel]: https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/documentintelligence/ai-document-intelligence-rest/samples/v1-beta/javascript/composeModel.js -[analyzedocumentbymodelid]: https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/documentintelligence/ai-document-intelligence-rest/samples/v1-beta/javascript/analyzeDocumentByModelId.js -[analyzereceiptbymodelid]: https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/documentintelligence/ai-document-intelligence-rest/samples/v1-beta/javascript/analyzeReceiptByModelId.js -[buildclassifier]: https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/documentintelligence/ai-document-intelligence-rest/samples/v1-beta/javascript/buildClassifier.js -[buildmodel]: https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/documentintelligence/ai-document-intelligence-rest/samples/v1-beta/javascript/buildModel.js -[classifydocument]: https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/documentintelligence/ai-document-intelligence-rest/samples/v1-beta/javascript/classifyDocument.js -[copymodel]: https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/documentintelligence/ai-document-intelligence-rest/samples/v1-beta/javascript/copyModel.js -[getclassifier]: https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/documentintelligence/ai-document-intelligence-rest/samples/v1-beta/javascript/getClassifier.js -[getinfo]: https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/documentintelligence/ai-document-intelligence-rest/samples/v1-beta/javascript/getInfo.js -[getmodel]: https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/documentintelligence/ai-document-intelligence-rest/samples/v1-beta/javascript/getModel.js -[listmodels]: https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/documentintelligence/ai-document-intelligence-rest/samples/v1-beta/javascript/listModels.js -[readdocument]: https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/documentintelligence/ai-document-intelligence-rest/samples/v1-beta/javascript/readDocument.js -[apiref]: https://docs.microsoft.com/javascript/api/@azure-rest/ai-document-intelligence -[freesub]: https://azure.microsoft.com/free/ -[package]: https://github.com/Azure/azure-sdk-for-js/tree/main/sdk/documentintelligence/ai-document-intelligence-rest/README.md diff --git a/sdk/documentintelligence/ai-document-intelligence-rest/samples/v1-beta/javascript/analyzeDocumentByModelId.js b/sdk/documentintelligence/ai-document-intelligence-rest/samples/v1-beta/javascript/analyzeDocumentByModelId.js deleted file mode 100644 index 098e789db469..000000000000 --- a/sdk/documentintelligence/ai-document-intelligence-rest/samples/v1-beta/javascript/analyzeDocumentByModelId.js +++ /dev/null @@ -1,55 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT License. - -/** - * This sample shows how to analyze a document using a model with a given ID. The model ID may refer to any model, - * whether custom, prebuilt, composed, etc. - * - * @summary analyze a document using a model by ID - */ - -const DocumentIntelligence = require("@azure-rest/ai-document-intelligence").default, - { getLongRunningPoller, isUnexpected } = require("@azure-rest/ai-document-intelligence"); - -require("dotenv").config(); - -async function main() { - const client = DocumentIntelligence( - process.env["DOCUMENT_INTELLIGENCE_ENDPOINT"] || "", - { key: process.env["DOCUMENT_INTELLIGENCE_API_KEY"] || "" }, - ); - const modelId = process.env.DOCUMENT_INTELLIGENCE_CUSTOM_MODEL_ID || ""; // "prebuilt-layout"; - - const initialResponse = await client.path("/documentModels/{modelId}:analyze", modelId).post({ - contentType: "application/json", - body: { - urlSource: - "https://raw.githubusercontent.com/Azure/azure-sdk-for-js/main/sdk/formrecognizer/ai-form-recognizer/assets/receipt/contoso-receipt.png", - }, - queryParameters: { locale: "en-IN" }, - }); - if (isUnexpected(initialResponse)) { - throw initialResponse.body.error; - } - const poller = await getLongRunningPoller(client, initialResponse); - const analyzeResult = (await poller.pollUntilDone()).body.analyzeResult; - - const documents = analyzeResult?.documents; - - const document = documents && documents[0]; - if (!document) { - throw new Error("Expected at least one document in the result."); - } - - console.log( - "Extracted document:", - document.docType, - `(confidence: ${document.confidence || ""})`, - ); - console.log("Fields:", document.fields); -} - -main().catch((error) => { - console.error("An error occurred:", error); - process.exit(1); -}); diff --git a/sdk/documentintelligence/ai-document-intelligence-rest/samples/v1-beta/javascript/analyzeReceiptByModelId.js b/sdk/documentintelligence/ai-document-intelligence-rest/samples/v1-beta/javascript/analyzeReceiptByModelId.js deleted file mode 100644 index 9264a61f4e1a..000000000000 --- a/sdk/documentintelligence/ai-document-intelligence-rest/samples/v1-beta/javascript/analyzeReceiptByModelId.js +++ /dev/null @@ -1,59 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT License. - -/** - * This sample shows how to extract elements of a receipt from a URL to a file using the prebuilt receipt model. Rather - * than using the `PrebuiltModels.Receipt` document model, this sample shows the use of the prebuilt model by ID, - * resulting in a weaker type that exactly mirrors the model's field schema at runtime. - * - * The prebuilt receipt model can return several fields. For a detailed list of the fields supported by the - * receipt model, see the `Receipt` type in the documentation, or refer to the following link: - * - * https://aka.ms/azsdk/documentitelligence/receiptfieldschema - * - * @summary use the "prebuilt-receipt" model ID to extract data from a receipt document (weakly-typed) - */ - -const DocumentIntelligence = require("@azure-rest/ai-document-intelligence").default, - { getLongRunningPoller, isUnexpected } = require("@azure-rest/ai-document-intelligence"); - -require("dotenv").config(); - -async function main() { - const client = DocumentIntelligence( - process.env["DOCUMENT_INTELLIGENCE_ENDPOINT"] || "", - { key: process.env["DOCUMENT_INTELLIGENCE_API_KEY"] || "" }, - ); - - const initialResponse = await client - .path("/documentModels/{modelId}:analyze", "prebuilt-receipt") - .post({ - contentType: "application/json", - body: { - // The Document Intelligence service will access the following URL to a receipt image and extract data from it - urlSource: - "https://raw.githubusercontent.com/Azure/azure-sdk-for-js/main/sdk/formrecognizer/ai-form-recognizer/assets/receipt/contoso-receipt.png", - }, - }); - if (isUnexpected(initialResponse)) { - throw initialResponse.body.error; - } - const poller = await getLongRunningPoller(client, initialResponse); - - poller.onProgress((state) => console.log("Operation:", state.result, state.status)); - const analyzeResult = (await poller.pollUntilDone()).body.analyzeResult; - - const documents = analyzeResult?.documents; - - const result = documents && documents[0]; - if (result) { - console.log(result.fields); - } else { - throw new Error("Expected at least one receipt in the result."); - } -} - -main().catch((error) => { - console.error("An error occurred:", error); - process.exit(1); -}); diff --git a/sdk/documentintelligence/ai-document-intelligence-rest/samples/v1-beta/javascript/buildClassifier.js b/sdk/documentintelligence/ai-document-intelligence-rest/samples/v1-beta/javascript/buildClassifier.js deleted file mode 100644 index ed481b8012aa..000000000000 --- a/sdk/documentintelligence/ai-document-intelligence-rest/samples/v1-beta/javascript/buildClassifier.js +++ /dev/null @@ -1,78 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT License. - -/** - * This sample shows how to programmatically build a custom classifier. - * - * The Document Intelligence service expects the training data to be organized and labeled according to a particular - * convention and stored in an Azure Storage container. For more information about creating a training data set, please - * see the information at the following link to the service's documentation: - * - * https://aka.ms/azsdk/documentitelligence/buildclassifiermodel - * - * @summary build a classifier from a training data set - */ - -const DocumentIntelligence = require("@azure-rest/ai-document-intelligence").default, - { getLongRunningPoller, isUnexpected } = require("@azure-rest/ai-document-intelligence"); - -require("dotenv").config(); - -async function main() { - const client = DocumentIntelligence( - process.env["DOCUMENT_INTELLIGENCE_ENDPOINT"] || "", - { key: process.env["DOCUMENT_INTELLIGENCE_API_KEY"] || "" }, - ); - - const random = Date.now().toString(); - const modelId = - (process.env.CUSTOM_CLASSIFIER_ID || "") + random.substring(random.length - 6); - - const trainingDataSasUrl1 = - process.env.CUSTOM_CLASSIFIER_TRAINING_DATA_SAS_URL_1 || ""; - - const trainingDataSasUrl2 = - process.env.CUSTOM_CLASSIFIER_TRAINING_DATA_SAS_URL_2 || ""; - - const initialResponse = await client.path("/documentClassifiers:build").post({ - body: { - classifierId: modelId, - description: "Custom classifier description", - docTypes: { - foo: { - azureBlobSource: { - containerUrl: trainingDataSasUrl1, - }, - }, - bar: { - azureBlobSource: { - containerUrl: trainingDataSasUrl2, - }, - }, - }, - }, - }); - - if (isUnexpected(initialResponse)) { - throw initialResponse.body.error; - } - const poller = await getLongRunningPoller(client, initialResponse); - const classifier = (await poller.pollUntilDone()).body.result; - if (!classifier) { - throw new Error("Expected a DocumentClassifierDetailsOutput response."); - } - - console.log("Classifier ID:", classifier.classifierId); - console.log("Description:", classifier.description); - console.log("Created:", classifier.createdDateTime); - - console.log("Document Types:"); - for (const [docType, details] of Object.entries(classifier.docTypes)) { - console.log(`- Name: "${docType}", source: ${JSON.stringify(details, null, 2)}`); - } -} - -main().catch((error) => { - console.error("An error occurred:", error); - process.exit(1); -}); diff --git a/sdk/documentintelligence/ai-document-intelligence-rest/samples/v1-beta/javascript/buildModel.js b/sdk/documentintelligence/ai-document-intelligence-rest/samples/v1-beta/javascript/buildModel.js deleted file mode 100644 index bbc03c07d574..000000000000 --- a/sdk/documentintelligence/ai-document-intelligence-rest/samples/v1-beta/javascript/buildModel.js +++ /dev/null @@ -1,78 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT License. - -/** - * This sample shows how to programmatically build a model with a single document type using a training data set. - * - * The Document Intelligence service expects the training data to be organized and labeled according to a particular - * convention and stored in an Azure Storage container. For more information about creating a training data set, please - * see the information at the following link to the service's documentation: - * - * https://aka.ms/azsdk/documentitelligence/buildtrainingset - * - * @summary build a model with a single document type from a training data set - */ - -const DocumentIntelligence = require("@azure-rest/ai-document-intelligence").default, - { getLongRunningPoller, isUnexpected } = require("@azure-rest/ai-document-intelligence"); - -require("dotenv").config(); - -async function main() { - const client = DocumentIntelligence( - process.env["DOCUMENT_INTELLIGENCE_ENDPOINT"] || "", - { key: process.env["DOCUMENT_INTELLIGENCE_API_KEY"] || "" }, - ); - const random = Date.now().toString(); - const modelId = - (process.env.CUSTOM_MODEL_ID || "") + random.substring(random.length - 6); - const trainingDataSasUrl = - process.env.CUSTOM_MODEL_TRAINING_DATA_SAS_URL || ""; - - const initialResponse = await client.path("/documentModels:build").post({ - body: { - buildMode: "template", - modelId, - azureBlobSource: { - containerUrl: trainingDataSasUrl, - }, - }, - }); - - if (isUnexpected(initialResponse)) { - throw initialResponse.body.error; - } - const poller = getLongRunningPoller(client, initialResponse); - const model = (await (await poller).pollUntilDone()).body.result; - if (!model) { - throw new Error("Expected a DocumentModelDetailsOutput response."); - } - - console.log("Model ID:", model.modelId); - console.log("Description:", model.description); - console.log("Created:", model.createdDateTime); - - // A model may contain several document types, which describe the possible object structures of fields extracted using - // this model - - console.log("Document Types:"); - for (const [docType, { description, fieldSchema: schema }] of Object.entries( - model.docTypes || {}, - )) { - console.log(`- Name: "${docType}"`); - console.log(` Description: "${description}"`); - - // For simplicity, this example will only show top-level field names - console.log(" Fields:"); - - for (const [fieldName, fieldSchema] of Object.entries(schema)) { - console.log(` - "${fieldName}" (${fieldSchema.type})`); - console.log(` ${fieldSchema.description || ""}`); - } - } -} - -main().catch((error) => { - console.error("An error occurred:", error); - process.exit(1); -}); diff --git a/sdk/documentintelligence/ai-document-intelligence-rest/samples/v1-beta/javascript/classifyDocument.js b/sdk/documentintelligence/ai-document-intelligence-rest/samples/v1-beta/javascript/classifyDocument.js deleted file mode 100644 index 07f3b4365304..000000000000 --- a/sdk/documentintelligence/ai-document-intelligence-rest/samples/v1-beta/javascript/classifyDocument.js +++ /dev/null @@ -1,54 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT License. - -/** - * This sample shows how to use a custom classifier to get the document type (class) of a document. - * - * @summary use a custom classifier to classify a document - */ - -const DocumentIntelligence = require("@azure-rest/ai-document-intelligence").default, - { getLongRunningPoller, isUnexpected } = require("@azure-rest/ai-document-intelligence"); - -require("dotenv").config(); - -async function main() { - const client = DocumentIntelligence( - process.env["DOCUMENT_INTELLIGENCE_ENDPOINT"] || "", - { key: process.env["DOCUMENT_INTELLIGENCE_API_KEY"] || "" }, - ); - const documentUrl = - "https://raw.githubusercontent.com/Azure/azure-sdk-for-js/main/sdk/formrecognizer/ai-form-recognizer/assets/invoice/Invoice_1.pdf"; - - const classifierId = process.env.CUSTOM_CLASSIFIER_ID ?? ""; - const initialResponse = await client - .path("/documentClassifiers/{classifierId}:analyze", classifierId) - .post({ - contentType: "application/json", - body: { - urlSource: documentUrl, - }, - }); - - if (isUnexpected(initialResponse)) { - throw initialResponse.body.error; - } - - const poller = await getLongRunningPoller(client, initialResponse); - const analyzeResult = (await poller.pollUntilDone()).body.analyzeResult; - - if (analyzeResult?.documents === undefined || analyzeResult.documents.length === 0) { - throw new Error("Failed to extract any documents."); - } - - for (const document of analyzeResult.documents) { - console.log( - `Extracted a document with type '${document.docType}' on page ${document.boundingRegions?.[0].pageNumber} (confidence: ${document.confidence})`, - ); - } -} - -main().catch((error) => { - console.error("An error occurred:", error); - process.exit(1); -}); diff --git a/sdk/documentintelligence/ai-document-intelligence-rest/samples/v1-beta/javascript/composeModel.js b/sdk/documentintelligence/ai-document-intelligence-rest/samples/v1-beta/javascript/composeModel.js deleted file mode 100644 index 59f97ac3dd1d..000000000000 --- a/sdk/documentintelligence/ai-document-intelligence-rest/samples/v1-beta/javascript/composeModel.js +++ /dev/null @@ -1,117 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT License. - -/** - * This sample demonstrates how to create a composed model from several individual labeled models. - * - * We build all of the component models used in the composition operation and then finally create the composed model. - * The resulting composed model will have all of the document types of its component submodels. When used for analysis, - * it will first classify the input as belonging to one of the document types.zzs - * - * @summary create a composed model from several individual labeled models - */ - -const DocumentIntelligence = require("@azure-rest/ai-document-intelligence").default, - { getLongRunningPoller, isUnexpected } = require("@azure-rest/ai-document-intelligence"); - -require("dotenv").config(); - -async function main() { - const client = DocumentIntelligence( - process.env["DOCUMENT_INTELLIGENCE_ENDPOINT"] || "", - { key: process.env["DOCUMENT_INTELLIGENCE_API_KEY"] || "" }, - ); - - // This object will hold the SAS-encoded URLs to containers that hold - // different types of purchase order documents and their labels. - const purchaseOrderSasUrls = { - supplies: - process.env["PURCHASE_ORDER_SUPPLIES_SAS_URL"] || - "", - equipment: - process.env["PURCHASE_ORDER_EQUIPMENT_SAS_URL"] || - "", - furniture: - process.env["PURCHASE_ORDER_FURNITURE_SAS_URL"] || - "", - cleaningSupplies: - process.env["PURCHASE_ORDER_CLEANING_SUPPLIES_SAS_URL"] || - "", - }; - - // We'll put the last few digits of the current timestamp into the model IDs, just to make sure they're unique. - const random = Date.now().toString(); - - const modelIds = await Promise.all( - Object.entries(purchaseOrderSasUrls) - .map(async ([kind, sasUrl]) => { - const modelId = kind + "ComponentModel" + random.substring(random.length - 6); - const initialResponse = await client.path("/documentModels:build").post({ - body: { - buildMode: "template", - modelId: modelId, - azureBlobSource: { - containerUrl: sasUrl, - }, - }, - }); - if (isUnexpected(initialResponse)) { - throw initialResponse.body.error; - } - const poller = await getLongRunningPoller(client, initialResponse); - const model = (await poller.pollUntilDone()).body.result; - - return model; - }) - .map(async (model) => { - return { modelId: (await model).modelId }; - }), - ); - - // Finally, create the composed model. - - const composedModelId = "purchaseOrders" + random.substring(random.length - 6); - - const initialResponse = await client.path("/documentModels:compose").post({ - body: { - description: - "A composed model that classifies purchase order documents and extracts data from them.", - componentModels: modelIds, - modelId: composedModelId, - }, - }); - - if (isUnexpected(initialResponse)) { - throw initialResponse.body.error; - } - const poller = await getLongRunningPoller(client, initialResponse); - - const composedModel = (await poller.pollUntilDone()).body.result; - - console.log("Model ID:", composedModel.modelId); - console.log("Description:", composedModel.description); - console.log("Created:", composedModel.createdDateTime); - - // The composed model should have a document type for each one of the individually built models that are composed into - // this larger model. - - console.log("Document Types:"); - for (const [docType, { description, fieldSchema: schema }] of Object.entries( - composedModel.docTypes || {}, - )) { - console.log(`- Name: "${docType}"`); - console.log(` Description: "${description}"`); - - // For simplicity, this example will only show top-level field names - console.log(" Fields:"); - - for (const [fieldName, fieldSchema] of Object.entries(schema)) { - console.log(` - "${fieldName}" (${fieldSchema.type})`); - console.log(` ${fieldSchema.description || ""}`); - } - } -} - -main().catch((err) => { - console.error("The sample encountered an error:", err); -}); diff --git a/sdk/documentintelligence/ai-document-intelligence-rest/samples/v1-beta/javascript/copyModel.js b/sdk/documentintelligence/ai-document-intelligence-rest/samples/v1-beta/javascript/copyModel.js deleted file mode 100644 index e729fa10de76..000000000000 --- a/sdk/documentintelligence/ai-document-intelligence-rest/samples/v1-beta/javascript/copyModel.js +++ /dev/null @@ -1,64 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT License. - -/** - * This sample shows how to copy a model from one resource to another. The model is created with a new model ID (and - * optionally a new description) in the destination resource, but will have the same document types as the source model. - * - * @summary copy a model from one resource to another - */ - -const DocumentIntelligence = require("@azure-rest/ai-document-intelligence").default, - { getLongRunningPoller, isUnexpected } = require("@azure-rest/ai-document-intelligence"); - -require("dotenv").config(); - -async function main() { - const random = Date.now().toString(); - const destinationModelId = - (process.env.CUSTOM_MODEL_ID || "") + random.substring(random.length - 6); - - // The authorization must be created by the destination resource. - // const destinationClient = new DocumentModelAdministrationClient(endpoint, credential); - const destinationClient = DocumentIntelligence( - process.env["DOCUMENT_INTELLIGENCE_ENDPOINT"] || "", - { key: process.env["DOCUMENT_INTELLIGENCE_API_KEY"] || "" }, - ); - // const authorization = await destinationClient.getCopyAuthorization(destinationModelId); - const targetAuth = await destinationClient.path("/documentModels:authorizeCopy").post({ - body: { - modelId: destinationModelId, - }, - }); - if (isUnexpected(targetAuth)) { - throw targetAuth.body.error; - } - const sourceEndpoint = process.env.DOCUMENT_INTELLIGENCE_SOURCE_ENDPOINT || ""; - const sourceModelId = process.env.COPY_SOURCE_MODEL_ID || ""; - - // Then, the source resource can initiate the copy operation. - const sourceClient = DocumentIntelligence(sourceEndpoint, { - key: process.env.DOCUMENT_INTELLIGENCE_SOURCE_API_KEY || "", - }); - - const copyInitResponse = await sourceClient - .path("/documentModels/{modelId}:copyTo", sourceModelId) - .post({ - body: targetAuth.body, - }); - - if (isUnexpected(copyInitResponse)) { - throw copyInitResponse.body.error; - } - const copyPoller = getLongRunningPoller(sourceClient, copyInitResponse); - const model = (await (await copyPoller).pollUntilDone()).body.result; - - console.log("Model ID:", model.modelId); - console.log("Description:", model.description); - console.log("Created:", model.createdDateTime); -} - -main().catch((error) => { - console.error("An error occurred:", error); - process.exit(1); -}); diff --git a/sdk/documentintelligence/ai-document-intelligence-rest/samples/v1-beta/javascript/getClassifier.js b/sdk/documentintelligence/ai-document-intelligence-rest/samples/v1-beta/javascript/getClassifier.js deleted file mode 100644 index 0a72ff6d40bf..000000000000 --- a/sdk/documentintelligence/ai-document-intelligence-rest/samples/v1-beta/javascript/getClassifier.js +++ /dev/null @@ -1,42 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT License. - -/** - * This sample shows how to get the details of a custom classifier by its ID, including information about the document - * types that the classifier supports. - * - * @summary get information about a classifier by its ID - */ - -const DocumentIntelligence = require("@azure-rest/ai-document-intelligence").default, - { isUnexpected } = require("@azure-rest/ai-document-intelligence"); - -require("dotenv").config(); - -async function main() { - const client = DocumentIntelligence( - process.env["DOCUMENT_INTELLIGENCE_ENDPOINT"] || "", - { key: process.env["DOCUMENT_INTELLIGENCE_API_KEY"] || "" }, - ); - - const classifierId = process.env.CUSTOM_CLASSIFIER_ID ?? ""; - const classifier = await client.path("/documentClassifiers/{classifierId}", classifierId).get(); - - if (isUnexpected(classifier)) { - throw classifier.body.error; - } - console.log("ID", classifier.body.classifierId); - console.log("Created:", classifier.body.createdDateTime); - console.log("Description: ", classifier.body.description || ""); - - console.log("Document Types:"); - for (const [docType, details] of Object.entries(classifier.body.docTypes || {})) { - // We can also programmatically access a schema of the fields. - console.log(`- Name "${docType}", source: ${JSON.stringify(details, null, 2)}`); - } -} - -main().catch((error) => { - console.error("An error occurred:", error); - process.exit(1); -}); diff --git a/sdk/documentintelligence/ai-document-intelligence-rest/samples/v1-beta/javascript/getInfo.js b/sdk/documentintelligence/ai-document-intelligence-rest/samples/v1-beta/javascript/getInfo.js deleted file mode 100644 index 290adf4bc8d8..000000000000 --- a/sdk/documentintelligence/ai-document-intelligence-rest/samples/v1-beta/javascript/getInfo.js +++ /dev/null @@ -1,33 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT License. - -/** - * This sample shows how to programmatically retrieve metadata about the number of custom models in the Form Recognizer - * resource and the limit of custom models that the resource will allow to be created. - * - * @summary get information about the count and limit of custom models in the resource - */ - -const DocumentIntelligence = require("@azure-rest/ai-document-intelligence").default, - { isUnexpected } = require("@azure-rest/ai-document-intelligence"); - -require("dotenv").config(); - -async function main() { - const client = DocumentIntelligence( - process.env["DOCUMENT_INTELLIGENCE_ENDPOINT"] || "", - { key: process.env["DOCUMENT_INTELLIGENCE_API_KEY"] || "" }, - ); - const info = await client.path("/info").get(); - if (isUnexpected(info)) { - throw info.body.error; - } - console.log( - `Custom document models: ${info.body.customDocumentModels.count} of ${info.body.customDocumentModels.limit}`, - ); -} - -main().catch((error) => { - console.error("An error occurred:", error); - process.exit(1); -}); diff --git a/sdk/documentintelligence/ai-document-intelligence-rest/samples/v1-beta/javascript/getModel.js b/sdk/documentintelligence/ai-document-intelligence-rest/samples/v1-beta/javascript/getModel.js deleted file mode 100644 index 0a2182b4dace..000000000000 --- a/sdk/documentintelligence/ai-document-intelligence-rest/samples/v1-beta/javascript/getModel.js +++ /dev/null @@ -1,46 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT License. - -/** - * This sample shows how to get the full information about a custom model by its model ID, including information about - * the document types in the model and their field schemas. - * - * @summary get information about a model by its ID - */ - -const DocumentIntelligence = require("@azure-rest/ai-document-intelligence").default, - { isUnexpected } = require("@azure-rest/ai-document-intelligence"); - -require("dotenv").config(); - -async function main() { - const client = DocumentIntelligence( - process.env["DOCUMENT_INTELLIGENCE_ENDPOINT"] || "", - { key: process.env["DOCUMENT_INTELLIGENCE_API_KEY"] || "" }, - ); - - // The model ID to query. This can be any model ID, not just a custom model, so for example - // the following sample uses `"prebuilt-idDocument"`, but you can change it to any model ID - // you'd like to inspect. - const modelId = "prebuilt-idDocument"; - const model = await client.path("/documentModels/{modelId}", modelId).get(); - - if (isUnexpected(model)) { - throw model.body.error; - } - - console.log("ID", model.body.modelId); - console.log("Created:", model.body.createdDateTime); - console.log("Description: ", model.body.description || ""); - - console.log("Document Types:"); - for (const [docType, { fieldSchema }] of Object.entries(model.body.docTypes || {})) { - // We can also programmatically access a schema of the fields. - console.log("-", docType, JSON.stringify(fieldSchema, undefined, 2)); - } -} - -main().catch((error) => { - console.error("An error occurred:", error); - process.exit(1); -}); diff --git a/sdk/documentintelligence/ai-document-intelligence-rest/samples/v1-beta/javascript/listModels.js b/sdk/documentintelligence/ai-document-intelligence-rest/samples/v1-beta/javascript/listModels.js deleted file mode 100644 index 0c257a445214..000000000000 --- a/sdk/documentintelligence/ai-document-intelligence-rest/samples/v1-beta/javascript/listModels.js +++ /dev/null @@ -1,49 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT License. - -/** - * This sample shows how to iterate over the models in a resource. This will include both custom and prebuilt models. - * - * @summary iterate over the models in a resource - */ - -const DocumentIntelligence = require("@azure-rest/ai-document-intelligence").default, - { isUnexpected, paginate } = require("@azure-rest/ai-document-intelligence"); - -require("dotenv").config(); - -async function main() { - const client = DocumentIntelligence( - process.env["DOCUMENT_INTELLIGENCE_ENDPOINT"] || "", - { key: process.env["DOCUMENT_INTELLIGENCE_API_KEY"] || "" }, - ); - - const response = await client.path("/documentModels").get(); - if (isUnexpected(response)) { - throw response.body.error; - } - - for await (const model of paginate(client, response)) { - console.log("- ID", model.modelId); - console.log(" Created:", model.createdDateTime); - console.log(" Description: ", model.description || ""); - - // The model summary does not include `docTypes`, so we must additionally call `getModel` to retrieve them - const detailedModel = await client.path("/documentModels/{modelId}", model.modelId).get(); - - if (isUnexpected(detailedModel)) { - throw detailedModel.body.error; - } - const docTypes = detailedModel.body.docTypes; - - console.log(" Document Types:"); - for (const docType of Object.keys(docTypes || {})) { - console.log(" -", docType); - } - } -} - -main().catch((error) => { - console.error("An error occurred:", error); - process.exit(1); -}); diff --git a/sdk/documentintelligence/ai-document-intelligence-rest/samples/v1-beta/javascript/package.json b/sdk/documentintelligence/ai-document-intelligence-rest/samples/v1-beta/javascript/package.json deleted file mode 100644 index c89c769c0df8..000000000000 --- a/sdk/documentintelligence/ai-document-intelligence-rest/samples/v1-beta/javascript/package.json +++ /dev/null @@ -1,32 +0,0 @@ -{ - "name": "@azure-samples/ai-document-intelligence-js-beta", - "private": true, - "version": "1.0.0", - "description": "Azure Document Intelligence Rest Client client library samples for JavaScript (Beta)", - "engines": { - "node": ">=18.0.0" - }, - "repository": { - "type": "git", - "url": "git+https://github.com/Azure/azure-sdk-for-js.git", - "directory": "sdk/documentintelligence/ai-document-intelligence-rest" - }, - "keywords": [ - "node", - "azure", - "cloud", - "typescript", - "browser", - "isomorphic" - ], - "author": "Microsoft Corporation", - "license": "MIT", - "bugs": { - "url": "https://github.com/Azure/azure-sdk-for-js/issues" - }, - "homepage": "https://github.com/Azure/azure-sdk-for-js/tree/main/sdk/documentintelligence/ai-document-intelligence-rest", - "dependencies": { - "@azure-rest/ai-document-intelligence": "next", - "dotenv": "latest" - } -} diff --git a/sdk/documentintelligence/ai-document-intelligence-rest/samples/v1-beta/javascript/readDocument.js b/sdk/documentintelligence/ai-document-intelligence-rest/samples/v1-beta/javascript/readDocument.js deleted file mode 100644 index 5b300c136e3b..000000000000 --- a/sdk/documentintelligence/ai-document-intelligence-rest/samples/v1-beta/javascript/readDocument.js +++ /dev/null @@ -1,91 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT License. - -/** - * This sample shows how to extract the text content of a document using the "prebuilt-read" model. - * - * @summary use the prebuilt "read" model to extract information about the text content of a document - */ - -const DocumentIntelligence = require("@azure-rest/ai-document-intelligence").default, - { getLongRunningPoller, isUnexpected } = require("@azure-rest/ai-document-intelligence"); - -require("dotenv").config(); - -async function main() { - const client = DocumentIntelligence( - process.env["DOCUMENT_INTELLIGENCE_ENDPOINT"] || "", - { key: process.env["DOCUMENT_INTELLIGENCE_API_KEY"] || "" }, - ); - - const initialResponse = await client - .path("/documentModels/{modelId}:analyze", "prebuilt-read") - .post({ - contentType: "application/json", - body: { - urlSource: - "https://raw.githubusercontent.com/Azure/azure-sdk-for-js/main/sdk/formrecognizer/ai-form-recognizer/assets/forms/Invoice_1.pdf", - }, - queryParameters: { features: ["barcodes"] }, - }); - - if (isUnexpected(initialResponse)) { - throw initialResponse.body.error; - } - const poller = await getLongRunningPoller(client, initialResponse); - const analyzeResult = (await poller.pollUntilDone()).body.analyzeResult; - - // The "prebuilt-read" model (`beginReadDocument` method) only extracts information about the textual content of the - // document, such as page text elements and information about the language of the text. - const pages = analyzeResult?.pages; - const languages = analyzeResult?.languages; - const styles = analyzeResult?.styles; - - if (!pages || pages.length <= 0) { - console.log("No pages were extracted from the document."); - } else { - console.log("Pages:"); - for (const page of pages) { - console.log("- Page", page.pageNumber, `(unit: ${page.unit})`); - console.log(` ${page.width}x${page.height}, angle: ${page.angle}`); - console.log( - ` ${page.lines && page.lines.length} lines, ${page.words && page.words.length} words`, - ); - - if (page.lines && page.lines.length > 0) { - console.log(" Lines:"); - - for (const line of page.lines) { - console.log(` - "${line.content}"`); - } - } - } - } - - if (!languages || languages.length <= 0) { - console.log("No language spans were extracted from the document."); - } else { - console.log("Languages:"); - for (const languageEntry of languages) { - console.log( - `- Found language: ${languageEntry.locale} (confidence: ${languageEntry.confidence})`, - ); - } - } - - if (!styles || styles.length <= 0) { - console.log("No text styles were extracted from the document."); - } else { - console.log("Styles:"); - for (const style of styles) { - console.log( - `- Handwritten: ${style.isHandwritten ? "yes" : "no"} (confidence=${style.confidence})`, - ); - } - } -} - -main().catch((error) => { - console.error("An error occurred:", error); - process.exit(1); -}); diff --git a/sdk/documentintelligence/ai-document-intelligence-rest/samples/v1-beta/javascript/sample.env b/sdk/documentintelligence/ai-document-intelligence-rest/samples/v1-beta/javascript/sample.env deleted file mode 100644 index 4edbb15d213c..000000000000 --- a/sdk/documentintelligence/ai-document-intelligence-rest/samples/v1-beta/javascript/sample.env +++ /dev/null @@ -1,8 +0,0 @@ -# Used in most samples. Retrieve these values from a Cognitive Services instance -# in the Azure Portal. -DOCUMENT_INTELLIGENCE_ENDPOINT="https://.cognitiveservies.azure.com/" -DOCUMENT_INTELLIGENCE_API_KEY="" - -# Our tests assume that TEST_MODE is "playback" by default. You can -# change it to "record" to generate new recordings, or "live" to bypass the recorder entirely. -# TEST_MODE=playback diff --git a/sdk/documentintelligence/ai-document-intelligence-rest/samples/v1-beta/typescript/README.md b/sdk/documentintelligence/ai-document-intelligence-rest/samples/v1-beta/typescript/README.md deleted file mode 100644 index a1204d02ebde..000000000000 --- a/sdk/documentintelligence/ai-document-intelligence-rest/samples/v1-beta/typescript/README.md +++ /dev/null @@ -1,95 +0,0 @@ -# Azure Document Intelligence Rest Client client library samples for TypeScript (Beta) - -These sample programs show how to use the TypeScript client libraries for Azure Document Intelligence Rest Client in some common scenarios. - -| **File Name** | **Description** | -| ------------------------------------------------------- | ------------------------------------------------------------------------------------------ | -| [composeModel.ts][composemodel] | create a composed model from several individual labeled models | -| [analyzeDocumentByModelId.ts][analyzedocumentbymodelid] | analyze a document using a model by ID | -| [analyzeIdentityDocument.ts][analyzeidentitydocument] | extract data from an identity document | -| [analyzeInvoice.ts][analyzeinvoice] | extract data from an invoice document | -| [analyzeReceipt.ts][analyzereceipt] | extract data from a receipt document | -| [analyzeReceiptByModelId.ts][analyzereceiptbymodelid] | use the "prebuilt-receipt" model ID to extract data from a receipt document (weakly-typed) | -| [analyzeW2TaxForm.ts][analyzew2taxform] | extract data from a United States W2 tax document | -| [buildClassifier.ts][buildclassifier] | build a classifier from a training data set | -| [buildModel.ts][buildmodel] | build a model with a single document type from a training data set | -| [classifyDocument.ts][classifydocument] | use a custom classifier to classify a document | -| [copyModel.ts][copymodel] | copy a model from one resource to another | -| [extractLayout.ts][extractlayout] | use the prebuilt layout model to extract basic document elements only | -| [getClassifier.ts][getclassifier] | get information about a classifier by its ID | -| [getInfo.ts][getinfo] | get information about the count and limit of custom models in the resource | -| [getModel.ts][getmodel] | get information about a model by its ID | -| [listModels.ts][listmodels] | iterate over the models in a resource | -| [readDocument.ts][readdocument] | use the prebuilt "read" model to extract information about the text content of a document | - -## Prerequisites - -The sample programs are compatible with [LTS versions of Node.js](https://github.com/nodejs/release#release-schedule). - -Before running the samples in Node, they must be compiled to JavaScript using the TypeScript compiler. For more information on TypeScript, see the [TypeScript documentation][typescript]. Install the TypeScript compiler using: - -```bash -npm install -g typescript -``` - -You need [an Azure subscription][freesub] to run these sample programs. - -Samples retrieve credentials to access the service endpoint from environment variables. Alternatively, edit the source code to include the appropriate credentials. See each individual sample for details on which environment variables/credentials it requires to function. - -Adapting the samples to run in the browser may require some additional consideration. For details, please see the [package README][package]. - -## Setup - -To run the samples using the published version of the package: - -1. Install the dependencies using `npm`: - -```bash -npm install -``` - -2. Compile the samples: - -```bash -npm run build -``` - -3. Edit the file `sample.env`, adding the correct credentials to access the Azure service and run the samples. Then rename the file from `sample.env` to just `.env`. The sample programs will read this file automatically. - -4. Run whichever samples you like (note that some samples may require additional setup, see the table above): - -```bash -node dist/composeModel.js -``` - -Alternatively, run a single sample with the correct environment variables set (setting up the `.env` file is not required if you do this), for example (cross-platform): - -```bash -npx dev-tool run vendored cross-env DOCUMENT_INTELLIGENCE_ENDPOINT="" DOCUMENT_INTELLIGENCE_API_KEY="" PURCHASE_ORDER_SUPPLIES_SAS_URL="" PURCHASE_ORDER_EQUIPMENT_SAS_URL="" PURCHASE_ORDER_FURNITURE_SAS_URL="" PURCHASE_ORDER_CLEANING_SUPPLIES_SAS_URL="" node dist/composeModel.js -``` - -## Next Steps - -Take a look at our [API Documentation][apiref] for more information about the APIs that are available in the clients. - -[composemodel]: https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/documentintelligence/ai-document-intelligence-rest/samples/v1-beta/typescript/src/composeModel.ts -[analyzedocumentbymodelid]: https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/documentintelligence/ai-document-intelligence-rest/samples/v1-beta/typescript/src/analyzeDocumentByModelId.ts -[analyzeidentitydocument]: https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/documentintelligence/ai-document-intelligence-rest/samples/v1-beta/typescript/src/analyzeIdentityDocument.ts -[analyzeinvoice]: https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/documentintelligence/ai-document-intelligence-rest/samples/v1-beta/typescript/src/analyzeInvoice.ts -[analyzereceipt]: https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/documentintelligence/ai-document-intelligence-rest/samples/v1-beta/typescript/src/analyzeReceipt.ts -[analyzereceiptbymodelid]: https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/documentintelligence/ai-document-intelligence-rest/samples/v1-beta/typescript/src/analyzeReceiptByModelId.ts -[analyzew2taxform]: https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/documentintelligence/ai-document-intelligence-rest/samples/v1-beta/typescript/src/analyzeW2TaxForm.ts -[buildclassifier]: https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/documentintelligence/ai-document-intelligence-rest/samples/v1-beta/typescript/src/buildClassifier.ts -[buildmodel]: https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/documentintelligence/ai-document-intelligence-rest/samples/v1-beta/typescript/src/buildModel.ts -[classifydocument]: https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/documentintelligence/ai-document-intelligence-rest/samples/v1-beta/typescript/src/classifyDocument.ts -[copymodel]: https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/documentintelligence/ai-document-intelligence-rest/samples/v1-beta/typescript/src/copyModel.ts -[extractlayout]: https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/documentintelligence/ai-document-intelligence-rest/samples/v1-beta/typescript/src/extractLayout.ts -[getclassifier]: https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/documentintelligence/ai-document-intelligence-rest/samples/v1-beta/typescript/src/getClassifier.ts -[getinfo]: https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/documentintelligence/ai-document-intelligence-rest/samples/v1-beta/typescript/src/getInfo.ts -[getmodel]: https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/documentintelligence/ai-document-intelligence-rest/samples/v1-beta/typescript/src/getModel.ts -[listmodels]: https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/documentintelligence/ai-document-intelligence-rest/samples/v1-beta/typescript/src/listModels.ts -[readdocument]: https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/documentintelligence/ai-document-intelligence-rest/samples/v1-beta/typescript/src/readDocument.ts -[apiref]: https://docs.microsoft.com/javascript/api/@azure-rest/ai-document-intelligence -[freesub]: https://azure.microsoft.com/free/ -[package]: https://github.com/Azure/azure-sdk-for-js/tree/main/sdk/documentintelligence/ai-document-intelligence-rest/README.md -[typescript]: https://www.typescriptlang.org/docs/home.html diff --git a/sdk/documentintelligence/ai-document-intelligence-rest/samples/v1-beta/typescript/package.json b/sdk/documentintelligence/ai-document-intelligence-rest/samples/v1-beta/typescript/package.json deleted file mode 100644 index 67c2ae765085..000000000000 --- a/sdk/documentintelligence/ai-document-intelligence-rest/samples/v1-beta/typescript/package.json +++ /dev/null @@ -1,41 +0,0 @@ -{ - "name": "@azure-samples/ai-document-intelligence-ts-beta", - "private": true, - "version": "1.0.0", - "description": "Azure Document Intelligence Rest Client client library samples for TypeScript (Beta)", - "engines": { - "node": ">=18.0.0" - }, - "scripts": { - "build": "tsc", - "prebuild": "rimraf dist/" - }, - "repository": { - "type": "git", - "url": "git+https://github.com/Azure/azure-sdk-for-js.git", - "directory": "sdk/documentintelligence/ai-document-intelligence-rest" - }, - "keywords": [ - "node", - "azure", - "cloud", - "typescript", - "browser", - "isomorphic" - ], - "author": "Microsoft Corporation", - "license": "MIT", - "bugs": { - "url": "https://github.com/Azure/azure-sdk-for-js/issues" - }, - "homepage": "https://github.com/Azure/azure-sdk-for-js/tree/main/sdk/documentintelligence/ai-document-intelligence-rest", - "dependencies": { - "@azure-rest/ai-document-intelligence": "next", - "dotenv": "latest" - }, - "devDependencies": { - "@types/node": "^18.0.0", - "typescript": "~5.6.2", - "rimraf": "latest" - } -} diff --git a/sdk/documentintelligence/ai-document-intelligence-rest/samples/v1-beta/typescript/sample.env b/sdk/documentintelligence/ai-document-intelligence-rest/samples/v1-beta/typescript/sample.env deleted file mode 100644 index 4edbb15d213c..000000000000 --- a/sdk/documentintelligence/ai-document-intelligence-rest/samples/v1-beta/typescript/sample.env +++ /dev/null @@ -1,8 +0,0 @@ -# Used in most samples. Retrieve these values from a Cognitive Services instance -# in the Azure Portal. -DOCUMENT_INTELLIGENCE_ENDPOINT="https://.cognitiveservies.azure.com/" -DOCUMENT_INTELLIGENCE_API_KEY="" - -# Our tests assume that TEST_MODE is "playback" by default. You can -# change it to "record" to generate new recordings, or "live" to bypass the recorder entirely. -# TEST_MODE=playback diff --git a/sdk/documentintelligence/ai-document-intelligence-rest/samples/v1-beta/typescript/src/analyzeDocumentByModelId.ts b/sdk/documentintelligence/ai-document-intelligence-rest/samples/v1-beta/typescript/src/analyzeDocumentByModelId.ts deleted file mode 100644 index ac70505c598a..000000000000 --- a/sdk/documentintelligence/ai-document-intelligence-rest/samples/v1-beta/typescript/src/analyzeDocumentByModelId.ts +++ /dev/null @@ -1,57 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT License. - -/** - * This sample shows how to analyze a document using a model with a given ID. The model ID may refer to any model, - * whether custom, prebuilt, composed, etc. - * - * @summary analyze a document using a model by ID - */ - -import DocumentIntelligence, { AnalyzeResultOperationOutput, getLongRunningPoller, isUnexpected } from "@azure-rest/ai-document-intelligence"; - -import * as dotenv from "dotenv"; -dotenv.config(); - -async function main() { - const client = DocumentIntelligence( - process.env["DOCUMENT_INTELLIGENCE_ENDPOINT"] || "", - { key: process.env["DOCUMENT_INTELLIGENCE_API_KEY"] || "" }) - const modelId = process.env.DOCUMENT_INTELLIGENCE_CUSTOM_MODEL_ID || "";// "prebuilt-layout"; - - const initialResponse = await client - .path("/documentModels/{modelId}:analyze", modelId) - .post({ - contentType: "application/json", - body: { - urlSource: "https://raw.githubusercontent.com/Azure/azure-sdk-for-js/main/sdk/formrecognizer/ai-form-recognizer/assets/receipt/contoso-receipt.png", - }, - queryParameters: { locale: "en-IN" }, - }); - if (isUnexpected(initialResponse)) { - throw initialResponse.body.error; - } - const poller = await getLongRunningPoller(client, initialResponse); - const analyzeResult = ( - (await (poller).pollUntilDone()).body as AnalyzeResultOperationOutput - ).analyzeResult; - - const documents = analyzeResult?.documents; - - const document = documents && documents[0]; - if (!document) { - throw new Error("Expected at least one document in the result."); - } - - console.log( - "Extracted document:", - document.docType, - `(confidence: ${document.confidence || ""})` - ); - console.log("Fields:", document.fields); -} - -main().catch((error) => { - console.error("An error occurred:", error); - process.exit(1); -}); diff --git a/sdk/documentintelligence/ai-document-intelligence-rest/samples/v1-beta/typescript/src/analyzeIdentityDocument.ts b/sdk/documentintelligence/ai-document-intelligence-rest/samples/v1-beta/typescript/src/analyzeIdentityDocument.ts deleted file mode 100644 index 02c576dd08aa..000000000000 --- a/sdk/documentintelligence/ai-document-intelligence-rest/samples/v1-beta/typescript/src/analyzeIdentityDocument.ts +++ /dev/null @@ -1,73 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT License. - -/** - * This sample shows how to extract elements of an identity document (such as a driver license or passport) from a URL - * to a file using the prebuilt identity document model. - * - * The prebuilt identity document model can return several fields. For a detailed list of the fields supported by the - * identity document model, see the `IdentityDocument` type in the documentation, or refer to the following link: - * - * https://aka.ms/azsdk/documentitelligence/iddocumentfieldschema - * - * @summary extract data from an identity document - */ - -import DocumentIntelligence, { AnalyzeResultOperationOutput, getLongRunningPoller, isUnexpected } from "@azure-rest/ai-document-intelligence"; - -import * as dotenv from "dotenv"; -dotenv.config(); - -async function main() { - const client = DocumentIntelligence( - process.env["DOCUMENT_INTELLIGENCE_ENDPOINT"] || "", - { key: process.env["DOCUMENT_INTELLIGENCE_API_KEY"] || "" }) - - const initialResponse = await client - .path("/documentModels/{modelId}:analyze", "prebuilt-idDocument") - .post({ - contentType: "application/json", - body: { - // The Document Intelligence service will access the following URL to a driver license image and extract data from it - urlSource: - "https://raw.githubusercontent.com/Azure/azure-sdk-for-js/main/sdk/formrecognizer/ai-form-recognizer/assets/identityDocument/license.png", - }, - queryParameters: { locale: "en-IN" }, - }); - if (isUnexpected(initialResponse)) { - throw initialResponse.body.error; - } - const poller = await getLongRunningPoller(client, initialResponse); - const analyzeResult = ( - (await (poller).pollUntilDone()).body as AnalyzeResultOperationOutput - ).analyzeResult; - - const documents = analyzeResult?.documents; - - const document = documents && documents[0]; - - // Use of PrebuiltModels.Receipt above (rather than the raw model ID), adds strong typing of the model's output - if (document) { - // The identity document model has multiple document types, so we need to know which document type was actually - // extracted. - if (document.docType === "idDocument.driverLicense") { - // For the sake of the example, we'll only show a few of the fields that are produced. - console.log("Extracted a Driver License:"); - console.log(document.fields) - } else if (document.docType === "idDocument.passport") { - console.log("Extracted a Passport:"); - console.log(document.fields) - } else { - // The only reason this would happen is if the client library's schema for the prebuilt identity document model is - // out of date, and a new document type has been introduced. - console.error("Unknown document type in result:", document); - } - } else { - throw new Error("Expected at least one receipt in the result."); - } -} - -main().catch((error) => { - console.error("An error occurred:", error); - process.exit(1); -}); diff --git a/sdk/documentintelligence/ai-document-intelligence-rest/samples/v1-beta/typescript/src/analyzeInvoice.ts b/sdk/documentintelligence/ai-document-intelligence-rest/samples/v1-beta/typescript/src/analyzeInvoice.ts deleted file mode 100644 index 314b3777e4c3..000000000000 --- a/sdk/documentintelligence/ai-document-intelligence-rest/samples/v1-beta/typescript/src/analyzeInvoice.ts +++ /dev/null @@ -1,62 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT License. - -/** - * This sample shows how to extract elements of an invoice from a URL to a file using the prebuilt invoice model. - * - * The prebuilt invoice model can return several fields. For a detailed list of the fields supported by the invoice - * model, see the `Invoice` type in the documentation, or refer to the following link: - * - * https://aka.ms/azsdk/documentitelligence/invoicefieldschema - * - * @summary extract data from an invoice document - */ - -import DocumentIntelligence, { AnalyzeResultOperationOutput, getLongRunningPoller, isUnexpected } from "@azure-rest/ai-document-intelligence"; - -import * as dotenv from "dotenv"; -dotenv.config(); - -async function main() { - - const client = DocumentIntelligence( - process.env["DOCUMENT_INTELLIGENCE_ENDPOINT"] || "", - { key: process.env["DOCUMENT_INTELLIGENCE_API_KEY"] || "" }) - - const initialResponse = await client - .path("/documentModels/{modelId}:analyze", "prebuilt-invoice") - .post({ - contentType: "application/json", - body: { - // The Document Intelligence service will access the following URL to an invoice image and extract data from it - urlSource: "https://raw.githubusercontent.com/Azure/azure-sdk-for-js/main/sdk/formrecognizer/ai-form-recognizer/assets/invoice/sample_invoice.jpg", - } - }); - if (isUnexpected(initialResponse)) { - throw initialResponse.body.error; - } - const poller = await getLongRunningPoller(client, initialResponse); - const analyzeResult = ( - (await (poller).pollUntilDone()).body as AnalyzeResultOperationOutput - ).analyzeResult; - - const documents = analyzeResult?.documents; - - const document = documents && documents[0]; - if (!document) { - throw new Error("Expected at least one document in the result."); - } - - - // Use of PrebuiltModels.Receipt above (rather than the raw model ID), adds strong typing of the model's output - if (document) { - console.log(document.fields); - } else { - throw new Error("Expected at least one receipt in the result."); - } -} - -main().catch((error) => { - console.error("An error occurred:", error); - process.exit(1); -}); diff --git a/sdk/documentintelligence/ai-document-intelligence-rest/samples/v1-beta/typescript/src/analyzeReceipt.ts b/sdk/documentintelligence/ai-document-intelligence-rest/samples/v1-beta/typescript/src/analyzeReceipt.ts deleted file mode 100644 index 41cb9a422796..000000000000 --- a/sdk/documentintelligence/ai-document-intelligence-rest/samples/v1-beta/typescript/src/analyzeReceipt.ts +++ /dev/null @@ -1,59 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT License. - -/** - * This sample shows how to extract elements of a receipt from a URL to a file using the prebuilt receipt model. - * - * The prebuilt receipt model can return several fields. For a detailed list of the fields supported by the receipt - * model, see the `Receipt` type in the documentation, or refer to the following link: - * - * https://aka.ms/azsdk/documentitelligence/receiptfieldschema - * - * @summary extract data from a receipt document - */ - -import DocumentIntelligence, { AnalyzeResultOperationOutput, getLongRunningPoller, isUnexpected } from "@azure-rest/ai-document-intelligence"; - -import * as dotenv from "dotenv"; -dotenv.config(); - -async function main() { - const client = DocumentIntelligence( - process.env["DOCUMENT_INTELLIGENCE_ENDPOINT"] || "", - { key: process.env["DOCUMENT_INTELLIGENCE_API_KEY"] || "" }) - - const initialResponse = await client - .path("/documentModels/{modelId}:analyze", "prebuilt-receipt") - .post({ - contentType: "application/json", - body: { - // The Document Intelligence service will access the following URL to a receipt image and extract data from it - urlSource: "https://raw.githubusercontent.com/Azure/azure-sdk-for-js/main/sdk/formrecognizer/ai-form-recognizer/assets/receipt/contoso-receipt.png", - } - }); - if (isUnexpected(initialResponse)) { - throw initialResponse.body.error; - } - const poller = await getLongRunningPoller(client, initialResponse); - const analyzeResult = ( - (await (poller).pollUntilDone()).body as AnalyzeResultOperationOutput - ).analyzeResult; - - const documents = analyzeResult?.documents; - - const document = documents && documents[0]; - - - - // Use of PrebuiltModels.Receipt above (rather than the raw model ID), as it adds strong typing of the model's output - if (document) { - console.log(document.fields); - } else { - throw new Error("Expected at least one receipt in the result."); - } -} - -main().catch((error) => { - console.error("An error occurred:", error); - process.exit(1); -}); diff --git a/sdk/documentintelligence/ai-document-intelligence-rest/samples/v1-beta/typescript/src/analyzeReceiptByModelId.ts b/sdk/documentintelligence/ai-document-intelligence-rest/samples/v1-beta/typescript/src/analyzeReceiptByModelId.ts deleted file mode 100644 index c8fc574766e9..000000000000 --- a/sdk/documentintelligence/ai-document-intelligence-rest/samples/v1-beta/typescript/src/analyzeReceiptByModelId.ts +++ /dev/null @@ -1,59 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT License. - -/** - * This sample shows how to extract elements of a receipt from a URL to a file using the prebuilt receipt model. Rather - * than using the `PrebuiltModels.Receipt` document model, this sample shows the use of the prebuilt model by ID, - * resulting in a weaker type that exactly mirrors the model's field schema at runtime. - * - * The prebuilt receipt model can return several fields. For a detailed list of the fields supported by the - * receipt model, see the `Receipt` type in the documentation, or refer to the following link: - * - * https://aka.ms/azsdk/documentitelligence/receiptfieldschema - * - * @summary use the "prebuilt-receipt" model ID to extract data from a receipt document (weakly-typed) - */ - -import DocumentIntelligence, { AnalyzeResultOperationOutput, getLongRunningPoller, isUnexpected } from "@azure-rest/ai-document-intelligence"; - -import * as dotenv from "dotenv"; -dotenv.config(); - -async function main() { - const client = DocumentIntelligence( - process.env["DOCUMENT_INTELLIGENCE_ENDPOINT"] || "", - { key: process.env["DOCUMENT_INTELLIGENCE_API_KEY"] || "" }) - - const initialResponse = await client - .path("/documentModels/{modelId}:analyze", "prebuilt-receipt") - .post({ - contentType: "application/json", - body: { - // The Document Intelligence service will access the following URL to a receipt image and extract data from it - urlSource: "https://raw.githubusercontent.com/Azure/azure-sdk-for-js/main/sdk/formrecognizer/ai-form-recognizer/assets/receipt/contoso-receipt.png", - } - }); - if (isUnexpected(initialResponse)) { - throw initialResponse.body.error; - } - const poller = await getLongRunningPoller(client, initialResponse); - - poller.onProgress((state) => console.log("Operation:", state.result, state.status)); - const analyzeResult = ( - (await (poller).pollUntilDone()).body as AnalyzeResultOperationOutput - ).analyzeResult; - - const documents = analyzeResult?.documents; - - const result = documents && documents[0]; - if (result) { - console.log(result.fields); - } else { - throw new Error("Expected at least one receipt in the result."); - } -} - -main().catch((error) => { - console.error("An error occurred:", error); - process.exit(1); -}); diff --git a/sdk/documentintelligence/ai-document-intelligence-rest/samples/v1-beta/typescript/src/analyzeW2TaxForm.ts b/sdk/documentintelligence/ai-document-intelligence-rest/samples/v1-beta/typescript/src/analyzeW2TaxForm.ts deleted file mode 100644 index c2d1e7f65e47..000000000000 --- a/sdk/documentintelligence/ai-document-intelligence-rest/samples/v1-beta/typescript/src/analyzeW2TaxForm.ts +++ /dev/null @@ -1,63 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT License. - -/** - * This sample shows how to extract elements of a United States W2 tax form from a file using the prebuilt US W2 model. - * - * The prebuilt W2 model can return several fields. For a detailed list of the fields supported by the model, see the - * `TaxUsW2` type in the documentation, or refer to the following link: - * - * https://aka.ms/azsdk/documentitelligence/taxusw2fieldschema - * - * @summary extract data from a United States W2 tax document - */ - -import DocumentIntelligence, { AnalyzeResultOperationOutput, getLongRunningPoller, isUnexpected } from "@azure-rest/ai-document-intelligence"; -import fs from "fs"; -import path from "path"; - -import * as dotenv from "dotenv"; -dotenv.config(); - -async function main() { - const client = DocumentIntelligence( - process.env["DOCUMENT_INTELLIGENCE_ENDPOINT"] || "", - { key: process.env["DOCUMENT_INTELLIGENCE_API_KEY"] || "" }) - - const filePath = fs.readFileSync(path.join(".", "assets", "w2", "w2-single.png")); - - const base64Source = fs.readFileSync(filePath, { encoding: "base64" }); - - const initialResponse = await client - .path("/documentModels/{modelId}:analyze", "prebuilt-tax.us.w2") - .post({ - contentType: "application/json", - body: { - base64Source, - }, - }); - - if (isUnexpected(initialResponse)) { - throw initialResponse.body.error; - } - - const poller = await getLongRunningPoller(client, initialResponse); - const analyzeResult = ( - (await (poller).pollUntilDone()).body as AnalyzeResultOperationOutput - ).analyzeResult; - - const documents = analyzeResult?.documents; - const document = documents?.[0]; - - if (document) { - console.log("Extracted W2 tax form:"); - console.log(document.fields); - } else { - throw new Error("Expected at least one document in the result."); - } -} - -main().catch((error) => { - console.error(error); - process.exit(1); -}); diff --git a/sdk/documentintelligence/ai-document-intelligence-rest/samples/v1-beta/typescript/src/buildClassifier.ts b/sdk/documentintelligence/ai-document-intelligence-rest/samples/v1-beta/typescript/src/buildClassifier.ts deleted file mode 100644 index 9358f1110f00..000000000000 --- a/sdk/documentintelligence/ai-document-intelligence-rest/samples/v1-beta/typescript/src/buildClassifier.ts +++ /dev/null @@ -1,80 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT License. - -/** - * This sample shows how to programmatically build a custom classifier. - * - * The Document Intelligence service expects the training data to be organized and labeled according to a particular - * convention and stored in an Azure Storage container. For more information about creating a training data set, please - * see the information at the following link to the service's documentation: - * - * https://aka.ms/azsdk/documentitelligence/buildclassifiermodel - * - * @summary build a classifier from a training data set - */ - -import DocumentIntelligence, { DocumentClassifierBuildOperationDetailsOutput, getLongRunningPoller, isUnexpected } from "@azure-rest/ai-document-intelligence"; - -import * as dotenv from "dotenv"; -dotenv.config(); - -async function main() { - const client = DocumentIntelligence( - process.env["DOCUMENT_INTELLIGENCE_ENDPOINT"] || "", - { key: process.env["DOCUMENT_INTELLIGENCE_API_KEY"] || "" }) - - const random = Date.now().toString(); - const modelId = - (process.env.CUSTOM_CLASSIFIER_ID || "") + random.substring(random.length - 6); - - const trainingDataSasUrl1 = - process.env.CUSTOM_CLASSIFIER_TRAINING_DATA_SAS_URL_1 || ""; - - const trainingDataSasUrl2 = - process.env.CUSTOM_CLASSIFIER_TRAINING_DATA_SAS_URL_2 || ""; - - const initialResponse = await client.path("/documentClassifiers:build").post({ - body: { - classifierId: modelId, - description: "Custom classifier description", - docTypes: { - foo: { - azureBlobSource: { - containerUrl: trainingDataSasUrl1, - }, - }, - bar: { - azureBlobSource: { - containerUrl: trainingDataSasUrl2, - }, - }, - }, - } - }); - - if (isUnexpected(initialResponse)) { - throw initialResponse.body.error; - } - const poller = await getLongRunningPoller(client, initialResponse); - const classifier = ( - (await (poller).pollUntilDone()).body as DocumentClassifierBuildOperationDetailsOutput - ).result; - if (!classifier) { - throw new Error("Expected a DocumentClassifierDetailsOutput response."); - } - - - console.log("Classifier ID:", classifier.classifierId); - console.log("Description:", classifier.description); - console.log("Created:", classifier.createdDateTime); - - console.log("Document Types:"); - for (const [docType, details] of Object.entries(classifier.docTypes)) { - console.log(`- Name: "${docType}", source: ${JSON.stringify(details, null, 2)}`); - } -} - -main().catch((error) => { - console.error("An error occurred:", error); - process.exit(1); -}); diff --git a/sdk/documentintelligence/ai-document-intelligence-rest/samples/v1-beta/typescript/src/buildModel.ts b/sdk/documentintelligence/ai-document-intelligence-rest/samples/v1-beta/typescript/src/buildModel.ts deleted file mode 100644 index a50eaa0e4a3e..000000000000 --- a/sdk/documentintelligence/ai-document-intelligence-rest/samples/v1-beta/typescript/src/buildModel.ts +++ /dev/null @@ -1,79 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT License. - -/** - * This sample shows how to programmatically build a model with a single document type using a training data set. - * - * The Document Intelligence service expects the training data to be organized and labeled according to a particular - * convention and stored in an Azure Storage container. For more information about creating a training data set, please - * see the information at the following link to the service's documentation: - * - * https://aka.ms/azsdk/documentitelligence/buildtrainingset - * - * @summary build a model with a single document type from a training data set - */ - -import DocumentIntelligence, { DocumentModelBuildOperationDetailsOutput, getLongRunningPoller, isUnexpected } from "@azure-rest/ai-document-intelligence"; - -import * as dotenv from "dotenv"; -dotenv.config(); - -async function main() { - const client = DocumentIntelligence( - process.env["DOCUMENT_INTELLIGENCE_ENDPOINT"] || "", - { key: process.env["DOCUMENT_INTELLIGENCE_API_KEY"] || "" }) - const random = Date.now().toString(); - const modelId = - (process.env.CUSTOM_MODEL_ID || "") + random.substring(random.length - 6); - const trainingDataSasUrl = - process.env.CUSTOM_MODEL_TRAINING_DATA_SAS_URL || ""; - - const initialResponse = await client.path("/documentModels:build").post({ - body: { - buildMode: "template", - modelId, - azureBlobSource: { - containerUrl: trainingDataSasUrl - }, - }, - }); - - if (isUnexpected(initialResponse)) { - throw initialResponse.body.error; - } - const poller = getLongRunningPoller(client, initialResponse); - const model = ( - (await (await poller).pollUntilDone()).body as DocumentModelBuildOperationDetailsOutput - ).result; - if (!model) { - throw new Error("Expected a DocumentModelDetailsOutput response."); - } - - console.log("Model ID:", model.modelId); - console.log("Description:", model.description); - console.log("Created:", model.createdDateTime); - - // A model may contain several document types, which describe the possible object structures of fields extracted using - // this model - - console.log("Document Types:"); - for (const [docType, { description, fieldSchema: schema }] of Object.entries( - model.docTypes || {} - )) { - console.log(`- Name: "${docType}"`); - console.log(` Description: "${description}"`); - - // For simplicity, this example will only show top-level field names - console.log(" Fields:"); - - for (const [fieldName, fieldSchema] of Object.entries(schema)) { - console.log(` - "${fieldName}" (${fieldSchema.type})`); - console.log(` ${fieldSchema.description || ""}`); - } - } -} - -main().catch((error) => { - console.error("An error occurred:", error); - process.exit(1); -}); diff --git a/sdk/documentintelligence/ai-document-intelligence-rest/samples/v1-beta/typescript/src/classifyDocument.ts b/sdk/documentintelligence/ai-document-intelligence-rest/samples/v1-beta/typescript/src/classifyDocument.ts deleted file mode 100644 index ad6ba315f0e3..000000000000 --- a/sdk/documentintelligence/ai-document-intelligence-rest/samples/v1-beta/typescript/src/classifyDocument.ts +++ /dev/null @@ -1,55 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT License. - -/** - * This sample shows how to use a custom classifier to get the document type (class) of a document. - * - * @summary use a custom classifier to classify a document - */ - -import DocumentIntelligence, { AnalyzeResultOperationOutput, getLongRunningPoller, isUnexpected } from "@azure-rest/ai-document-intelligence"; - -import * as dotenv from "dotenv"; -dotenv.config(); - -async function main() { - const client = DocumentIntelligence( - process.env["DOCUMENT_INTELLIGENCE_ENDPOINT"] || "", - { key: process.env["DOCUMENT_INTELLIGENCE_API_KEY"] || "" }) - const documentUrl = - "https://raw.githubusercontent.com/Azure/azure-sdk-for-js/main/sdk/formrecognizer/ai-form-recognizer/assets/invoice/Invoice_1.pdf"; - - const classifierId = process.env.CUSTOM_CLASSIFIER_ID ?? ""; - const initialResponse = await client - .path("/documentClassifiers/{classifierId}:analyze", classifierId) - .post({ - contentType: "application/json", - body: { - urlSource: documentUrl, - }, - }); - - if (isUnexpected(initialResponse)) { - throw initialResponse.body.error; - } - - const poller = await getLongRunningPoller(client, initialResponse); - const analyzeResult = ( - (await (poller).pollUntilDone()).body as AnalyzeResultOperationOutput - ).analyzeResult; - - if (analyzeResult?.documents === undefined || analyzeResult.documents.length === 0) { - throw new Error("Failed to extract any documents."); - } - - for (const document of analyzeResult.documents) { - console.log( - `Extracted a document with type '${document.docType}' on page ${document.boundingRegions?.[0].pageNumber} (confidence: ${document.confidence})` - ); - } -} - -main().catch((error) => { - console.error("An error occurred:", error); - process.exit(1); -}); diff --git a/sdk/documentintelligence/ai-document-intelligence-rest/samples/v1-beta/typescript/src/composeModel.ts b/sdk/documentintelligence/ai-document-intelligence-rest/samples/v1-beta/typescript/src/composeModel.ts deleted file mode 100644 index 5e1044f9cf49..000000000000 --- a/sdk/documentintelligence/ai-document-intelligence-rest/samples/v1-beta/typescript/src/composeModel.ts +++ /dev/null @@ -1,119 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT License. - -/** - * This sample demonstrates how to create a composed model from several individual labeled models. - * - * We build all of the component models used in the composition operation and then finally create the composed model. - * The resulting composed model will have all of the document types of its component submodels. When used for analysis, - * it will first classify the input as belonging to one of the document types.zzs - * - * @summary create a composed model from several individual labeled models - */ - -import DocumentIntelligence, { DocumentModelBuildOperationDetailsOutput, DocumentModelComposeOperationDetailsOutput, getLongRunningPoller, isUnexpected } from "@azure-rest/ai-document-intelligence"; - -import * as dotenv from "dotenv"; -dotenv.config(); - -async function main() { - const client = DocumentIntelligence( - process.env["DOCUMENT_INTELLIGENCE_ENDPOINT"] || "", - { key: process.env["DOCUMENT_INTELLIGENCE_API_KEY"] || "" }) - - // This object will hold the SAS-encoded URLs to containers that hold - // different types of purchase order documents and their labels. - const purchaseOrderSasUrls = { - supplies: - process.env["PURCHASE_ORDER_SUPPLIES_SAS_URL"] || - "", - equipment: - process.env["PURCHASE_ORDER_EQUIPMENT_SAS_URL"] || - "", - furniture: - process.env["PURCHASE_ORDER_FURNITURE_SAS_URL"] || - "", - cleaningSupplies: - process.env["PURCHASE_ORDER_CLEANING_SUPPLIES_SAS_URL"] || - "", - }; - - // We'll put the last few digits of the current timestamp into the model IDs, just to make sure they're unique. - const random = Date.now().toString(); - - const modelIds = await Promise.all( - Object.entries(purchaseOrderSasUrls) - .map(async ([kind, sasUrl]) => { - const modelId = kind + "ComponentModel" + random.substring(random.length - 6); - const initialResponse = await client.path("/documentModels:build").post({ - body: { - buildMode: "template", - modelId: modelId, - azureBlobSource: { - containerUrl: sasUrl, - }, - }, - }); - if (isUnexpected(initialResponse)) { - throw initialResponse.body.error; - } - const poller = await getLongRunningPoller(client, initialResponse); - const model = ( - (await (poller).pollUntilDone()).body as DocumentModelBuildOperationDetailsOutput - ).result!; - - return model; - }) - .map(async (model) => { return { modelId: (await model).modelId } }) - ); - - // Finally, create the composed model. - - const composedModelId = "purchaseOrders" + random.substring(random.length - 6); - - const initialResponse = await client.path("/documentModels:compose").post({ - body: { - description: "A composed model that classifies purchase order documents and extracts data from them.", - componentModels: modelIds, - modelId: composedModelId, - - }, - }); - - if (isUnexpected(initialResponse)) { - throw initialResponse.body.error; - } - const poller = await getLongRunningPoller(client, initialResponse); - - const composedModel = ( - (await (poller).pollUntilDone()).body as DocumentModelComposeOperationDetailsOutput - ).result!; - - - console.log("Model ID:", composedModel.modelId); - console.log("Description:", composedModel.description); - console.log("Created:", composedModel.createdDateTime); - - // The composed model should have a document type for each one of the individually built models that are composed into - // this larger model. - - console.log("Document Types:"); - for (const [docType, { description, fieldSchema: schema }] of Object.entries( - composedModel.docTypes || {} - )) { - console.log(`- Name: "${docType}"`); - console.log(` Description: "${description}"`); - - // For simplicity, this example will only show top-level field names - console.log(" Fields:"); - - for (const [fieldName, fieldSchema] of Object.entries(schema)) { - console.log(` - "${fieldName}" (${fieldSchema.type})`); - console.log(` ${fieldSchema.description || ""}`); - } - } -} - -main().catch((err) => { - console.error("The sample encountered an error:", err); -}); diff --git a/sdk/documentintelligence/ai-document-intelligence-rest/samples/v1-beta/typescript/src/copyModel.ts b/sdk/documentintelligence/ai-document-intelligence-rest/samples/v1-beta/typescript/src/copyModel.ts deleted file mode 100644 index 2da44e98c499..000000000000 --- a/sdk/documentintelligence/ai-document-intelligence-rest/samples/v1-beta/typescript/src/copyModel.ts +++ /dev/null @@ -1,66 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT License. - -/** - * This sample shows how to copy a model from one resource to another. The model is created with a new model ID (and - * optionally a new description) in the destination resource, but will have the same document types as the source model. - * - * @summary copy a model from one resource to another - */ - -import DocumentIntelligence, { DocumentModelCopyToOperationDetailsOutput, getLongRunningPoller, isUnexpected } from "@azure-rest/ai-document-intelligence"; - -import * as dotenv from "dotenv"; -dotenv.config(); - -async function main() { - - const random = Date.now().toString(); - const destinationModelId = - (process.env.CUSTOM_MODEL_ID || "") + random.substring(random.length - 6); - - // The authorization must be created by the destination resource. - // const destinationClient = new DocumentModelAdministrationClient(endpoint, credential); - const destinationClient = DocumentIntelligence( - process.env["DOCUMENT_INTELLIGENCE_ENDPOINT"] || "", - { key: process.env["DOCUMENT_INTELLIGENCE_API_KEY"] || "" }) - // const authorization = await destinationClient.getCopyAuthorization(destinationModelId); - const targetAuth = await destinationClient.path("/documentModels:authorizeCopy").post({ - body: { - modelId: destinationModelId, - }, - }); - if (isUnexpected(targetAuth)) { - throw targetAuth.body.error; - } - const sourceEndpoint = process.env.DOCUMENT_INTELLIGENCE_SOURCE_ENDPOINT || ""; - const sourceModelId = process.env.COPY_SOURCE_MODEL_ID || ""; - - // Then, the source resource can initiate the copy operation. - const sourceClient = DocumentIntelligence( - sourceEndpoint, - { key: process.env.DOCUMENT_INTELLIGENCE_SOURCE_API_KEY || "" }) - - const copyInitResponse = await sourceClient - .path("/documentModels/{modelId}:copyTo", sourceModelId) - .post({ - body: targetAuth.body, - }); - - if (isUnexpected(copyInitResponse)) { - throw copyInitResponse.body.error; - } - const copyPoller = getLongRunningPoller(sourceClient, copyInitResponse); - const model = ( - (await (await copyPoller).pollUntilDone()).body as DocumentModelCopyToOperationDetailsOutput - ).result!; - - console.log("Model ID:", model.modelId); - console.log("Description:", model.description); - console.log("Created:", model.createdDateTime); -} - -main().catch((error) => { - console.error("An error occurred:", error); - process.exit(1); -}); diff --git a/sdk/documentintelligence/ai-document-intelligence-rest/samples/v1-beta/typescript/src/extractLayout.ts b/sdk/documentintelligence/ai-document-intelligence-rest/samples/v1-beta/typescript/src/extractLayout.ts deleted file mode 100644 index ea4cb0a9a5d5..000000000000 --- a/sdk/documentintelligence/ai-document-intelligence-rest/samples/v1-beta/typescript/src/extractLayout.ts +++ /dev/null @@ -1,80 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT License. - -/** - * This sample shows how to extract only the basic layout information from a document using the `beginExtractLayout` - * method. Layout information consists of the arrangement of basic OCR elements, such as pages (including their contents - * such as lines, words, and selection marks), tables, and text font styles. - * - * @summary use the prebuilt layout model to extract basic document elements only - */ - -import DocumentIntelligence, { AnalyzeResultOperationOutput, getLongRunningPoller, isUnexpected } from "@azure-rest/ai-document-intelligence"; - - -import * as dotenv from "dotenv"; -dotenv.config(); - -async function main() { - const client = DocumentIntelligence( - process.env["DOCUMENT_INTELLIGENCE_ENDPOINT"] || "", - { key: process.env["DOCUMENT_INTELLIGENCE_API_KEY"] || "" }) - - const initialResponse = await client - .path("/documentModels/{modelId}:analyze", "prebuilt-layout") - .post({ - contentType: "application/json", - body: { - urlSource: "https://raw.githubusercontent.com/Azure/azure-sdk-for-js/main/sdk/formrecognizer/ai-form-recognizer/assets/forms/Invoice_1.pdf", - } - }); - - if (isUnexpected(initialResponse)) { - throw initialResponse.body.error; - } - - const poller = getLongRunningPoller(client, initialResponse); - const analyzeResult = ( - (await (await poller).pollUntilDone()).body as AnalyzeResultOperationOutput - ).analyzeResult; - - const pages = analyzeResult?.pages; - const tables = analyzeResult?.tables; - - if (!pages || pages.length <= 0) { - console.log("No pages were extracted from the document."); - } else { - console.log("Pages:"); - for (const page of pages) { - console.log("- Page", page.pageNumber, `(unit: ${page.unit})`); - console.log(` ${page.width}x${page.height}, angle: ${page.angle}`); - console.log( - ` ${page.lines && page.lines.length} lines, ${page.words && page.words.length} words` - ); - - if (page.lines && page.lines.length > 0) { - console.log(" Lines:"); - - for (const line of page.lines) { - console.log(` - "${line.content}"`); - } - } - } - } - - if (!tables || tables.length <= 0) { - console.log("No tables were extracted from the document."); - } else { - console.log("Tables:"); - for (const table of tables) { - console.log( - `- Extracted table: ${table.columnCount} columns, ${table.rowCount} rows (${table.cells.length} cells)` - ); - } - } -} - -main().catch((error) => { - console.error("An error occurred:", error); - process.exit(1); -}); diff --git a/sdk/documentintelligence/ai-document-intelligence-rest/samples/v1-beta/typescript/src/getClassifier.ts b/sdk/documentintelligence/ai-document-intelligence-rest/samples/v1-beta/typescript/src/getClassifier.ts deleted file mode 100644 index 1a6352dba334..000000000000 --- a/sdk/documentintelligence/ai-document-intelligence-rest/samples/v1-beta/typescript/src/getClassifier.ts +++ /dev/null @@ -1,41 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT License. - -/** - * This sample shows how to get the details of a custom classifier by its ID, including information about the document - * types that the classifier supports. - * - * @summary get information about a classifier by its ID - */ - -import DocumentIntelligence, { isUnexpected } from "@azure-rest/ai-document-intelligence"; - -import * as dotenv from "dotenv"; -dotenv.config(); - -async function main() { - const client = DocumentIntelligence( - process.env["DOCUMENT_INTELLIGENCE_ENDPOINT"] || "", - { key: process.env["DOCUMENT_INTELLIGENCE_API_KEY"] || "" }) - - const classifierId = process.env.CUSTOM_CLASSIFIER_ID ?? ""; - const classifier = await client.path("/documentClassifiers/{classifierId}", classifierId).get(); - - if (isUnexpected(classifier)) { - throw classifier.body.error; - } - console.log("ID", classifier.body.classifierId); - console.log("Created:", classifier.body.createdDateTime); - console.log("Description: ", classifier.body.description || ""); - - console.log("Document Types:"); - for (const [docType, details] of Object.entries(classifier.body.docTypes || {})) { - // We can also programmatically access a schema of the fields. - console.log(`- Name "${docType}", source: ${JSON.stringify(details, null, 2)}`); - } -} - -main().catch((error) => { - console.error("An error occurred:", error); - process.exit(1); -}); diff --git a/sdk/documentintelligence/ai-document-intelligence-rest/samples/v1-beta/typescript/src/getInfo.ts b/sdk/documentintelligence/ai-document-intelligence-rest/samples/v1-beta/typescript/src/getInfo.ts deleted file mode 100644 index b5644db43610..000000000000 --- a/sdk/documentintelligence/ai-document-intelligence-rest/samples/v1-beta/typescript/src/getInfo.ts +++ /dev/null @@ -1,32 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT License. - -/** - * This sample shows how to programmatically retrieve metadata about the number of custom models in the Form Recognizer - * resource and the limit of custom models that the resource will allow to be created. - * - * @summary get information about the count and limit of custom models in the resource - */ - -import DocumentIntelligence, { isUnexpected } from "@azure-rest/ai-document-intelligence"; - -import * as dotenv from "dotenv"; -dotenv.config(); - -async function main() { - const client = DocumentIntelligence( - process.env["DOCUMENT_INTELLIGENCE_ENDPOINT"] || "", - { key: process.env["DOCUMENT_INTELLIGENCE_API_KEY"] || "" }) - const info = await client.path("/info").get(); - if (isUnexpected(info)) { - throw info.body.error; - } - console.log( - `Custom document models: ${info.body.customDocumentModels.count} of ${info.body.customDocumentModels.limit}` - ); -} - -main().catch((error) => { - console.error("An error occurred:", error); - process.exit(1); -}); diff --git a/sdk/documentintelligence/ai-document-intelligence-rest/samples/v1-beta/typescript/src/getModel.ts b/sdk/documentintelligence/ai-document-intelligence-rest/samples/v1-beta/typescript/src/getModel.ts deleted file mode 100644 index 1ca43511e8c0..000000000000 --- a/sdk/documentintelligence/ai-document-intelligence-rest/samples/v1-beta/typescript/src/getModel.ts +++ /dev/null @@ -1,45 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT License. - -/** - * This sample shows how to get the full information about a custom model by its model ID, including information about - * the document types in the model and their field schemas. - * - * @summary get information about a model by its ID - */ - -import DocumentIntelligence, { isUnexpected } from "@azure-rest/ai-document-intelligence"; - -import * as dotenv from "dotenv"; -dotenv.config(); - -async function main() { - const client = DocumentIntelligence( - process.env["DOCUMENT_INTELLIGENCE_ENDPOINT"] || "", - { key: process.env["DOCUMENT_INTELLIGENCE_API_KEY"] || "" }) - - // The model ID to query. This can be any model ID, not just a custom model, so for example - // the following sample uses `"prebuilt-idDocument"`, but you can change it to any model ID - // you'd like to inspect. - const modelId = "prebuilt-idDocument"; - const model = await client.path("/documentModels/{modelId}", modelId).get(); - - if (isUnexpected(model)) { - throw model.body.error; - } - - console.log("ID", model.body.modelId); - console.log("Created:", model.body.createdDateTime); - console.log("Description: ", model.body.description || ""); - - console.log("Document Types:"); - for (const [docType, { fieldSchema }] of Object.entries(model.body.docTypes || {})) { - // We can also programmatically access a schema of the fields. - console.log("-", docType, JSON.stringify(fieldSchema, undefined, 2)); - } -} - -main().catch((error) => { - console.error("An error occurred:", error); - process.exit(1); -}); diff --git a/sdk/documentintelligence/ai-document-intelligence-rest/samples/v1-beta/typescript/src/listModels.ts b/sdk/documentintelligence/ai-document-intelligence-rest/samples/v1-beta/typescript/src/listModels.ts deleted file mode 100644 index eb081a1ce61a..000000000000 --- a/sdk/documentintelligence/ai-document-intelligence-rest/samples/v1-beta/typescript/src/listModels.ts +++ /dev/null @@ -1,49 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT License. - -/** - * This sample shows how to iterate over the models in a resource. This will include both custom and prebuilt models. - * - * @summary iterate over the models in a resource - */ - -import DocumentIntelligence, { isUnexpected, paginate } from "@azure-rest/ai-document-intelligence"; - -import * as dotenv from "dotenv"; -dotenv.config(); - -async function main() { - const client = DocumentIntelligence( - process.env["DOCUMENT_INTELLIGENCE_ENDPOINT"] || "", - { key: process.env["DOCUMENT_INTELLIGENCE_API_KEY"] || "" }) - - - const response = await client.path("/documentModels").get(); - if (isUnexpected(response)) { - throw response.body.error; - } - - for await (const model of paginate(client, response)) { - console.log("- ID", model.modelId); - console.log(" Created:", model.createdDateTime); - console.log(" Description: ", model.description || ""); - - // The model summary does not include `docTypes`, so we must additionally call `getModel` to retrieve them - const detailedModel = (await client.path("/documentModels/{modelId}", model.modelId).get()); - - if (isUnexpected(detailedModel)) { - throw detailedModel.body.error; - } - const docTypes = detailedModel.body.docTypes; - - console.log(" Document Types:"); - for (const docType of Object.keys(docTypes || {})) { - console.log(" -", docType); - } - } -} - -main().catch((error) => { - console.error("An error occurred:", error); - process.exit(1); -}); diff --git a/sdk/documentintelligence/ai-document-intelligence-rest/samples/v1-beta/typescript/src/readDocument.ts b/sdk/documentintelligence/ai-document-intelligence-rest/samples/v1-beta/typescript/src/readDocument.ts deleted file mode 100644 index 440c4e16040c..000000000000 --- a/sdk/documentintelligence/ai-document-intelligence-rest/samples/v1-beta/typescript/src/readDocument.ts +++ /dev/null @@ -1,93 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT License. - -/** - * This sample shows how to extract the text content of a document using the "prebuilt-read" model. - * - * @summary use the prebuilt "read" model to extract information about the text content of a document - */ - -import DocumentIntelligence, { AnalyzeResultOperationOutput, getLongRunningPoller, isUnexpected } from "@azure-rest/ai-document-intelligence"; - -import * as dotenv from "dotenv"; -dotenv.config(); - -async function main() { - const client = DocumentIntelligence( - process.env["DOCUMENT_INTELLIGENCE_ENDPOINT"] || "", - { key: process.env["DOCUMENT_INTELLIGENCE_API_KEY"] || "" }) - - const initialResponse = await client - .path("/documentModels/{modelId}:analyze", "prebuilt-read") - .post({ - contentType: "application/json", - body: { - urlSource: "https://raw.githubusercontent.com/Azure/azure-sdk-for-js/main/sdk/formrecognizer/ai-form-recognizer/assets/forms/Invoice_1.pdf", - }, - queryParameters: { features: ["barcodes"] }, - }); - - if (isUnexpected(initialResponse)) { - throw initialResponse.body.error; - } - const poller = await getLongRunningPoller(client, initialResponse); - const analyzeResult = ( - (await (poller).pollUntilDone()).body as AnalyzeResultOperationOutput - ).analyzeResult; - - - // The "prebuilt-read" model (`beginReadDocument` method) only extracts information about the textual content of the - // document, such as page text elements and information about the language of the text. - const pages = analyzeResult?.pages; - const languages = analyzeResult?.languages; - const styles = analyzeResult?.styles; - - - if (!pages || pages.length <= 0) { - console.log("No pages were extracted from the document."); - } else { - console.log("Pages:"); - for (const page of pages) { - console.log("- Page", page.pageNumber, `(unit: ${page.unit})`); - console.log(` ${page.width}x${page.height}, angle: ${page.angle}`); - console.log( - ` ${page.lines && page.lines.length} lines, ${page.words && page.words.length} words` - ); - - if (page.lines && page.lines.length > 0) { - console.log(" Lines:"); - - for (const line of page.lines) { - console.log(` - "${line.content}"`); - } - } - } - } - - if (!languages || languages.length <= 0) { - console.log("No language spans were extracted from the document."); - } else { - console.log("Languages:"); - for (const languageEntry of languages) { - console.log( - `- Found language: ${languageEntry.locale} (confidence: ${languageEntry.confidence})` - ); - } - } - - if (!styles || styles.length <= 0) { - console.log("No text styles were extracted from the document."); - } else { - console.log("Styles:"); - for (const style of styles) { - console.log( - `- Handwritten: ${style.isHandwritten ? "yes" : "no"} (confidence=${style.confidence})` - ); - } - } -} - -main().catch((error) => { - console.error("An error occurred:", error); - process.exit(1); -}); diff --git a/sdk/documentintelligence/ai-document-intelligence-rest/samples/v1-beta/typescript/tsconfig.json b/sdk/documentintelligence/ai-document-intelligence-rest/samples/v1-beta/typescript/tsconfig.json deleted file mode 100644 index 984eed535aa8..000000000000 --- a/sdk/documentintelligence/ai-document-intelligence-rest/samples/v1-beta/typescript/tsconfig.json +++ /dev/null @@ -1,17 +0,0 @@ -{ - "compilerOptions": { - "target": "ES2020", - "module": "commonjs", - "moduleResolution": "node", - "resolveJsonModule": true, - "esModuleInterop": true, - "allowSyntheticDefaultImports": true, - "strict": true, - "alwaysStrict": true, - "outDir": "dist", - "rootDir": "src" - }, - "include": [ - "src/**/*.ts" - ] -} diff --git a/sdk/documentintelligence/ai-document-intelligence-rest/src/clientDefinitions.ts b/sdk/documentintelligence/ai-document-intelligence-rest/src/clientDefinitions.ts index c336ff164be1..8d905008afb1 100644 --- a/sdk/documentintelligence/ai-document-intelligence-rest/src/clientDefinitions.ts +++ b/sdk/documentintelligence/ai-document-intelligence-rest/src/clientDefinitions.ts @@ -1,7 +1,7 @@ // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. -import type { +import { ListOperationsParameters, GetDocumentModelBuildOperationParameters, GetDocumentModelComposeOperationParameters, @@ -9,14 +9,17 @@ import type { GetDocumentClassifierCopyToOperationParameters, GetDocumentClassifierBuildOperationParameters, GetOperationParameters, - GetResourceInfoParameters, + GetResourceDetailsParameters, GetAnalyzeResultParameters, + DeleteAnalyzeResultParameters, GetAnalyzeResultPdfParameters, GetAnalyzeResultFigureParameters, AnalyzeDocumentFromStreamParameters, AnalyzeDocumentParameters, GetAnalyzeBatchResultParameters, + DeleteAnalyzeBatchResultParameters, AnalyzeBatchDocumentsParameters, + ListAnalyzeBatchResultsParameters, GetModelParameters, DeleteModelParameters, BuildModelParameters, @@ -34,7 +37,7 @@ import type { AuthorizeClassifierCopyParameters, CopyClassifierToParameters, } from "./parameters.js"; -import type { +import { ListOperations200Response, ListOperationsDefaultResponse, GetDocumentModelBuildOperation200Response, @@ -49,10 +52,12 @@ import type { GetDocumentClassifierBuildOperationDefaultResponse, GetOperation200Response, GetOperationDefaultResponse, - GetResourceInfo200Response, - GetResourceInfoDefaultResponse, + GetResourceDetails200Response, + GetResourceDetailsDefaultResponse, GetAnalyzeResult200Response, GetAnalyzeResultDefaultResponse, + DeleteAnalyzeResult204Response, + DeleteAnalyzeResultDefaultResponse, GetAnalyzeResultPdf200Response, GetAnalyzeResultPdfDefaultResponse, GetAnalyzeResultFigure200Response, @@ -63,8 +68,12 @@ import type { AnalyzeDocumentDefaultResponse, GetAnalyzeBatchResult200Response, GetAnalyzeBatchResultDefaultResponse, + DeleteAnalyzeBatchResult204Response, + DeleteAnalyzeBatchResultDefaultResponse, AnalyzeBatchDocuments202Response, AnalyzeBatchDocumentsDefaultResponse, + ListAnalyzeBatchResults200Response, + ListAnalyzeBatchResultsDefaultResponse, GetModel200Response, GetModelDefaultResponse, DeleteModel204Response, @@ -98,13 +107,15 @@ import type { CopyClassifierTo202Response, CopyClassifierToDefaultResponse, } from "./responses.js"; -import type { Client, StreamableMethod } from "@azure-rest/core-client"; +import { Client, StreamableMethod } from "@azure-rest/core-client"; export interface ListOperations { /** Lists all operations. */ get( options?: ListOperationsParameters, - ): StreamableMethod; + ): StreamableMethod< + ListOperations200Response | ListOperationsDefaultResponse + >; } export interface GetDocumentModelBuildOperation { @@ -112,19 +123,22 @@ export interface GetDocumentModelBuildOperation { get( options?: GetDocumentModelBuildOperationParameters, ): StreamableMethod< - GetDocumentModelBuildOperation200Response | GetDocumentModelBuildOperationDefaultResponse + | GetDocumentModelBuildOperation200Response + | GetDocumentModelBuildOperationDefaultResponse >; /** Gets operation info. */ get( options?: GetDocumentModelComposeOperationParameters, ): StreamableMethod< - GetDocumentModelComposeOperation200Response | GetDocumentModelComposeOperationDefaultResponse + | GetDocumentModelComposeOperation200Response + | GetDocumentModelComposeOperationDefaultResponse >; /** Gets operation info. */ get( options?: GetDocumentModelCopyToOperationParameters, ): StreamableMethod< - GetDocumentModelCopyToOperation200Response | GetDocumentModelCopyToOperationDefaultResponse + | GetDocumentModelCopyToOperation200Response + | GetDocumentModelCopyToOperationDefaultResponse >; /** Gets operation info. */ get( @@ -146,32 +160,46 @@ export interface GetDocumentModelBuildOperation { ): StreamableMethod; } -export interface GetResourceInfo { +export interface GetResourceDetails { /** Return information about the current resource. */ get( - options?: GetResourceInfoParameters, - ): StreamableMethod; + options?: GetResourceDetailsParameters, + ): StreamableMethod< + GetResourceDetails200Response | GetResourceDetailsDefaultResponse + >; } export interface GetAnalyzeResult { /** Gets the result of document analysis. */ get( options?: GetAnalyzeResultParameters, - ): StreamableMethod; + ): StreamableMethod< + GetAnalyzeResult200Response | GetAnalyzeResultDefaultResponse + >; + /** Mark the result of document analysis for deletion. */ + delete( + options?: DeleteAnalyzeResultParameters, + ): StreamableMethod< + DeleteAnalyzeResult204Response | DeleteAnalyzeResultDefaultResponse + >; } export interface GetAnalyzeResultPdf { /** Gets the generated searchable PDF output from document analysis. */ get( options?: GetAnalyzeResultPdfParameters, - ): StreamableMethod; + ): StreamableMethod< + GetAnalyzeResultPdf200Response | GetAnalyzeResultPdfDefaultResponse + >; } export interface GetAnalyzeResultFigure { /** Gets the generated cropped image of specified figure from document analysis. */ get( options?: GetAnalyzeResultFigureParameters, - ): StreamableMethod; + ): StreamableMethod< + GetAnalyzeResultFigure200Response | GetAnalyzeResultFigureDefaultResponse + >; } export interface AnalyzeDocumentFromStream { @@ -179,26 +207,49 @@ export interface AnalyzeDocumentFromStream { post( options: AnalyzeDocumentFromStreamParameters, ): StreamableMethod< - AnalyzeDocumentFromStream202Response | AnalyzeDocumentFromStreamDefaultResponse + | AnalyzeDocumentFromStream202Response + | AnalyzeDocumentFromStreamDefaultResponse >; /** Analyzes document with document model. */ post( options: AnalyzeDocumentParameters, - ): StreamableMethod; + ): StreamableMethod< + AnalyzeDocument202Response | AnalyzeDocumentDefaultResponse + >; } export interface GetAnalyzeBatchResult { /** Gets the result of batch document analysis. */ get( options?: GetAnalyzeBatchResultParameters, - ): StreamableMethod; + ): StreamableMethod< + GetAnalyzeBatchResult200Response | GetAnalyzeBatchResultDefaultResponse + >; + /** Mark the batch document analysis result for deletion. */ + delete( + options?: DeleteAnalyzeBatchResultParameters, + ): StreamableMethod< + | DeleteAnalyzeBatchResult204Response + | DeleteAnalyzeBatchResultDefaultResponse + >; } export interface AnalyzeBatchDocuments { /** Analyzes batch documents with document model. */ post( options: AnalyzeBatchDocumentsParameters, - ): StreamableMethod; + ): StreamableMethod< + AnalyzeBatchDocuments202Response | AnalyzeBatchDocumentsDefaultResponse + >; +} + +export interface ListAnalyzeBatchResults { + /** List batch document analysis results. */ + get( + options?: ListAnalyzeBatchResultsParameters, + ): StreamableMethod< + ListAnalyzeBatchResults200Response | ListAnalyzeBatchResultsDefaultResponse + >; } export interface GetModel { @@ -233,7 +284,9 @@ export interface AuthorizeModelCopy { */ post( options: AuthorizeModelCopyParameters, - ): StreamableMethod; + ): StreamableMethod< + AuthorizeModelCopy200Response | AuthorizeModelCopyDefaultResponse + >; } export interface CopyModelTo { @@ -254,14 +307,18 @@ export interface BuildClassifier { /** Builds a custom document classifier. */ post( options: BuildClassifierParameters, - ): StreamableMethod; + ): StreamableMethod< + BuildClassifier202Response | BuildClassifierDefaultResponse + >; } export interface ListClassifiers { /** List all document classifiers. */ get( options?: ListClassifiersParameters, - ): StreamableMethod; + ): StreamableMethod< + ListClassifiers200Response | ListClassifiersDefaultResponse + >; } export interface GetClassifier { @@ -272,7 +329,9 @@ export interface GetClassifier { /** Deletes document classifier. */ delete( options?: DeleteClassifierParameters, - ): StreamableMethod; + ): StreamableMethod< + DeleteClassifier204Response | DeleteClassifierDefaultResponse + >; } export interface ClassifyDocumentFromStream { @@ -280,19 +339,24 @@ export interface ClassifyDocumentFromStream { post( options: ClassifyDocumentFromStreamParameters, ): StreamableMethod< - ClassifyDocumentFromStream202Response | ClassifyDocumentFromStreamDefaultResponse + | ClassifyDocumentFromStream202Response + | ClassifyDocumentFromStreamDefaultResponse >; /** Classifies document with document classifier. */ post( options: ClassifyDocumentParameters, - ): StreamableMethod; + ): StreamableMethod< + ClassifyDocument202Response | ClassifyDocumentDefaultResponse + >; } export interface GetClassifyResult { /** Gets the result of document classifier. */ get( options?: GetClassifyResultParameters, - ): StreamableMethod; + ): StreamableMethod< + GetClassifyResult200Response | GetClassifyResultDefaultResponse + >; } export interface AuthorizeClassifierCopy { @@ -302,24 +366,31 @@ export interface AuthorizeClassifierCopy { */ post( options: AuthorizeClassifierCopyParameters, - ): StreamableMethod; + ): StreamableMethod< + AuthorizeClassifierCopy200Response | AuthorizeClassifierCopyDefaultResponse + >; } export interface CopyClassifierTo { /** Copies document classifier to the target resource, region, and classifierId. */ post( options: CopyClassifierToParameters, - ): StreamableMethod; + ): StreamableMethod< + CopyClassifierTo202Response | CopyClassifierToDefaultResponse + >; } export interface Routes { /** Resource for '/operations' has methods for the following verbs: get */ (path: "/operations"): ListOperations; /** Resource for '/operations/\{operationId\}' has methods for the following verbs: get */ - (path: "/operations/{operationId}", operationId: string): GetDocumentModelBuildOperation; + ( + path: "/operations/{operationId}", + operationId: string, + ): GetDocumentModelBuildOperation; /** Resource for '/info' has methods for the following verbs: get */ - (path: "/info"): GetResourceInfo; - /** Resource for '/documentModels/\{modelId\}/analyzeResults/\{resultId\}' has methods for the following verbs: get */ + (path: "/info"): GetResourceDetails; + /** Resource for '/documentModels/\{modelId\}/analyzeResults/\{resultId\}' has methods for the following verbs: get, delete */ ( path: "/documentModels/{modelId}/analyzeResults/{resultId}", modelId: string, @@ -339,15 +410,26 @@ export interface Routes { figureId: string, ): GetAnalyzeResultFigure; /** Resource for '/documentModels/\{modelId\}:analyze' has methods for the following verbs: post */ - (path: "/documentModels/{modelId}:analyze", modelId: string): AnalyzeDocumentFromStream; - /** Resource for '/documentModels/\{modelId\}/analyzeBatchResults/\{resultId\}' has methods for the following verbs: get */ + ( + path: "/documentModels/{modelId}:analyze", + modelId: string, + ): AnalyzeDocumentFromStream; + /** Resource for '/documentModels/\{modelId\}/analyzeBatchResults/\{resultId\}' has methods for the following verbs: get, delete */ ( path: "/documentModels/{modelId}/analyzeBatchResults/{resultId}", modelId: string, resultId: string, ): GetAnalyzeBatchResult; /** Resource for '/documentModels/\{modelId\}:analyzeBatch' has methods for the following verbs: post */ - (path: "/documentModels/{modelId}:analyzeBatch", modelId: string): AnalyzeBatchDocuments; + ( + path: "/documentModels/{modelId}:analyzeBatch", + modelId: string, + ): AnalyzeBatchDocuments; + /** Resource for '/documentModels/\{modelId\}/analyzeBatchResults' has methods for the following verbs: get */ + ( + path: "/documentModels/{modelId}/analyzeBatchResults", + modelId: string, + ): ListAnalyzeBatchResults; /** Resource for '/documentModels/\{modelId\}' has methods for the following verbs: get, delete */ (path: "/documentModels/{modelId}", modelId: string): GetModel; /** Resource for '/documentModels:build' has methods for the following verbs: post */ @@ -365,7 +447,10 @@ export interface Routes { /** Resource for '/documentClassifiers' has methods for the following verbs: get */ (path: "/documentClassifiers"): ListClassifiers; /** Resource for '/documentClassifiers/\{classifierId\}' has methods for the following verbs: get, delete */ - (path: "/documentClassifiers/{classifierId}", classifierId: string): GetClassifier; + ( + path: "/documentClassifiers/{classifierId}", + classifierId: string, + ): GetClassifier; /** Resource for '/documentClassifiers/\{classifierId\}:analyze' has methods for the following verbs: post */ ( path: "/documentClassifiers/{classifierId}:analyze", @@ -380,7 +465,10 @@ export interface Routes { /** Resource for '/documentClassifiers:authorizeCopy' has methods for the following verbs: post */ (path: "/documentClassifiers:authorizeCopy"): AuthorizeClassifierCopy; /** Resource for '/documentClassifiers/\{classifierId\}:copyTo' has methods for the following verbs: post */ - (path: "/documentClassifiers/{classifierId}:copyTo", classifierId: string): CopyClassifierTo; + ( + path: "/documentClassifiers/{classifierId}:copyTo", + classifierId: string, + ): CopyClassifierTo; } export type DocumentIntelligenceClient = Client & { diff --git a/sdk/documentintelligence/ai-document-intelligence-rest/src/documentIntelligence.ts b/sdk/documentintelligence/ai-document-intelligence-rest/src/documentIntelligence.ts index 53884360f1cd..fa1f22020a8c 100644 --- a/sdk/documentintelligence/ai-document-intelligence-rest/src/documentIntelligence.ts +++ b/sdk/documentintelligence/ai-document-intelligence-rest/src/documentIntelligence.ts @@ -1,11 +1,10 @@ // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. -import type { ClientOptions } from "@azure-rest/core-client"; -import { getClient } from "@azure-rest/core-client"; +import { getClient, ClientOptions } from "@azure-rest/core-client"; import { logger } from "./logger.js"; -import type { TokenCredential, KeyCredential } from "@azure/core-auth"; -import type { DocumentIntelligenceClient } from "./clientDefinitions.js"; +import { TokenCredential, KeyCredential } from "@azure/core-auth"; +import { DocumentIntelligenceClient } from "./clientDefinitions.js"; /** The optional parameters for the client */ export interface DocumentIntelligenceClientOptions extends ClientOptions { @@ -22,11 +21,16 @@ export interface DocumentIntelligenceClientOptions extends ClientOptions { export default function createClient( endpointParam: string, credentials: TokenCredential | KeyCredential, - { apiVersion = "2024-07-31-preview", ...options }: DocumentIntelligenceClientOptions = {}, + { + apiVersion = "2024-11-30", + ...options + }: DocumentIntelligenceClientOptions = {}, ): DocumentIntelligenceClient { const endpointUrl = - options.endpoint ?? options.baseUrl ?? `${endpointParam}/documentintelligence`; - const userAgentInfo = `azsdk-js-ai-document-intelligence-rest/1.0.0-beta.1`; + options.endpoint ?? + options.baseUrl ?? + `${endpointParam}/documentintelligence`; + const userAgentInfo = `azsdk-js-ai-document-intelligence-rest/1.0.0`; const userAgentPrefix = options.userAgentOptions && options.userAgentOptions.userAgentPrefix ? `${options.userAgentOptions.userAgentPrefix} ${userAgentInfo}` @@ -40,11 +44,18 @@ export default function createClient( logger: options.loggingOptions?.logger ?? logger.info, }, credentials: { - scopes: options.credentials?.scopes ?? ["https://cognitiveservices.azure.com/.default"], - apiKeyHeaderName: options.credentials?.apiKeyHeaderName ?? "Ocp-Apim-Subscription-Key", + scopes: options.credentials?.scopes ?? [ + "https://cognitiveservices.azure.com/.default", + ], + apiKeyHeaderName: + options.credentials?.apiKeyHeaderName ?? "Ocp-Apim-Subscription-Key", }, }; - const client = getClient(endpointUrl, credentials, options) as DocumentIntelligenceClient; + const client = getClient( + endpointUrl, + credentials, + options, + ) as DocumentIntelligenceClient; client.pipeline.removePolicy({ name: "ApiVersionPolicy" }); client.pipeline.addPolicy({ diff --git a/sdk/documentintelligence/ai-document-intelligence-rest/src/isUnexpected.ts b/sdk/documentintelligence/ai-document-intelligence-rest/src/isUnexpected.ts index 472c444bfbc9..ee5570ce7800 100644 --- a/sdk/documentintelligence/ai-document-intelligence-rest/src/isUnexpected.ts +++ b/sdk/documentintelligence/ai-document-intelligence-rest/src/isUnexpected.ts @@ -1,7 +1,7 @@ // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. -import type { +import { ListOperations200Response, ListOperationsDefaultResponse, GetDocumentModelBuildOperation200Response, @@ -16,10 +16,12 @@ import type { GetDocumentClassifierBuildOperationDefaultResponse, GetOperation200Response, GetOperationDefaultResponse, - GetResourceInfo200Response, - GetResourceInfoDefaultResponse, + GetResourceDetails200Response, + GetResourceDetailsDefaultResponse, GetAnalyzeResult200Response, GetAnalyzeResultDefaultResponse, + DeleteAnalyzeResult204Response, + DeleteAnalyzeResultDefaultResponse, GetAnalyzeResultPdf200Response, GetAnalyzeResultPdfDefaultResponse, GetAnalyzeResultFigure200Response, @@ -32,9 +34,13 @@ import type { AnalyzeDocumentDefaultResponse, GetAnalyzeBatchResult200Response, GetAnalyzeBatchResultDefaultResponse, + DeleteAnalyzeBatchResult204Response, + DeleteAnalyzeBatchResultDefaultResponse, AnalyzeBatchDocuments202Response, AnalyzeBatchDocumentsLogicalResponse, AnalyzeBatchDocumentsDefaultResponse, + ListAnalyzeBatchResults200Response, + ListAnalyzeBatchResultsDefaultResponse, GetModel200Response, GetModelDefaultResponse, DeleteModel204Response, @@ -81,13 +87,17 @@ const responseMap: Record = { "GET /operations/{operationId}": ["200"], "GET /info": ["200"], "GET /documentModels/{modelId}/analyzeResults/{resultId}": ["200"], + "DELETE /documentModels/{modelId}/analyzeResults/{resultId}": ["204"], "GET /documentModels/{modelId}/analyzeResults/{resultId}/pdf": ["200"], - "GET /documentModels/{modelId}/analyzeResults/{resultId}/figures/{figureId}": ["200"], + "GET /documentModels/{modelId}/analyzeResults/{resultId}/figures/{figureId}": + ["200"], "GET /documentModels/{modelId}:analyze": ["200", "202"], "POST /documentModels/{modelId}:analyze": ["202"], "GET /documentModels/{modelId}/analyzeBatchResults/{resultId}": ["200"], + "DELETE /documentModels/{modelId}/analyzeBatchResults/{resultId}": ["204"], "GET /documentModels/{modelId}:analyzeBatch": ["200", "202"], "POST /documentModels/{modelId}:analyzeBatch": ["202"], + "GET /documentModels/{modelId}/analyzeBatchResults": ["200"], "GET /documentModels/{modelId}": ["200"], "DELETE /documentModels/{modelId}": ["204"], "GET /documentModels:build": ["200", "202"], @@ -143,16 +153,21 @@ export function isUnexpected( response: GetOperation200Response | GetOperationDefaultResponse, ): response is GetOperationDefaultResponse; export function isUnexpected( - response: GetResourceInfo200Response | GetResourceInfoDefaultResponse, -): response is GetResourceInfoDefaultResponse; + response: GetResourceDetails200Response | GetResourceDetailsDefaultResponse, +): response is GetResourceDetailsDefaultResponse; export function isUnexpected( response: GetAnalyzeResult200Response | GetAnalyzeResultDefaultResponse, ): response is GetAnalyzeResultDefaultResponse; +export function isUnexpected( + response: DeleteAnalyzeResult204Response | DeleteAnalyzeResultDefaultResponse, +): response is DeleteAnalyzeResultDefaultResponse; export function isUnexpected( response: GetAnalyzeResultPdf200Response | GetAnalyzeResultPdfDefaultResponse, ): response is GetAnalyzeResultPdfDefaultResponse; export function isUnexpected( - response: GetAnalyzeResultFigure200Response | GetAnalyzeResultFigureDefaultResponse, + response: + | GetAnalyzeResultFigure200Response + | GetAnalyzeResultFigureDefaultResponse, ): response is GetAnalyzeResultFigureDefaultResponse; export function isUnexpected( response: @@ -167,14 +182,26 @@ export function isUnexpected( | AnalyzeDocumentDefaultResponse, ): response is AnalyzeDocumentDefaultResponse; export function isUnexpected( - response: GetAnalyzeBatchResult200Response | GetAnalyzeBatchResultDefaultResponse, + response: + | GetAnalyzeBatchResult200Response + | GetAnalyzeBatchResultDefaultResponse, ): response is GetAnalyzeBatchResultDefaultResponse; +export function isUnexpected( + response: + | DeleteAnalyzeBatchResult204Response + | DeleteAnalyzeBatchResultDefaultResponse, +): response is DeleteAnalyzeBatchResultDefaultResponse; export function isUnexpected( response: | AnalyzeBatchDocuments202Response | AnalyzeBatchDocumentsLogicalResponse | AnalyzeBatchDocumentsDefaultResponse, ): response is AnalyzeBatchDocumentsDefaultResponse; +export function isUnexpected( + response: + | ListAnalyzeBatchResults200Response + | ListAnalyzeBatchResultsDefaultResponse, +): response is ListAnalyzeBatchResultsDefaultResponse; export function isUnexpected( response: GetModel200Response | GetModelDefaultResponse, ): response is GetModelDefaultResponse; @@ -182,16 +209,25 @@ export function isUnexpected( response: DeleteModel204Response | DeleteModelDefaultResponse, ): response is DeleteModelDefaultResponse; export function isUnexpected( - response: BuildModel202Response | BuildModelLogicalResponse | BuildModelDefaultResponse, + response: + | BuildModel202Response + | BuildModelLogicalResponse + | BuildModelDefaultResponse, ): response is BuildModelDefaultResponse; export function isUnexpected( - response: ComposeModel202Response | ComposeModelLogicalResponse | ComposeModelDefaultResponse, + response: + | ComposeModel202Response + | ComposeModelLogicalResponse + | ComposeModelDefaultResponse, ): response is ComposeModelDefaultResponse; export function isUnexpected( response: AuthorizeModelCopy200Response | AuthorizeModelCopyDefaultResponse, ): response is AuthorizeModelCopyDefaultResponse; export function isUnexpected( - response: CopyModelTo202Response | CopyModelToLogicalResponse | CopyModelToDefaultResponse, + response: + | CopyModelTo202Response + | CopyModelToLogicalResponse + | CopyModelToDefaultResponse, ): response is CopyModelToDefaultResponse; export function isUnexpected( response: ListModels200Response | ListModelsDefaultResponse, @@ -227,7 +263,9 @@ export function isUnexpected( response: GetClassifyResult200Response | GetClassifyResultDefaultResponse, ): response is GetClassifyResultDefaultResponse; export function isUnexpected( - response: AuthorizeClassifierCopy200Response | AuthorizeClassifierCopyDefaultResponse, + response: + | AuthorizeClassifierCopy200Response + | AuthorizeClassifierCopyDefaultResponse, ): response is AuthorizeClassifierCopyDefaultResponse; export function isUnexpected( response: @@ -251,10 +289,12 @@ export function isUnexpected( | GetDocumentClassifierBuildOperationDefaultResponse | GetOperation200Response | GetOperationDefaultResponse - | GetResourceInfo200Response - | GetResourceInfoDefaultResponse + | GetResourceDetails200Response + | GetResourceDetailsDefaultResponse | GetAnalyzeResult200Response | GetAnalyzeResultDefaultResponse + | DeleteAnalyzeResult204Response + | DeleteAnalyzeResultDefaultResponse | GetAnalyzeResultPdf200Response | GetAnalyzeResultPdfDefaultResponse | GetAnalyzeResultFigure200Response @@ -267,9 +307,13 @@ export function isUnexpected( | AnalyzeDocumentDefaultResponse | GetAnalyzeBatchResult200Response | GetAnalyzeBatchResultDefaultResponse + | DeleteAnalyzeBatchResult204Response + | DeleteAnalyzeBatchResultDefaultResponse | AnalyzeBatchDocuments202Response | AnalyzeBatchDocumentsLogicalResponse | AnalyzeBatchDocumentsDefaultResponse + | ListAnalyzeBatchResults200Response + | ListAnalyzeBatchResultsDefaultResponse | GetModel200Response | GetModelDefaultResponse | DeleteModel204Response @@ -317,14 +361,17 @@ export function isUnexpected( | GetDocumentClassifierCopyToOperationDefaultResponse | GetDocumentClassifierBuildOperationDefaultResponse | GetOperationDefaultResponse - | GetResourceInfoDefaultResponse + | GetResourceDetailsDefaultResponse | GetAnalyzeResultDefaultResponse + | DeleteAnalyzeResultDefaultResponse | GetAnalyzeResultPdfDefaultResponse | GetAnalyzeResultFigureDefaultResponse | AnalyzeDocumentFromStreamDefaultResponse | AnalyzeDocumentDefaultResponse | GetAnalyzeBatchResultDefaultResponse + | DeleteAnalyzeBatchResultDefaultResponse | AnalyzeBatchDocumentsDefaultResponse + | ListAnalyzeBatchResultsDefaultResponse | GetModelDefaultResponse | DeleteModelDefaultResponse | BuildModelDefaultResponse @@ -373,17 +420,24 @@ function getParametrizedPathSuccess(method: string, path: string): string[] { // track if we have found a match to return the values found. let found = true; - for (let i = candidateParts.length - 1, j = pathParts.length - 1; i >= 1 && j >= 1; i--, j--) { - if (candidateParts[i]?.startsWith("{") && candidateParts[i]?.indexOf("}") !== -1) { + for ( + let i = candidateParts.length - 1, j = pathParts.length - 1; + i >= 1 && j >= 1; + i--, j-- + ) { + if ( + candidateParts[i]?.startsWith("{") && + candidateParts[i]?.indexOf("}") !== -1 + ) { const start = candidateParts[i]!.indexOf("}") + 1, end = candidateParts[i]?.length; // If the current part of the candidate is a "template" part // Try to use the suffix of pattern to match the path // {guid} ==> $ // {guid}:export ==> :export$ - const isMatched = new RegExp(`${candidateParts[i]?.slice(start, end)}`).test( - pathParts[j] || "", - ); + const isMatched = new RegExp( + `${candidateParts[i]?.slice(start, end)}`, + ).test(pathParts[j] || ""); if (!isMatched) { found = false; diff --git a/sdk/documentintelligence/ai-document-intelligence-rest/src/models.ts b/sdk/documentintelligence/ai-document-intelligence-rest/src/models.ts index b089b1b5a545..1f534a9326f7 100644 --- a/sdk/documentintelligence/ai-document-intelligence-rest/src/models.ts +++ b/sdk/documentintelligence/ai-document-intelligence-rest/src/models.ts @@ -24,7 +24,7 @@ export interface DocumentTypeDetails { /** * Custom document model build mode. * - * Possible values: "template", "neural", "generative" + * Possible values: "template", "neural" */ buildMode?: DocumentBuildMode; /** Description of the document semantic schema using a JSON Schema style syntax. */ @@ -123,7 +123,7 @@ export interface BuildDocumentModelRequest { /** * Custom document model build mode. * - * Possible values: "template", "neural", "generative" + * Possible values: "template", "neural" */ buildMode: DocumentBuildMode; /** @@ -178,7 +178,7 @@ export interface AuthorizeCopyRequest { * Authorization to copy a document model to the specified target resource and * modelId. */ -export interface CopyAuthorization { +export interface ModelCopyAuthorization { /** ID of the target Azure resource where the document model should be copied to. */ targetResourceId: string; /** @@ -265,7 +265,7 @@ export type DocumentAnalysisFeature = string; export type ContentSourceKind = string; /** Alias for StringIndexType */ export type StringIndexType = string; -/** Alias for ContentFormat */ -export type ContentFormat = string; +/** Alias for DocumentContentFormat */ +export type DocumentContentFormat = string; /** Alias for AnalyzeOutputOption */ export type AnalyzeOutputOption = string; diff --git a/sdk/documentintelligence/ai-document-intelligence-rest/src/outputModels.ts b/sdk/documentintelligence/ai-document-intelligence-rest/src/outputModels.ts index 8273ae8968f4..11c75abaeade 100644 --- a/sdk/documentintelligence/ai-document-intelligence-rest/src/outputModels.ts +++ b/sdk/documentintelligence/ai-document-intelligence-rest/src/outputModels.ts @@ -1,18 +1,24 @@ // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. -import type { Paged } from "@azure/core-paging"; +/** Paged collection of DocumentIntelligenceOperationDetails items */ +export interface PagedDocumentIntelligenceOperationDetailsOutput { + /** The DocumentIntelligenceOperationDetails items on this page */ + value: Array; + /** The link to the next page of items */ + nextLink?: string; +} /** Operation info. */ -export interface OperationDetailsOutputParent { +export interface DocumentIntelligenceOperationDetailsOutputParent { /** Operation ID */ operationId: string; /** * Operation status. notStarted, running, completed, or failed * - * Possible values: "notStarted", "running", "failed", "succeeded", "completed", "canceled" + * Possible values: "notStarted", "running", "failed", "succeeded", "canceled", "skipped" */ - status: OperationStatusOutput; + status: DocumentIntelligenceOperationStatusOutput; /** Operation progress (0-100). */ percentCompleted?: number; /** Date and time (UTC) when the operation was created. */ @@ -26,12 +32,12 @@ export interface OperationDetailsOutputParent { /** List of key-value tag attributes associated with the document model. */ tags?: Record; /** Encountered error. */ - error?: ErrorModelOutput; + error?: DocumentIntelligenceErrorOutput; kind: OperationKindOutput; } /** The error object. */ -export interface ErrorModelOutput { +export interface DocumentIntelligenceErrorOutput { /** One of a server-defined set of error codes. */ code: string; /** A human-readable representation of the error. */ @@ -39,23 +45,24 @@ export interface ErrorModelOutput { /** The target of the error. */ target?: string; /** An array of details about specific errors that led to this reported error. */ - details?: Array; + details?: Array; /** An object containing more specific information than the current object about the error. */ - innererror?: InnerErrorOutput; + innererror?: DocumentIntelligenceInnerErrorOutput; } /** An object containing more specific information about the error. */ -export interface InnerErrorOutput { +export interface DocumentIntelligenceInnerErrorOutput { /** One of a server-defined set of error codes. */ code?: string; /** A human-readable representation of the error. */ message?: string; /** Inner error. */ - innererror?: InnerErrorOutput; + innererror?: DocumentIntelligenceInnerErrorOutput; } /** Get Operation response object. */ -export interface DocumentModelBuildOperationDetailsOutput extends OperationDetailsOutputParent { +export interface DocumentModelBuildOperationDetailsOutput + extends DocumentIntelligenceOperationDetailsOutputParent { /** Operation result upon success. */ result?: DocumentModelDetailsOutput; /** Type of operation. */ @@ -72,6 +79,8 @@ export interface DocumentModelDetailsOutput { readonly createdDateTime: string; /** Date and time (UTC) when the document model will expire. */ readonly expirationDateTime?: string; + /** Date and time (UTC) when the document model was last modified. */ + readonly modifiedDateTime?: string; /** API version used to create this document model. */ readonly apiVersion?: string; /** List of key-value tag attributes associated with the document model. */ @@ -79,7 +88,7 @@ export interface DocumentModelDetailsOutput { /** * Custom document model build mode. * - * Possible values: "template", "neural", "generative" + * Possible values: "template", "neural" */ readonly buildMode?: DocumentBuildModeOutput; /** @@ -103,7 +112,7 @@ export interface DocumentModelDetailsOutput { /** Supported document types. */ readonly docTypes?: Record; /** List of warnings encountered while building the model. */ - readonly warnings?: Array; + readonly warnings?: Array; /** Number of V100-equivalent GPU hours consumed for model training. */ readonly trainingHours?: number; } @@ -131,7 +140,7 @@ export interface DocumentTypeDetailsOutput { /** * Custom document model build mode. * - * Possible values: "template", "neural", "generative" + * Possible values: "template", "neural" */ buildMode?: DocumentBuildModeOutput; /** Description of the document semantic schema using a JSON Schema style syntax. */ @@ -169,7 +178,7 @@ export interface DocumentFieldSchemaOutput { } /** The error object. */ -export interface WarningOutput { +export interface DocumentIntelligenceWarningOutput { /** One of a server-defined set of warning codes. */ code: string; /** A human-readable representation of the warning. */ @@ -179,7 +188,8 @@ export interface WarningOutput { } /** Get Operation response object. */ -export interface DocumentModelComposeOperationDetailsOutput extends OperationDetailsOutputParent { +export interface DocumentModelComposeOperationDetailsOutput + extends DocumentIntelligenceOperationDetailsOutputParent { /** Operation result upon success. */ result?: DocumentModelDetailsOutput; /** Type of operation. */ @@ -187,7 +197,8 @@ export interface DocumentModelComposeOperationDetailsOutput extends OperationDet } /** Get Operation response object. */ -export interface DocumentModelCopyToOperationDetailsOutput extends OperationDetailsOutputParent { +export interface DocumentModelCopyToOperationDetailsOutput + extends DocumentIntelligenceOperationDetailsOutputParent { /** Operation result upon success. */ result?: DocumentModelDetailsOutput; /** Type of operation. */ @@ -196,7 +207,7 @@ export interface DocumentModelCopyToOperationDetailsOutput extends OperationDeta /** Get Operation response object. */ export interface DocumentClassifierCopyToOperationDetailsOutput - extends OperationDetailsOutputParent { + extends DocumentIntelligenceOperationDetailsOutputParent { /** Operation result upon success. */ result?: DocumentClassifierDetailsOutput; /** Type of operation. */ @@ -213,6 +224,8 @@ export interface DocumentClassifierDetailsOutput { createdDateTime: string; /** Date and time (UTC) when the document classifier will expire. */ expirationDateTime?: string; + /** Date and time (UTC) when the document model was last modified. */ + readonly modifiedDateTime?: string; /** API version used to create this document classifier. */ apiVersion: string; /** Base classifierId on top of which the classifier was trained. */ @@ -220,7 +233,7 @@ export interface DocumentClassifierDetailsOutput { /** List of document types to classify against. */ docTypes: Record; /** List of warnings encountered while building the classifier. */ - warnings?: Array; + warnings?: Array; } /** Classifier document type info. */ @@ -247,7 +260,7 @@ export interface ClassifierDocumentTypeDetailsOutput { /** Get Operation response object. */ export interface DocumentClassifierBuildOperationDetailsOutput - extends OperationDetailsOutputParent { + extends DocumentIntelligenceOperationDetailsOutputParent { /** Operation result upon success. */ result?: DocumentClassifierDetailsOutput; /** Type of operation. */ @@ -255,13 +268,13 @@ export interface DocumentClassifierBuildOperationDetailsOutput } /** Error response object. */ -export interface ErrorResponseOutput { +export interface DocumentIntelligenceErrorResponseOutput { /** Error info. */ - error: ErrorModelOutput; + error: DocumentIntelligenceErrorOutput; } /** General information regarding the current resource. */ -export interface ResourceDetailsOutput { +export interface DocumentIntelligenceResourceDetailsOutput { /** Details regarding custom document models. */ customDocumentModels: CustomDocumentModelsDetailsOutput; } @@ -275,19 +288,19 @@ export interface CustomDocumentModelsDetailsOutput { } /** Status and result of the analyze operation. */ -export interface AnalyzeResultOperationOutput { +export interface AnalyzeOperationOutput { /** * Operation status. notStarted, running, succeeded, or failed * - * Possible values: "notStarted", "running", "failed", "succeeded", "completed", "canceled" + * Possible values: "notStarted", "running", "failed", "succeeded", "canceled", "skipped" */ - status: OperationStatusOutput; + status: DocumentIntelligenceOperationStatusOutput; /** Date and time (UTC) when the analyze operation was submitted. */ createdDateTime: string; /** Date and time (UTC) when the status was last updated. */ lastUpdatedDateTime: string; /** Encountered error during document analysis. */ - error?: ErrorModelOutput; + error?: DocumentIntelligenceErrorOutput; /** Document analysis result. */ analyzeResult?: AnalyzeResultOutput; } @@ -309,7 +322,7 @@ export interface AnalyzeResultOutput { * * Possible values: "text", "markdown" */ - contentFormat?: ContentFormatOutput; + contentFormat?: DocumentContentFormatOutput; /** * Concatenate string representation of all textual and visual elements in reading * order. @@ -332,9 +345,9 @@ export interface AnalyzeResultOutput { /** Detected languages. */ languages?: Array; /** Extracted documents. */ - documents?: Array; + documents?: Array; /** List of warnings encountered. */ - warnings?: Array; + warnings?: Array; } /** Content and layout elements extracted from a page from the input. */ @@ -655,13 +668,13 @@ export interface DocumentStyleOutput { * * Possible values: "normal", "italic" */ - fontStyle?: FontStyleOutput; + fontStyle?: DocumentFontStyleOutput; /** * Font weight. * * Possible values: "normal", "bold" */ - fontWeight?: FontWeightOutput; + fontWeight?: DocumentFontWeightOutput; /** Foreground color in #rrggbb hexadecimal format. */ color?: string; /** Background color in #rrggbb hexadecimal format.. */ @@ -689,7 +702,7 @@ export interface DocumentLanguageOutput { } /** An object describing the location and semantic content of a document. */ -export interface DocumentOutput { +export interface AnalyzedDocumentOutput { /** Document type. */ docType: string; /** Bounding regions covering the document. */ @@ -804,13 +817,15 @@ export interface AddressValueOutput { } /** Status and result of the analyze batch operation. */ -export interface AnalyzeBatchResultOperationOutput { +export interface AnalyzeBatchOperationOutput { + /** Analyze batch operation result ID. */ + resultId?: string; /** - * Operation status. notStarted, running, completed, or failed + * Operation status. notStarted, running, succeeded, or failed * - * Possible values: "notStarted", "running", "failed", "succeeded", "completed", "canceled" + * Possible values: "notStarted", "running", "failed", "succeeded", "canceled", "skipped" */ - status: OperationStatusOutput; + status: DocumentIntelligenceOperationStatusOutput; /** Date and time (UTC) when the operation was submitted. */ createdDateTime: string; /** Date and time (UTC) when the status was last updated. */ @@ -818,7 +833,7 @@ export interface AnalyzeBatchResultOperationOutput { /** Operation progress (0-100). */ percentCompleted?: number; /** Encountered error during batch document analysis. */ - error?: ErrorModelOutput; + error?: DocumentIntelligenceErrorOutput; /** Batch document analysis result. */ result?: AnalyzeBatchResultOutput; } @@ -832,7 +847,7 @@ export interface AnalyzeBatchResultOutput { /** Number of documents that completed with status skipped. */ skippedCount: number; /** Operation detail for each document in the batch. */ - details: Array; + details?: Array; } /** Operation detail for a document in a batch analysis. */ @@ -840,22 +855,30 @@ export interface AnalyzeBatchOperationDetailOutput { /** * Analyze status. succeeded, failed, or skipped * - * Possible values: "notStarted", "running", "failed", "succeeded", "completed", "canceled" + * Possible values: "notStarted", "running", "failed", "succeeded", "canceled", "skipped" */ - status: OperationStatusOutput; + status: DocumentIntelligenceOperationStatusOutput; /** URL of the source document. */ sourceUrl: string; /** URL of the analyze result JSON. */ resultUrl?: string; /** Encountered error. */ - error?: ErrorModelOutput; + error?: DocumentIntelligenceErrorOutput; +} + +/** Paged collection of AnalyzeBatchOperation items */ +export interface PagedAnalyzeBatchOperationOutput { + /** The AnalyzeBatchOperation items on this page */ + value: Array; + /** The link to the next page of items */ + nextLink?: string; } /** * Authorization to copy a document model to the specified target resource and * modelId. */ -export interface CopyAuthorizationOutput { +export interface ModelCopyAuthorizationOutput { /** ID of the target Azure resource where the document model should be copied to. */ targetResourceId: string; /** @@ -873,6 +896,22 @@ export interface CopyAuthorizationOutput { expirationDateTime: string; } +/** Paged collection of DocumentModelDetails items */ +export interface PagedDocumentModelDetailsOutput { + /** The DocumentModelDetails items on this page */ + value: Array; + /** The link to the next page of items */ + nextLink?: string; +} + +/** Paged collection of DocumentClassifierDetails items */ +export interface PagedDocumentClassifierDetailsOutput { + /** The DocumentClassifierDetails items on this page */ + value: Array; + /** The link to the next page of items */ + nextLink?: string; +} + /** * Authorization to copy a document classifier to the specified target resource and * classifierId. @@ -896,17 +935,15 @@ export interface ClassifierCopyAuthorizationOutput { } /** Operation info. */ -export type OperationDetailsOutput = - | OperationDetailsOutputParent +export type DocumentIntelligenceOperationDetailsOutput = + | DocumentIntelligenceOperationDetailsOutputParent | DocumentModelBuildOperationDetailsOutput | DocumentModelComposeOperationDetailsOutput | DocumentModelCopyToOperationDetailsOutput | DocumentClassifierCopyToOperationDetailsOutput | DocumentClassifierBuildOperationDetailsOutput; -/** Paged collection of OperationDetails items */ -export type PagedOperationDetailsOutput = Paged; -/** Alias for OperationStatusOutput */ -export type OperationStatusOutput = string; +/** Alias for DocumentIntelligenceOperationStatusOutput */ +export type DocumentIntelligenceOperationStatusOutput = string; /** Alias for OperationKindOutput */ export type OperationKindOutput = string; /** Alias for DocumentBuildModeOutput */ @@ -921,8 +958,8 @@ export type DocumentAnalysisFeatureOutput = string; export type ContentSourceKindOutput = string; /** Alias for StringIndexTypeOutput */ export type StringIndexTypeOutput = string; -/** Alias for ContentFormatOutput */ -export type ContentFormatOutput = string; +/** Alias for DocumentContentFormatOutput */ +export type DocumentContentFormatOutput = string; /** Alias for LengthUnitOutput */ export type LengthUnitOutput = string; /** Alias for DocumentSelectionMarkStateOutput */ @@ -935,13 +972,9 @@ export type DocumentFormulaKindOutput = string; export type ParagraphRoleOutput = string; /** Alias for DocumentTableCellKindOutput */ export type DocumentTableCellKindOutput = string; -/** Alias for FontStyleOutput */ -export type FontStyleOutput = string; -/** Alias for FontWeightOutput */ -export type FontWeightOutput = string; +/** Alias for DocumentFontStyleOutput */ +export type DocumentFontStyleOutput = string; +/** Alias for DocumentFontWeightOutput */ +export type DocumentFontWeightOutput = string; /** Alias for DocumentSignatureTypeOutput */ export type DocumentSignatureTypeOutput = string; -/** Paged collection of DocumentModelDetails items */ -export type PagedDocumentModelDetailsOutput = Paged; -/** Paged collection of DocumentClassifierDetails items */ -export type PagedDocumentClassifierDetailsOutput = Paged; diff --git a/sdk/documentintelligence/ai-document-intelligence-rest/src/paginateHelper.ts b/sdk/documentintelligence/ai-document-intelligence-rest/src/paginateHelper.ts index 5d541b4e406d..5ef95e5ea0a3 100644 --- a/sdk/documentintelligence/ai-document-intelligence-rest/src/paginateHelper.ts +++ b/sdk/documentintelligence/ai-document-intelligence-rest/src/paginateHelper.ts @@ -1,10 +1,162 @@ // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. -import type { PagedAsyncIterableIterator, PagedResult } from "@azure/core-paging"; -import { getPagedAsyncIterator } from "@azure/core-paging"; -import type { Client, PathUncheckedResponse } from "@azure-rest/core-client"; -import { createRestError } from "@azure-rest/core-client"; +import { + Client, + createRestError, + PathUncheckedResponse, +} from "@azure-rest/core-client"; + +/** + * returns an async iterator that iterates over results. It also has a `byPage` + * method that returns pages of items at once. + * + * @param pagedResult - an object that specifies how to get pages. + * @returns a paged async iterator that iterates over results. + */ +function getPagedAsyncIterator< + TElement, + TPage = TElement[], + TPageSettings = PageSettings, + TLink = string, +>( + pagedResult: PagedResult, +): PagedAsyncIterableIterator { + const iter = getItemAsyncIterator( + pagedResult, + ); + return { + next() { + return iter.next(); + }, + [Symbol.asyncIterator]() { + return this; + }, + byPage: + pagedResult?.byPage ?? + (((settings?: PageSettings) => { + const { continuationToken } = settings ?? {}; + return getPageAsyncIterator(pagedResult, { + pageLink: continuationToken as unknown as TLink | undefined, + }); + }) as unknown as ( + settings?: TPageSettings, + ) => AsyncIterableIterator), + }; +} + +async function* getItemAsyncIterator( + pagedResult: PagedResult, +): AsyncIterableIterator { + const pages = getPageAsyncIterator(pagedResult); + const firstVal = await pages.next(); + // if the result does not have an array shape, i.e. TPage = TElement, then we return it as is + if (!Array.isArray(firstVal.value)) { + // can extract elements from this page + const { toElements } = pagedResult; + if (toElements) { + yield* toElements(firstVal.value) as TElement[]; + for await (const page of pages) { + yield* toElements(page) as TElement[]; + } + } else { + yield firstVal.value; + // `pages` is of type `AsyncIterableIterator` but TPage = TElement in this case + yield* pages as unknown as AsyncIterableIterator; + } + } else { + yield* firstVal.value; + for await (const page of pages) { + // pages is of type `AsyncIterableIterator` so `page` is of type `TPage`. In this branch, + // it must be the case that `TPage = TElement[]` + yield* page as unknown as TElement[]; + } + } +} + +async function* getPageAsyncIterator( + pagedResult: PagedResult, + options: { + pageLink?: TLink; + } = {}, +): AsyncIterableIterator { + const { pageLink } = options; + let response = await pagedResult.getPage( + pageLink ?? pagedResult.firstPageLink, + ); + if (!response) { + return; + } + yield response.page; + while (response.nextPageLink) { + response = await pagedResult.getPage(response.nextPageLink); + if (!response) { + return; + } + yield response.page; + } +} + +/** + * An interface that tracks the settings for paged iteration + */ +export interface PageSettings { + /** + * The token that keeps track of where to continue the iterator + */ + continuationToken?: string; +} + +/** + * An interface that allows async iterable iteration both to completion and by page. + */ +export interface PagedAsyncIterableIterator< + TElement, + TPage = TElement[], + TPageSettings = PageSettings, +> { + /** + * The next method, part of the iteration protocol + */ + next(): Promise>; + /** + * The connection to the async iterator, part of the iteration protocol + */ + [Symbol.asyncIterator](): PagedAsyncIterableIterator< + TElement, + TPage, + TPageSettings + >; + /** + * Return an AsyncIterableIterator that works a page at a time + */ + byPage: (settings?: TPageSettings) => AsyncIterableIterator; +} + +/** + * An interface that describes how to communicate with the service. + */ +interface PagedResult { + /** + * Link to the first page of results. + */ + firstPageLink: TLink; + /** + * A method that returns a page of results. + */ + getPage: ( + pageLink: TLink, + ) => Promise<{ page: TPage; nextPageLink?: TLink } | undefined>; + /** + * a function to implement the `byPage` method on the paged async iterator. + */ + byPage?: (settings?: TPageSettings) => AsyncIterableIterator; + + /** + * A function to extract elements from a page. + */ + toElements?: (page: TPage) => unknown[]; +} /** * Helper type to extract the type of an array @@ -14,10 +166,7 @@ export type GetArrayType = T extends Array ? TData : never; /** * The type of a custom function that defines how to get a page and a link to the next one if any. */ -export type GetPage = ( - pageLink: string, - maxPageSize?: number, -) => Promise<{ +export type GetPage = (pageLink: string) => Promise<{ page: TPage; nextPageLink?: string; }>; @@ -69,7 +218,9 @@ export function paginate( typeof customGetPage === "function" ? customGetPage : async (pageLink: string) => { - const result = firstRun ? initialResponse : await client.pathUnchecked(pageLink).get(); + const result = firstRun + ? initialResponse + : await client.pathUnchecked(pageLink).get(); firstRun = false; checkPagingRequest(result); const nextLink = getNextLink(result.body, nextLinkName); @@ -95,7 +246,9 @@ function getNextLink(body: unknown, nextLinkName?: string): string | undefined { const nextLink = (body as Record)[nextLinkName]; if (typeof nextLink !== "string" && typeof nextLink !== "undefined") { - throw new Error(`Body Property ${nextLinkName} should be a string or undefined`); + throw new Error( + `Body Property ${nextLinkName} should be a string or undefined`, + ); } return nextLink; @@ -123,7 +276,18 @@ function getElements(body: unknown, itemName: string): T[] { * Checks if a request failed */ function checkPagingRequest(response: PathUncheckedResponse): void { - const Http2xxStatusCodes = ["200", "201", "202", "203", "204", "205", "206", "207", "208", "226"]; + const Http2xxStatusCodes = [ + "200", + "201", + "202", + "203", + "204", + "205", + "206", + "207", + "208", + "226", + ]; if (!Http2xxStatusCodes.includes(response.status)) { throw createRestError( `Pagination failed with unexpected statusCode ${response.status}`, diff --git a/sdk/documentintelligence/ai-document-intelligence-rest/src/parameters.ts b/sdk/documentintelligence/ai-document-intelligence-rest/src/parameters.ts index cd76fb6850be..21f14fb9039e 100644 --- a/sdk/documentintelligence/ai-document-intelligence-rest/src/parameters.ts +++ b/sdk/documentintelligence/ai-document-intelligence-rest/src/parameters.ts @@ -1,19 +1,19 @@ // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. -import type { RawHttpHeadersInput } from "@azure/core-rest-pipeline"; -import type { RequestParameters } from "@azure-rest/core-client"; -import type { +import { RawHttpHeadersInput } from "@azure/core-rest-pipeline"; +import { RequestParameters } from "@azure-rest/core-client"; +import { StringIndexType, DocumentAnalysisFeature, - ContentFormat, + DocumentContentFormat, AnalyzeOutputOption, AnalyzeDocumentRequest, AnalyzeBatchDocumentsRequest, BuildDocumentModelRequest, ComposeDocumentModelRequest, AuthorizeCopyRequest, - CopyAuthorization, + ModelCopyAuthorization, BuildDocumentClassifierRequest, SplitMode, ClassifyDocumentRequest, @@ -30,7 +30,8 @@ export interface ListOperationsHeaderParam { headers?: RawHttpHeadersInput & ListOperationsHeaders; } -export type ListOperationsParameters = ListOperationsHeaderParam & RequestParameters; +export type ListOperationsParameters = ListOperationsHeaderParam & + RequestParameters; export interface GetDocumentModelBuildOperationHeaders { /** An opaque, globally-unique, client-generated string identifier for the request. */ @@ -41,8 +42,8 @@ export interface GetDocumentModelBuildOperationHeaderParam { headers?: RawHttpHeadersInput & GetDocumentModelBuildOperationHeaders; } -export type GetDocumentModelBuildOperationParameters = GetDocumentModelBuildOperationHeaderParam & - RequestParameters; +export type GetDocumentModelBuildOperationParameters = + GetDocumentModelBuildOperationHeaderParam & RequestParameters; export interface GetDocumentModelComposeOperationHeaders { /** An opaque, globally-unique, client-generated string identifier for the request. */ @@ -65,8 +66,8 @@ export interface GetDocumentModelCopyToOperationHeaderParam { headers?: RawHttpHeadersInput & GetDocumentModelCopyToOperationHeaders; } -export type GetDocumentModelCopyToOperationParameters = GetDocumentModelCopyToOperationHeaderParam & - RequestParameters; +export type GetDocumentModelCopyToOperationParameters = + GetDocumentModelCopyToOperationHeaderParam & RequestParameters; export interface GetDocumentClassifierCopyToOperationHeaders { /** An opaque, globally-unique, client-generated string identifier for the request. */ @@ -101,11 +102,13 @@ export interface GetOperationHeaderParam { headers?: RawHttpHeadersInput & GetOperationHeaders; } -export type GetOperationParameters = GetOperationHeaderParam & RequestParameters; -export type GetResourceInfoParameters = RequestParameters; +export type GetOperationParameters = GetOperationHeaderParam & + RequestParameters; +export type GetResourceDetailsParameters = RequestParameters; export type GetAnalyzeResultParameters = RequestParameters; export type GetAnalyzeResultPdfParameters = RequestParameters; export type GetAnalyzeResultFigureParameters = RequestParameters; +export type DeleteAnalyzeResultParameters = RequestParameters; export interface AnalyzeDocumentFromStreamBodyParam { /** @@ -113,11 +116,15 @@ export interface AnalyzeDocumentFromStreamBodyParam { * * Value may contain any sequence of octets */ - body: string | Uint8Array | ReadableStream | NodeJS.ReadableStream; + body: + | string + | Uint8Array + | ReadableStream + | NodeJS.ReadableStream; } export interface AnalyzeDocumentFromStreamQueryParamProperties { - /** List of 1-based page numbers to analyze. Ex. "1-3,5,7-9" */ + /** 1-based page numbers to analyze. Ex. "1-3,5,7-9" */ pages?: string; /** * Locale hint for text recognition and document analysis. Value may contain only @@ -139,7 +146,7 @@ export interface AnalyzeDocumentFromStreamQueryParamProperties { * * Possible values: "text", "markdown" */ - outputContentFormat?: ContentFormat; + outputContentFormat?: DocumentContentFormat; /** Additional outputs to generate during analysis. */ output?: AnalyzeOutputOption[]; } @@ -164,10 +171,11 @@ export interface AnalyzeDocumentFromStreamMediaTypesParam { | "application/vnd.openxmlformats-officedocument.presentationml.presentation"; } -export type AnalyzeDocumentFromStreamParameters = AnalyzeDocumentFromStreamQueryParam & - AnalyzeDocumentFromStreamMediaTypesParam & - AnalyzeDocumentFromStreamBodyParam & - RequestParameters; +export type AnalyzeDocumentFromStreamParameters = + AnalyzeDocumentFromStreamQueryParam & + AnalyzeDocumentFromStreamMediaTypesParam & + AnalyzeDocumentFromStreamBodyParam & + RequestParameters; export interface AnalyzeDocumentBodyParam { /** Analyze request parameters. */ @@ -175,7 +183,7 @@ export interface AnalyzeDocumentBodyParam { } export interface AnalyzeDocumentQueryParamProperties { - /** List of 1-based page numbers to analyze. Ex. "1-3,5,7-9" */ + /** 1-based page numbers to analyze. Ex. "1-3,5,7-9" */ pages?: string; /** * Locale hint for text recognition and document analysis. Value may contain only @@ -197,7 +205,7 @@ export interface AnalyzeDocumentQueryParamProperties { * * Possible values: "text", "markdown" */ - outputContentFormat?: ContentFormat; + outputContentFormat?: DocumentContentFormat; /** Additional outputs to generate during analysis. */ output?: AnalyzeOutputOption[]; } @@ -223,7 +231,7 @@ export interface AnalyzeBatchDocumentsBodyParam { } export interface AnalyzeBatchDocumentsQueryParamProperties { - /** List of 1-based page numbers to analyze. Ex. "1-3,5,7-9" */ + /** 1-based page numbers to analyze. Ex. "1-3,5,7-9" */ pages?: string; /** * Locale hint for text recognition and document analysis. Value may contain only @@ -245,7 +253,7 @@ export interface AnalyzeBatchDocumentsQueryParamProperties { * * Possible values: "text", "markdown" */ - outputContentFormat?: ContentFormat; + outputContentFormat?: DocumentContentFormat; /** Additional outputs to generate during analysis. */ output?: AnalyzeOutputOption[]; } @@ -263,6 +271,8 @@ export type AnalyzeBatchDocumentsParameters = AnalyzeBatchDocumentsQueryParam & AnalyzeBatchDocumentsMediaTypesParam & AnalyzeBatchDocumentsBodyParam & RequestParameters; +export type ListAnalyzeBatchResultsParameters = RequestParameters; +export type DeleteAnalyzeBatchResultParameters = RequestParameters; export interface GetModelHeaders { /** An opaque, globally-unique, client-generated string identifier for the request. */ @@ -294,11 +304,12 @@ export interface AuthorizeModelCopyBodyParam { body: AuthorizeCopyRequest; } -export type AuthorizeModelCopyParameters = AuthorizeModelCopyBodyParam & RequestParameters; +export type AuthorizeModelCopyParameters = AuthorizeModelCopyBodyParam & + RequestParameters; export interface CopyModelToBodyParam { /** Copy to request parameters. */ - body: CopyAuthorization; + body: ModelCopyAuthorization; } export type CopyModelToParameters = CopyModelToBodyParam & RequestParameters; @@ -330,7 +341,8 @@ export interface BuildClassifierBodyParam { body: BuildDocumentClassifierRequest; } -export type BuildClassifierParameters = BuildClassifierBodyParam & RequestParameters; +export type BuildClassifierParameters = BuildClassifierBodyParam & + RequestParameters; export interface ListClassifiersHeaders { /** An opaque, globally-unique, client-generated string identifier for the request. */ @@ -341,7 +353,8 @@ export interface ListClassifiersHeaderParam { headers?: RawHttpHeadersInput & ListClassifiersHeaders; } -export type ListClassifiersParameters = ListClassifiersHeaderParam & RequestParameters; +export type ListClassifiersParameters = ListClassifiersHeaderParam & + RequestParameters; export interface GetClassifierHeaders { /** An opaque, globally-unique, client-generated string identifier for the request. */ @@ -352,7 +365,8 @@ export interface GetClassifierHeaderParam { headers?: RawHttpHeadersInput & GetClassifierHeaders; } -export type GetClassifierParameters = GetClassifierHeaderParam & RequestParameters; +export type GetClassifierParameters = GetClassifierHeaderParam & + RequestParameters; export interface DeleteClassifierHeaders { /** An opaque, globally-unique, client-generated string identifier for the request. */ @@ -363,7 +377,8 @@ export interface DeleteClassifierHeaderParam { headers?: RawHttpHeadersInput & DeleteClassifierHeaders; } -export type DeleteClassifierParameters = DeleteClassifierHeaderParam & RequestParameters; +export type DeleteClassifierParameters = DeleteClassifierHeaderParam & + RequestParameters; export interface ClassifyDocumentFromStreamBodyParam { /** @@ -371,7 +386,11 @@ export interface ClassifyDocumentFromStreamBodyParam { * * Value may contain any sequence of octets */ - body: string | Uint8Array | ReadableStream | NodeJS.ReadableStream; + body: + | string + | Uint8Array + | ReadableStream + | NodeJS.ReadableStream; } export interface ClassifyDocumentFromStreamQueryParamProperties { @@ -387,7 +406,7 @@ export interface ClassifyDocumentFromStreamQueryParamProperties { * Possible values: "auto", "none", "perPage" */ split?: SplitMode; - /** List of 1-based page numbers to analyze. Ex. "1-3,5,7-9" */ + /** 1-based page numbers to analyze. Ex. "1-3,5,7-9" */ pages?: string; } @@ -411,10 +430,11 @@ export interface ClassifyDocumentFromStreamMediaTypesParam { | "application/vnd.openxmlformats-officedocument.presentationml.presentation"; } -export type ClassifyDocumentFromStreamParameters = ClassifyDocumentFromStreamQueryParam & - ClassifyDocumentFromStreamMediaTypesParam & - ClassifyDocumentFromStreamBodyParam & - RequestParameters; +export type ClassifyDocumentFromStreamParameters = + ClassifyDocumentFromStreamQueryParam & + ClassifyDocumentFromStreamMediaTypesParam & + ClassifyDocumentFromStreamBodyParam & + RequestParameters; export interface ClassifyDocumentBodyParam { /** Classify request parameters. */ @@ -434,7 +454,7 @@ export interface ClassifyDocumentQueryParamProperties { * Possible values: "auto", "none", "perPage" */ split?: SplitMode; - /** List of 1-based page numbers to analyze. Ex. "1-3,5,7-9" */ + /** 1-based page numbers to analyze. Ex. "1-3,5,7-9" */ pages?: string; } @@ -458,12 +478,13 @@ export interface AuthorizeClassifierCopyBodyParam { body: AuthorizeClassifierCopyRequest; } -export type AuthorizeClassifierCopyParameters = AuthorizeClassifierCopyBodyParam & - RequestParameters; +export type AuthorizeClassifierCopyParameters = + AuthorizeClassifierCopyBodyParam & RequestParameters; export interface CopyClassifierToBodyParam { /** Copy to request parameters. */ body: ClassifierCopyAuthorization; } -export type CopyClassifierToParameters = CopyClassifierToBodyParam & RequestParameters; +export type CopyClassifierToParameters = CopyClassifierToBodyParam & + RequestParameters; diff --git a/sdk/documentintelligence/ai-document-intelligence-rest/src/pollingHelper.ts b/sdk/documentintelligence/ai-document-intelligence-rest/src/pollingHelper.ts index 026a853d89c2..7d9cb3edc706 100644 --- a/sdk/documentintelligence/ai-document-intelligence-rest/src/pollingHelper.ts +++ b/sdk/documentintelligence/ai-document-intelligence-rest/src/pollingHelper.ts @@ -1,17 +1,17 @@ // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. -import type { Client, HttpResponse } from "@azure-rest/core-client"; -import type { AbortSignalLike } from "@azure/abort-controller"; -import type { +import { Client, HttpResponse } from "@azure-rest/core-client"; +import { AbortSignalLike } from "@azure/abort-controller"; +import { CancelOnProgress, CreateHttpPollerOptions, RunningOperation, OperationResponse, OperationState, + createHttpPoller, } from "@azure/core-lro"; -import { createHttpPoller } from "@azure/core-lro"; -import type { +import { AnalyzeDocumentFromStream202Response, AnalyzeDocumentFromStreamDefaultResponse, AnalyzeDocumentFromStreamLogicalResponse, @@ -37,12 +37,14 @@ import type { CopyClassifierToDefaultResponse, CopyClassifierToLogicalResponse, } from "./responses.js"; -import type { AnalyzeBatchResultOperationOutput } from "./outputModels.js"; /** * A simple poller that can be used to poll a long running operation. */ -export interface SimplePollerLike, TResult> { +export interface SimplePollerLike< + TState extends OperationState, + TResult, +> { /** * Returns true if the poller has finished polling. */ @@ -51,10 +53,6 @@ export interface SimplePollerLike, TResul * Returns the state of the operation. */ getOperationState(): TState; - /** - * Returns the id of the operation. - */ - getOperationId(): string; /** * Returns the result value of the operation, * regardless of the state of the poller. @@ -70,7 +68,9 @@ export interface SimplePollerLike, TResul /** * Returns a promise that will resolve once the underlying operation is completed. */ - pollUntilDone(pollOptions?: { abortSignal?: AbortSignalLike }): Promise; + pollUntilDone(pollOptions?: { + abortSignal?: AbortSignalLike; + }): Promise; /** * Invokes the provided callback after each polling is completed, * sending the current state of the poller's operation. @@ -117,10 +117,14 @@ export interface SimplePollerLike, TResul * @returns - A poller object to poll for operation state updates and eventually get the final response. */ export async function getLongRunningPoller< - TResult extends AnalyzeBatchDocumentsLogicalResponse | AnalyzeBatchDocumentsDefaultResponse, + TResult extends + | AnalyzeBatchDocumentsLogicalResponse + | AnalyzeBatchDocumentsDefaultResponse, >( client: Client, - initialResponse: AnalyzeBatchDocuments202Response | AnalyzeBatchDocumentsDefaultResponse, + initialResponse: + | AnalyzeBatchDocuments202Response + | AnalyzeBatchDocumentsDefaultResponse, options?: CreateHttpPollerOptions>, ): Promise, TResult>>; export async function getLongRunningPoller< @@ -145,17 +149,23 @@ export async function getLongRunningPoller< options?: CreateHttpPollerOptions>, ): Promise, TResult>>; export async function getLongRunningPoller< - TResult extends BuildClassifierLogicalResponse | BuildClassifierDefaultResponse, + TResult extends + | BuildClassifierLogicalResponse + | BuildClassifierDefaultResponse, >( client: Client, initialResponse: BuildClassifier202Response | BuildClassifierDefaultResponse, options?: CreateHttpPollerOptions>, ): Promise, TResult>>; export async function getLongRunningPoller< - TResult extends CopyClassifierToLogicalResponse | CopyClassifierToDefaultResponse, + TResult extends + | CopyClassifierToLogicalResponse + | CopyClassifierToDefaultResponse, >( client: Client, - initialResponse: CopyClassifierTo202Response | CopyClassifierToDefaultResponse, + initialResponse: + | CopyClassifierTo202Response + | CopyClassifierToDefaultResponse, options?: CreateHttpPollerOptions>, ): Promise, TResult>>; export async function getLongRunningPoller< @@ -164,7 +174,9 @@ export async function getLongRunningPoller< | AnalyzeDocumentFromStreamDefaultResponse, >( client: Client, - initialResponse: AnalyzeDocumentFromStream202Response | AnalyzeDocumentFromStreamDefaultResponse, + initialResponse: + | AnalyzeDocumentFromStream202Response + | AnalyzeDocumentFromStreamDefaultResponse, options?: CreateHttpPollerOptions>, ): Promise, TResult>>; export async function getLongRunningPoller< @@ -191,7 +203,10 @@ export async function getLongRunningPoller( // response we were provided. return getLroResponse(initialResponse); }, - sendPollRequest: async (path: string, pollOptions?: { abortSignal?: AbortSignalLike }) => { + sendPollRequest: async ( + path: string, + pollOptions?: { abortSignal?: AbortSignalLike }, + ) => { // This is the callback that is going to be called to poll the service // to get the latest status. We use the client provided and the polling path // which is an opaque URL provided by caller, the service sends this in one of the following headers: operation-location, azure-asyncoperation or location @@ -217,23 +232,14 @@ export async function getLongRunningPoller( inputAbortSignal?.removeEventListener("abort", abortListener); } const lroResponse = getLroResponse(response as TResult); - lroResponse.rawResponse.headers["x-ms-original-url"] = initialResponse.request.url; + lroResponse.rawResponse.headers["x-ms-original-url"] = + initialResponse.request.url; return lroResponse; }, }; options.resolveOnUnsuccessful = options.resolveOnUnsuccessful ?? true; - - const httpPoller = createHttpPoller(poller, { - ...options, - updateState: (state, response) => { - const flatResponse = response.flatResponse; - if (!("body" in flatResponse)) return; - const flatResponseBody = flatResponse.body; - if (!("status" in flatResponseBody && flatResponseBody.status === "completed")) return; - state.status = "succeeded"; - }, - }); + const httpPoller = createHttpPoller(poller, options); const simplePoller: SimplePollerLike, TResult> = { isDone() { return httpPoller.isDone; @@ -270,26 +276,10 @@ export async function getLongRunningPoller( pollUntilDone: httpPoller.pollUntilDone, serialize: httpPoller.serialize, submitted: httpPoller.submitted, - getOperationId: () => parseOperationId(initialResponse.headers["operation-location"]), }; return simplePoller; } -/** - * Returns the operation-id from the operation-location header - */ -function parseOperationId(operationLocationHeader: string): string { - // regex to extract the operation id from the operation-location header with the regex "[^:]+://[^/]+/documentintelligence/.+/([^?/]+)" - const regex = /[^:]+:\/\/[^/]+\/documentintelligence\/.+\/([^?/]+)/; - const match = operationLocationHeader.match(regex); - if (!match) { - throw new Error( - `Failed to parse operation id from the operation-location header: ${operationLocationHeader}`, - ); - } - return match[1]; -} - /** * Converts a Rest Client response to a response that the LRO implementation understands * @param response - a rest client http response @@ -299,7 +289,9 @@ function getLroResponse( response: TResult, ): OperationResponse { if (Number.isNaN(response.status)) { - throw new TypeError(`Status code of the response is not a number. Value: ${response.status}`); + throw new TypeError( + `Status code of the response is not a number. Value: ${response.status}`, + ); } return { diff --git a/sdk/documentintelligence/ai-document-intelligence-rest/src/responses.ts b/sdk/documentintelligence/ai-document-intelligence-rest/src/responses.ts index 0ebe9b71afb3..2ca3f3141b2b 100644 --- a/sdk/documentintelligence/ai-document-intelligence-rest/src/responses.ts +++ b/sdk/documentintelligence/ai-document-intelligence-rest/src/responses.ts @@ -1,22 +1,23 @@ // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. -import type { RawHttpHeaders } from "@azure/core-rest-pipeline"; -import type { HttpResponse } from "@azure-rest/core-client"; -import type { - PagedOperationDetailsOutput, - ErrorResponseOutput, +import { RawHttpHeaders } from "@azure/core-rest-pipeline"; +import { HttpResponse } from "@azure-rest/core-client"; +import { + PagedDocumentIntelligenceOperationDetailsOutput, + DocumentIntelligenceErrorResponseOutput, DocumentModelBuildOperationDetailsOutput, DocumentModelComposeOperationDetailsOutput, DocumentModelCopyToOperationDetailsOutput, DocumentClassifierCopyToOperationDetailsOutput, DocumentClassifierBuildOperationDetailsOutput, - OperationDetailsOutput, - ResourceDetailsOutput, - AnalyzeResultOperationOutput, - AnalyzeBatchResultOperationOutput, + DocumentIntelligenceOperationDetailsOutput, + DocumentIntelligenceResourceDetailsOutput, + AnalyzeOperationOutput, + AnalyzeBatchOperationOutput, + PagedAnalyzeBatchOperationOutput, DocumentModelDetailsOutput, - CopyAuthorizationOutput, + ModelCopyAuthorizationOutput, PagedDocumentModelDetailsOutput, PagedDocumentClassifierDetailsOutput, DocumentClassifierDetailsOutput, @@ -31,13 +32,13 @@ export interface ListOperations200Headers { /** The request has succeeded. */ export interface ListOperations200Response extends HttpResponse { status: "200"; - body: PagedOperationDetailsOutput; + body: PagedDocumentIntelligenceOperationDetailsOutput; headers: RawHttpHeaders & ListOperations200Headers; } export interface ListOperationsDefaultResponse extends HttpResponse { status: string; - body: ErrorResponseOutput; + body: DocumentIntelligenceErrorResponseOutput; } export interface GetDocumentModelBuildOperation200Headers { @@ -46,15 +47,17 @@ export interface GetDocumentModelBuildOperation200Headers { } /** The request has succeeded. */ -export interface GetDocumentModelBuildOperation200Response extends HttpResponse { +export interface GetDocumentModelBuildOperation200Response + extends HttpResponse { status: "200"; body: DocumentModelBuildOperationDetailsOutput; headers: RawHttpHeaders & GetDocumentModelBuildOperation200Headers; } -export interface GetDocumentModelBuildOperationDefaultResponse extends HttpResponse { +export interface GetDocumentModelBuildOperationDefaultResponse + extends HttpResponse { status: string; - body: ErrorResponseOutput; + body: DocumentIntelligenceErrorResponseOutput; } export interface GetDocumentModelComposeOperation200Headers { @@ -63,15 +66,17 @@ export interface GetDocumentModelComposeOperation200Headers { } /** The request has succeeded. */ -export interface GetDocumentModelComposeOperation200Response extends HttpResponse { +export interface GetDocumentModelComposeOperation200Response + extends HttpResponse { status: "200"; body: DocumentModelComposeOperationDetailsOutput; headers: RawHttpHeaders & GetDocumentModelComposeOperation200Headers; } -export interface GetDocumentModelComposeOperationDefaultResponse extends HttpResponse { +export interface GetDocumentModelComposeOperationDefaultResponse + extends HttpResponse { status: string; - body: ErrorResponseOutput; + body: DocumentIntelligenceErrorResponseOutput; } export interface GetDocumentModelCopyToOperation200Headers { @@ -80,15 +85,17 @@ export interface GetDocumentModelCopyToOperation200Headers { } /** The request has succeeded. */ -export interface GetDocumentModelCopyToOperation200Response extends HttpResponse { +export interface GetDocumentModelCopyToOperation200Response + extends HttpResponse { status: "200"; body: DocumentModelCopyToOperationDetailsOutput; headers: RawHttpHeaders & GetDocumentModelCopyToOperation200Headers; } -export interface GetDocumentModelCopyToOperationDefaultResponse extends HttpResponse { +export interface GetDocumentModelCopyToOperationDefaultResponse + extends HttpResponse { status: string; - body: ErrorResponseOutput; + body: DocumentIntelligenceErrorResponseOutput; } export interface GetDocumentClassifierCopyToOperation200Headers { @@ -97,15 +104,17 @@ export interface GetDocumentClassifierCopyToOperation200Headers { } /** The request has succeeded. */ -export interface GetDocumentClassifierCopyToOperation200Response extends HttpResponse { +export interface GetDocumentClassifierCopyToOperation200Response + extends HttpResponse { status: "200"; body: DocumentClassifierCopyToOperationDetailsOutput; headers: RawHttpHeaders & GetDocumentClassifierCopyToOperation200Headers; } -export interface GetDocumentClassifierCopyToOperationDefaultResponse extends HttpResponse { +export interface GetDocumentClassifierCopyToOperationDefaultResponse + extends HttpResponse { status: string; - body: ErrorResponseOutput; + body: DocumentIntelligenceErrorResponseOutput; } export interface GetDocumentClassifierBuildOperation200Headers { @@ -114,15 +123,17 @@ export interface GetDocumentClassifierBuildOperation200Headers { } /** The request has succeeded. */ -export interface GetDocumentClassifierBuildOperation200Response extends HttpResponse { +export interface GetDocumentClassifierBuildOperation200Response + extends HttpResponse { status: "200"; body: DocumentClassifierBuildOperationDetailsOutput; headers: RawHttpHeaders & GetDocumentClassifierBuildOperation200Headers; } -export interface GetDocumentClassifierBuildOperationDefaultResponse extends HttpResponse { +export interface GetDocumentClassifierBuildOperationDefaultResponse + extends HttpResponse { status: string; - body: ErrorResponseOutput; + body: DocumentIntelligenceErrorResponseOutput; } export interface GetOperation200Headers { @@ -133,35 +144,35 @@ export interface GetOperation200Headers { /** The request has succeeded. */ export interface GetOperation200Response extends HttpResponse { status: "200"; - body: OperationDetailsOutput; + body: DocumentIntelligenceOperationDetailsOutput; headers: RawHttpHeaders & GetOperation200Headers; } export interface GetOperationDefaultResponse extends HttpResponse { status: string; - body: ErrorResponseOutput; + body: DocumentIntelligenceErrorResponseOutput; } /** The request has succeeded. */ -export interface GetResourceInfo200Response extends HttpResponse { +export interface GetResourceDetails200Response extends HttpResponse { status: "200"; - body: ResourceDetailsOutput; + body: DocumentIntelligenceResourceDetailsOutput; } -export interface GetResourceInfoDefaultResponse extends HttpResponse { +export interface GetResourceDetailsDefaultResponse extends HttpResponse { status: string; - body: ErrorResponseOutput; + body: DocumentIntelligenceErrorResponseOutput; } /** The request has succeeded. */ export interface GetAnalyzeResult200Response extends HttpResponse { status: "200"; - body: AnalyzeResultOperationOutput; + body: AnalyzeOperationOutput; } export interface GetAnalyzeResultDefaultResponse extends HttpResponse { status: string; - body: ErrorResponseOutput; + body: DocumentIntelligenceErrorResponseOutput; } export interface GetAnalyzeResultPdf200Headers { @@ -179,7 +190,7 @@ export interface GetAnalyzeResultPdf200Response extends HttpResponse { export interface GetAnalyzeResultPdfDefaultResponse extends HttpResponse { status: string; - body: ErrorResponseOutput; + body: DocumentIntelligenceErrorResponseOutput; } export interface GetAnalyzeResultFigure200Headers { @@ -197,7 +208,17 @@ export interface GetAnalyzeResultFigure200Response extends HttpResponse { export interface GetAnalyzeResultFigureDefaultResponse extends HttpResponse { status: string; - body: ErrorResponseOutput; + body: DocumentIntelligenceErrorResponseOutput; +} + +/** There is no content to send for this request, but the headers may be useful. */ +export interface DeleteAnalyzeResult204Response extends HttpResponse { + status: "204"; +} + +export interface DeleteAnalyzeResultDefaultResponse extends HttpResponse { + status: string; + body: DocumentIntelligenceErrorResponseOutput; } export interface AnalyzeDocumentFromStream202Headers { @@ -214,7 +235,7 @@ export interface AnalyzeDocumentFromStream202Response extends HttpResponse { export interface AnalyzeDocumentFromStreamDefaultResponse extends HttpResponse { status: string; - body: ErrorResponseOutput; + body: DocumentIntelligenceErrorResponseOutput; } /** The final response for long-running analyzeDocumentFromStream operation */ @@ -236,7 +257,7 @@ export interface AnalyzeDocument202Response extends HttpResponse { export interface AnalyzeDocumentDefaultResponse extends HttpResponse { status: string; - body: ErrorResponseOutput; + body: DocumentIntelligenceErrorResponseOutput; } /** The final response for long-running analyzeDocument operation */ @@ -247,12 +268,12 @@ export interface AnalyzeDocumentLogicalResponse extends HttpResponse { /** The request has succeeded. */ export interface GetAnalyzeBatchResult200Response extends HttpResponse { status: "200"; - body: AnalyzeBatchResultOperationOutput; + body: AnalyzeBatchOperationOutput; } export interface GetAnalyzeBatchResultDefaultResponse extends HttpResponse { status: string; - body: ErrorResponseOutput; + body: DocumentIntelligenceErrorResponseOutput; } export interface AnalyzeBatchDocuments202Headers { @@ -269,7 +290,7 @@ export interface AnalyzeBatchDocuments202Response extends HttpResponse { export interface AnalyzeBatchDocumentsDefaultResponse extends HttpResponse { status: string; - body: ErrorResponseOutput; + body: DocumentIntelligenceErrorResponseOutput; } /** The final response for long-running analyzeBatchDocuments operation */ @@ -277,6 +298,27 @@ export interface AnalyzeBatchDocumentsLogicalResponse extends HttpResponse { status: "200"; } +/** The request has succeeded. */ +export interface ListAnalyzeBatchResults200Response extends HttpResponse { + status: "200"; + body: PagedAnalyzeBatchOperationOutput; +} + +export interface ListAnalyzeBatchResultsDefaultResponse extends HttpResponse { + status: string; + body: DocumentIntelligenceErrorResponseOutput; +} + +/** There is no content to send for this request, but the headers may be useful. */ +export interface DeleteAnalyzeBatchResult204Response extends HttpResponse { + status: "204"; +} + +export interface DeleteAnalyzeBatchResultDefaultResponse extends HttpResponse { + status: string; + body: DocumentIntelligenceErrorResponseOutput; +} + export interface GetModel200Headers { /** An opaque, globally-unique, client-generated string identifier for the request. */ "x-ms-client-request-id"?: string; @@ -291,7 +333,7 @@ export interface GetModel200Response extends HttpResponse { export interface GetModelDefaultResponse extends HttpResponse { status: string; - body: ErrorResponseOutput; + body: DocumentIntelligenceErrorResponseOutput; } export interface BuildModel202Headers { @@ -308,7 +350,7 @@ export interface BuildModel202Response extends HttpResponse { export interface BuildModelDefaultResponse extends HttpResponse { status: string; - body: ErrorResponseOutput; + body: DocumentIntelligenceErrorResponseOutput; } /** The final response for long-running buildModel operation */ @@ -330,7 +372,7 @@ export interface ComposeModel202Response extends HttpResponse { export interface ComposeModelDefaultResponse extends HttpResponse { status: string; - body: ErrorResponseOutput; + body: DocumentIntelligenceErrorResponseOutput; } /** The final response for long-running composeModel operation */ @@ -341,12 +383,12 @@ export interface ComposeModelLogicalResponse extends HttpResponse { /** The request has succeeded. */ export interface AuthorizeModelCopy200Response extends HttpResponse { status: "200"; - body: CopyAuthorizationOutput; + body: ModelCopyAuthorizationOutput; } export interface AuthorizeModelCopyDefaultResponse extends HttpResponse { status: string; - body: ErrorResponseOutput; + body: DocumentIntelligenceErrorResponseOutput; } export interface CopyModelTo202Headers { @@ -363,7 +405,7 @@ export interface CopyModelTo202Response extends HttpResponse { export interface CopyModelToDefaultResponse extends HttpResponse { status: string; - body: ErrorResponseOutput; + body: DocumentIntelligenceErrorResponseOutput; } /** The final response for long-running copyModelTo operation */ @@ -385,7 +427,7 @@ export interface ListModels200Response extends HttpResponse { export interface ListModelsDefaultResponse extends HttpResponse { status: string; - body: ErrorResponseOutput; + body: DocumentIntelligenceErrorResponseOutput; } export interface DeleteModel204Headers { @@ -401,7 +443,7 @@ export interface DeleteModel204Response extends HttpResponse { export interface DeleteModelDefaultResponse extends HttpResponse { status: string; - body: ErrorResponseOutput; + body: DocumentIntelligenceErrorResponseOutput; } export interface BuildClassifier202Headers { @@ -418,7 +460,7 @@ export interface BuildClassifier202Response extends HttpResponse { export interface BuildClassifierDefaultResponse extends HttpResponse { status: string; - body: ErrorResponseOutput; + body: DocumentIntelligenceErrorResponseOutput; } /** The final response for long-running buildClassifier operation */ @@ -440,7 +482,7 @@ export interface ListClassifiers200Response extends HttpResponse { export interface ListClassifiersDefaultResponse extends HttpResponse { status: string; - body: ErrorResponseOutput; + body: DocumentIntelligenceErrorResponseOutput; } export interface GetClassifier200Headers { @@ -457,7 +499,7 @@ export interface GetClassifier200Response extends HttpResponse { export interface GetClassifierDefaultResponse extends HttpResponse { status: string; - body: ErrorResponseOutput; + body: DocumentIntelligenceErrorResponseOutput; } export interface DeleteClassifier204Headers { @@ -473,7 +515,7 @@ export interface DeleteClassifier204Response extends HttpResponse { export interface DeleteClassifierDefaultResponse extends HttpResponse { status: string; - body: ErrorResponseOutput; + body: DocumentIntelligenceErrorResponseOutput; } export interface ClassifyDocumentFromStream202Headers { @@ -488,13 +530,15 @@ export interface ClassifyDocumentFromStream202Response extends HttpResponse { headers: RawHttpHeaders & ClassifyDocumentFromStream202Headers; } -export interface ClassifyDocumentFromStreamDefaultResponse extends HttpResponse { +export interface ClassifyDocumentFromStreamDefaultResponse + extends HttpResponse { status: string; - body: ErrorResponseOutput; + body: DocumentIntelligenceErrorResponseOutput; } /** The final response for long-running classifyDocumentFromStream operation */ -export interface ClassifyDocumentFromStreamLogicalResponse extends HttpResponse { +export interface ClassifyDocumentFromStreamLogicalResponse + extends HttpResponse { status: "200"; } @@ -512,7 +556,7 @@ export interface ClassifyDocument202Response extends HttpResponse { export interface ClassifyDocumentDefaultResponse extends HttpResponse { status: string; - body: ErrorResponseOutput; + body: DocumentIntelligenceErrorResponseOutput; } /** The final response for long-running classifyDocument operation */ @@ -523,12 +567,12 @@ export interface ClassifyDocumentLogicalResponse extends HttpResponse { /** The request has succeeded. */ export interface GetClassifyResult200Response extends HttpResponse { status: "200"; - body: AnalyzeResultOperationOutput; + body: AnalyzeOperationOutput; } export interface GetClassifyResultDefaultResponse extends HttpResponse { status: string; - body: ErrorResponseOutput; + body: DocumentIntelligenceErrorResponseOutput; } /** The request has succeeded. */ @@ -539,7 +583,7 @@ export interface AuthorizeClassifierCopy200Response extends HttpResponse { export interface AuthorizeClassifierCopyDefaultResponse extends HttpResponse { status: string; - body: ErrorResponseOutput; + body: DocumentIntelligenceErrorResponseOutput; } export interface CopyClassifierTo202Headers { @@ -556,7 +600,7 @@ export interface CopyClassifierTo202Response extends HttpResponse { export interface CopyClassifierToDefaultResponse extends HttpResponse { status: string; - body: ErrorResponseOutput; + body: DocumentIntelligenceErrorResponseOutput; } /** The final response for long-running copyClassifierTo operation */ diff --git a/sdk/documentintelligence/ai-document-intelligence-rest/test/public/analysis.spec.ts b/sdk/documentintelligence/ai-document-intelligence-rest/test/public/analysis.spec.ts deleted file mode 100644 index 8d086d962058..000000000000 --- a/sdk/documentintelligence/ai-document-intelligence-rest/test/public/analysis.spec.ts +++ /dev/null @@ -1,1081 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT License. - -import type { Recorder } from "@azure-tools/test-recorder"; -import { assertEnvironmentVariable } from "@azure-tools/test-recorder"; -import { createRecorder, testPollingOptions } from "./utils/recorderUtils.js"; -import DocumentIntelligence from "../../src/documentIntelligence.js"; -import { assert, describe, beforeEach, afterEach, it } from "vitest"; -import { - ASSET_PATH, - batchTrainingFilesContainerUrl, - batchTrainingFilesResultContainerUrl, - getRandomNumber, - makeTestUrl, -} from "./utils/utils.js"; -import path from "path"; -import fs from "fs"; -import type { DocumentIntelligenceClient } from "../../src/clientDefinitions.js"; -import type { - AnalyzeResultOperationOutput, - DocumentBarcodeOutput, - DocumentModelBuildOperationDetailsOutput, - DocumentModelDetailsOutput, - DocumentTableOutput, -} from "../../src/index.js"; -import { getLongRunningPoller, isUnexpected } from "../../src/index.js"; - -describe("DocumentIntelligenceClient", () => { - let recorder: Recorder; - let client: DocumentIntelligenceClient; - beforeEach(async function (context) { - recorder = await createRecorder(context); - await recorder.setMatcher("BodilessMatcher"); - client = DocumentIntelligence( - assertEnvironmentVariable("DOCUMENT_INTELLIGENCE_ENDPOINT"), - { key: assertEnvironmentVariable("DOCUMENT_INTELLIGENCE_API_KEY") }, - recorder.configureClientOptions({}), - ); - }); - - afterEach(async function () { - await recorder.stop(); - }); - - describe("content analysis", () => { - it("pdf file stream", async () => { - const filePath = path.join(ASSET_PATH, "forms", "Invoice_1.pdf"); - - const base64Source = fs.readFileSync(filePath, { encoding: "base64" }); - - const initialResponse = await client - .path("/documentModels/{modelId}:analyze", "prebuilt-layout") - .post({ - contentType: "application/json", - body: { - base64Source, - }, - queryParameters: { locale: "en-IN" }, - }); - - if (isUnexpected(initialResponse)) { - throw initialResponse.body.error; - } - - const poller = getLongRunningPoller(client, initialResponse, { ...testPollingOptions }); - const analyzeResult = ( - (await (await poller).pollUntilDone()).body as AnalyzeResultOperationOutput - ).analyzeResult; - - const pages = analyzeResult?.pages; - const tables = analyzeResult?.tables; - assert.ok(pages && pages.length > 0, `Expected non-empty pages but got ${pages}`); - assert.isNotEmpty(pages); - assert.isNotEmpty(tables); - - const [table] = tables!; - assert.ok(table.boundingRegions?.[0]); - assert.equal(table.boundingRegions?.[0].pageNumber, 1); - }); - - it("png file stream", async () => { - const filePath = path.join(ASSET_PATH, "receipt", "contoso-receipt.png"); - - const base64Source = fs.readFileSync(filePath, { encoding: "base64" }); - - const initialResponse = await client - .path("/documentModels/{modelId}:analyze", "prebuilt-layout") - .post({ - contentType: "application/json", - body: { - base64Source, - }, - queryParameters: { locale: "en-IN" }, - }); - - if (isUnexpected(initialResponse)) { - throw initialResponse.body.error; - } - - const poller = getLongRunningPoller(client, initialResponse, { ...testPollingOptions }); - const analyzeResult = ( - (await (await poller).pollUntilDone()).body as AnalyzeResultOperationOutput - ).analyzeResult; - - const pages = analyzeResult?.pages; - const paragraphs = analyzeResult?.paragraphs; - - assert.ok( - paragraphs && paragraphs.length > 0, - `Expected non-empty paragraphs but got ${paragraphs}.`, - ); - - assert.ok(pages && pages.length > 0, `Expect no-empty pages but got ${pages}`); - }); - - it("jpeg file stream", async () => { - const filePath = path.join(ASSET_PATH, "forms", "Form_1.jpg"); - - const base64Source = fs.readFileSync(filePath, { encoding: "base64" }); - - const initialResponse = await client - .path("/documentModels/{modelId}:analyze", "prebuilt-layout") - .post({ - contentType: "application/json", - body: { - base64Source, - }, - queryParameters: { locale: "en-IN" }, - }); - - if (isUnexpected(initialResponse)) { - throw initialResponse.body.error; - } - - const poller = getLongRunningPoller(client, initialResponse, { ...testPollingOptions }); - const analyzeResult = ( - (await (await poller).pollUntilDone()).body as AnalyzeResultOperationOutput - ).analyzeResult; - - const pages = analyzeResult?.pages; - const tables = analyzeResult?.tables; - assert.isNotEmpty(pages); - assert.isNotEmpty(tables); - const [table] = tables as DocumentTableOutput[]; - assert.ok(table.boundingRegions?.[0].polygon); - assert.equal(table.boundingRegions?.[0].pageNumber, 1); - }); - - it("tiff file stream", async () => { - const filePath = path.join(ASSET_PATH, "forms", "Invoice_1.tiff"); - - const base64Source = fs.readFileSync(filePath, { encoding: "base64" }); - - const initialResponse = await client - .path("/documentModels/{modelId}:analyze", "prebuilt-layout") - .post({ - contentType: "application/json", - body: { - base64Source, - }, - }); - - if (isUnexpected(initialResponse)) { - throw initialResponse.body.error; - } - - const poller = getLongRunningPoller(client, initialResponse, { ...testPollingOptions }); - const analyzeResult = ( - (await (await poller).pollUntilDone()).body as AnalyzeResultOperationOutput - ).analyzeResult; - - const pages = analyzeResult?.pages; - const tables = analyzeResult?.tables; - assert.isNotEmpty(pages); - assert.isNotEmpty(tables); - const [table] = tables as DocumentTableOutput[]; - assert.ok(table.boundingRegions?.[0].polygon); - assert.equal(table.boundingRegions?.[0].pageNumber, 1); - }); - - it("pdf file stream without passing content type", async () => { - const filePath = path.join(ASSET_PATH, "forms", "Invoice_1.pdf"); - - const base64Source = fs.readFileSync(filePath, { encoding: "base64" }); - - const initialResponse = await client - .path("/documentModels/{modelId}:analyze", "prebuilt-layout") - .post({ - contentType: "application/json", - body: { - base64Source, - }, - queryParameters: { locale: "en-IN" }, - }); - - if (isUnexpected(initialResponse)) { - throw initialResponse.body.error; - } - - const poller = getLongRunningPoller(client, initialResponse, { ...testPollingOptions }); - const analyzeResult = ( - (await (await poller).pollUntilDone()).body as AnalyzeResultOperationOutput - ).analyzeResult; - - const pages = analyzeResult?.pages; - const tables = analyzeResult?.tables; - assert.isNotEmpty(pages); - assert.isNotEmpty(tables); - const [table] = tables as DocumentTableOutput[]; - assert.ok(table.boundingRegions?.[0].polygon); - assert.equal(table.boundingRegions?.[0].pageNumber, 1); - }); - - it("url", async () => { - const url = makeTestUrl("/Invoice_1.pdf"); - - const initialResponse = await client - .path("/documentModels/{modelId}:analyze", "prebuilt-layout") - .post({ - contentType: "application/json", - body: { - urlSource: url, - }, - queryParameters: { locale: "en-IN" }, - }); - if (isUnexpected(initialResponse)) { - throw initialResponse.body.error; - } - const poller = getLongRunningPoller(client, initialResponse, { ...testPollingOptions }); - const analyzeResult = ( - (await (await poller).pollUntilDone()).body as AnalyzeResultOperationOutput - ).analyzeResult; - - const pages = analyzeResult?.pages; - const tables = analyzeResult?.tables; - - assert.isNotEmpty(pages); - - assert.isNotEmpty(tables); - const [table] = tables as DocumentTableOutput[]; - assert.ok(table.boundingRegions?.[0].polygon); - assert.equal(table.boundingRegions?.[0].pageNumber, 1); - }); - - it("with selection marks", async () => { - const filePath = path.join(ASSET_PATH, "forms", "selection_mark_form.pdf"); - const base64Source = fs.readFileSync(filePath, { encoding: "base64" }); - - const initialResponse = await client - .path("/documentModels/{modelId}:analyze", "prebuilt-layout") - .post({ - contentType: "application/json", - body: { - base64Source, - }, - queryParameters: { locale: "en-IN" }, - }); - if (isUnexpected(initialResponse)) { - throw initialResponse.body.error; - } - - const poller = getLongRunningPoller(client, initialResponse); - const analyzeResult = ( - (await (await poller).pollUntilDone()).body as AnalyzeResultOperationOutput - ).analyzeResult; - const pages = analyzeResult?.pages; - assert.equal(pages?.[0].pageNumber, 1); - assert.isNotEmpty(pages?.[0].selectionMarks); - }); - - it("invalid locale throws", async () => { - const url = makeTestUrl("/Invoice_1.pdf"); - - try { - const initialResponse = await client - .path("/documentModels/{modelId}:analyze", "prebuilt-layout") - .post({ - contentType: "application/json", - body: { - urlSource: url, - }, - queryParameters: { locale: "thisIsNotAValidLanguage" }, - }); - if (isUnexpected(initialResponse)) { - throw initialResponse.body.error; - } - - const poller = getLongRunningPoller(client, initialResponse, { ...testPollingOptions }); - (await (await poller).pollUntilDone()).body as AnalyzeResultOperationOutput; - assert.fail("Expected an exception due to invalid locale."); - } catch (ex: any) { - assert.ok((ex as Error).message.includes("Invalid argument.")); - } - }); - - it("specifying pages", async () => { - const url = makeTestUrl("/Invoice_1.pdf"); - const initialResponse = await client - .path("/documentModels/{modelId}:analyze", "prebuilt-layout") - .post({ - contentType: "application/json", - body: { - urlSource: url, - }, - queryParameters: { locale: "en-IN", pages: "1" }, - }); - if (isUnexpected(initialResponse)) { - throw initialResponse.body.error; - } - const poller = getLongRunningPoller(client, initialResponse, { ...testPollingOptions }); - await (await poller).pollUntilDone(); - }); - - it("invalid pages throws", async () => { - const url = makeTestUrl("/Invoice_1.pdf"); - - try { - const initialResponse = await client - .path("/documentModels/{modelId}:analyze", "prebuilt-layout") - .post({ - contentType: "application/json", - body: { - urlSource: url, - }, - queryParameters: { locale: "en-IN", pages: "2" }, - }); - if (isUnexpected(initialResponse)) { - throw initialResponse.body.error; - } - const poller = getLongRunningPoller(client, initialResponse, { ...testPollingOptions }); - await (await poller).pollUntilDone(); - assert.fail("Expected an exception due to invalid pages."); - } catch (ex: any) { - // Just make sure we didn't get a bad error message - assert.isFalse((ex as Error).message.includes("")); - } - }); - - it("barcode", async function () { - const url = makeTestUrl("/barcode2.tif"); - - const initialResponse = await client - .path("/documentModels/{modelId}:analyze", "prebuilt-read") - .post({ - contentType: "application/json", - body: { - urlSource: url, - }, - queryParameters: { features: ["barcodes"] }, - }); - - if (isUnexpected(initialResponse)) { - throw initialResponse.body.error; - } - const poller = getLongRunningPoller(client, initialResponse, { ...testPollingOptions }); - const analyzeResult = ( - (await (await poller).pollUntilDone()).body as AnalyzeResultOperationOutput - ).analyzeResult; - - const pages = analyzeResult?.pages; - - assert.isNotEmpty(pages); - - assert.isNotEmpty(pages?.[0].barcodes); - - const [barcode1, barcode2] = pages?.[0].barcodes as DocumentBarcodeOutput[]; - - assert.equal(barcode1.kind, "Code39"); - assert.equal(barcode1.value, "D589992-X"); - - assert.equal(barcode2.kind, "Code39"); - assert.equal(barcode2.value, "SYN121720213429"); - }); - - it("annotations", async function () { - const url = makeTestUrl("/annotations.jpg"); - - const initialResponse = await client - .path("/documentModels/{modelId}:analyze", "prebuilt-layout") - .post({ - contentType: "application/json", - body: { - urlSource: url, - }, - queryParameters: { locale: "en-IN" }, - }); - if (isUnexpected(initialResponse)) { - throw initialResponse.body.error; - } - const poller = getLongRunningPoller(client, initialResponse, { ...testPollingOptions }); - const analyzeResult = ( - (await (await poller).pollUntilDone()).body as AnalyzeResultOperationOutput - ).analyzeResult; - - const pages = analyzeResult?.pages; - - assert.isNotEmpty(pages); - }); - - it("formula", async function () { - const url = makeTestUrl("/formula1.jpg"); - - const initialResponse = await client - .path("/documentModels/{modelId}:analyze", "prebuilt-read") - .post({ - contentType: "application/json", - body: { - urlSource: url, - }, - queryParameters: { features: ["formulas"] }, - }); - - if (isUnexpected(initialResponse)) { - throw initialResponse.body.error; - } - const poller = getLongRunningPoller(client, initialResponse, { ...testPollingOptions }); - const analyzeResult = ( - (await (await poller).pollUntilDone()).body as AnalyzeResultOperationOutput - ).analyzeResult; - - const pages = analyzeResult?.pages; - - assert.isNotEmpty(pages); - - assert.isNotEmpty(pages?.[0].formulas); - }); - }); - - describe("custom forms", () => { - let _model: DocumentModelDetailsOutput; - let modelName: string; - - // We only want to create the model once, but because of the recorder's - // precedence, we have to create it in a test, so one test will end up - // recording the entire creation and the other tests will still be able - // to use it. - async function requireModel(): Promise { - if (!_model) { - modelName = recorder.variable( - "customFormModelName", - `customFormModelName${getRandomNumber()}`, - ); - - const initialResponse = await client.path("/documentModels:build").post({ - body: { - buildMode: "template", - modelId: modelName, - azureBlobSource: { - containerUrl: assertEnvironmentVariable( - "DOCUMENT_INTELLIGENCE_SELECTION_MARK_STORAGE_CONTAINER_SAS_URL", - ), - }, - }, - }); - - if (isUnexpected(initialResponse)) { - throw initialResponse.body.error; - } - const poller = getLongRunningPoller(client, initialResponse); - const response = ( - (await (await poller).pollUntilDone()).body as DocumentModelBuildOperationDetailsOutput - ).result; - if (!response) { - throw new Error("Expected a DocumentModelDetailsOutput response."); - } - _model = response; - assert.ok(_model.modelId); - } - - return _model; - } - - it.skip("with selection marks", async () => { - const { modelId } = await requireModel(); - - const filePath = path.join(ASSET_PATH, "forms", "selection_mark_form.pdf"); - - const base64Source = fs.readFileSync(filePath, { encoding: "base64" }); - - const initialResponse = await client.path("/documentModels/{modelId}:analyze", modelId).post({ - contentType: "application/json", - body: { - base64Source, - }, - }); - - if (isUnexpected(initialResponse)) { - throw initialResponse.body.error; - } - - const poller = getLongRunningPoller(client, initialResponse, { ...testPollingOptions }); - const analyzeResult = ( - (await (await poller).pollUntilDone()).body as AnalyzeResultOperationOutput - ).analyzeResult; - - const documents = analyzeResult?.documents; - const pages = analyzeResult?.pages; - assert.ok(documents); - assert.equal(documents?.[0].docType, `${modelName}:${modelName}`); - assert.ok(pages?.[0]); - - /* There should be a table in the response, but it isn't recognized (maybe because it's too small or sparse) - assert.isNotEmpty(tables); - const [table] = tables!; - assert.ok(table.boundingRegions?.[0].boundingBox); - assert.equal(table.boundingRegions?.[0].pageNumber, 1);*/ - - assert.equal(pages?.[0].pageNumber, 1); - assert.isNotEmpty(pages?.[0].selectionMarks); - }); - - it("png file stream", async () => { - const filePath = path.join(ASSET_PATH, "forms", "Invoice_1.pdf"); - - const base64Source = fs.readFileSync(filePath, { encoding: "base64" }); - - const initialResponse = await client - .path("/documentModels/{modelId}:analyze", "prebuilt-invoice") - .post({ - contentType: "application/json", - body: { - base64Source, - }, - }); - - if (isUnexpected(initialResponse)) { - throw initialResponse.body.error; - } - - const poller = getLongRunningPoller(client, initialResponse, { ...testPollingOptions }); - const analyzeResult = ( - (await (await poller).pollUntilDone()).body as AnalyzeResultOperationOutput - ).analyzeResult; - const documents = analyzeResult?.documents; - assert.isNotEmpty(documents); - - assert.equal(documents?.[0].docType, "invoice"); - }); - }); - - describe("receipts", () => { - it("png file stream", async () => { - const filePath = path.join(ASSET_PATH, "receipt", "contoso-receipt.png"); - - const base64Source = fs.readFileSync(filePath, { encoding: "base64" }); - - const initialResponse = await client - .path("/documentModels/{modelId}:analyze", "prebuilt-receipt") - .post({ - contentType: "application/json", - body: { - base64Source, - }, - }); - - if (isUnexpected(initialResponse)) { - throw initialResponse.body.error; - } - - const poller = getLongRunningPoller(client, initialResponse, { ...testPollingOptions }); - const analyzeResult = ( - (await (await poller).pollUntilDone()).body as AnalyzeResultOperationOutput - ).analyzeResult; - const documents = analyzeResult?.documents; - assert.isNotEmpty(documents); - - assert.equal(documents?.[0].docType, "receipt.retailMeal"); - }); - - it("jpeg file stream", async () => { - const filePath = path.join(ASSET_PATH, "receipt", "contoso-allinone.jpg"); - - const base64Source = fs.readFileSync(filePath, { encoding: "base64" }); - - const initialResponse = await client - .path("/documentModels/{modelId}:analyze", "prebuilt-receipt") - .post({ - contentType: "application/json", - body: { - base64Source, - }, - }); - - if (isUnexpected(initialResponse)) { - throw initialResponse.body.error; - } - - const poller = getLongRunningPoller(client, initialResponse, { ...testPollingOptions }); - const analyzeResult = ( - (await (await poller).pollUntilDone()).body as AnalyzeResultOperationOutput - ).analyzeResult; - const documents = analyzeResult?.documents; - assert.isNotEmpty(documents); - - assert.equal(documents?.[0].docType, "receipt.retailMeal"); - }); - - it("url", async () => { - const url = makeTestUrl("/contoso-allinone.jpg"); - - const initialResponse = await client - .path("/documentModels/{modelId}:analyze", "prebuilt-receipt") - .post({ - contentType: "application/json", - body: { - urlSource: url, - }, - }); - - if (isUnexpected(initialResponse)) { - throw initialResponse.body.error; - } - - const poller = getLongRunningPoller(client, initialResponse, { ...testPollingOptions }); - const analyzeResult = ( - (await (await poller).pollUntilDone()).body as AnalyzeResultOperationOutput - ).analyzeResult; - const documents = analyzeResult?.documents; - assert.isNotEmpty(documents); - - assert.equal(documents?.[0].docType, "receipt.retailMeal"); - }); - - it("specifying locale", async () => { - const url = makeTestUrl("/contoso-allinone.jpg"); - - const initialResponse = await client - .path("/documentModels/{modelId}:analyze", "prebuilt-receipt") - .post({ - contentType: "application/json", - body: { - urlSource: url, - }, - queryParameters: { locale: "en-IN" }, - }); - - if (isUnexpected(initialResponse)) { - throw initialResponse.body.error; - } - - const poller = getLongRunningPoller(client, initialResponse, { ...testPollingOptions }); - (await (await poller).pollUntilDone()).body as AnalyzeResultOperationOutput; - }); - - it("invalid locale throws", async () => { - const url = makeTestUrl("/contoso-allinone.jpg"); - - try { - const initialResponse = await client - .path("/documentModels/{modelId}:analyze", "prebuilt-receipt") - .post({ - contentType: "application/json", - body: { - urlSource: url, - }, - queryParameters: { locale: "thisIsNotAValidLanguage" }, - }); - - if (isUnexpected(initialResponse)) { - throw initialResponse.body.error; - } - - const poller = getLongRunningPoller(client, initialResponse, { ...testPollingOptions }); - (await (await poller).pollUntilDone()).body as AnalyzeResultOperationOutput; - assert.fail("Expected an exception due to invalid locale."); - } catch (ex: any) { - assert.ok((ex as Error).message.includes("Invalid argument.")); - } - }); - }); - - describe("invoices", () => { - it("pdf file stream", async () => { - const filePath = path.join(ASSET_PATH, "invoice", "Invoice_1.pdf"); - - const base64Source = fs.readFileSync(filePath, { encoding: "base64" }); - - const initialResponse = await client - .path("/documentModels/{modelId}:analyze", "prebuilt-invoice") - .post({ - contentType: "application/json", - body: { - base64Source, - }, - }); - - if (isUnexpected(initialResponse)) { - throw initialResponse.body.error; - } - - const poller = getLongRunningPoller(client, initialResponse, { ...testPollingOptions }); - const analyzeResult = ( - (await (await poller).pollUntilDone()).body as AnalyzeResultOperationOutput - ).analyzeResult; - const documents = analyzeResult?.documents; - const pages = analyzeResult?.pages; - const tables = analyzeResult?.tables; - - assert.isNotEmpty(documents); - assert.isNotEmpty(pages); - assert.isNotEmpty(tables); - const [table] = tables!; - assert.ok(table.boundingRegions?.[0].polygon); - assert.equal(table.boundingRegions?.[0].pageNumber, 1); - }); - - it("url", async () => { - const url = makeTestUrl("/Invoice_1.pdf"); - - const initialResponse = await client - .path("/documentModels/{modelId}:analyze", "prebuilt-invoice") - .post({ - contentType: "application/json", - body: { - urlSource: url, - }, - }); - - if (isUnexpected(initialResponse)) { - throw initialResponse.body.error; - } - - const poller = getLongRunningPoller(client, initialResponse, { ...testPollingOptions }); - const analyzeResult = ( - (await (await poller).pollUntilDone()).body as AnalyzeResultOperationOutput - ).analyzeResult; - const documents = analyzeResult?.documents; - const pages = analyzeResult?.pages; - const tables = analyzeResult?.tables; - - assert.isNotEmpty(documents); - assert.isNotEmpty(pages); - assert.isNotEmpty(tables); - const [table] = tables!; - assert.ok(table.boundingRegions?.[0].polygon); - assert.equal(table.boundingRegions?.[0].pageNumber, 1); - }); - - it("invalid locale throws", async () => { - const url = makeTestUrl("/Invoice_1.pdf"); - - try { - const initialResponse = await client - .path("/documentModels/{modelId}:analyze", "prebuilt-invoice") - .post({ - contentType: "application/json", - body: { - urlSource: url, - }, - queryParameters: { locale: "thisIsNotAValidLanguage" }, - }); - - if (isUnexpected(initialResponse)) { - throw initialResponse.body.error; - } - - const poller = getLongRunningPoller(client, initialResponse, { ...testPollingOptions }); - (await (await poller).pollUntilDone()).body as AnalyzeResultOperationOutput; - assert.fail("Expected an exception due to invalid locale."); - } catch (ex: any) { - assert.ok((ex as Error).message.includes("Invalid argument.")); - } - }); - }); - - describe("identityDocuments", () => { - it("png file stream", async () => { - const filePath = path.join(ASSET_PATH, "identityDocument", "license.png"); - - const base64Source = fs.readFileSync(filePath, { encoding: "base64" }); - - const initialResponse = await client - .path("/documentModels/{modelId}:analyze", "prebuilt-idDocument") - .post({ - contentType: "application/json", - body: { - base64Source, - }, - }); - - if (isUnexpected(initialResponse)) { - throw initialResponse.body.error; - } - - const poller = getLongRunningPoller(client, initialResponse, { ...testPollingOptions }); - const analyzeResult = ( - (await (await poller).pollUntilDone()).body as AnalyzeResultOperationOutput - ).analyzeResult; - - const documents = analyzeResult?.documents; - const receipt = documents?.[0]; - - assert.isNotEmpty(documents); - - assert.equal(receipt?.docType, "idDocument.driverLicense"); - }); - - it("url", async () => { - const url = makeTestUrl("/license.jpg"); - - const initialResponse = await client - .path("/documentModels/{modelId}:analyze", "prebuilt-idDocument") - .post({ - contentType: "application/json", - body: { - urlSource: url, - }, - }); - - if (isUnexpected(initialResponse)) { - throw initialResponse.body.error; - } - - const poller = getLongRunningPoller(client, initialResponse, { ...testPollingOptions }); - const analyzeResult = ( - (await (await poller).pollUntilDone()).body as AnalyzeResultOperationOutput - ).analyzeResult; - - const documents = analyzeResult?.documents; - const idDocument = documents?.[0]; - - assert.isNotEmpty(documents); - assert.equal(idDocument?.docType, "idDocument.driverLicense"); - }); - - it("invalid locale throws", async () => { - const url = makeTestUrl("/license.png"); - - try { - await client.path("/documentModels/{modelId}:analyze", "prebuilt-idDocument").post({ - contentType: "application/json", - body: { - urlSource: url, - }, - queryParameters: { - locale: "thisIsNotAValidLocaleString", - }, - }); - assert.fail("Expected an exception due to invalid locale."); - } catch (ex: any) { - // Just make sure we didn't get a bad error message - assert.isFalse((ex as Error).message.includes("")); - } - }); - }); - - describe("tax - US - w2", () => { - it("png file stream", async function () { - const filePath = path.join(ASSET_PATH, "w2", "w2-single.png"); - // - - const base64Source = fs.readFileSync(filePath, { encoding: "base64" }); - - const initialResponse = await client - .path("/documentModels/{modelId}:analyze", "prebuilt-tax.us.w2") - .post({ - contentType: "application/json", - body: { - base64Source, - }, - }); - - if (isUnexpected(initialResponse)) { - throw initialResponse.body.error; - } - - const poller = getLongRunningPoller(client, initialResponse, { ...testPollingOptions }); - const analyzeResult = ( - (await (await poller).pollUntilDone()).body as AnalyzeResultOperationOutput - ).analyzeResult; - - const documents = analyzeResult?.documents; - const w2Naive = documents?.[0]; - - assert.isNotEmpty(documents); - - assert.equal(w2Naive?.docType, "tax.us.w2"); - }); - }); - - describe("healthInsuranceCard - US", function () { - it("png file stream", async function () { - const filePath = path.join(ASSET_PATH, "healthInsuranceCard", "insurance.png"); - - const base64Source = fs.readFileSync(filePath, { encoding: "base64" }); - - const initialResponse = await client - .path("/documentModels/{modelId}:analyze", "prebuilt-healthInsuranceCard.us") - .post({ - contentType: "application/json", - body: { - base64Source, - }, - }); - - if (isUnexpected(initialResponse)) { - throw initialResponse.body.error; - } - - const poller = getLongRunningPoller(client, initialResponse, { ...testPollingOptions }); - const analyzeResult = ( - (await (await poller).pollUntilDone()).body as AnalyzeResultOperationOutput - ).analyzeResult; - - const documents = analyzeResult?.documents; - - assert.isNotEmpty(documents); - }); - }); - - describe("batch analysis", function () { - // We only want to create the model once, but because of the recorder's - // precedence, we have to create it in a test, so one test will end up - // recording the entire creation and the other tests will still be able - // to use it - let _model: DocumentModelDetailsOutput | undefined; - let modelId: string; - - async function requireModel(): Promise { - if (!_model) { - // Compute a unique name for the model - modelId = recorder.variable("batch-model", `modelName${getRandomNumber()}`); - const initialResponse = await client.path("/documentModels:build").post({ - body: { - buildMode: "generative", - modelId: modelId, - azureBlobSource: { - containerUrl: batchTrainingFilesContainerUrl(), - }, - }, - }); - if (isUnexpected(initialResponse)) { - throw initialResponse.body.error; - } - const poller = getLongRunningPoller(client, initialResponse, { ...testPollingOptions }); - const response = (await (await poller).pollUntilDone()).body as DocumentModelDetailsOutput; - if (!response) { - throw new Error("Expected a DocumentModelDetailsOutput response."); - } - _model = response; - - assert.equal(_model!.modelId, modelId); - } - - return _model!; - } - - it("batch training", async function () { - const model = await requireModel(); - const initialResponse = await client - .path("/documentModels/{modelId}:analyzeBatch", model.modelId) - .post({ - contentType: "application/json", - body: { - azureBlobSource: { - containerUrl: batchTrainingFilesContainerUrl(), - }, - resultContainerUrl: batchTrainingFilesResultContainerUrl(), - resultPrefix: "result", - }, - }); - - if (isUnexpected(initialResponse)) { - throw initialResponse.body.error; - } - // get the poller - const poller = getLongRunningPoller(client, initialResponse, { ...testPollingOptions }); - // poll until the operation is done - await (await poller).pollUntilDone(); - }); - }); - - describe("get AnalyzeResult methods", function () { - it("getAnalyzeResult", async function () { - const filePath = path.join(ASSET_PATH, "layout-pageobject.pdf"); - - const base64Source = fs.readFileSync(filePath, { encoding: "base64" }); - - const initialResponse = await client - .path("/documentModels/{modelId}:analyze", "prebuilt-read") - .post({ - contentType: "application/json", - body: { - base64Source, - }, - }); - - if (isUnexpected(initialResponse)) { - throw initialResponse.body.error; - } - - const poller = await getLongRunningPoller(client, initialResponse, { ...testPollingOptions }); - - await poller.pollUntilDone(); - - const output = await client - .path( - "/documentModels/{modelId}/analyzeResults/{resultId}", - "prebuilt-read", - poller.getOperationId(), - ) - .get(); - }); - - it("getAnalyzeResult pdf", async function () { - const filePath = path.join(ASSET_PATH, "layout-pageobject.pdf"); - - const base64Source = fs.readFileSync(filePath, { encoding: "base64" }); - - const initialResponse = await client - .path("/documentModels/{modelId}:analyze", "prebuilt-read") - .post({ - contentType: "application/json", - body: { - base64Source, - }, - queryParameters: { output: ["pdf"] }, - }); - - if (isUnexpected(initialResponse)) { - throw initialResponse.body.error; - } - - const poller = await getLongRunningPoller(client, initialResponse, { ...testPollingOptions }); - - await poller.pollUntilDone(); - - const output = await client - .path( - "/documentModels/{modelId}/analyzeResults/{resultId}/pdf", - "prebuilt-read", - poller.getOperationId(), - ) - .get(); - - // A PDF's header is expected to be: %PDF- - assert.ok(output.body.toString().startsWith("%PDF-")); - }); - - it("getAnalyzeResult figures", async function () { - const filePath = path.join(ASSET_PATH, "layout-pageobject.pdf"); - - const base64Source = fs.readFileSync(filePath, { encoding: "base64" }); - - const initialResponse = await client - .path("/documentModels/{modelId}:analyze", "prebuilt-layout") - .post({ - contentType: "application/json", - body: { - base64Source, - }, - queryParameters: { output: ["figures"] }, - }); - - if (isUnexpected(initialResponse)) { - throw initialResponse.body.error; - } - - const poller = await getLongRunningPoller(client, initialResponse, { ...testPollingOptions }); - - const result = (await poller.pollUntilDone()).body as AnalyzeResultOperationOutput; - const figures = result.analyzeResult?.figures; - assert.isArray(figures); - assert.isNotEmpty(figures?.[0]); - const figureId = figures?.[0].id; - assert.isDefined(figureId); - - const output = await client - .path( - "/documentModels/{modelId}/analyzeResults/{resultId}/figures/{figureId}", - "prebuilt-layout", - poller.getOperationId(), - figureId, - ) - .get(); - - // Header starts with a special character followed by "PNG" - assert.equal(output.body.toString().slice(1, 4), "PNG"); - }); - }); -}); diff --git a/sdk/documentintelligence/ai-document-intelligence-rest/test/public/classifiers.spec.ts b/sdk/documentintelligence/ai-document-intelligence-rest/test/public/classifiers.spec.ts deleted file mode 100644 index b20f9fe8696f..000000000000 --- a/sdk/documentintelligence/ai-document-intelligence-rest/test/public/classifiers.spec.ts +++ /dev/null @@ -1,180 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT License. - -import type { Recorder } from "@azure-tools/test-recorder"; -import { assertEnvironmentVariable } from "@azure-tools/test-recorder"; -import { createRecorder, testPollingOptions } from "./utils/recorderUtils.js"; -import DocumentIntelligence from "../../src/documentIntelligence.js"; -import type { Context } from "vitest"; -import { assert, describe, beforeEach, afterEach, it } from "vitest"; -import { ASSET_PATH, getRandomNumber, makeTestUrl } from "./utils/utils.js"; -import type { DocumentIntelligenceClient } from "../../src/clientDefinitions.js"; -import type { - AnalyzeResultOperationOutput, - DocumentClassifierBuildOperationDetailsOutput, - DocumentClassifierDetailsOutput, -} from "../../src/index.js"; -import { getLongRunningPoller, isUnexpected } from "../../src/index.js"; -import path from "path"; -import fs from "fs"; - -const containerSasUrl = (): string => - assertEnvironmentVariable("DOCUMENT_INTELLIGENCE_TRAINING_CONTAINER_SAS_URL"); - -describe.skip("classifiers", () => { - let recorder: Recorder; - let client: DocumentIntelligenceClient; - beforeEach(async function (context) { - recorder = await createRecorder(context); - await recorder.setMatcher("BodilessMatcher"); - client = DocumentIntelligence( - assertEnvironmentVariable("DOCUMENT_INTELLIGENCE_ENDPOINT"), - { key: assertEnvironmentVariable("DOCUMENT_INTELLIGENCE_API_KEY") }, - recorder.configureClientOptions({}), - ); - }); - - afterEach(async function () { - await recorder.stop(); - }); - - let _classifier: DocumentClassifierDetailsOutput; - let _classifierId: string; - - const customClassifierDescription = "Custom classifier description"; - - // We only want to create the model once, but because of the recorder's - // precedence, we have to create it in a test, so one test will end up - // recording the entire creation and the other tests will still be able - // to use it. - async function requireClassifier(): Promise { - if (!_classifier) { - _classifierId = recorder.variable( - "customClassifierId", - `customClassifier${getRandomNumber()}`, - ); - - const initialResponse = await client.path("/documentClassifiers:build").post({ - body: { - classifierId: _classifierId, - description: "Custom classifier description", - docTypes: { - foo: { - azureBlobSource: { - containerUrl: containerSasUrl(), - }, - }, - bar: { - azureBlobSource: { - containerUrl: containerSasUrl(), - }, - }, - }, - }, - queryParameters: { customClassifierDescription }, - }); - - if (isUnexpected(initialResponse)) { - throw initialResponse.body.error; - } - const poller = getLongRunningPoller(client, initialResponse); - const response = ( - (await (await poller).pollUntilDone()).body as DocumentClassifierBuildOperationDetailsOutput - ).result; - if (!response) { - throw new Error("Expected a DocumentClassifierDetailsOutput response."); - } - _classifier = response; - - assert.ok(_classifier.classifierId); - } - - return _classifier; - } - - it("build classifier", async function (this: Context) { - const classifier = await requireClassifier(); - - assert.containsAllKeys(classifier.docTypes, ["foo", "bar"]); - assert.equal(classifier.classifierId, _classifierId); - assert.equal(classifier.description, customClassifierDescription); - }); - - it("analyze from PNG file stream", async function (this: Context) { - const filePath = path.join(ASSET_PATH, "forms", "Invoice_1.pdf"); - const { classifierId } = await requireClassifier(); - const base64Source = fs.readFileSync(filePath, { encoding: "base64" }); - - const initialResponse = await client - .path("/documentClassifiers/{classifierId}:analyze", classifierId) - .post({ - contentType: "application/json", - body: { - base64Source, - }, - }); - - if (isUnexpected(initialResponse)) { - throw initialResponse.body.error; - } - - const poller = getLongRunningPoller(client, initialResponse, { ...testPollingOptions }); - const analyzeResult = ( - (await (await poller).pollUntilDone()).body as AnalyzeResultOperationOutput - ).analyzeResult; - - assert.isNotEmpty(analyzeResult?.documents); - assert.oneOf(analyzeResult?.documents![0].docType, ["foo", "bar"]); - - // Additionally check that the pages aren't empty and that there are some common fields set - assert.isNotEmpty(analyzeResult?.pages); - assert.ok(analyzeResult?.pages![0].pageNumber); - assert.isDefined(analyzeResult?.pages![0].angle); - assert.ok(analyzeResult?.pages![0].height); - assert.ok(analyzeResult?.pages![0].width); - assert.ok(analyzeResult?.pages![0].unit); - }); - - it("analyze from PNG file URL", async function (this: Context) { - const url = makeTestUrl("/Invoice_1.pdf"); - const { classifierId } = await requireClassifier(); - - const initialResponse = await client - .path("/documentClassifiers/{classifierId}:analyze", classifierId) - .post({ - contentType: "application/json", - body: { - urlSource: url, - }, - }); - - if (isUnexpected(initialResponse)) { - throw initialResponse.body.error; - } - - const poller = getLongRunningPoller(client, initialResponse, { ...testPollingOptions }); - const analyzeResult = ( - (await (await poller).pollUntilDone()).body as AnalyzeResultOperationOutput - ).analyzeResult; - - assert.isNotEmpty(analyzeResult?.documents); - assert.oneOf(analyzeResult?.documents![0].docType, ["foo", "bar"]); - }); - - it("get & delete classifiers from the account", async function () { - await client.path("/documentClassifiers/{classifierId}", _classifierId).get(); - - // Delete the custom classifier we created - if (_classifierId) { - await client.path("/documentClassifiers/{classifierId}", _classifierId).delete(); - } - - // Try to get the classifier and assert that it's gone - try { - await client.path("/documentClassifiers/{classifierId}", _classifierId).get(); - assert.fail("Expected error while accessing a deleted classifier"); - } catch (error: any) { - assert.ok(error); - } - }); -}); diff --git a/sdk/documentintelligence/ai-document-intelligence-rest/test/public/documentIntelligence.spec.ts b/sdk/documentintelligence/ai-document-intelligence-rest/test/public/documentIntelligence.spec.ts deleted file mode 100644 index 775859daa37a..000000000000 --- a/sdk/documentintelligence/ai-document-intelligence-rest/test/public/documentIntelligence.spec.ts +++ /dev/null @@ -1,98 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT License. - -import type { Recorder } from "@azure-tools/test-recorder"; -import { assertEnvironmentVariable } from "@azure-tools/test-recorder"; -import { createTestCredential } from "@azure-tools/test-credential"; -import { createRecorder } from "./utils/recorderUtils.js"; -import DocumentIntelligence from "../../src/documentIntelligence.js"; -import { assert, describe, beforeEach, afterEach, it } from "vitest"; -import { getRandomNumber, containerSasUrl } from "./utils/utils.js"; -import type { DocumentIntelligenceClient } from "../../src/clientDefinitions.js"; -import type { DocumentClassifierBuildOperationDetailsOutput } from "../../src/index.js"; -import { getLongRunningPoller, isUnexpected } from "../../src/index.js"; - -describe("DocumentIntelligenceClient", () => { - let recorder: Recorder; - let client: DocumentIntelligenceClient; - beforeEach(async function (context) { - recorder = await createRecorder(context); - await recorder.setMatcher("BodilessMatcher"); - client = DocumentIntelligence( - assertEnvironmentVariable("DOCUMENT_INTELLIGENCE_ENDPOINT"), - { key: assertEnvironmentVariable("DOCUMENT_INTELLIGENCE_API_KEY") }, - recorder.configureClientOptions({}), - ); - }); - - afterEach(async function () { - await recorder.stop(); - }); - - it("API Key works - getInfo", async function () { - const response = await client.path("/info").get(); - if (isUnexpected(response)) { - throw response.body.error; - } - assert.strictEqual( - response.body.customDocumentModels.limit, - 20000, - "expected customDocumentModels limit should be 20000", - ); - }); - - it.skip("AAD works - getInfo", async function () { - client = DocumentIntelligence( - assertEnvironmentVariable("DOCUMENT_INTELLIGENCE_ENDPOINT"), - createTestCredential(), - recorder.configureClientOptions({}), - ); - const response = await client.path("/info").get(); - if (isUnexpected(response)) { - throw response.body.error; - } - assert.strictEqual( - response.body.customDocumentModels.limit, - 20000, - "expected customDocumentModels limit should be 20000", - ); - }); - - it.skip("documentClassifiers build", async function () { - const initialResponse = await client.path("/documentClassifiers:build").post({ - body: { - classifierId: recorder.variable( - "customClassifierId", - `customClassifier${getRandomNumber()}`, - ), - description: "Custom classifier description", - docTypes: { - foo: { - azureBlobSource: { - containerUrl: containerSasUrl(), - }, - }, - bar: { - // Adding source kind fails with 400 Invalid Argument - azureBlobSource: { - containerUrl: containerSasUrl(), - }, - }, - }, - }, - }); - - if (isUnexpected(initialResponse)) { - throw initialResponse.body.error; - } - const poller = getLongRunningPoller(client, initialResponse); - const response = ( - (await (await poller).pollUntilDone()).body - ); - assert.strictEqual( - response.result?.classifierId, - recorder.variable("customClassifierId"), - "expected classifierId to match", - ); - }); -}); diff --git a/sdk/documentintelligence/ai-document-intelligence-rest/test/public/sampleTest.spec.ts b/sdk/documentintelligence/ai-document-intelligence-rest/test/public/sampleTest.spec.ts new file mode 100644 index 000000000000..d4919ac91ac5 --- /dev/null +++ b/sdk/documentintelligence/ai-document-intelligence-rest/test/public/sampleTest.spec.ts @@ -0,0 +1,21 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +import { createRecorder } from "./utils/recordedClient.js"; +import { assert, beforeEach, afterEach, it, describe } from "vitest"; + +describe("My test", () => { + // let recorder: Recorder; + + beforeEach(async function () { + // recorder = await createRecorder(this); + }); + + afterEach(async function () { + // await recorder.stop(); + }); + + it("sample test", async function () { + assert.equal(1, 1); + }); +}); diff --git a/sdk/documentintelligence/ai-document-intelligence-rest/test/public/training.spec.ts b/sdk/documentintelligence/ai-document-intelligence-rest/test/public/training.spec.ts deleted file mode 100644 index 8542ee730cc2..000000000000 --- a/sdk/documentintelligence/ai-document-intelligence-rest/test/public/training.spec.ts +++ /dev/null @@ -1,360 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT License. - -import type { Recorder } from "@azure-tools/test-recorder"; -import { assertEnvironmentVariable, testPollingOptions } from "@azure-tools/test-recorder"; -import { createRecorder } from "./utils/recorderUtils.js"; -import DocumentIntelligence from "../../src/documentIntelligence.js"; -import { assert, describe, beforeEach, afterEach, it, Context } from "vitest"; -import { getRandomNumber, containerSasUrl } from "./utils/utils.js"; -import type { DocumentIntelligenceClient } from "../../src/clientDefinitions.js"; -import type { - AnalyzeResultOperationOutput, - DocumentModelBuildOperationDetailsOutput, - DocumentModelComposeOperationDetailsOutput, - DocumentModelCopyToOperationDetailsOutput, - DocumentModelDetailsOutput, - DocumentTypeDetails, -} from "../../src/index.js"; -import { getLongRunningPoller, isUnexpected, paginate } from "../../src/index.js"; - -describe("model management", () => { - let recorder: Recorder; - let client: DocumentIntelligenceClient; - beforeEach(async function (context) { - recorder = await createRecorder(context); - client = DocumentIntelligence( - assertEnvironmentVariable("DOCUMENT_INTELLIGENCE_ENDPOINT"), - { key: assertEnvironmentVariable("DOCUMENT_INTELLIGENCE_API_KEY") }, - recorder.configureClientOptions({}), - ); - }); - - afterEach(async function () { - await recorder.stop(); - }); - - // #region Model Training - - /* - * All test steps that are related to training and validating - * models from source documents are encapsulated in this - * "describe" block - */ - - describe("model build", async function () { - const allModels: string[] = []; - - let id = 0; - function getId(): number { - return (id += 1); - } - - describe.skip(`custom model from trainingdata-v3`, async () => { - let _model: DocumentModelDetailsOutput; - - let modelId: string; - - // We only want to create the model once, but because of the recorder's - // precedence, we have to create it in a test, so one test will end up - // recording the entire creation and the other tests will still be able - // to use it - async function requireModel(): Promise { - if (!_model) { - // Compute a unique name for the model - modelId = recorder.variable(getId().toString(), `modelName${getRandomNumber()}`); - const initialResponse = await client.path("/documentModels:build").post({ - body: { - buildMode: "template", - modelId: modelId, - azureBlobSource: { - containerUrl: containerSasUrl(), - }, - }, - }); - if (isUnexpected(initialResponse)) { - throw initialResponse.body.error; - } - const poller = getLongRunningPoller(client, initialResponse); - const response = ( - (await (await poller).pollUntilDone()).body as DocumentModelBuildOperationDetailsOutput - ).result; - if (!response) { - throw new Error("Expected a DocumentModelDetailsOutput response."); - } - _model = response; - - assert.equal(_model.modelId, modelId); - - allModels.push(_model.modelId); - } - - return _model; - } - - /* - * Make sure the model training API returns correct information - * for the model. - */ - it("validate model training response", async () => { - const model = await requireModel(); - - assert.ok(model, "Expecting valid response"); - assert.ok(model.modelId); - - assert.isNotEmpty(model.docTypes); - const submodel = model.docTypes![model.modelId]; - - // When training with labels, we will have expectations for the names - assert.ok( - submodel.fieldSchema!["Signature"], - "Expecting field with name 'Signature' to be valid", - ); - }); - - /* - * Use the model for some simple recognition - */ - describe("recognition", async () => { - it("form from url", async () => { - const model = await requireModel(); - const urlParts = containerSasUrl().split("?"); - const url = `${urlParts[0]}/Form_1.jpg?${urlParts[1]}`; - - const initialResponse = await client - .path("/documentModels/{modelId}:analyze", model.modelId) - .post({ - contentType: "application/json", - body: { - urlSource: url, - }, - }); - - if (isUnexpected(initialResponse)) { - throw initialResponse.body.error; - } - - const poller = getLongRunningPoller(client, initialResponse, { - intervalInMs: testPollingOptions.updateIntervalInMs, - }); - const analyzeResult = ( - (await (await poller).pollUntilDone()).body as AnalyzeResultOperationOutput - ).analyzeResult; - - const documents = analyzeResult?.documents; - const tables = analyzeResult?.tables; - assert.isNotEmpty(documents); - const document = documents?.[0]; - - assert.isNotEmpty(document?.boundingRegions); - - assert.isNotEmpty(tables); - const [table] = tables!; - - assert.ok(table.boundingRegions?.[0].polygon); - assert.equal(table.boundingRegions?.[0].pageNumber, 1); - - assert.ok(document?.fields); - assert.ok(document?.fields?.["Merchant"]); - assert.ok(document?.fields?.["DatedAs"]); - assert.ok(document?.fields?.["CompanyPhoneNumber"]); - assert.ok(document?.fields?.["CompanyName"]); - assert.ok(document?.fields?.["Signature"]); - }); - }); - - it("getModel() verification", async () => { - const model = await requireModel(); - - const modelDetails = await client.path("/documentModels/{modelId}", model.modelId).get(); - - if (isUnexpected(modelDetails)) { - throw modelDetails.body.error; - } - - assert.strictEqual(modelDetails.body.modelId, model.modelId); - assert.strictEqual(modelDetails.body.description, model.description); - assert.ok(modelDetails.body.docTypes); - }); - }); - - /* - * These are tests that check that model querying functions as expected. - * This section also cleans up the models by deleting them. - */ - describe("model information", async () => { - it("iterate models in account", async () => { - const response = await client.path("/documentModels").get(); - if (isUnexpected(response)) { - throw response.body.error; - } - - const modelsInAccount: string[] = []; - for await (const model of paginate(client, response)) { - assert.ok(model.modelId); - modelsInAccount.push(model.modelId); - } - - for (const modelId of allModels) { - assert.isTrue(modelsInAccount.includes(modelId)); - } - }); - - it("delete models from the account", async () => { - // Delete all of the models - await Promise.all( - allModels.map((modelId) => client.path("/documentModels/{modelId}", modelId).delete()), - ); - - await Promise.all( - allModels.map(async (modelId) => { - try { - const res = await client.path("/documentModels/{modelId}", modelId).get(); - if (isUnexpected(res)) { - throw res.body.error; - } - console.log(`Model ${res.body.modelId} was not deleted!`); - throw new Error( - `The service returned model info for ${modelId}, but we thought we had deleted it!`, - ); - } catch (e: unknown) { - assert.isTrue((e as Error).message.endsWith(" not found.")); - } - }), - ); - }); - }); - }); - - // #endregion - - it.skip(`compose model`, async function () { - // Helper function to train/validate single model - async function makeModel(prefix: string): Promise> { - const modelId = recorder.variable(prefix, `${prefix}${getRandomNumber()}`); - const initialResponse = await client.path("/documentModels:build").post({ - body: { - buildMode: "template", - modelId: modelId, - azureBlobSource: { - containerUrl: containerSasUrl(), - }, - }, - }); - if (isUnexpected(initialResponse)) { - throw initialResponse.body.error; - } - const poller = getLongRunningPoller(client, initialResponse); - const model = ( - (await (await poller).pollUntilDone()).body as DocumentModelBuildOperationDetailsOutput - ).result!; - - assert.equal(model.modelId, modelId); - assert.equal(model.modelId, modelId); - assert.ok(model.docTypes); - - return { modelId: model.docTypes }; - } - - const modelIdDoctypeMap = await Promise.all([makeModel("input1"), makeModel("input2")]); - - const modelId = recorder.variable("composedModelName", `composedModelName${getRandomNumber()}`); - const component1 = modelIdDoctypeMap[0]; - const component2 = modelIdDoctypeMap[1]; - const initialResponse = await client.path("/documentModels:compose").post({ - body: { - classifierId: recorder.variable("classifierId", `classifierId${getRandomNumber()}`), - docTypes: { component1, component2 }, - modelId, - }, - }); - - if (isUnexpected(initialResponse)) { - throw initialResponse.body.error; - } - const poller = getLongRunningPoller(client, initialResponse); - - const composedModel = ( - (await (await poller).pollUntilDone()).body as DocumentModelComposeOperationDetailsOutput - ).result!; - assert.ok(composedModel.modelId); - assert.equal(composedModel.modelId, modelId); - assert.ok(composedModel.docTypes); - - // Submodels - assert.equal(Object.entries(composedModel.docTypes ?? {}).length, 2); - }); - - it.skip(`copy model`, async function () { - // Since this test is isolated, we'll create a fresh set of resources for it - await recorder.addSanitizers( - { - bodyKeySanitizers: [ - { - jsonPath: "$.accessToken", - value: "access_token", - }, - ], - }, - ["playback", "record"], - ); - const modelId = recorder.variable("copySource", `copySource${getRandomNumber()}`); - - const initialResponse = await client.path("/documentModels:build").post({ - body: { - buildMode: "template", - modelId: modelId, - azureBlobSource: { - containerUrl: containerSasUrl(), - }, - }, - }); - if (isUnexpected(initialResponse)) { - throw initialResponse.body.error; - } - const trainingPoller = getLongRunningPoller(client, initialResponse); - const sourceModel = ( - (await (await trainingPoller).pollUntilDone()) - .body as DocumentModelBuildOperationDetailsOutput - ).result!; - - assert.equal(sourceModel.modelId, modelId); - - const targetModelId = recorder.variable("copyTarget", `copyTarget${getRandomNumber()}`); - const targetAuth = await client.path("/documentModels:authorizeCopy").post({ - body: { - modelId: targetModelId, - }, - }); - - if (isUnexpected(targetAuth)) { - throw targetAuth.body.error; - } - const copyInitResponse = await client - .path("/documentModels/{modelId}:copyTo", sourceModel.modelId) - .post({ - body: targetAuth.body, - }); - - if (isUnexpected(copyInitResponse)) { - throw copyInitResponse.body.error; - } - const copyPoller = getLongRunningPoller(client, copyInitResponse); - const copyResult = ( - (await (await copyPoller).pollUntilDone()).body as DocumentModelCopyToOperationDetailsOutput - ).result!; - - assert.ok(copyResult, "Expecting valid copy result"); - assert.equal(copyResult.modelId, targetAuth.body.targetModelId); - - assert.ok(copyResult.createdDateTime, "Expecting valid 'trainingStartedOn' property"); - - const targetModel = await client.path("/documentModels/{modelId}", copyResult.modelId).get(); - - if (isUnexpected(targetModel)) { - throw targetModel.body.error; - } - assert.equal(targetModel.body.modelId, targetAuth.body.targetModelId); - assert.equal(targetModel.body.modelId, copyResult.modelId); - }); -}); diff --git a/sdk/documentintelligence/ai-document-intelligence-rest/test/public/utils/recordedClient.ts b/sdk/documentintelligence/ai-document-intelligence-rest/test/public/utils/recordedClient.ts new file mode 100644 index 000000000000..6e425fdcfdf9 --- /dev/null +++ b/sdk/documentintelligence/ai-document-intelligence-rest/test/public/utils/recordedClient.ts @@ -0,0 +1,29 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +import { + Recorder, + RecorderStartOptions, + VitestTestContext, +} from "@azure-tools/test-recorder"; + +const replaceableVariables: Record = { + SUBSCRIPTION_ID: "azure_subscription_id", +}; + +const recorderEnvSetup: RecorderStartOptions = { + envSetupForPlayback: replaceableVariables, +}; + +/** + * creates the recorder and reads the environment variables from the `.env` file. + * Should be called first in the test suite to make sure environment variables are + * read before they are being used. + */ +export async function createRecorder( + context: VitestTestContext, +): Promise { + const recorder = new Recorder(context); + await recorder.start(recorderEnvSetup); + return recorder; +} diff --git a/sdk/documentintelligence/ai-document-intelligence-rest/test/public/utils/recorderUtils.ts b/sdk/documentintelligence/ai-document-intelligence-rest/test/public/utils/recorderUtils.ts deleted file mode 100644 index b9a1b77e8662..000000000000 --- a/sdk/documentintelligence/ai-document-intelligence-rest/test/public/utils/recorderUtils.ts +++ /dev/null @@ -1,111 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT License. - -import type { RecorderStartOptions, TestInfo } from "@azure-tools/test-recorder"; -import { Recorder, env, isPlaybackMode } from "@azure-tools/test-recorder"; - -const envSetupForPlayback: { [k: string]: string } = { - AZURE_CLIENT_ID: "azure_client_id", - AZURE_CLIENT_SECRET: "azure_client_secret", - AZURE_TENANT_ID: "12345678-1234-1234-1234-123456789012", - DOCUMENT_INTELLIGENCE_API_KEY: "api_key", - DOCUMENT_INTELLIGENCE_ENDPOINT: "https://endpoint/", - DOCUMENT_INTELLIGENCE_TRAINING_CONTAINER_SAS_URL: - "https://storageaccount/trainingdata-v3?sastoken", - DOCUMENT_INTELLIGENCE_TESTING_CONTAINER_SAS_URL: "https://storageaccount/testingdata?sastoken", - DOCUMENT_INTELLIGENCE_SELECTION_MARK_STORAGE_CONTAINER_SAS_URL: - "https://storageaccount/selectionmark-v3?sastoken", - DOCUMENT_INTELLIGENCE_TARGET_RESOURCE_REGION: "westus2", - DOCUMENT_INTELLIGENCE_BATCH_TRAINING_DATA_CONTAINER_SAS_URL: - "https://storageaccount/batchtraining?sastoken", - DOCUMENT_INTELLIGENCE_BATCH_TRAINING_DATA_RESULT_CONTAINER_SAS_URL: - "https://storageaccount/batchtrainingresult?sastoken", - // fake resource id - DOCUMENT_INTELLIGENCE_TARGET_RESOURCE_ID: - "/subscriptions/e1367d46-77d4-4f57-8cfe-348edbdc84a3/resourceGroups/jstests/providers/Microsoft.CognitiveServices/accounts/jstests-fr", -}; - -export const recorderOptions: RecorderStartOptions = { - envSetupForPlayback, - removeCentralSanitizers: [ - "AZSDK4001", // envSetupForPlayback handles endpoint sanitization - "AZSDK2030", // no need to sanitize "operation-location" header since the endpoint is already sanitized - "AZSDK3430", // $.id - "AZSDK3496", // $..resourceLocation - ], - sanitizerOptions: { - generalSanitizers: [ - // endpoints - { - target: env["DOCUMENT_INTELLIGENCE_TRAINING_CONTAINER_SAS_URL"]?.split("/")[2] || "", - value: - envSetupForPlayback["DOCUMENT_INTELLIGENCE_TRAINING_CONTAINER_SAS_URL"].split("/")[2], - }, - { - target: env["DOCUMENT_INTELLIGENCE_TESTING_CONTAINER_SAS_URL"]?.split("/")[2] || "", - value: envSetupForPlayback["DOCUMENT_INTELLIGENCE_TESTING_CONTAINER_SAS_URL"].split("/")[2], - }, - { - target: - env["DOCUMENT_INTELLIGENCE_SELECTION_MARK_STORAGE_CONTAINER_SAS_URL"]?.split("/")[2] || - "", - value: - envSetupForPlayback[ - "DOCUMENT_INTELLIGENCE_SELECTION_MARK_STORAGE_CONTAINER_SAS_URL" - ].split("/")[2], - }, - // sas tokens - { - target: env["DOCUMENT_INTELLIGENCE_TRAINING_CONTAINER_SAS_URL"]?.split("?")[1] || "", - value: - envSetupForPlayback["DOCUMENT_INTELLIGENCE_TRAINING_CONTAINER_SAS_URL"].split("?")[1], - }, - { - target: env["DOCUMENT_INTELLIGENCE_TESTING_CONTAINER_SAS_URL"]?.split("?")[1] || "", - value: envSetupForPlayback["DOCUMENT_INTELLIGENCE_TESTING_CONTAINER_SAS_URL"].split("?")[1], - }, - { - target: - env["DOCUMENT_INTELLIGENCE_SELECTION_MARK_STORAGE_CONTAINER_SAS_URL"]?.split("?")[1] || - "", - value: - envSetupForPlayback[ - "DOCUMENT_INTELLIGENCE_SELECTION_MARK_STORAGE_CONTAINER_SAS_URL" - ].split("?")[1], - }, - { - target: - env["DOCUMENT_INTELLIGENCE_BATCH_TRAINING_DATA_CONTAINER_SAS_URL"]?.split("?")[1] || "", - value: - envSetupForPlayback["DOCUMENT_INTELLIGENCE_BATCH_TRAINING_DATA_CONTAINER_SAS_URL"].split( - "?", - )[1], - }, - { - target: - env["DOCUMENT_INTELLIGENCE_BATCH_TRAINING_DATA_RESULT_CONTAINER_SAS_URL"]?.split( - "?", - )[1] || "", - value: - envSetupForPlayback[ - "DOCUMENT_INTELLIGENCE_BATCH_TRAINING_DATA_RESULT_CONTAINER_SAS_URL" - ].split("?")[1], - }, - ], - }, -}; - -/** - * creates the recorder and reads the environment variables from the `.env` file. - * Should be called first in the test suite to make sure environment variables are - * read before they are being used. - */ -export async function createRecorder(context: TestInfo): Promise { - const recorder = new Recorder(context); - await recorder.start(recorderOptions); - return recorder; -} - -export const testPollingOptions = { - intervalInMs: isPlaybackMode() ? 0 : undefined, -}; diff --git a/sdk/documentintelligence/ai-document-intelligence-rest/test/public/utils/utils.ts b/sdk/documentintelligence/ai-document-intelligence-rest/test/public/utils/utils.ts deleted file mode 100644 index 3a94e0da4a02..000000000000 --- a/sdk/documentintelligence/ai-document-intelligence-rest/test/public/utils/utils.ts +++ /dev/null @@ -1,32 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT License. - -import { assertEnvironmentVariable } from "@azure-tools/test-recorder"; -import { createClientLogger } from "@azure/logger"; - -import path from "path"; - -export const ASSET_PATH = path.resolve(path.join(process.cwd(), "assets")); - -export function makeTestUrl(urlPath: string): string { - const testingContainerUrl = assertEnvironmentVariable( - "DOCUMENT_INTELLIGENCE_TESTING_CONTAINER_SAS_URL", - ); - const parts = testingContainerUrl.split("?"); - return `${parts[0]}${urlPath}?${parts[1]}`; -} - -export function getRandomNumber(): number { - return Math.ceil(Math.random() * 1000 + 10000); -} - -export const containerSasUrl = (): string => - assertEnvironmentVariable("DOCUMENT_INTELLIGENCE_TRAINING_CONTAINER_SAS_URL"); - -export const batchTrainingFilesContainerUrl = (): string => - assertEnvironmentVariable("DOCUMENT_INTELLIGENCE_BATCH_TRAINING_DATA_CONTAINER_SAS_URL"); - -export const batchTrainingFilesResultContainerUrl = (): string => - assertEnvironmentVariable("DOCUMENT_INTELLIGENCE_BATCH_TRAINING_DATA_RESULT_CONTAINER_SAS_URL"); - -export const logger = createClientLogger("ai-form-recognizer:test"); diff --git a/sdk/documentintelligence/ai-document-intelligence-rest/tests.yml b/sdk/documentintelligence/ai-document-intelligence-rest/tests.yml deleted file mode 100644 index edc529d018e1..000000000000 --- a/sdk/documentintelligence/ai-document-intelligence-rest/tests.yml +++ /dev/null @@ -1,22 +0,0 @@ -# Parameterize Location to the test resources deployment script. This allows -# specifying a different Azure region to use when manually triggering the live -# tests pipeline. This is useful for when we want to test in different -# environments: Prod, Canary, etc. -parameters: - - name: Location - displayName: Location - type: string - default: eastus - -trigger: none - -extends: - template: /eng/pipelines/templates/stages/archetype-sdk-tests.yml - parameters: - PackageName: "@azure-rest/ai-document-intelligence" - ServiceDirectory: documentintelligence - Location: "${{ parameters.Location }}" - EnvVars: - AZURE_CLIENT_ID: $(aad-azure-sdk-test-client-id) - AZURE_TENANT_ID: $(aad-azure-sdk-test-tenant-id) - AZURE_CLIENT_SECRET: $(aad-azure-sdk-test-client-secret) diff --git a/sdk/documentintelligence/ai-document-intelligence-rest/tsconfig.browser.config.json b/sdk/documentintelligence/ai-document-intelligence-rest/tsconfig.browser.config.json index 1b37aebc5457..091177fcb991 100644 --- a/sdk/documentintelligence/ai-document-intelligence-rest/tsconfig.browser.config.json +++ b/sdk/documentintelligence/ai-document-intelligence-rest/tsconfig.browser.config.json @@ -1,7 +1,7 @@ { "extends": "./.tshy/build.json", - "include": ["./src/**/*.ts", "./src/**/*.mts", "./test/**/*.spec.ts"], - "exclude": ["./test/**/node/**/*.ts"], + "include": ["src/**/*.ts", "src/**/*.mts", "test/**/*.spec.ts"], + "exclude": ["test/**/node/**/*.ts"], "compilerOptions": { "outDir": "./dist-test/browser", "rootDir": ".", diff --git a/sdk/documentintelligence/ai-document-intelligence-rest/tsconfig.json b/sdk/documentintelligence/ai-document-intelligence-rest/tsconfig.json index a10f22a54c88..71858ab903e4 100644 --- a/sdk/documentintelligence/ai-document-intelligence-rest/tsconfig.json +++ b/sdk/documentintelligence/ai-document-intelligence-rest/tsconfig.json @@ -3,7 +3,14 @@ "compilerOptions": { "module": "NodeNext", "moduleResolution": "NodeNext", - "rootDir": "." + "rootDir": ".", + "skipLibCheck": true }, - "include": ["./src/**/*.ts", "./src/**/*.mts", "./src/**/*.cts", "test/**/*.ts", "./test/**/*.ts"] -} + "include": [ + "src/**/*.ts", + "src/**/*.mts", + "src/**/*.cts", + "test/**/*.ts", + "test/**/*.ts" + ] +} \ No newline at end of file diff --git a/sdk/documentintelligence/ai-document-intelligence-rest/tsp-location.yaml b/sdk/documentintelligence/ai-document-intelligence-rest/tsp-location.yaml index 66ce35132bd2..394a2a5309bf 100644 --- a/sdk/documentintelligence/ai-document-intelligence-rest/tsp-location.yaml +++ b/sdk/documentintelligence/ai-document-intelligence-rest/tsp-location.yaml @@ -1,3 +1,4 @@ directory: specification/ai/DocumentIntelligence -commit: ec2a81edaecf3970e5938936e8256759905163e6 -repo: Azure/azure-rest-api-specs +commit: 2c4c42b3b19b0c27dc045a053822f6753bf8e9bc +repo: /mnt/vss/_work/1/s/azure-rest-api-specs +additionalDirectories: diff --git a/sdk/documentintelligence/ai-document-intelligence-rest/vitest.browser.config.ts b/sdk/documentintelligence/ai-document-intelligence-rest/vitest.browser.config.ts index 5e0dc418cfa2..da68c1d231aa 100644 --- a/sdk/documentintelligence/ai-document-intelligence-rest/vitest.browser.config.ts +++ b/sdk/documentintelligence/ai-document-intelligence-rest/vitest.browser.config.ts @@ -33,5 +33,6 @@ export default defineConfig({ reportsDirectory: "coverage-browser", }, testTimeout: 1200000, + hookTimeout: 1200000, }, }); diff --git a/sdk/documentintelligence/ai-document-intelligence-rest/vitest.config.ts b/sdk/documentintelligence/ai-document-intelligence-rest/vitest.config.ts index c7a030e63be0..2cf5d0e02c2e 100644 --- a/sdk/documentintelligence/ai-document-intelligence-rest/vitest.config.ts +++ b/sdk/documentintelligence/ai-document-intelligence-rest/vitest.config.ts @@ -6,7 +6,7 @@ import { relativeRecordingsPath } from "@azure-tools/test-recorder"; export default defineConfig({ test: { - reporters: ["verbose", "basic"], + reporters: ["basic", "junit"], outputFile: { junit: "test-results.browser.xml", }, @@ -29,5 +29,6 @@ export default defineConfig({ reportsDirectory: "coverage", }, testTimeout: 1200000, + hookTimeout: 1200000, }, });