diff --git a/CHANGELOG.md b/CHANGELOG.md index 578569aa..cce4a53e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,11 +1,8 @@ # Changelog -## 4.1.5 - -- Removed the exposed field for configuring the package to use fetch_client instead of http_client manually withe is `isWeb` field, in favor of using `dart.library.js` and `dart.library.io` conditional imports to automatically detect the platform and use the appropriate client for it. - ## 4.1.4 +- Removed the exposed field for configuring the package to use fetch_client instead of http_client manually withe is `isWeb` field, in favor of using `dart.library.js` and `dart.library.io` conditional imports to automatically detect the platform and use the appropriate client for it. - Exposed field for configuring the package to use fetch_client instead of http_client for making requests in web apps (flutter web, etc..) ## 4.1.3 diff --git a/README.md b/README.md index cd02b652..9c59f7d2 100644 --- a/README.md +++ b/README.md @@ -171,7 +171,7 @@ OpenAI.showLogs = true; This will only log the requests steps such when the request started and finished, when the decoding started... -But if you want to log raw responses that are returned from the API (JSON, RAW...), you can set the `showResponsesLogs` to `true`: +But if you want to log raw responses that are returned from the API (JSON, RAW...), you can set the `showResponsesLogs`: ```dart OpenAI.showResponsesLogs = true; @@ -605,15 +605,14 @@ to get access to the translation API, and translate an audio file to english, yo OpenAIAudioModel translation = await OpenAI.instance.audio.createTranslation( file: File(/* THE FILE PATH*/), model: "whisper-1", - responseFormat: OpenAIAudioResponseFo rmat.text, + responseFormat: OpenAIAudioResponseFormat.text, ); // print the translation. print(translation.text); ``` -Learn more from [here](C:\projects\Flutter_and_Dart\openai -). +Learn more from [here](https://platform.openai.com/docs/api-reference/audio/createTranslation).
diff --git a/lib/src/core/models/edit/sub_models/usage.dart b/lib/src/core/models/edit/sub_models/usage.dart index c41df107..88335b5f 100644 --- a/lib/src/core/models/edit/sub_models/usage.dart +++ b/lib/src/core/models/edit/sub_models/usage.dart @@ -18,7 +18,7 @@ final class OpenAIEditModelUsage { int get hashCode => promptTokens.hashCode ^ completionTokens.hashCode ^ totalTokens.hashCode; - /// {@template openai_edit_model_usage} + /// {@macro openai_edit_model_usage} const OpenAIEditModelUsage({ required this.promptTokens, required this.completionTokens, @@ -27,6 +27,7 @@ final class OpenAIEditModelUsage { /// {@template openai_edit_model_usage} /// This method is used to convert a [Map] object to a [OpenAIEditModelUsage] object. + /// {@endtemplate} factory OpenAIEditModelUsage.fromMap(Map json) { return OpenAIEditModelUsage( promptTokens: json['prompt_tokens'], diff --git a/pubspec.yaml b/pubspec.yaml index 18ff8a37..ce55503c 100644 --- a/pubspec.yaml +++ b/pubspec.yaml @@ -1,6 +1,6 @@ name: dart_openai description: Dart SDK for openAI Apis (GPT-3 & DALL-E), integrate easily the power of OpenAI's state-of-the-art AI models into their Dart applications. -version: 4.1.5 +version: 4.1.4 homepage: https://github.com/anasfik/openai repository: https://github.com/anasfik/openai documentation: https://github.com/anasfik/openai/blob/main/README.md