diff --git a/.github/workflows/amplify_firehose_dart.yaml b/.github/workflows/amplify_firehose_dart.yaml new file mode 100644 index 00000000000..74614db994a --- /dev/null +++ b/.github/workflows/amplify_firehose_dart.yaml @@ -0,0 +1,83 @@ +# Generated with aft. To update, run: `aft generate workflows` +name: amplify_firehose_dart +on: + push: + branches: + - main + - stable + paths: + - '.github/workflows/amplify_firehose_dart.yaml' + - '.github/workflows/dart_vm.yaml' + - 'packages/amplify_core/lib/**/*.dart' + - 'packages/amplify_core/pubspec.yaml' + - 'packages/amplify_foundation/amplify_foundation_dart/lib/**/*.dart' + - 'packages/amplify_foundation/amplify_foundation_dart/pubspec.yaml' + - 'packages/amplify_foundation/amplify_foundation_dart_bridge/lib/**/*.dart' + - 'packages/amplify_foundation/amplify_foundation_dart_bridge/pubspec.yaml' + - 'packages/amplify_lints/lib/**/*.yaml' + - 'packages/amplify_lints/pubspec.yaml' + - 'packages/aws_common/lib/**/*.dart' + - 'packages/aws_common/pubspec.yaml' + - 'packages/aws_signature_v4/lib/**/*.dart' + - 'packages/aws_signature_v4/pubspec.yaml' + - 'packages/common/amplify_db_common_dart/lib/**/*.dart' + - 'packages/common/amplify_db_common_dart/pubspec.yaml' + - 'packages/kinesis/amplify_firehose_dart/**/*.dart' + - 'packages/kinesis/amplify_firehose_dart/**/*.yaml' + - 'packages/kinesis/amplify_firehose_dart/lib/**/*' + - 'packages/kinesis/amplify_firehose_dart/test/**/*' + - 'packages/smithy/smithy/lib/**/*.dart' + - 'packages/smithy/smithy/pubspec.yaml' + - 'packages/smithy/smithy_aws/lib/**/*.dart' + - 'packages/smithy/smithy_aws/pubspec.yaml' + pull_request: + paths: + - '.github/workflows/amplify_firehose_dart.yaml' + - '.github/workflows/dart_vm.yaml' + - 'packages/amplify_core/lib/**/*.dart' + - 'packages/amplify_core/pubspec.yaml' + - 'packages/amplify_foundation/amplify_foundation_dart/lib/**/*.dart' + - 'packages/amplify_foundation/amplify_foundation_dart/pubspec.yaml' + - 'packages/amplify_foundation/amplify_foundation_dart_bridge/lib/**/*.dart' + - 'packages/amplify_foundation/amplify_foundation_dart_bridge/pubspec.yaml' + - 'packages/amplify_lints/lib/**/*.yaml' + - 'packages/amplify_lints/pubspec.yaml' + - 'packages/aws_common/lib/**/*.dart' + - 'packages/aws_common/pubspec.yaml' + - 'packages/aws_signature_v4/lib/**/*.dart' + - 'packages/aws_signature_v4/pubspec.yaml' + - 'packages/common/amplify_db_common_dart/lib/**/*.dart' + - 'packages/common/amplify_db_common_dart/pubspec.yaml' + - 'packages/kinesis/amplify_firehose_dart/**/*.dart' + - 'packages/kinesis/amplify_firehose_dart/**/*.yaml' + - 'packages/kinesis/amplify_firehose_dart/lib/**/*' + - 'packages/kinesis/amplify_firehose_dart/test/**/*' + - 'packages/smithy/smithy/lib/**/*.dart' + - 'packages/smithy/smithy/pubspec.yaml' + - 'packages/smithy/smithy_aws/lib/**/*.dart' + - 'packages/smithy/smithy_aws/pubspec.yaml' + schedule: + - cron: "0 13 * * 1" # Every Monday at 06:00 PST + workflow_dispatch: +defaults: + run: + shell: bash + +# These permissions are needed to interact with GitHub's OIDC Token endpoint. +permissions: + id-token: write + contents: read + +# Cancels in-progress job when there is another push to same ref. +# https://docs.github.com/en/actions/using-jobs/using-concurrency#example-only-cancel-in-progress-jobs-or-runs-for-the-current-workflow +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +jobs: + test: + uses: ./.github/workflows/dart_vm.yaml + secrets: inherit + with: + package-name: amplify_firehose_dart + working-directory: packages/kinesis/amplify_firehose_dart diff --git a/.github/workflows/amplify_kinesis.yaml b/.github/workflows/amplify_kinesis.yaml index 3c71abc57cb..d27eb6e2b14 100644 --- a/.github/workflows/amplify_kinesis.yaml +++ b/.github/workflows/amplify_kinesis.yaml @@ -28,6 +28,8 @@ on: - 'packages/kinesis/amplify_kinesis/test/**/*' - 'packages/kinesis/amplify_kinesis_dart/lib/**/*.dart' - 'packages/kinesis/amplify_kinesis_dart/pubspec.yaml' + - 'packages/kinesis/amplify_record_cache_dart/lib/**/*.dart' + - 'packages/kinesis/amplify_record_cache_dart/pubspec.yaml' - 'packages/smithy/smithy/lib/**/*.dart' - 'packages/smithy/smithy/pubspec.yaml' - 'packages/smithy/smithy_aws/lib/**/*.dart' @@ -56,6 +58,8 @@ on: - 'packages/kinesis/amplify_kinesis/test/**/*' - 'packages/kinesis/amplify_kinesis_dart/lib/**/*.dart' - 'packages/kinesis/amplify_kinesis_dart/pubspec.yaml' + - 'packages/kinesis/amplify_record_cache_dart/lib/**/*.dart' + - 'packages/kinesis/amplify_record_cache_dart/pubspec.yaml' - 'packages/smithy/smithy/lib/**/*.dart' - 'packages/smithy/smithy/pubspec.yaml' - 'packages/smithy/smithy_aws/lib/**/*.dart' diff --git a/.github/workflows/amplify_kinesis_dart.yaml b/.github/workflows/amplify_kinesis_dart.yaml index baaa6e452e2..bb65205e7ad 100644 --- a/.github/workflows/amplify_kinesis_dart.yaml +++ b/.github/workflows/amplify_kinesis_dart.yaml @@ -27,6 +27,8 @@ on: - 'packages/kinesis/amplify_kinesis_dart/**/*.yaml' - 'packages/kinesis/amplify_kinesis_dart/lib/**/*' - 'packages/kinesis/amplify_kinesis_dart/test/**/*' + - 'packages/kinesis/amplify_record_cache_dart/lib/**/*.dart' + - 'packages/kinesis/amplify_record_cache_dart/pubspec.yaml' - 'packages/smithy/smithy/lib/**/*.dart' - 'packages/smithy/smithy/pubspec.yaml' - 'packages/smithy/smithy_aws/lib/**/*.dart' @@ -54,6 +56,8 @@ on: - 'packages/kinesis/amplify_kinesis_dart/**/*.yaml' - 'packages/kinesis/amplify_kinesis_dart/lib/**/*' - 'packages/kinesis/amplify_kinesis_dart/test/**/*' + - 'packages/kinesis/amplify_record_cache_dart/lib/**/*.dart' + - 'packages/kinesis/amplify_record_cache_dart/pubspec.yaml' - 'packages/smithy/smithy/lib/**/*.dart' - 'packages/smithy/smithy/pubspec.yaml' - 'packages/smithy/smithy_aws/lib/**/*.dart' diff --git a/.github/workflows/amplify_kinesis_example.yaml b/.github/workflows/amplify_kinesis_example.yaml index 160ac0b1731..54da2af128c 100644 --- a/.github/workflows/amplify_kinesis_example.yaml +++ b/.github/workflows/amplify_kinesis_example.yaml @@ -57,6 +57,8 @@ on: - 'packages/kinesis/amplify_kinesis/pubspec.yaml' - 'packages/kinesis/amplify_kinesis_dart/lib/**/*.dart' - 'packages/kinesis/amplify_kinesis_dart/pubspec.yaml' + - 'packages/kinesis/amplify_record_cache_dart/lib/**/*.dart' + - 'packages/kinesis/amplify_record_cache_dart/pubspec.yaml' - 'packages/secure_storage/amplify_secure_storage/android/**/*' - 'packages/secure_storage/amplify_secure_storage/ios/**/*' - 'packages/secure_storage/amplify_secure_storage/lib/**/*.dart' @@ -127,6 +129,8 @@ on: - 'packages/kinesis/amplify_kinesis/pubspec.yaml' - 'packages/kinesis/amplify_kinesis_dart/lib/**/*.dart' - 'packages/kinesis/amplify_kinesis_dart/pubspec.yaml' + - 'packages/kinesis/amplify_record_cache_dart/lib/**/*.dart' + - 'packages/kinesis/amplify_record_cache_dart/pubspec.yaml' - 'packages/secure_storage/amplify_secure_storage/android/**/*' - 'packages/secure_storage/amplify_secure_storage/ios/**/*' - 'packages/secure_storage/amplify_secure_storage/lib/**/*.dart' diff --git a/.github/workflows/amplify_record_cache_dart.yaml b/.github/workflows/amplify_record_cache_dart.yaml new file mode 100644 index 00000000000..b60dcf6b9a8 --- /dev/null +++ b/.github/workflows/amplify_record_cache_dart.yaml @@ -0,0 +1,75 @@ +# Generated with aft. To update, run: `aft generate workflows` +name: amplify_record_cache_dart +on: + push: + branches: + - main + - stable + paths: + - '.github/workflows/amplify_record_cache_dart.yaml' + - '.github/workflows/dart_vm.yaml' + - 'packages/amplify_core/lib/**/*.dart' + - 'packages/amplify_core/pubspec.yaml' + - 'packages/amplify_foundation/amplify_foundation_dart/lib/**/*.dart' + - 'packages/amplify_foundation/amplify_foundation_dart/pubspec.yaml' + - 'packages/amplify_lints/lib/**/*.yaml' + - 'packages/amplify_lints/pubspec.yaml' + - 'packages/aws_common/lib/**/*.dart' + - 'packages/aws_common/pubspec.yaml' + - 'packages/aws_signature_v4/lib/**/*.dart' + - 'packages/aws_signature_v4/pubspec.yaml' + - 'packages/common/amplify_db_common_dart/lib/**/*.dart' + - 'packages/common/amplify_db_common_dart/pubspec.yaml' + - 'packages/kinesis/amplify_record_cache_dart/**/*.dart' + - 'packages/kinesis/amplify_record_cache_dart/**/*.yaml' + - 'packages/kinesis/amplify_record_cache_dart/lib/**/*' + - 'packages/kinesis/amplify_record_cache_dart/test/**/*' + - 'packages/smithy/smithy/lib/**/*.dart' + - 'packages/smithy/smithy/pubspec.yaml' + pull_request: + paths: + - '.github/workflows/amplify_record_cache_dart.yaml' + - '.github/workflows/dart_vm.yaml' + - 'packages/amplify_core/lib/**/*.dart' + - 'packages/amplify_core/pubspec.yaml' + - 'packages/amplify_foundation/amplify_foundation_dart/lib/**/*.dart' + - 'packages/amplify_foundation/amplify_foundation_dart/pubspec.yaml' + - 'packages/amplify_lints/lib/**/*.yaml' + - 'packages/amplify_lints/pubspec.yaml' + - 'packages/aws_common/lib/**/*.dart' + - 'packages/aws_common/pubspec.yaml' + - 'packages/aws_signature_v4/lib/**/*.dart' + - 'packages/aws_signature_v4/pubspec.yaml' + - 'packages/common/amplify_db_common_dart/lib/**/*.dart' + - 'packages/common/amplify_db_common_dart/pubspec.yaml' + - 'packages/kinesis/amplify_record_cache_dart/**/*.dart' + - 'packages/kinesis/amplify_record_cache_dart/**/*.yaml' + - 'packages/kinesis/amplify_record_cache_dart/lib/**/*' + - 'packages/kinesis/amplify_record_cache_dart/test/**/*' + - 'packages/smithy/smithy/lib/**/*.dart' + - 'packages/smithy/smithy/pubspec.yaml' + schedule: + - cron: "0 13 * * 1" # Every Monday at 06:00 PST + workflow_dispatch: +defaults: + run: + shell: bash + +# These permissions are needed to interact with GitHub's OIDC Token endpoint. +permissions: + id-token: write + contents: read + +# Cancels in-progress job when there is another push to same ref. +# https://docs.github.com/en/actions/using-jobs/using-concurrency#example-only-cancel-in-progress-jobs-or-runs-for-the-current-workflow +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +jobs: + test: + uses: ./.github/workflows/dart_vm.yaml + secrets: inherit + with: + package-name: amplify_record_cache_dart + working-directory: packages/kinesis/amplify_record_cache_dart diff --git a/packages/kinesis/amplify_firehose_dart/.gitignore b/packages/kinesis/amplify_firehose_dart/.gitignore new file mode 100644 index 00000000000..53f256efc73 --- /dev/null +++ b/packages/kinesis/amplify_firehose_dart/.gitignore @@ -0,0 +1,2 @@ +pubspec_overrides.yaml +pubspec.lock diff --git a/packages/kinesis/amplify_firehose_dart/CHANGELOG.md b/packages/kinesis/amplify_firehose_dart/CHANGELOG.md new file mode 100644 index 00000000000..090fc365989 --- /dev/null +++ b/packages/kinesis/amplify_firehose_dart/CHANGELOG.md @@ -0,0 +1,3 @@ +## 0.1.0 + +- Initial release diff --git a/packages/kinesis/amplify_firehose_dart/LICENSE b/packages/kinesis/amplify_firehose_dart/LICENSE new file mode 100644 index 00000000000..67db8588217 --- /dev/null +++ b/packages/kinesis/amplify_firehose_dart/LICENSE @@ -0,0 +1,175 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. diff --git a/packages/kinesis/amplify_firehose_dart/README.md b/packages/kinesis/amplify_firehose_dart/README.md new file mode 100644 index 00000000000..090fc365989 --- /dev/null +++ b/packages/kinesis/amplify_firehose_dart/README.md @@ -0,0 +1,3 @@ +## 0.1.0 + +- Initial release diff --git a/packages/kinesis/amplify_firehose_dart/analysis_options.yaml b/packages/kinesis/amplify_firehose_dart/analysis_options.yaml new file mode 100644 index 00000000000..13d208784ec --- /dev/null +++ b/packages/kinesis/amplify_firehose_dart/analysis_options.yaml @@ -0,0 +1,8 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 + +include: package:amplify_lints/library.yaml + +analyzer: + exclude: + - "lib/src/sdk/src/**" diff --git a/packages/kinesis/amplify_firehose_dart/dart_test.yaml b/packages/kinesis/amplify_firehose_dart/dart_test.yaml new file mode 100644 index 00000000000..fe6f97158a7 --- /dev/null +++ b/packages/kinesis/amplify_firehose_dart/dart_test.yaml @@ -0,0 +1,3 @@ +tags: + e2e: + skip: "E2E tests require backend configuration" diff --git a/packages/kinesis/amplify_firehose_dart/lib/amplify_firehose_dart.dart b/packages/kinesis/amplify_firehose_dart/lib/amplify_firehose_dart.dart new file mode 100644 index 00000000000..36c4c485d45 --- /dev/null +++ b/packages/kinesis/amplify_firehose_dart/lib/amplify_firehose_dart.dart @@ -0,0 +1,8 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 + +/// Amplify Amazon Data Firehose client for Dart. +library; + +// SDK client (for escape hatch) +// Exports will be added as implementation PRs land. diff --git a/packages/kinesis/amplify_firehose_dart/lib/src/firehose_limits.dart b/packages/kinesis/amplify_firehose_dart/lib/src/firehose_limits.dart new file mode 100644 index 00000000000..4b78ea4f51f --- /dev/null +++ b/packages/kinesis/amplify_firehose_dart/lib/src/firehose_limits.dart @@ -0,0 +1,20 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 + +/// Amazon Data Firehose PutRecordBatch API limits. +/// +/// These are AWS service-imposed constants. They are internal to the +/// package and not exported from the public barrel file. +library; + +/// Maximum number of records per PutRecordBatch request (500). +const int maxRecordsPerBatch = 500; + +/// Maximum size of a single record in bytes (1,000 KiB). +/// +/// Per AWS docs, the maximum size of a record sent to Firehose is +/// 1,000 KiB (before base64 encoding by the service). +const int maxRecordSizeBytes = 1000 * 1024; + +/// Maximum total payload size per PutRecordBatch request in bytes (4 MiB). +const int maxBatchSizeBytes = 4 * 1024 * 1024; diff --git a/packages/kinesis/amplify_firehose_dart/lib/src/sdk/firehose.dart b/packages/kinesis/amplify_firehose_dart/lib/src/sdk/firehose.dart new file mode 100644 index 00000000000..f93fdaa173b --- /dev/null +++ b/packages/kinesis/amplify_firehose_dart/lib/src/sdk/firehose.dart @@ -0,0 +1,23 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 + +// Generated with smithy-dart 0.3.2. DO NOT MODIFY. +// ignore_for_file: avoid_unused_constructor_parameters,deprecated_member_use_from_same_package,non_constant_identifier_names,unnecessary_library_name + +/// # Amazon Data Firehose +/// +/// Amazon Data Firehose is a fully managed service that delivers real-time +/// streaming data to destinations such as Amazon S3, Amazon OpenSearch Service, +/// Amazon Redshift, Splunk, and various other supported destinations. +library amplify_firehose_dart.firehose; + +export 'package:amplify_firehose_dart/src/sdk/src/firehose/firehose_client.dart'; +export 'package:amplify_firehose_dart/src/sdk/src/firehose/model/invalid_argument_exception.dart'; +export 'package:amplify_firehose_dart/src/sdk/src/firehose/model/invalid_kms_resource_exception.dart'; +export 'package:amplify_firehose_dart/src/sdk/src/firehose/model/invalid_source_exception.dart'; +export 'package:amplify_firehose_dart/src/sdk/src/firehose/model/put_record_batch_input.dart'; +export 'package:amplify_firehose_dart/src/sdk/src/firehose/model/put_record_batch_output.dart'; +export 'package:amplify_firehose_dart/src/sdk/src/firehose/model/put_record_batch_response_entry.dart'; +export 'package:amplify_firehose_dart/src/sdk/src/firehose/model/record.dart'; +export 'package:amplify_firehose_dart/src/sdk/src/firehose/model/resource_not_found_exception.dart'; +export 'package:amplify_firehose_dart/src/sdk/src/firehose/model/service_unavailable_exception.dart'; diff --git a/packages/kinesis/amplify_firehose_dart/lib/src/sdk/src/firehose/common/endpoint_resolver.dart b/packages/kinesis/amplify_firehose_dart/lib/src/sdk/src/firehose/common/endpoint_resolver.dart new file mode 100644 index 00000000000..e14b455b3ec --- /dev/null +++ b/packages/kinesis/amplify_firehose_dart/lib/src/sdk/src/firehose/common/endpoint_resolver.dart @@ -0,0 +1,214 @@ +// Generated with smithy-dart 0.3.2. DO NOT MODIFY. +// ignore_for_file: avoid_unused_constructor_parameters,deprecated_member_use_from_same_package,non_constant_identifier_names,unnecessary_library_name + +library amplify_firehose_dart.firehose.common.endpoint_resolver; // ignore_for_file: no_leading_underscores_for_library_prefixes + +import 'package:meta/meta.dart' as _i2; +import 'package:smithy_aws/smithy_aws.dart' as _i1; + +final _partitions = [ + _i1.Partition( + id: 'aws', + regionRegex: RegExp(r'^(us|eu|ap|sa|ca|me|af)\-\w+\-\d+$'), + partitionEndpoint: null, + isRegionalized: true, + defaults: const _i1.EndpointDefinition( + hostname: 'firehose.{region}.amazonaws.com', + protocols: ['https'], + signatureVersions: [_i1.AWSSignatureVersion.v4], + credentialScope: _i1.CredentialScope(), + variants: [], + ), + regions: const { + 'af-south-1', + 'ap-east-1', + 'ap-northeast-1', + 'ap-northeast-2', + 'ap-northeast-3', + 'ap-south-1', + 'ap-southeast-1', + 'ap-southeast-2', + 'ap-southeast-3', + 'ca-central-1', + 'eu-central-1', + 'eu-north-1', + 'eu-south-1', + 'eu-west-1', + 'eu-west-2', + 'eu-west-3', + 'me-south-1', + 'sa-east-1', + 'us-east-1', + 'us-east-2', + 'us-west-1', + 'us-west-2', + }, + endpoints: const { + 'af-south-1': _i1.EndpointDefinition(variants: []), + 'ap-east-1': _i1.EndpointDefinition(variants: []), + 'ap-northeast-1': _i1.EndpointDefinition(variants: []), + 'ap-northeast-2': _i1.EndpointDefinition(variants: []), + 'ap-northeast-3': _i1.EndpointDefinition(variants: []), + 'ap-south-1': _i1.EndpointDefinition(variants: []), + 'ap-southeast-1': _i1.EndpointDefinition(variants: []), + 'ap-southeast-2': _i1.EndpointDefinition(variants: []), + 'ca-central-1': _i1.EndpointDefinition(variants: []), + 'eu-central-1': _i1.EndpointDefinition(variants: []), + 'eu-north-1': _i1.EndpointDefinition(variants: []), + 'eu-south-1': _i1.EndpointDefinition(variants: []), + 'eu-west-1': _i1.EndpointDefinition(variants: []), + 'eu-west-2': _i1.EndpointDefinition(variants: []), + 'eu-west-3': _i1.EndpointDefinition(variants: []), + 'fips-us-east-1': _i1.EndpointDefinition( + hostname: 'firehose-fips.us-east-1.amazonaws.com', + credentialScope: _i1.CredentialScope(region: 'us-east-1'), + variants: [], + ), + 'fips-us-east-2': _i1.EndpointDefinition( + hostname: 'firehose-fips.us-east-2.amazonaws.com', + credentialScope: _i1.CredentialScope(region: 'us-east-2'), + variants: [], + ), + 'fips-us-west-1': _i1.EndpointDefinition( + hostname: 'firehose-fips.us-west-1.amazonaws.com', + credentialScope: _i1.CredentialScope(region: 'us-west-1'), + variants: [], + ), + 'fips-us-west-2': _i1.EndpointDefinition( + hostname: 'firehose-fips.us-west-2.amazonaws.com', + credentialScope: _i1.CredentialScope(region: 'us-west-2'), + variants: [], + ), + 'me-south-1': _i1.EndpointDefinition(variants: []), + 'sa-east-1': _i1.EndpointDefinition(variants: []), + 'us-east-1': _i1.EndpointDefinition( + variants: [ + _i1.EndpointDefinitionVariant( + hostname: 'firehose-fips.us-east-1.amazonaws.com', + tags: ['fips'], + ), + ], + ), + 'us-east-2': _i1.EndpointDefinition( + variants: [ + _i1.EndpointDefinitionVariant( + hostname: 'firehose-fips.us-east-2.amazonaws.com', + tags: ['fips'], + ), + ], + ), + 'us-west-1': _i1.EndpointDefinition( + variants: [ + _i1.EndpointDefinitionVariant( + hostname: 'firehose-fips.us-west-1.amazonaws.com', + tags: ['fips'], + ), + ], + ), + 'us-west-2': _i1.EndpointDefinition( + variants: [ + _i1.EndpointDefinitionVariant( + hostname: 'firehose-fips.us-west-2.amazonaws.com', + tags: ['fips'], + ), + ], + ), + }, + ), + _i1.Partition( + id: 'aws-cn', + regionRegex: RegExp(r'^cn\-\w+\-\d+$'), + partitionEndpoint: null, + isRegionalized: true, + defaults: const _i1.EndpointDefinition( + hostname: 'firehose.{region}.amazonaws.com.cn', + protocols: ['https'], + signatureVersions: [_i1.AWSSignatureVersion.v4], + credentialScope: _i1.CredentialScope(), + variants: [], + ), + regions: const {'cn-north-1', 'cn-northwest-1'}, + endpoints: const { + 'cn-north-1': _i1.EndpointDefinition(variants: []), + 'cn-northwest-1': _i1.EndpointDefinition(variants: []), + }, + ), + _i1.Partition( + id: 'aws-iso', + regionRegex: RegExp(r'^us\-iso\-\w+\-\d+$'), + partitionEndpoint: null, + isRegionalized: true, + defaults: const _i1.EndpointDefinition( + hostname: 'firehose.{region}.c2s.ic.gov', + protocols: ['https'], + signatureVersions: [_i1.AWSSignatureVersion.v4], + credentialScope: _i1.CredentialScope(), + variants: [], + ), + regions: const {'us-iso-east-1', 'us-iso-west-1'}, + endpoints: const {'us-iso-east-1': _i1.EndpointDefinition(variants: [])}, + ), + _i1.Partition( + id: 'aws-iso-b', + regionRegex: RegExp(r'^us\-isob\-\w+\-\d+$'), + partitionEndpoint: null, + isRegionalized: true, + defaults: const _i1.EndpointDefinition( + hostname: 'firehose.{region}.sc2s.sgov.gov', + protocols: ['https'], + signatureVersions: [_i1.AWSSignatureVersion.v4], + credentialScope: _i1.CredentialScope(), + variants: [], + ), + regions: const {'us-isob-east-1'}, + endpoints: const {}, + ), + _i1.Partition( + id: 'aws-us-gov', + regionRegex: RegExp(r'^us\-gov\-\w+\-\d+$'), + partitionEndpoint: null, + isRegionalized: true, + defaults: const _i1.EndpointDefinition( + hostname: 'firehose.{region}.amazonaws.com', + protocols: ['https'], + signatureVersions: [_i1.AWSSignatureVersion.v4], + credentialScope: _i1.CredentialScope(), + variants: [], + ), + regions: const {'us-gov-east-1', 'us-gov-west-1'}, + endpoints: const { + 'fips-us-gov-east-1': _i1.EndpointDefinition( + hostname: 'firehose-fips.us-gov-east-1.amazonaws.com', + credentialScope: _i1.CredentialScope(region: 'us-gov-east-1'), + variants: [], + ), + 'fips-us-gov-west-1': _i1.EndpointDefinition( + hostname: 'firehose-fips.us-gov-west-1.amazonaws.com', + credentialScope: _i1.CredentialScope(region: 'us-gov-west-1'), + variants: [], + ), + 'us-gov-east-1': _i1.EndpointDefinition( + variants: [ + _i1.EndpointDefinitionVariant( + hostname: 'firehose-fips.us-gov-east-1.amazonaws.com', + tags: ['fips'], + ), + ], + ), + 'us-gov-west-1': _i1.EndpointDefinition( + variants: [ + _i1.EndpointDefinitionVariant( + hostname: 'firehose-fips.us-gov-west-1.amazonaws.com', + tags: ['fips'], + ), + ], + ), + }, + ), +]; +@_i2.internal +final _i1.AWSEndpointResolver endpointResolver = _i1.AWSEndpointResolver( + _partitions, +); +@_i2.internal +const String sdkId = 'Firehose'; diff --git a/packages/kinesis/amplify_firehose_dart/lib/src/sdk/src/firehose/common/serializers.dart b/packages/kinesis/amplify_firehose_dart/lib/src/sdk/src/firehose/common/serializers.dart new file mode 100644 index 00000000000..63556718189 --- /dev/null +++ b/packages/kinesis/amplify_firehose_dart/lib/src/sdk/src/firehose/common/serializers.dart @@ -0,0 +1,35 @@ +// Generated with smithy-dart 0.3.2. DO NOT MODIFY. +// ignore_for_file: avoid_unused_constructor_parameters,deprecated_member_use_from_same_package,non_constant_identifier_names,unnecessary_library_name + +library amplify_firehose_dart.firehose.common.serializers; // ignore_for_file: no_leading_underscores_for_library_prefixes + +import 'package:amplify_firehose_dart/src/sdk/src/firehose/model/invalid_argument_exception.dart'; +import 'package:amplify_firehose_dart/src/sdk/src/firehose/model/invalid_kms_resource_exception.dart'; +import 'package:amplify_firehose_dart/src/sdk/src/firehose/model/invalid_source_exception.dart'; +import 'package:amplify_firehose_dart/src/sdk/src/firehose/model/put_record_batch_input.dart'; +import 'package:amplify_firehose_dart/src/sdk/src/firehose/model/put_record_batch_output.dart'; +import 'package:amplify_firehose_dart/src/sdk/src/firehose/model/put_record_batch_response_entry.dart'; +import 'package:amplify_firehose_dart/src/sdk/src/firehose/model/record.dart'; +import 'package:amplify_firehose_dart/src/sdk/src/firehose/model/resource_not_found_exception.dart'; +import 'package:amplify_firehose_dart/src/sdk/src/firehose/model/service_unavailable_exception.dart'; +import 'package:built_collection/built_collection.dart' as _i2; +import 'package:built_value/serializer.dart'; +import 'package:smithy/smithy.dart' as _i1; + +const List<_i1.SmithySerializer> serializers = [ + ...Record.serializers, + ...PutRecordBatchInput.serializers, + ...PutRecordBatchResponseEntry.serializers, + ...PutRecordBatchOutput.serializers, + ...InvalidArgumentException.serializers, + ...InvalidKmsResourceException.serializers, + ...InvalidSourceException.serializers, + ...ResourceNotFoundException.serializers, + ...ServiceUnavailableException.serializers, +]; +final Map builderFactories = { + const FullType(_i2.BuiltList, [FullType(Record)]): + _i2.ListBuilder.new, + const FullType(_i2.BuiltList, [FullType(PutRecordBatchResponseEntry)]): + _i2.ListBuilder.new, +}; diff --git a/packages/kinesis/amplify_firehose_dart/lib/src/sdk/src/firehose/firehose_client.dart b/packages/kinesis/amplify_firehose_dart/lib/src/sdk/src/firehose/firehose_client.dart new file mode 100644 index 00000000000..00cd24e6b3a --- /dev/null +++ b/packages/kinesis/amplify_firehose_dart/lib/src/sdk/src/firehose/firehose_client.dart @@ -0,0 +1,91 @@ +// Generated with smithy-dart 0.3.2. DO NOT MODIFY. +// ignore_for_file: avoid_unused_constructor_parameters,deprecated_member_use_from_same_package,non_constant_identifier_names,unnecessary_library_name + +library amplify_firehose_dart.firehose.firehose_client; // ignore_for_file: no_leading_underscores_for_library_prefixes + +import 'package:amplify_firehose_dart/src/sdk/src/firehose/model/put_record_batch_input.dart'; +import 'package:amplify_firehose_dart/src/sdk/src/firehose/model/put_record_batch_output.dart'; +import 'package:amplify_firehose_dart/src/sdk/src/firehose/operation/put_record_batch_operation.dart'; +import 'package:aws_common/aws_common.dart' as _i1; +import 'package:aws_signature_v4/aws_signature_v4.dart' as _i2; +import 'package:smithy/smithy.dart' as _i3; + +/// ## Amazon Data Firehose +/// +/// Amazon Data Firehose was previously known as Amazon Kinesis Data Firehose. +/// +/// Amazon Data Firehose is a fully managed service that delivers real-time streaming data to destinations such as Amazon Simple Storage Service (Amazon S3), Amazon OpenSearch Service, Amazon Redshift, Splunk, and various other supported destinations. +class FirehoseClient { + /// ## Amazon Data Firehose + /// + /// Amazon Data Firehose was previously known as Amazon Kinesis Data Firehose. + /// + /// Amazon Data Firehose is a fully managed service that delivers real-time streaming data to destinations such as Amazon Simple Storage Service (Amazon S3), Amazon OpenSearch Service, Amazon Redshift, Splunk, and various other supported destinations. + const FirehoseClient({ + _i1.AWSHttpClient? client, + required String region, + Uri? baseUri, + _i2.AWSCredentialsProvider credentialsProvider = + const _i2.AWSCredentialsProvider.defaultChain(), + List<_i3.HttpRequestInterceptor> requestInterceptors = const [], + List<_i3.HttpResponseInterceptor> responseInterceptors = const [], + }) : _client = client, + _region = region, + _baseUri = baseUri, + _credentialsProvider = credentialsProvider, + _requestInterceptors = requestInterceptors, + _responseInterceptors = responseInterceptors; + + final _i1.AWSHttpClient? _client; + + final String _region; + + final Uri? _baseUri; + + final _i2.AWSCredentialsProvider _credentialsProvider; + + final List<_i3.HttpRequestInterceptor> _requestInterceptors; + + final List<_i3.HttpResponseInterceptor> _responseInterceptors; + + /// Writes multiple data records into a Firehose stream in a single call, which can achieve higher throughput per producer than when writing single records. To write single data records into a Firehose stream, use PutRecord. Applications using these operations are referred to as producers. + /// + /// Firehose accumulates and publishes a particular metric for a customer account in one minute intervals. It is possible that the bursts of incoming bytes/records ingested to a Firehose stream last only for a few seconds. Due to this, the actual spikes in the traffic might not be fully visible in the customer's 1 minute CloudWatch metrics. + /// + /// For information about service quota, see [Amazon Firehose Quota](https://docs.aws.amazon.com/firehose/latest/dev/limits.html). + /// + /// Each PutRecordBatch request supports up to 500 records. Each record in the request can be as large as 1,000 KB (before base64 encoding), up to a limit of 4 MB for the entire request. These limits cannot be changed. + /// + /// You must specify the name of the Firehose stream and the data record when using PutRecord. The data record consists of a data blob that can be up to 1,000 KB in size, and any kind of data. For example, it could be a segment from a log file, geographic location data, website clickstream data, and so on. + /// + /// For multi record de-aggregation, you can not put more than 500 records even if the data blob length is less than 1000 KiB. If you include more than 500 records, the request succeeds but the record de-aggregation doesn't work as expected and transformation lambda is invoked with the complete base64 encoded data blob instead of de-aggregated base64 decoded records. + /// + /// Firehose buffers records before delivering them to the destination. To disambiguate the data blobs at the destination, a common solution is to use delimiters in the data, such as a newline (`\\n`) or some other character unique within the data. This allows the consumer application to parse individual data items when reading the data from the destination. + /// + /// The PutRecordBatch response includes a count of failed records, `FailedPutCount`, and an array of responses, `RequestResponses`. Even if the PutRecordBatch call succeeds, the value of `FailedPutCount` may be greater than 0, indicating that there are records for which the operation didn't succeed. Each entry in the `RequestResponses` array provides additional information about the processed record. It directly correlates with a record in the request array using the same ordering, from the top to the bottom. The response array always includes the same number of records as the request array. `RequestResponses` includes both successfully and unsuccessfully processed records. Firehose tries to process all records in each PutRecordBatch request. A single record failure does not stop the processing of subsequent records. + /// + /// A successfully processed record includes a `RecordId` value, which is unique for the record. An unsuccessfully processed record includes `ErrorCode` and `ErrorMessage` values. `ErrorCode` reflects the type of error, and is one of the following values: `ServiceUnavailableException` or `InternalFailure`. `ErrorMessage` provides more detailed information about the error. + /// + /// If there is an internal server error or a timeout, the write might have completed or it might have failed. If `FailedPutCount` is greater than 0, retry the request, resending only those records that might have failed processing. This minimizes the possible duplicate records and also reduces the total bytes sent (and corresponding charges). We recommend that you handle any duplicates at the destination. + /// + /// If PutRecordBatch throws `ServiceUnavailableException`, the API is automatically reinvoked (retried) 3 times. If the exception persists, it is possible that the throughput limits have been exceeded for the Firehose stream. + /// + /// Re-invoking the Put API operations (for example, PutRecord and PutRecordBatch) can result in data duplicates. For larger data assets, allow for a longer time out before retrying Put API operations. + /// + /// Data records sent to Firehose are stored for 24 hours from the time they are added to a Firehose stream as it attempts to send the records to the destination. If the destination is unreachable for more than 24 hours, the data is no longer available. + /// + /// Don't concatenate two or more base64 strings to form the data fields of your records. Instead, concatenate the raw data, then perform base64 encoding. + _i3.SmithyOperation putRecordBatch( + PutRecordBatchInput input, { + _i1.AWSHttpClient? client, + _i2.AWSCredentialsProvider? credentialsProvider, + }) { + return PutRecordBatchOperation( + region: _region, + baseUri: _baseUri, + credentialsProvider: credentialsProvider ?? _credentialsProvider, + requestInterceptors: _requestInterceptors, + responseInterceptors: _responseInterceptors, + ).run(input, client: client ?? _client); + } +} diff --git a/packages/kinesis/amplify_firehose_dart/lib/src/sdk/src/firehose/model/invalid_argument_exception.dart b/packages/kinesis/amplify_firehose_dart/lib/src/sdk/src/firehose/model/invalid_argument_exception.dart new file mode 100644 index 00000000000..a2ded1dff6d --- /dev/null +++ b/packages/kinesis/amplify_firehose_dart/lib/src/sdk/src/firehose/model/invalid_argument_exception.dart @@ -0,0 +1,137 @@ +// Generated with smithy-dart 0.3.2. DO NOT MODIFY. +// ignore_for_file: avoid_unused_constructor_parameters,deprecated_member_use_from_same_package,non_constant_identifier_names,unnecessary_library_name + +library amplify_firehose_dart.firehose.model.invalid_argument_exception; // ignore_for_file: no_leading_underscores_for_library_prefixes + +import 'package:aws_common/aws_common.dart' as _i1; +import 'package:built_value/built_value.dart'; +import 'package:built_value/serializer.dart'; +import 'package:smithy/smithy.dart' as _i2; + +part 'invalid_argument_exception.g.dart'; + +/// The specified input parameter has a value that is not valid. +abstract class InvalidArgumentException + with _i1.AWSEquatable + implements + Built, + _i2.SmithyHttpException { + /// The specified input parameter has a value that is not valid. + factory InvalidArgumentException({String? message}) { + return _$InvalidArgumentException._(message: message); + } + + /// The specified input parameter has a value that is not valid. + factory InvalidArgumentException.build([ + void Function(InvalidArgumentExceptionBuilder) updates, + ]) = _$InvalidArgumentException; + + const InvalidArgumentException._(); + + /// Constructs a [InvalidArgumentException] from a [payload] and [response]. + factory InvalidArgumentException.fromResponse( + InvalidArgumentException payload, + _i1.AWSBaseHttpResponse response, + ) => payload.rebuild((b) { + b.statusCode = response.statusCode; + b.headers = response.headers; + }); + + static const List<_i2.SmithySerializer> + serializers = [InvalidArgumentExceptionAwsJson11Serializer()]; + + /// A message that provides information about the error. + @override + String? get message; + @override + _i2.ShapeId get shapeId => const _i2.ShapeId( + namespace: 'com.amazonaws.firehose', + shape: 'InvalidArgumentException', + ); + + @override + _i2.RetryConfig? get retryConfig => null; + + @override + @BuiltValueField(compare: false) + int? get statusCode; + @override + @BuiltValueField(compare: false) + Map? get headers; + @override + Exception? get underlyingException => null; + + @override + List get props => [message]; + + @override + String toString() { + final helper = newBuiltValueToStringHelper('InvalidArgumentException') + ..add('message', message); + return helper.toString(); + } +} + +class InvalidArgumentExceptionAwsJson11Serializer + extends _i2.StructuredSmithySerializer { + const InvalidArgumentExceptionAwsJson11Serializer() + : super('InvalidArgumentException'); + + @override + Iterable get types => const [ + InvalidArgumentException, + _$InvalidArgumentException, + ]; + + @override + Iterable<_i2.ShapeId> get supportedProtocols => const [ + _i2.ShapeId(namespace: 'aws.protocols', shape: 'awsJson1_1'), + ]; + + @override + InvalidArgumentException deserialize( + Serializers serializers, + Iterable serialized, { + FullType specifiedType = FullType.unspecified, + }) { + final result = InvalidArgumentExceptionBuilder(); + final iterator = serialized.iterator; + while (iterator.moveNext()) { + final key = iterator.current as String; + iterator.moveNext(); + final value = iterator.current; + if (value == null) { + continue; + } + switch (key) { + case 'message': + result.message = + (serializers.deserialize( + value, + specifiedType: const FullType(String), + ) + as String); + } + } + + return result.build(); + } + + @override + Iterable serialize( + Serializers serializers, + InvalidArgumentException object, { + FullType specifiedType = FullType.unspecified, + }) { + final result$ = []; + final InvalidArgumentException(:message) = object; + if (message != null) { + result$ + ..add('message') + ..add( + serializers.serialize(message, specifiedType: const FullType(String)), + ); + } + return result$; + } +} diff --git a/packages/kinesis/amplify_firehose_dart/lib/src/sdk/src/firehose/model/invalid_argument_exception.g.dart b/packages/kinesis/amplify_firehose_dart/lib/src/sdk/src/firehose/model/invalid_argument_exception.g.dart new file mode 100644 index 00000000000..84b29263deb --- /dev/null +++ b/packages/kinesis/amplify_firehose_dart/lib/src/sdk/src/firehose/model/invalid_argument_exception.g.dart @@ -0,0 +1,103 @@ +// GENERATED CODE - DO NOT MODIFY BY HAND + +part of 'invalid_argument_exception.dart'; + +// ************************************************************************** +// BuiltValueGenerator +// ************************************************************************** + +class _$InvalidArgumentException extends InvalidArgumentException { + @override + final String? message; + @override + final int? statusCode; + @override + final Map? headers; + + factory _$InvalidArgumentException([ + void Function(InvalidArgumentExceptionBuilder)? updates, + ]) => (InvalidArgumentExceptionBuilder()..update(updates))._build(); + + _$InvalidArgumentException._({this.message, this.statusCode, this.headers}) + : super._(); + @override + InvalidArgumentException rebuild( + void Function(InvalidArgumentExceptionBuilder) updates, + ) => (toBuilder()..update(updates)).build(); + + @override + InvalidArgumentExceptionBuilder toBuilder() => + InvalidArgumentExceptionBuilder()..replace(this); + + @override + bool operator ==(Object other) { + if (identical(other, this)) return true; + return other is InvalidArgumentException && message == other.message; + } + + @override + int get hashCode { + var _$hash = 0; + _$hash = $jc(_$hash, message.hashCode); + _$hash = $jf(_$hash); + return _$hash; + } +} + +class InvalidArgumentExceptionBuilder + implements + Builder { + _$InvalidArgumentException? _$v; + + String? _message; + String? get message => _$this._message; + set message(String? message) => _$this._message = message; + + int? _statusCode; + int? get statusCode => _$this._statusCode; + set statusCode(int? statusCode) => _$this._statusCode = statusCode; + + Map? _headers; + Map? get headers => _$this._headers; + set headers(Map? headers) => _$this._headers = headers; + + InvalidArgumentExceptionBuilder(); + + InvalidArgumentExceptionBuilder get _$this { + final $v = _$v; + if ($v != null) { + _message = $v.message; + _statusCode = $v.statusCode; + _headers = $v.headers; + _$v = null; + } + return this; + } + + @override + void replace(InvalidArgumentException other) { + _$v = other as _$InvalidArgumentException; + } + + @override + void update(void Function(InvalidArgumentExceptionBuilder)? updates) { + if (updates != null) updates(this); + } + + @override + InvalidArgumentException build() => _build(); + + _$InvalidArgumentException _build() { + final _$result = + _$v ?? + _$InvalidArgumentException._( + message: message, + statusCode: statusCode, + headers: headers, + ); + replace(_$result); + return _$result; + } +} + +// ignore_for_file: deprecated_member_use_from_same_package,type=lint diff --git a/packages/kinesis/amplify_firehose_dart/lib/src/sdk/src/firehose/model/invalid_kms_resource_exception.dart b/packages/kinesis/amplify_firehose_dart/lib/src/sdk/src/firehose/model/invalid_kms_resource_exception.dart new file mode 100644 index 00000000000..a165836353a --- /dev/null +++ b/packages/kinesis/amplify_firehose_dart/lib/src/sdk/src/firehose/model/invalid_kms_resource_exception.dart @@ -0,0 +1,152 @@ +// Generated with smithy-dart 0.3.2. DO NOT MODIFY. +// ignore_for_file: avoid_unused_constructor_parameters,deprecated_member_use_from_same_package,non_constant_identifier_names,unnecessary_library_name + +library amplify_firehose_dart.firehose.model.invalid_kms_resource_exception; // ignore_for_file: no_leading_underscores_for_library_prefixes + +import 'package:aws_common/aws_common.dart' as _i1; +import 'package:built_value/built_value.dart'; +import 'package:built_value/serializer.dart'; +import 'package:smithy/smithy.dart' as _i2; + +part 'invalid_kms_resource_exception.g.dart'; + +/// Firehose throws this exception when an attempt to put records or to start or stop Firehose stream encryption fails. This happens when the KMS service throws one of the following exception types: `AccessDeniedException`, `InvalidStateException`, `DisabledException`, or `NotFoundException`. +abstract class InvalidKmsResourceException + with _i1.AWSEquatable + implements + Built, + _i2.SmithyHttpException { + /// Firehose throws this exception when an attempt to put records or to start or stop Firehose stream encryption fails. This happens when the KMS service throws one of the following exception types: `AccessDeniedException`, `InvalidStateException`, `DisabledException`, or `NotFoundException`. + factory InvalidKmsResourceException({String? code, String? message}) { + return _$InvalidKmsResourceException._(code: code, message: message); + } + + /// Firehose throws this exception when an attempt to put records or to start or stop Firehose stream encryption fails. This happens when the KMS service throws one of the following exception types: `AccessDeniedException`, `InvalidStateException`, `DisabledException`, or `NotFoundException`. + factory InvalidKmsResourceException.build([ + void Function(InvalidKmsResourceExceptionBuilder) updates, + ]) = _$InvalidKmsResourceException; + + const InvalidKmsResourceException._(); + + /// Constructs a [InvalidKmsResourceException] from a [payload] and [response]. + factory InvalidKmsResourceException.fromResponse( + InvalidKmsResourceException payload, + _i1.AWSBaseHttpResponse response, + ) => payload.rebuild((b) { + b.statusCode = response.statusCode; + b.headers = response.headers; + }); + + static const List<_i2.SmithySerializer> + serializers = [InvalidKmsResourceExceptionAwsJson11Serializer()]; + + String? get code; + @override + String? get message; + @override + _i2.ShapeId get shapeId => const _i2.ShapeId( + namespace: 'com.amazonaws.firehose', + shape: 'InvalidKMSResourceException', + ); + + @override + _i2.RetryConfig? get retryConfig => null; + + @override + @BuiltValueField(compare: false) + int? get statusCode; + @override + @BuiltValueField(compare: false) + Map? get headers; + @override + Exception? get underlyingException => null; + + @override + List get props => [code, message]; + + @override + String toString() { + final helper = newBuiltValueToStringHelper('InvalidKmsResourceException') + ..add('code', code) + ..add('message', message); + return helper.toString(); + } +} + +class InvalidKmsResourceExceptionAwsJson11Serializer + extends _i2.StructuredSmithySerializer { + const InvalidKmsResourceExceptionAwsJson11Serializer() + : super('InvalidKmsResourceException'); + + @override + Iterable get types => const [ + InvalidKmsResourceException, + _$InvalidKmsResourceException, + ]; + + @override + Iterable<_i2.ShapeId> get supportedProtocols => const [ + _i2.ShapeId(namespace: 'aws.protocols', shape: 'awsJson1_1'), + ]; + + @override + InvalidKmsResourceException deserialize( + Serializers serializers, + Iterable serialized, { + FullType specifiedType = FullType.unspecified, + }) { + final result = InvalidKmsResourceExceptionBuilder(); + final iterator = serialized.iterator; + while (iterator.moveNext()) { + final key = iterator.current as String; + iterator.moveNext(); + final value = iterator.current; + if (value == null) { + continue; + } + switch (key) { + case 'code': + result.code = + (serializers.deserialize( + value, + specifiedType: const FullType(String), + ) + as String); + case 'message': + result.message = + (serializers.deserialize( + value, + specifiedType: const FullType(String), + ) + as String); + } + } + + return result.build(); + } + + @override + Iterable serialize( + Serializers serializers, + InvalidKmsResourceException object, { + FullType specifiedType = FullType.unspecified, + }) { + final result$ = []; + final InvalidKmsResourceException(:code, :message) = object; + if (code != null) { + result$ + ..add('code') + ..add( + serializers.serialize(code, specifiedType: const FullType(String)), + ); + } + if (message != null) { + result$ + ..add('message') + ..add( + serializers.serialize(message, specifiedType: const FullType(String)), + ); + } + return result$; + } +} diff --git a/packages/kinesis/amplify_firehose_dart/lib/src/sdk/src/firehose/model/invalid_kms_resource_exception.g.dart b/packages/kinesis/amplify_firehose_dart/lib/src/sdk/src/firehose/model/invalid_kms_resource_exception.g.dart new file mode 100644 index 00000000000..e6cfca7e9b1 --- /dev/null +++ b/packages/kinesis/amplify_firehose_dart/lib/src/sdk/src/firehose/model/invalid_kms_resource_exception.g.dart @@ -0,0 +1,121 @@ +// GENERATED CODE - DO NOT MODIFY BY HAND + +part of 'invalid_kms_resource_exception.dart'; + +// ************************************************************************** +// BuiltValueGenerator +// ************************************************************************** + +class _$InvalidKmsResourceException extends InvalidKmsResourceException { + @override + final String? code; + @override + final String? message; + @override + final int? statusCode; + @override + final Map? headers; + + factory _$InvalidKmsResourceException([ + void Function(InvalidKmsResourceExceptionBuilder)? updates, + ]) => (InvalidKmsResourceExceptionBuilder()..update(updates))._build(); + + _$InvalidKmsResourceException._({ + this.code, + this.message, + this.statusCode, + this.headers, + }) : super._(); + @override + InvalidKmsResourceException rebuild( + void Function(InvalidKmsResourceExceptionBuilder) updates, + ) => (toBuilder()..update(updates)).build(); + + @override + InvalidKmsResourceExceptionBuilder toBuilder() => + InvalidKmsResourceExceptionBuilder()..replace(this); + + @override + bool operator ==(Object other) { + if (identical(other, this)) return true; + return other is InvalidKmsResourceException && + code == other.code && + message == other.message; + } + + @override + int get hashCode { + var _$hash = 0; + _$hash = $jc(_$hash, code.hashCode); + _$hash = $jc(_$hash, message.hashCode); + _$hash = $jf(_$hash); + return _$hash; + } +} + +class InvalidKmsResourceExceptionBuilder + implements + Builder< + InvalidKmsResourceException, + InvalidKmsResourceExceptionBuilder + > { + _$InvalidKmsResourceException? _$v; + + String? _code; + String? get code => _$this._code; + set code(String? code) => _$this._code = code; + + String? _message; + String? get message => _$this._message; + set message(String? message) => _$this._message = message; + + int? _statusCode; + int? get statusCode => _$this._statusCode; + set statusCode(int? statusCode) => _$this._statusCode = statusCode; + + Map? _headers; + Map? get headers => _$this._headers; + set headers(Map? headers) => _$this._headers = headers; + + InvalidKmsResourceExceptionBuilder(); + + InvalidKmsResourceExceptionBuilder get _$this { + final $v = _$v; + if ($v != null) { + _code = $v.code; + _message = $v.message; + _statusCode = $v.statusCode; + _headers = $v.headers; + _$v = null; + } + return this; + } + + @override + void replace(InvalidKmsResourceException other) { + _$v = other as _$InvalidKmsResourceException; + } + + @override + void update(void Function(InvalidKmsResourceExceptionBuilder)? updates) { + if (updates != null) updates(this); + } + + @override + InvalidKmsResourceException build() => _build(); + + _$InvalidKmsResourceException _build() { + final _$result = + _$v ?? + _$InvalidKmsResourceException._( + code: code, + message: message, + statusCode: statusCode, + headers: headers, + ); + replace(_$result); + return _$result; + } +} + +// ignore_for_file: deprecated_member_use_from_same_package,type=lint diff --git a/packages/kinesis/amplify_firehose_dart/lib/src/sdk/src/firehose/model/invalid_source_exception.dart b/packages/kinesis/amplify_firehose_dart/lib/src/sdk/src/firehose/model/invalid_source_exception.dart new file mode 100644 index 00000000000..7fcb7bfaec1 --- /dev/null +++ b/packages/kinesis/amplify_firehose_dart/lib/src/sdk/src/firehose/model/invalid_source_exception.dart @@ -0,0 +1,152 @@ +// Generated with smithy-dart 0.3.2. DO NOT MODIFY. +// ignore_for_file: avoid_unused_constructor_parameters,deprecated_member_use_from_same_package,non_constant_identifier_names,unnecessary_library_name + +library amplify_firehose_dart.firehose.model.invalid_source_exception; // ignore_for_file: no_leading_underscores_for_library_prefixes + +import 'package:aws_common/aws_common.dart' as _i1; +import 'package:built_value/built_value.dart'; +import 'package:built_value/serializer.dart'; +import 'package:smithy/smithy.dart' as _i2; + +part 'invalid_source_exception.g.dart'; + +/// Only requests from CloudWatch Logs are supported when CloudWatch Logs decompression is enabled. +abstract class InvalidSourceException + with _i1.AWSEquatable + implements + Built, + _i2.SmithyHttpException { + /// Only requests from CloudWatch Logs are supported when CloudWatch Logs decompression is enabled. + factory InvalidSourceException({String? code, String? message}) { + return _$InvalidSourceException._(code: code, message: message); + } + + /// Only requests from CloudWatch Logs are supported when CloudWatch Logs decompression is enabled. + factory InvalidSourceException.build([ + void Function(InvalidSourceExceptionBuilder) updates, + ]) = _$InvalidSourceException; + + const InvalidSourceException._(); + + /// Constructs a [InvalidSourceException] from a [payload] and [response]. + factory InvalidSourceException.fromResponse( + InvalidSourceException payload, + _i1.AWSBaseHttpResponse response, + ) => payload.rebuild((b) { + b.statusCode = response.statusCode; + b.headers = response.headers; + }); + + static const List<_i2.SmithySerializer> serializers = + [InvalidSourceExceptionAwsJson11Serializer()]; + + String? get code; + @override + String? get message; + @override + _i2.ShapeId get shapeId => const _i2.ShapeId( + namespace: 'com.amazonaws.firehose', + shape: 'InvalidSourceException', + ); + + @override + _i2.RetryConfig? get retryConfig => null; + + @override + @BuiltValueField(compare: false) + int? get statusCode; + @override + @BuiltValueField(compare: false) + Map? get headers; + @override + Exception? get underlyingException => null; + + @override + List get props => [code, message]; + + @override + String toString() { + final helper = newBuiltValueToStringHelper('InvalidSourceException') + ..add('code', code) + ..add('message', message); + return helper.toString(); + } +} + +class InvalidSourceExceptionAwsJson11Serializer + extends _i2.StructuredSmithySerializer { + const InvalidSourceExceptionAwsJson11Serializer() + : super('InvalidSourceException'); + + @override + Iterable get types => const [ + InvalidSourceException, + _$InvalidSourceException, + ]; + + @override + Iterable<_i2.ShapeId> get supportedProtocols => const [ + _i2.ShapeId(namespace: 'aws.protocols', shape: 'awsJson1_1'), + ]; + + @override + InvalidSourceException deserialize( + Serializers serializers, + Iterable serialized, { + FullType specifiedType = FullType.unspecified, + }) { + final result = InvalidSourceExceptionBuilder(); + final iterator = serialized.iterator; + while (iterator.moveNext()) { + final key = iterator.current as String; + iterator.moveNext(); + final value = iterator.current; + if (value == null) { + continue; + } + switch (key) { + case 'code': + result.code = + (serializers.deserialize( + value, + specifiedType: const FullType(String), + ) + as String); + case 'message': + result.message = + (serializers.deserialize( + value, + specifiedType: const FullType(String), + ) + as String); + } + } + + return result.build(); + } + + @override + Iterable serialize( + Serializers serializers, + InvalidSourceException object, { + FullType specifiedType = FullType.unspecified, + }) { + final result$ = []; + final InvalidSourceException(:code, :message) = object; + if (code != null) { + result$ + ..add('code') + ..add( + serializers.serialize(code, specifiedType: const FullType(String)), + ); + } + if (message != null) { + result$ + ..add('message') + ..add( + serializers.serialize(message, specifiedType: const FullType(String)), + ); + } + return result$; + } +} diff --git a/packages/kinesis/amplify_firehose_dart/lib/src/sdk/src/firehose/model/invalid_source_exception.g.dart b/packages/kinesis/amplify_firehose_dart/lib/src/sdk/src/firehose/model/invalid_source_exception.g.dart new file mode 100644 index 00000000000..8f0081f16ea --- /dev/null +++ b/packages/kinesis/amplify_firehose_dart/lib/src/sdk/src/firehose/model/invalid_source_exception.g.dart @@ -0,0 +1,117 @@ +// GENERATED CODE - DO NOT MODIFY BY HAND + +part of 'invalid_source_exception.dart'; + +// ************************************************************************** +// BuiltValueGenerator +// ************************************************************************** + +class _$InvalidSourceException extends InvalidSourceException { + @override + final String? code; + @override + final String? message; + @override + final int? statusCode; + @override + final Map? headers; + + factory _$InvalidSourceException([ + void Function(InvalidSourceExceptionBuilder)? updates, + ]) => (InvalidSourceExceptionBuilder()..update(updates))._build(); + + _$InvalidSourceException._({ + this.code, + this.message, + this.statusCode, + this.headers, + }) : super._(); + @override + InvalidSourceException rebuild( + void Function(InvalidSourceExceptionBuilder) updates, + ) => (toBuilder()..update(updates)).build(); + + @override + InvalidSourceExceptionBuilder toBuilder() => + InvalidSourceExceptionBuilder()..replace(this); + + @override + bool operator ==(Object other) { + if (identical(other, this)) return true; + return other is InvalidSourceException && + code == other.code && + message == other.message; + } + + @override + int get hashCode { + var _$hash = 0; + _$hash = $jc(_$hash, code.hashCode); + _$hash = $jc(_$hash, message.hashCode); + _$hash = $jf(_$hash); + return _$hash; + } +} + +class InvalidSourceExceptionBuilder + implements Builder { + _$InvalidSourceException? _$v; + + String? _code; + String? get code => _$this._code; + set code(String? code) => _$this._code = code; + + String? _message; + String? get message => _$this._message; + set message(String? message) => _$this._message = message; + + int? _statusCode; + int? get statusCode => _$this._statusCode; + set statusCode(int? statusCode) => _$this._statusCode = statusCode; + + Map? _headers; + Map? get headers => _$this._headers; + set headers(Map? headers) => _$this._headers = headers; + + InvalidSourceExceptionBuilder(); + + InvalidSourceExceptionBuilder get _$this { + final $v = _$v; + if ($v != null) { + _code = $v.code; + _message = $v.message; + _statusCode = $v.statusCode; + _headers = $v.headers; + _$v = null; + } + return this; + } + + @override + void replace(InvalidSourceException other) { + _$v = other as _$InvalidSourceException; + } + + @override + void update(void Function(InvalidSourceExceptionBuilder)? updates) { + if (updates != null) updates(this); + } + + @override + InvalidSourceException build() => _build(); + + _$InvalidSourceException _build() { + final _$result = + _$v ?? + _$InvalidSourceException._( + code: code, + message: message, + statusCode: statusCode, + headers: headers, + ); + replace(_$result); + return _$result; + } +} + +// ignore_for_file: deprecated_member_use_from_same_package,type=lint diff --git a/packages/kinesis/amplify_firehose_dart/lib/src/sdk/src/firehose/model/put_record_batch_input.dart b/packages/kinesis/amplify_firehose_dart/lib/src/sdk/src/firehose/model/put_record_batch_input.dart new file mode 100644 index 00000000000..a985e3f9fc4 --- /dev/null +++ b/packages/kinesis/amplify_firehose_dart/lib/src/sdk/src/firehose/model/put_record_batch_input.dart @@ -0,0 +1,142 @@ +// Generated with smithy-dart 0.3.2. DO NOT MODIFY. +// ignore_for_file: avoid_unused_constructor_parameters,deprecated_member_use_from_same_package,non_constant_identifier_names,unnecessary_library_name + +library amplify_firehose_dart.firehose.model.put_record_batch_input; // ignore_for_file: no_leading_underscores_for_library_prefixes + +import 'package:amplify_firehose_dart/src/sdk/src/firehose/model/record.dart'; +import 'package:aws_common/aws_common.dart' as _i2; +import 'package:built_collection/built_collection.dart' as _i3; +import 'package:built_value/built_value.dart'; +import 'package:built_value/serializer.dart'; +import 'package:smithy/smithy.dart' as _i1; + +part 'put_record_batch_input.g.dart'; + +abstract class PutRecordBatchInput + with + _i1.HttpInput, + _i2.AWSEquatable + implements Built { + factory PutRecordBatchInput({ + required String deliveryStreamName, + required List records, + }) { + return _$PutRecordBatchInput._( + deliveryStreamName: deliveryStreamName, + records: _i3.BuiltList(records), + ); + } + + factory PutRecordBatchInput.build([ + void Function(PutRecordBatchInputBuilder) updates, + ]) = _$PutRecordBatchInput; + + const PutRecordBatchInput._(); + + factory PutRecordBatchInput.fromRequest( + PutRecordBatchInput payload, + _i2.AWSBaseHttpRequest request, { + Map labels = const {}, + }) => payload; + + static const List<_i1.SmithySerializer> serializers = [ + PutRecordBatchInputAwsJson11Serializer(), + ]; + + /// The name of the Firehose stream. + String get deliveryStreamName; + + /// One or more records. + _i3.BuiltList get records; + @override + PutRecordBatchInput getPayload() => this; + + @override + List get props => [deliveryStreamName, records]; + + @override + String toString() { + final helper = newBuiltValueToStringHelper('PutRecordBatchInput') + ..add('deliveryStreamName', deliveryStreamName) + ..add('records', records); + return helper.toString(); + } +} + +class PutRecordBatchInputAwsJson11Serializer + extends _i1.StructuredSmithySerializer { + const PutRecordBatchInputAwsJson11Serializer() : super('PutRecordBatchInput'); + + @override + Iterable get types => const [ + PutRecordBatchInput, + _$PutRecordBatchInput, + ]; + + @override + Iterable<_i1.ShapeId> get supportedProtocols => const [ + _i1.ShapeId(namespace: 'aws.protocols', shape: 'awsJson1_1'), + ]; + + @override + PutRecordBatchInput deserialize( + Serializers serializers, + Iterable serialized, { + FullType specifiedType = FullType.unspecified, + }) { + final result = PutRecordBatchInputBuilder(); + final iterator = serialized.iterator; + while (iterator.moveNext()) { + final key = iterator.current as String; + iterator.moveNext(); + final value = iterator.current; + if (value == null) { + continue; + } + switch (key) { + case 'DeliveryStreamName': + result.deliveryStreamName = + (serializers.deserialize( + value, + specifiedType: const FullType(String), + ) + as String); + case 'Records': + result.records.replace( + (serializers.deserialize( + value, + specifiedType: const FullType(_i3.BuiltList, [ + FullType(Record), + ]), + ) + as _i3.BuiltList), + ); + } + } + + return result.build(); + } + + @override + Iterable serialize( + Serializers serializers, + PutRecordBatchInput object, { + FullType specifiedType = FullType.unspecified, + }) { + final result$ = []; + final PutRecordBatchInput(:deliveryStreamName, :records) = object; + result$.addAll([ + 'DeliveryStreamName', + serializers.serialize( + deliveryStreamName, + specifiedType: const FullType(String), + ), + 'Records', + serializers.serialize( + records, + specifiedType: const FullType(_i3.BuiltList, [FullType(Record)]), + ), + ]); + return result$; + } +} diff --git a/packages/kinesis/amplify_firehose_dart/lib/src/sdk/src/firehose/model/put_record_batch_input.g.dart b/packages/kinesis/amplify_firehose_dart/lib/src/sdk/src/firehose/model/put_record_batch_input.g.dart new file mode 100644 index 00000000000..563cece71d1 --- /dev/null +++ b/packages/kinesis/amplify_firehose_dart/lib/src/sdk/src/firehose/model/put_record_batch_input.g.dart @@ -0,0 +1,121 @@ +// GENERATED CODE - DO NOT MODIFY BY HAND + +part of 'put_record_batch_input.dart'; + +// ************************************************************************** +// BuiltValueGenerator +// ************************************************************************** + +class _$PutRecordBatchInput extends PutRecordBatchInput { + @override + final String deliveryStreamName; + @override + final _i3.BuiltList records; + + factory _$PutRecordBatchInput([ + void Function(PutRecordBatchInputBuilder)? updates, + ]) => (PutRecordBatchInputBuilder()..update(updates))._build(); + + _$PutRecordBatchInput._({ + required this.deliveryStreamName, + required this.records, + }) : super._(); + @override + PutRecordBatchInput rebuild( + void Function(PutRecordBatchInputBuilder) updates, + ) => (toBuilder()..update(updates)).build(); + + @override + PutRecordBatchInputBuilder toBuilder() => + PutRecordBatchInputBuilder()..replace(this); + + @override + bool operator ==(Object other) { + if (identical(other, this)) return true; + return other is PutRecordBatchInput && + deliveryStreamName == other.deliveryStreamName && + records == other.records; + } + + @override + int get hashCode { + var _$hash = 0; + _$hash = $jc(_$hash, deliveryStreamName.hashCode); + _$hash = $jc(_$hash, records.hashCode); + _$hash = $jf(_$hash); + return _$hash; + } +} + +class PutRecordBatchInputBuilder + implements Builder { + _$PutRecordBatchInput? _$v; + + String? _deliveryStreamName; + String? get deliveryStreamName => _$this._deliveryStreamName; + set deliveryStreamName(String? deliveryStreamName) => + _$this._deliveryStreamName = deliveryStreamName; + + _i3.ListBuilder? _records; + _i3.ListBuilder get records => + _$this._records ??= _i3.ListBuilder(); + set records(_i3.ListBuilder? records) => _$this._records = records; + + PutRecordBatchInputBuilder(); + + PutRecordBatchInputBuilder get _$this { + final $v = _$v; + if ($v != null) { + _deliveryStreamName = $v.deliveryStreamName; + _records = $v.records.toBuilder(); + _$v = null; + } + return this; + } + + @override + void replace(PutRecordBatchInput other) { + _$v = other as _$PutRecordBatchInput; + } + + @override + void update(void Function(PutRecordBatchInputBuilder)? updates) { + if (updates != null) updates(this); + } + + @override + PutRecordBatchInput build() => _build(); + + _$PutRecordBatchInput _build() { + _$PutRecordBatchInput _$result; + try { + _$result = + _$v ?? + _$PutRecordBatchInput._( + deliveryStreamName: BuiltValueNullFieldError.checkNotNull( + deliveryStreamName, + r'PutRecordBatchInput', + 'deliveryStreamName', + ), + records: records.build(), + ); + } catch (_) { + late String _$failedField; + try { + _$failedField = 'records'; + records.build(); + } catch (e) { + throw BuiltValueNestedFieldError( + r'PutRecordBatchInput', + _$failedField, + e.toString(), + ); + } + rethrow; + } + replace(_$result); + return _$result; + } +} + +// ignore_for_file: deprecated_member_use_from_same_package,type=lint diff --git a/packages/kinesis/amplify_firehose_dart/lib/src/sdk/src/firehose/model/put_record_batch_output.dart b/packages/kinesis/amplify_firehose_dart/lib/src/sdk/src/firehose/model/put_record_batch_output.dart new file mode 100644 index 00000000000..8d12465c69d --- /dev/null +++ b/packages/kinesis/amplify_firehose_dart/lib/src/sdk/src/firehose/model/put_record_batch_output.dart @@ -0,0 +1,158 @@ +// Generated with smithy-dart 0.3.2. DO NOT MODIFY. +// ignore_for_file: avoid_unused_constructor_parameters,deprecated_member_use_from_same_package,non_constant_identifier_names,unnecessary_library_name + +library amplify_firehose_dart.firehose.model.put_record_batch_output; // ignore_for_file: no_leading_underscores_for_library_prefixes + +import 'package:amplify_firehose_dart/src/sdk/src/firehose/model/put_record_batch_response_entry.dart'; +import 'package:aws_common/aws_common.dart' as _i1; +import 'package:built_collection/built_collection.dart' as _i2; +import 'package:built_value/built_value.dart'; +import 'package:built_value/serializer.dart'; +import 'package:smithy/smithy.dart' as _i3; + +part 'put_record_batch_output.g.dart'; + +abstract class PutRecordBatchOutput + with _i1.AWSEquatable + implements Built { + factory PutRecordBatchOutput({ + required int failedPutCount, + bool? encrypted, + required List requestResponses, + }) { + return _$PutRecordBatchOutput._( + failedPutCount: failedPutCount, + encrypted: encrypted, + requestResponses: _i2.BuiltList(requestResponses), + ); + } + + factory PutRecordBatchOutput.build([ + void Function(PutRecordBatchOutputBuilder) updates, + ]) = _$PutRecordBatchOutput; + + const PutRecordBatchOutput._(); + + /// Constructs a [PutRecordBatchOutput] from a [payload] and [response]. + factory PutRecordBatchOutput.fromResponse( + PutRecordBatchOutput payload, + _i1.AWSBaseHttpResponse response, + ) => payload; + + static const List<_i3.SmithySerializer> serializers = [ + PutRecordBatchOutputAwsJson11Serializer(), + ]; + + /// The number of records that might have failed processing. This number might be greater than 0 even if the PutRecordBatch call succeeds. Check `FailedPutCount` to determine whether there are records that you need to resend. + int get failedPutCount; + + /// Indicates whether server-side encryption (SSE) was enabled during this operation. + bool? get encrypted; + + /// The results array. For each record, the index of the response element is the same as the index used in the request array. + _i2.BuiltList get requestResponses; + @override + List get props => [failedPutCount, encrypted, requestResponses]; + + @override + String toString() { + final helper = newBuiltValueToStringHelper('PutRecordBatchOutput') + ..add('failedPutCount', failedPutCount) + ..add('encrypted', encrypted) + ..add('requestResponses', requestResponses); + return helper.toString(); + } +} + +class PutRecordBatchOutputAwsJson11Serializer + extends _i3.StructuredSmithySerializer { + const PutRecordBatchOutputAwsJson11Serializer() + : super('PutRecordBatchOutput'); + + @override + Iterable get types => const [ + PutRecordBatchOutput, + _$PutRecordBatchOutput, + ]; + + @override + Iterable<_i3.ShapeId> get supportedProtocols => const [ + _i3.ShapeId(namespace: 'aws.protocols', shape: 'awsJson1_1'), + ]; + + @override + PutRecordBatchOutput deserialize( + Serializers serializers, + Iterable serialized, { + FullType specifiedType = FullType.unspecified, + }) { + final result = PutRecordBatchOutputBuilder(); + final iterator = serialized.iterator; + while (iterator.moveNext()) { + final key = iterator.current as String; + iterator.moveNext(); + final value = iterator.current; + if (value == null) { + continue; + } + switch (key) { + case 'FailedPutCount': + result.failedPutCount = + (serializers.deserialize( + value, + specifiedType: const FullType(int), + ) + as int); + case 'Encrypted': + result.encrypted = + (serializers.deserialize( + value, + specifiedType: const FullType(bool), + ) + as bool); + case 'RequestResponses': + result.requestResponses.replace( + (serializers.deserialize( + value, + specifiedType: const FullType(_i2.BuiltList, [ + FullType(PutRecordBatchResponseEntry), + ]), + ) + as _i2.BuiltList), + ); + } + } + + return result.build(); + } + + @override + Iterable serialize( + Serializers serializers, + PutRecordBatchOutput object, { + FullType specifiedType = FullType.unspecified, + }) { + final result$ = []; + final PutRecordBatchOutput(:failedPutCount, :encrypted, :requestResponses) = + object; + result$.addAll([ + 'FailedPutCount', + serializers.serialize(failedPutCount, specifiedType: const FullType(int)), + 'RequestResponses', + serializers.serialize( + requestResponses, + specifiedType: const FullType(_i2.BuiltList, [ + FullType(PutRecordBatchResponseEntry), + ]), + ), + ]); + if (encrypted != null) { + result$ + ..add('Encrypted') + ..add( + serializers.serialize(encrypted, specifiedType: const FullType(bool)), + ); + } + return result$; + } +} diff --git a/packages/kinesis/amplify_firehose_dart/lib/src/sdk/src/firehose/model/put_record_batch_output.g.dart b/packages/kinesis/amplify_firehose_dart/lib/src/sdk/src/firehose/model/put_record_batch_output.g.dart new file mode 100644 index 00000000000..ca705a46b70 --- /dev/null +++ b/packages/kinesis/amplify_firehose_dart/lib/src/sdk/src/firehose/model/put_record_batch_output.g.dart @@ -0,0 +1,135 @@ +// GENERATED CODE - DO NOT MODIFY BY HAND + +part of 'put_record_batch_output.dart'; + +// ************************************************************************** +// BuiltValueGenerator +// ************************************************************************** + +class _$PutRecordBatchOutput extends PutRecordBatchOutput { + @override + final int failedPutCount; + @override + final bool? encrypted; + @override + final _i2.BuiltList requestResponses; + + factory _$PutRecordBatchOutput([ + void Function(PutRecordBatchOutputBuilder)? updates, + ]) => (PutRecordBatchOutputBuilder()..update(updates))._build(); + + _$PutRecordBatchOutput._({ + required this.failedPutCount, + this.encrypted, + required this.requestResponses, + }) : super._(); + @override + PutRecordBatchOutput rebuild( + void Function(PutRecordBatchOutputBuilder) updates, + ) => (toBuilder()..update(updates)).build(); + + @override + PutRecordBatchOutputBuilder toBuilder() => + PutRecordBatchOutputBuilder()..replace(this); + + @override + bool operator ==(Object other) { + if (identical(other, this)) return true; + return other is PutRecordBatchOutput && + failedPutCount == other.failedPutCount && + encrypted == other.encrypted && + requestResponses == other.requestResponses; + } + + @override + int get hashCode { + var _$hash = 0; + _$hash = $jc(_$hash, failedPutCount.hashCode); + _$hash = $jc(_$hash, encrypted.hashCode); + _$hash = $jc(_$hash, requestResponses.hashCode); + _$hash = $jf(_$hash); + return _$hash; + } +} + +class PutRecordBatchOutputBuilder + implements Builder { + _$PutRecordBatchOutput? _$v; + + int? _failedPutCount; + int? get failedPutCount => _$this._failedPutCount; + set failedPutCount(int? failedPutCount) => + _$this._failedPutCount = failedPutCount; + + bool? _encrypted; + bool? get encrypted => _$this._encrypted; + set encrypted(bool? encrypted) => _$this._encrypted = encrypted; + + _i2.ListBuilder? _requestResponses; + _i2.ListBuilder get requestResponses => + _$this._requestResponses ??= + _i2.ListBuilder(); + set requestResponses( + _i2.ListBuilder? requestResponses, + ) => _$this._requestResponses = requestResponses; + + PutRecordBatchOutputBuilder(); + + PutRecordBatchOutputBuilder get _$this { + final $v = _$v; + if ($v != null) { + _failedPutCount = $v.failedPutCount; + _encrypted = $v.encrypted; + _requestResponses = $v.requestResponses.toBuilder(); + _$v = null; + } + return this; + } + + @override + void replace(PutRecordBatchOutput other) { + _$v = other as _$PutRecordBatchOutput; + } + + @override + void update(void Function(PutRecordBatchOutputBuilder)? updates) { + if (updates != null) updates(this); + } + + @override + PutRecordBatchOutput build() => _build(); + + _$PutRecordBatchOutput _build() { + _$PutRecordBatchOutput _$result; + try { + _$result = + _$v ?? + _$PutRecordBatchOutput._( + failedPutCount: BuiltValueNullFieldError.checkNotNull( + failedPutCount, + r'PutRecordBatchOutput', + 'failedPutCount', + ), + encrypted: encrypted, + requestResponses: requestResponses.build(), + ); + } catch (_) { + late String _$failedField; + try { + _$failedField = 'requestResponses'; + requestResponses.build(); + } catch (e) { + throw BuiltValueNestedFieldError( + r'PutRecordBatchOutput', + _$failedField, + e.toString(), + ); + } + rethrow; + } + replace(_$result); + return _$result; + } +} + +// ignore_for_file: deprecated_member_use_from_same_package,type=lint diff --git a/packages/kinesis/amplify_firehose_dart/lib/src/sdk/src/firehose/model/put_record_batch_response_entry.dart b/packages/kinesis/amplify_firehose_dart/lib/src/sdk/src/firehose/model/put_record_batch_response_entry.dart new file mode 100644 index 00000000000..18d99feea8f --- /dev/null +++ b/packages/kinesis/amplify_firehose_dart/lib/src/sdk/src/firehose/model/put_record_batch_response_entry.dart @@ -0,0 +1,162 @@ +// Generated with smithy-dart 0.3.2. DO NOT MODIFY. +// ignore_for_file: avoid_unused_constructor_parameters,deprecated_member_use_from_same_package,non_constant_identifier_names,unnecessary_library_name + +library amplify_firehose_dart.firehose.model.put_record_batch_response_entry; // ignore_for_file: no_leading_underscores_for_library_prefixes + +import 'package:aws_common/aws_common.dart' as _i1; +import 'package:built_value/built_value.dart'; +import 'package:built_value/serializer.dart'; +import 'package:smithy/smithy.dart' as _i2; + +part 'put_record_batch_response_entry.g.dart'; + +/// Contains the result for an individual record from a PutRecordBatch request. If the record is successfully added to your Firehose stream, it receives a record ID. If the record fails to be added to your Firehose stream, the result includes an error code and an error message. +abstract class PutRecordBatchResponseEntry + with _i1.AWSEquatable + implements + Built { + /// Contains the result for an individual record from a PutRecordBatch request. If the record is successfully added to your Firehose stream, it receives a record ID. If the record fails to be added to your Firehose stream, the result includes an error code and an error message. + factory PutRecordBatchResponseEntry({ + String? recordId, + String? errorCode, + String? errorMessage, + }) { + return _$PutRecordBatchResponseEntry._( + recordId: recordId, + errorCode: errorCode, + errorMessage: errorMessage, + ); + } + + /// Contains the result for an individual record from a PutRecordBatch request. If the record is successfully added to your Firehose stream, it receives a record ID. If the record fails to be added to your Firehose stream, the result includes an error code and an error message. + factory PutRecordBatchResponseEntry.build([ + void Function(PutRecordBatchResponseEntryBuilder) updates, + ]) = _$PutRecordBatchResponseEntry; + + const PutRecordBatchResponseEntry._(); + + static const List<_i2.SmithySerializer> + serializers = [PutRecordBatchResponseEntryAwsJson11Serializer()]; + + /// The ID of the record. + String? get recordId; + + /// The error code for an individual record result. + String? get errorCode; + + /// The error message for an individual record result. + String? get errorMessage; + @override + List get props => [recordId, errorCode, errorMessage]; + + @override + String toString() { + final helper = newBuiltValueToStringHelper('PutRecordBatchResponseEntry') + ..add('recordId', recordId) + ..add('errorCode', errorCode) + ..add('errorMessage', errorMessage); + return helper.toString(); + } +} + +class PutRecordBatchResponseEntryAwsJson11Serializer + extends _i2.StructuredSmithySerializer { + const PutRecordBatchResponseEntryAwsJson11Serializer() + : super('PutRecordBatchResponseEntry'); + + @override + Iterable get types => const [ + PutRecordBatchResponseEntry, + _$PutRecordBatchResponseEntry, + ]; + + @override + Iterable<_i2.ShapeId> get supportedProtocols => const [ + _i2.ShapeId(namespace: 'aws.protocols', shape: 'awsJson1_1'), + ]; + + @override + PutRecordBatchResponseEntry deserialize( + Serializers serializers, + Iterable serialized, { + FullType specifiedType = FullType.unspecified, + }) { + final result = PutRecordBatchResponseEntryBuilder(); + final iterator = serialized.iterator; + while (iterator.moveNext()) { + final key = iterator.current as String; + iterator.moveNext(); + final value = iterator.current; + if (value == null) { + continue; + } + switch (key) { + case 'RecordId': + result.recordId = + (serializers.deserialize( + value, + specifiedType: const FullType(String), + ) + as String); + case 'ErrorCode': + result.errorCode = + (serializers.deserialize( + value, + specifiedType: const FullType(String), + ) + as String); + case 'ErrorMessage': + result.errorMessage = + (serializers.deserialize( + value, + specifiedType: const FullType(String), + ) + as String); + } + } + + return result.build(); + } + + @override + Iterable serialize( + Serializers serializers, + PutRecordBatchResponseEntry object, { + FullType specifiedType = FullType.unspecified, + }) { + final result$ = []; + final PutRecordBatchResponseEntry(:recordId, :errorCode, :errorMessage) = + object; + if (recordId != null) { + result$ + ..add('RecordId') + ..add( + serializers.serialize( + recordId, + specifiedType: const FullType(String), + ), + ); + } + if (errorCode != null) { + result$ + ..add('ErrorCode') + ..add( + serializers.serialize( + errorCode, + specifiedType: const FullType(String), + ), + ); + } + if (errorMessage != null) { + result$ + ..add('ErrorMessage') + ..add( + serializers.serialize( + errorMessage, + specifiedType: const FullType(String), + ), + ); + } + return result$; + } +} diff --git a/packages/kinesis/amplify_firehose_dart/lib/src/sdk/src/firehose/model/put_record_batch_response_entry.g.dart b/packages/kinesis/amplify_firehose_dart/lib/src/sdk/src/firehose/model/put_record_batch_response_entry.g.dart new file mode 100644 index 00000000000..075c5a7badd --- /dev/null +++ b/packages/kinesis/amplify_firehose_dart/lib/src/sdk/src/firehose/model/put_record_batch_response_entry.g.dart @@ -0,0 +1,114 @@ +// GENERATED CODE - DO NOT MODIFY BY HAND + +part of 'put_record_batch_response_entry.dart'; + +// ************************************************************************** +// BuiltValueGenerator +// ************************************************************************** + +class _$PutRecordBatchResponseEntry extends PutRecordBatchResponseEntry { + @override + final String? recordId; + @override + final String? errorCode; + @override + final String? errorMessage; + + factory _$PutRecordBatchResponseEntry([ + void Function(PutRecordBatchResponseEntryBuilder)? updates, + ]) => (PutRecordBatchResponseEntryBuilder()..update(updates))._build(); + + _$PutRecordBatchResponseEntry._({ + this.recordId, + this.errorCode, + this.errorMessage, + }) : super._(); + @override + PutRecordBatchResponseEntry rebuild( + void Function(PutRecordBatchResponseEntryBuilder) updates, + ) => (toBuilder()..update(updates)).build(); + + @override + PutRecordBatchResponseEntryBuilder toBuilder() => + PutRecordBatchResponseEntryBuilder()..replace(this); + + @override + bool operator ==(Object other) { + if (identical(other, this)) return true; + return other is PutRecordBatchResponseEntry && + recordId == other.recordId && + errorCode == other.errorCode && + errorMessage == other.errorMessage; + } + + @override + int get hashCode { + var _$hash = 0; + _$hash = $jc(_$hash, recordId.hashCode); + _$hash = $jc(_$hash, errorCode.hashCode); + _$hash = $jc(_$hash, errorMessage.hashCode); + _$hash = $jf(_$hash); + return _$hash; + } +} + +class PutRecordBatchResponseEntryBuilder + implements + Builder< + PutRecordBatchResponseEntry, + PutRecordBatchResponseEntryBuilder + > { + _$PutRecordBatchResponseEntry? _$v; + + String? _recordId; + String? get recordId => _$this._recordId; + set recordId(String? recordId) => _$this._recordId = recordId; + + String? _errorCode; + String? get errorCode => _$this._errorCode; + set errorCode(String? errorCode) => _$this._errorCode = errorCode; + + String? _errorMessage; + String? get errorMessage => _$this._errorMessage; + set errorMessage(String? errorMessage) => _$this._errorMessage = errorMessage; + + PutRecordBatchResponseEntryBuilder(); + + PutRecordBatchResponseEntryBuilder get _$this { + final $v = _$v; + if ($v != null) { + _recordId = $v.recordId; + _errorCode = $v.errorCode; + _errorMessage = $v.errorMessage; + _$v = null; + } + return this; + } + + @override + void replace(PutRecordBatchResponseEntry other) { + _$v = other as _$PutRecordBatchResponseEntry; + } + + @override + void update(void Function(PutRecordBatchResponseEntryBuilder)? updates) { + if (updates != null) updates(this); + } + + @override + PutRecordBatchResponseEntry build() => _build(); + + _$PutRecordBatchResponseEntry _build() { + final _$result = + _$v ?? + _$PutRecordBatchResponseEntry._( + recordId: recordId, + errorCode: errorCode, + errorMessage: errorMessage, + ); + replace(_$result); + return _$result; + } +} + +// ignore_for_file: deprecated_member_use_from_same_package,type=lint diff --git a/packages/kinesis/amplify_firehose_dart/lib/src/sdk/src/firehose/model/record.dart b/packages/kinesis/amplify_firehose_dart/lib/src/sdk/src/firehose/model/record.dart new file mode 100644 index 00000000000..9209ae5799d --- /dev/null +++ b/packages/kinesis/amplify_firehose_dart/lib/src/sdk/src/firehose/model/record.dart @@ -0,0 +1,99 @@ +// Generated with smithy-dart 0.3.2. DO NOT MODIFY. +// ignore_for_file: avoid_unused_constructor_parameters,deprecated_member_use_from_same_package,non_constant_identifier_names,unnecessary_library_name + +library amplify_firehose_dart.firehose.model.record; // ignore_for_file: no_leading_underscores_for_library_prefixes + +import 'dart:typed_data' as _i2; + +import 'package:aws_common/aws_common.dart' as _i1; +import 'package:built_value/built_value.dart'; +import 'package:built_value/serializer.dart'; +import 'package:smithy/smithy.dart' as _i3; + +part 'record.g.dart'; + +/// The unit of data in a Firehose stream. +abstract class Record + with _i1.AWSEquatable + implements Built { + /// The unit of data in a Firehose stream. + factory Record({required _i2.Uint8List data}) { + return _$Record._(data: data); + } + + /// The unit of data in a Firehose stream. + factory Record.build([void Function(RecordBuilder) updates]) = _$Record; + + const Record._(); + + static const List<_i3.SmithySerializer> serializers = [ + RecordAwsJson11Serializer(), + ]; + + /// The data blob, which is base64-encoded when the blob is serialized. The maximum size of the data blob, before base64-encoding, is 1,000 KiB. + _i2.Uint8List get data; + @override + List get props => [data]; + + @override + String toString() { + final helper = newBuiltValueToStringHelper('Record')..add('data', data); + return helper.toString(); + } +} + +class RecordAwsJson11Serializer extends _i3.StructuredSmithySerializer { + const RecordAwsJson11Serializer() : super('Record'); + + @override + Iterable get types => const [Record, _$Record]; + + @override + Iterable<_i3.ShapeId> get supportedProtocols => const [ + _i3.ShapeId(namespace: 'aws.protocols', shape: 'awsJson1_1'), + ]; + + @override + Record deserialize( + Serializers serializers, + Iterable serialized, { + FullType specifiedType = FullType.unspecified, + }) { + final result = RecordBuilder(); + final iterator = serialized.iterator; + while (iterator.moveNext()) { + final key = iterator.current as String; + iterator.moveNext(); + final value = iterator.current; + if (value == null) { + continue; + } + switch (key) { + case 'Data': + result.data = + (serializers.deserialize( + value, + specifiedType: const FullType(_i2.Uint8List), + ) + as _i2.Uint8List); + } + } + + return result.build(); + } + + @override + Iterable serialize( + Serializers serializers, + Record object, { + FullType specifiedType = FullType.unspecified, + }) { + final result$ = []; + final Record(:data) = object; + result$.addAll([ + 'Data', + serializers.serialize(data, specifiedType: const FullType(_i2.Uint8List)), + ]); + return result$; + } +} diff --git a/packages/kinesis/amplify_firehose_dart/lib/src/sdk/src/firehose/model/record.g.dart b/packages/kinesis/amplify_firehose_dart/lib/src/sdk/src/firehose/model/record.g.dart new file mode 100644 index 00000000000..8d445d9de5d --- /dev/null +++ b/packages/kinesis/amplify_firehose_dart/lib/src/sdk/src/firehose/model/record.g.dart @@ -0,0 +1,81 @@ +// GENERATED CODE - DO NOT MODIFY BY HAND + +part of 'record.dart'; + +// ************************************************************************** +// BuiltValueGenerator +// ************************************************************************** + +class _$Record extends Record { + @override + final _i2.Uint8List data; + + factory _$Record([void Function(RecordBuilder)? updates]) => + (RecordBuilder()..update(updates))._build(); + + _$Record._({required this.data}) : super._(); + @override + Record rebuild(void Function(RecordBuilder) updates) => + (toBuilder()..update(updates)).build(); + + @override + RecordBuilder toBuilder() => RecordBuilder()..replace(this); + + @override + bool operator ==(Object other) { + if (identical(other, this)) return true; + return other is Record && data == other.data; + } + + @override + int get hashCode { + var _$hash = 0; + _$hash = $jc(_$hash, data.hashCode); + _$hash = $jf(_$hash); + return _$hash; + } +} + +class RecordBuilder implements Builder { + _$Record? _$v; + + _i2.Uint8List? _data; + _i2.Uint8List? get data => _$this._data; + set data(_i2.Uint8List? data) => _$this._data = data; + + RecordBuilder(); + + RecordBuilder get _$this { + final $v = _$v; + if ($v != null) { + _data = $v.data; + _$v = null; + } + return this; + } + + @override + void replace(Record other) { + _$v = other as _$Record; + } + + @override + void update(void Function(RecordBuilder)? updates) { + if (updates != null) updates(this); + } + + @override + Record build() => _build(); + + _$Record _build() { + final _$result = + _$v ?? + _$Record._( + data: BuiltValueNullFieldError.checkNotNull(data, r'Record', 'data'), + ); + replace(_$result); + return _$result; + } +} + +// ignore_for_file: deprecated_member_use_from_same_package,type=lint diff --git a/packages/kinesis/amplify_firehose_dart/lib/src/sdk/src/firehose/model/resource_not_found_exception.dart b/packages/kinesis/amplify_firehose_dart/lib/src/sdk/src/firehose/model/resource_not_found_exception.dart new file mode 100644 index 00000000000..11c8a5c564c --- /dev/null +++ b/packages/kinesis/amplify_firehose_dart/lib/src/sdk/src/firehose/model/resource_not_found_exception.dart @@ -0,0 +1,137 @@ +// Generated with smithy-dart 0.3.2. DO NOT MODIFY. +// ignore_for_file: avoid_unused_constructor_parameters,deprecated_member_use_from_same_package,non_constant_identifier_names,unnecessary_library_name + +library amplify_firehose_dart.firehose.model.resource_not_found_exception; // ignore_for_file: no_leading_underscores_for_library_prefixes + +import 'package:aws_common/aws_common.dart' as _i1; +import 'package:built_value/built_value.dart'; +import 'package:built_value/serializer.dart'; +import 'package:smithy/smithy.dart' as _i2; + +part 'resource_not_found_exception.g.dart'; + +/// The specified resource could not be found. +abstract class ResourceNotFoundException + with _i1.AWSEquatable + implements + Built, + _i2.SmithyHttpException { + /// The specified resource could not be found. + factory ResourceNotFoundException({String? message}) { + return _$ResourceNotFoundException._(message: message); + } + + /// The specified resource could not be found. + factory ResourceNotFoundException.build([ + void Function(ResourceNotFoundExceptionBuilder) updates, + ]) = _$ResourceNotFoundException; + + const ResourceNotFoundException._(); + + /// Constructs a [ResourceNotFoundException] from a [payload] and [response]. + factory ResourceNotFoundException.fromResponse( + ResourceNotFoundException payload, + _i1.AWSBaseHttpResponse response, + ) => payload.rebuild((b) { + b.statusCode = response.statusCode; + b.headers = response.headers; + }); + + static const List<_i2.SmithySerializer> + serializers = [ResourceNotFoundExceptionAwsJson11Serializer()]; + + /// A message that provides information about the error. + @override + String? get message; + @override + _i2.ShapeId get shapeId => const _i2.ShapeId( + namespace: 'com.amazonaws.firehose', + shape: 'ResourceNotFoundException', + ); + + @override + _i2.RetryConfig? get retryConfig => null; + + @override + @BuiltValueField(compare: false) + int? get statusCode; + @override + @BuiltValueField(compare: false) + Map? get headers; + @override + Exception? get underlyingException => null; + + @override + List get props => [message]; + + @override + String toString() { + final helper = newBuiltValueToStringHelper('ResourceNotFoundException') + ..add('message', message); + return helper.toString(); + } +} + +class ResourceNotFoundExceptionAwsJson11Serializer + extends _i2.StructuredSmithySerializer { + const ResourceNotFoundExceptionAwsJson11Serializer() + : super('ResourceNotFoundException'); + + @override + Iterable get types => const [ + ResourceNotFoundException, + _$ResourceNotFoundException, + ]; + + @override + Iterable<_i2.ShapeId> get supportedProtocols => const [ + _i2.ShapeId(namespace: 'aws.protocols', shape: 'awsJson1_1'), + ]; + + @override + ResourceNotFoundException deserialize( + Serializers serializers, + Iterable serialized, { + FullType specifiedType = FullType.unspecified, + }) { + final result = ResourceNotFoundExceptionBuilder(); + final iterator = serialized.iterator; + while (iterator.moveNext()) { + final key = iterator.current as String; + iterator.moveNext(); + final value = iterator.current; + if (value == null) { + continue; + } + switch (key) { + case 'message': + result.message = + (serializers.deserialize( + value, + specifiedType: const FullType(String), + ) + as String); + } + } + + return result.build(); + } + + @override + Iterable serialize( + Serializers serializers, + ResourceNotFoundException object, { + FullType specifiedType = FullType.unspecified, + }) { + final result$ = []; + final ResourceNotFoundException(:message) = object; + if (message != null) { + result$ + ..add('message') + ..add( + serializers.serialize(message, specifiedType: const FullType(String)), + ); + } + return result$; + } +} diff --git a/packages/kinesis/amplify_firehose_dart/lib/src/sdk/src/firehose/model/resource_not_found_exception.g.dart b/packages/kinesis/amplify_firehose_dart/lib/src/sdk/src/firehose/model/resource_not_found_exception.g.dart new file mode 100644 index 00000000000..8eae04a44de --- /dev/null +++ b/packages/kinesis/amplify_firehose_dart/lib/src/sdk/src/firehose/model/resource_not_found_exception.g.dart @@ -0,0 +1,103 @@ +// GENERATED CODE - DO NOT MODIFY BY HAND + +part of 'resource_not_found_exception.dart'; + +// ************************************************************************** +// BuiltValueGenerator +// ************************************************************************** + +class _$ResourceNotFoundException extends ResourceNotFoundException { + @override + final String? message; + @override + final int? statusCode; + @override + final Map? headers; + + factory _$ResourceNotFoundException([ + void Function(ResourceNotFoundExceptionBuilder)? updates, + ]) => (ResourceNotFoundExceptionBuilder()..update(updates))._build(); + + _$ResourceNotFoundException._({this.message, this.statusCode, this.headers}) + : super._(); + @override + ResourceNotFoundException rebuild( + void Function(ResourceNotFoundExceptionBuilder) updates, + ) => (toBuilder()..update(updates)).build(); + + @override + ResourceNotFoundExceptionBuilder toBuilder() => + ResourceNotFoundExceptionBuilder()..replace(this); + + @override + bool operator ==(Object other) { + if (identical(other, this)) return true; + return other is ResourceNotFoundException && message == other.message; + } + + @override + int get hashCode { + var _$hash = 0; + _$hash = $jc(_$hash, message.hashCode); + _$hash = $jf(_$hash); + return _$hash; + } +} + +class ResourceNotFoundExceptionBuilder + implements + Builder { + _$ResourceNotFoundException? _$v; + + String? _message; + String? get message => _$this._message; + set message(String? message) => _$this._message = message; + + int? _statusCode; + int? get statusCode => _$this._statusCode; + set statusCode(int? statusCode) => _$this._statusCode = statusCode; + + Map? _headers; + Map? get headers => _$this._headers; + set headers(Map? headers) => _$this._headers = headers; + + ResourceNotFoundExceptionBuilder(); + + ResourceNotFoundExceptionBuilder get _$this { + final $v = _$v; + if ($v != null) { + _message = $v.message; + _statusCode = $v.statusCode; + _headers = $v.headers; + _$v = null; + } + return this; + } + + @override + void replace(ResourceNotFoundException other) { + _$v = other as _$ResourceNotFoundException; + } + + @override + void update(void Function(ResourceNotFoundExceptionBuilder)? updates) { + if (updates != null) updates(this); + } + + @override + ResourceNotFoundException build() => _build(); + + _$ResourceNotFoundException _build() { + final _$result = + _$v ?? + _$ResourceNotFoundException._( + message: message, + statusCode: statusCode, + headers: headers, + ); + replace(_$result); + return _$result; + } +} + +// ignore_for_file: deprecated_member_use_from_same_package,type=lint diff --git a/packages/kinesis/amplify_firehose_dart/lib/src/sdk/src/firehose/model/service_unavailable_exception.dart b/packages/kinesis/amplify_firehose_dart/lib/src/sdk/src/firehose/model/service_unavailable_exception.dart new file mode 100644 index 00000000000..603a0368824 --- /dev/null +++ b/packages/kinesis/amplify_firehose_dart/lib/src/sdk/src/firehose/model/service_unavailable_exception.dart @@ -0,0 +1,137 @@ +// Generated with smithy-dart 0.3.2. DO NOT MODIFY. +// ignore_for_file: avoid_unused_constructor_parameters,deprecated_member_use_from_same_package,non_constant_identifier_names,unnecessary_library_name + +library amplify_firehose_dart.firehose.model.service_unavailable_exception; // ignore_for_file: no_leading_underscores_for_library_prefixes + +import 'package:aws_common/aws_common.dart' as _i1; +import 'package:built_value/built_value.dart'; +import 'package:built_value/serializer.dart'; +import 'package:smithy/smithy.dart' as _i2; + +part 'service_unavailable_exception.g.dart'; + +/// The service is unavailable. Back off and retry the operation. If you continue to see the exception, throughput limits for the Firehose stream may have been exceeded. For more information about limits and how to request an increase, see [Amazon Firehose Limits](https://docs.aws.amazon.com/firehose/latest/dev/limits.html). +abstract class ServiceUnavailableException + with _i1.AWSEquatable + implements + Built, + _i2.SmithyHttpException { + /// The service is unavailable. Back off and retry the operation. If you continue to see the exception, throughput limits for the Firehose stream may have been exceeded. For more information about limits and how to request an increase, see [Amazon Firehose Limits](https://docs.aws.amazon.com/firehose/latest/dev/limits.html). + factory ServiceUnavailableException({String? message}) { + return _$ServiceUnavailableException._(message: message); + } + + /// The service is unavailable. Back off and retry the operation. If you continue to see the exception, throughput limits for the Firehose stream may have been exceeded. For more information about limits and how to request an increase, see [Amazon Firehose Limits](https://docs.aws.amazon.com/firehose/latest/dev/limits.html). + factory ServiceUnavailableException.build([ + void Function(ServiceUnavailableExceptionBuilder) updates, + ]) = _$ServiceUnavailableException; + + const ServiceUnavailableException._(); + + /// Constructs a [ServiceUnavailableException] from a [payload] and [response]. + factory ServiceUnavailableException.fromResponse( + ServiceUnavailableException payload, + _i1.AWSBaseHttpResponse response, + ) => payload.rebuild((b) { + b.headers = response.headers; + }); + + static const List<_i2.SmithySerializer> + serializers = [ServiceUnavailableExceptionAwsJson11Serializer()]; + + /// A message that provides information about the error. + @override + String? get message; + @override + _i2.ShapeId get shapeId => const _i2.ShapeId( + namespace: 'com.amazonaws.firehose', + shape: 'ServiceUnavailableException', + ); + + @override + _i2.RetryConfig? get retryConfig => null; + + @override + @BuiltValueField(compare: false) + int get statusCode => 503; + + @override + @BuiltValueField(compare: false) + Map? get headers; + @override + Exception? get underlyingException => null; + + @override + List get props => [message]; + + @override + String toString() { + final helper = newBuiltValueToStringHelper('ServiceUnavailableException') + ..add('message', message); + return helper.toString(); + } +} + +class ServiceUnavailableExceptionAwsJson11Serializer + extends _i2.StructuredSmithySerializer { + const ServiceUnavailableExceptionAwsJson11Serializer() + : super('ServiceUnavailableException'); + + @override + Iterable get types => const [ + ServiceUnavailableException, + _$ServiceUnavailableException, + ]; + + @override + Iterable<_i2.ShapeId> get supportedProtocols => const [ + _i2.ShapeId(namespace: 'aws.protocols', shape: 'awsJson1_1'), + ]; + + @override + ServiceUnavailableException deserialize( + Serializers serializers, + Iterable serialized, { + FullType specifiedType = FullType.unspecified, + }) { + final result = ServiceUnavailableExceptionBuilder(); + final iterator = serialized.iterator; + while (iterator.moveNext()) { + final key = iterator.current as String; + iterator.moveNext(); + final value = iterator.current; + if (value == null) { + continue; + } + switch (key) { + case 'message': + result.message = + (serializers.deserialize( + value, + specifiedType: const FullType(String), + ) + as String); + } + } + + return result.build(); + } + + @override + Iterable serialize( + Serializers serializers, + ServiceUnavailableException object, { + FullType specifiedType = FullType.unspecified, + }) { + final result$ = []; + final ServiceUnavailableException(:message) = object; + if (message != null) { + result$ + ..add('message') + ..add( + serializers.serialize(message, specifiedType: const FullType(String)), + ); + } + return result$; + } +} diff --git a/packages/kinesis/amplify_firehose_dart/lib/src/sdk/src/firehose/model/service_unavailable_exception.g.dart b/packages/kinesis/amplify_firehose_dart/lib/src/sdk/src/firehose/model/service_unavailable_exception.g.dart new file mode 100644 index 00000000000..e6ba9c5fb36 --- /dev/null +++ b/packages/kinesis/amplify_firehose_dart/lib/src/sdk/src/firehose/model/service_unavailable_exception.g.dart @@ -0,0 +1,94 @@ +// GENERATED CODE - DO NOT MODIFY BY HAND + +part of 'service_unavailable_exception.dart'; + +// ************************************************************************** +// BuiltValueGenerator +// ************************************************************************** + +class _$ServiceUnavailableException extends ServiceUnavailableException { + @override + final String? message; + @override + final Map? headers; + + factory _$ServiceUnavailableException([ + void Function(ServiceUnavailableExceptionBuilder)? updates, + ]) => (ServiceUnavailableExceptionBuilder()..update(updates))._build(); + + _$ServiceUnavailableException._({this.message, this.headers}) : super._(); + @override + ServiceUnavailableException rebuild( + void Function(ServiceUnavailableExceptionBuilder) updates, + ) => (toBuilder()..update(updates)).build(); + + @override + ServiceUnavailableExceptionBuilder toBuilder() => + ServiceUnavailableExceptionBuilder()..replace(this); + + @override + bool operator ==(Object other) { + if (identical(other, this)) return true; + return other is ServiceUnavailableException && message == other.message; + } + + @override + int get hashCode { + var _$hash = 0; + _$hash = $jc(_$hash, message.hashCode); + _$hash = $jf(_$hash); + return _$hash; + } +} + +class ServiceUnavailableExceptionBuilder + implements + Builder< + ServiceUnavailableException, + ServiceUnavailableExceptionBuilder + > { + _$ServiceUnavailableException? _$v; + + String? _message; + String? get message => _$this._message; + set message(String? message) => _$this._message = message; + + Map? _headers; + Map? get headers => _$this._headers; + set headers(Map? headers) => _$this._headers = headers; + + ServiceUnavailableExceptionBuilder(); + + ServiceUnavailableExceptionBuilder get _$this { + final $v = _$v; + if ($v != null) { + _message = $v.message; + _headers = $v.headers; + _$v = null; + } + return this; + } + + @override + void replace(ServiceUnavailableException other) { + _$v = other as _$ServiceUnavailableException; + } + + @override + void update(void Function(ServiceUnavailableExceptionBuilder)? updates) { + if (updates != null) updates(this); + } + + @override + ServiceUnavailableException build() => _build(); + + _$ServiceUnavailableException _build() { + final _$result = + _$v ?? + _$ServiceUnavailableException._(message: message, headers: headers); + replace(_$result); + return _$result; + } +} + +// ignore_for_file: deprecated_member_use_from_same_package,type=lint diff --git a/packages/kinesis/amplify_firehose_dart/lib/src/sdk/src/firehose/operation/put_record_batch_operation.dart b/packages/kinesis/amplify_firehose_dart/lib/src/sdk/src/firehose/operation/put_record_batch_operation.dart new file mode 100644 index 00000000000..26c47466758 --- /dev/null +++ b/packages/kinesis/amplify_firehose_dart/lib/src/sdk/src/firehose/operation/put_record_batch_operation.dart @@ -0,0 +1,240 @@ +// Generated with smithy-dart 0.3.2. DO NOT MODIFY. +// ignore_for_file: avoid_unused_constructor_parameters,deprecated_member_use_from_same_package,non_constant_identifier_names,unnecessary_library_name + +library amplify_firehose_dart.firehose.operation.put_record_batch_operation; // ignore_for_file: no_leading_underscores_for_library_prefixes + +import 'dart:async' as _i5; + +import 'package:amplify_firehose_dart/src/sdk/src/firehose/common/endpoint_resolver.dart'; +import 'package:amplify_firehose_dart/src/sdk/src/firehose/common/serializers.dart'; +import 'package:amplify_firehose_dart/src/sdk/src/firehose/model/invalid_argument_exception.dart'; +import 'package:amplify_firehose_dart/src/sdk/src/firehose/model/invalid_kms_resource_exception.dart'; +import 'package:amplify_firehose_dart/src/sdk/src/firehose/model/invalid_source_exception.dart'; +import 'package:amplify_firehose_dart/src/sdk/src/firehose/model/put_record_batch_input.dart'; +import 'package:amplify_firehose_dart/src/sdk/src/firehose/model/put_record_batch_output.dart'; +import 'package:amplify_firehose_dart/src/sdk/src/firehose/model/resource_not_found_exception.dart'; +import 'package:amplify_firehose_dart/src/sdk/src/firehose/model/service_unavailable_exception.dart'; +import 'package:aws_common/aws_common.dart' as _i4; +import 'package:aws_signature_v4/aws_signature_v4.dart' as _i2; +import 'package:smithy/smithy.dart' as _i1; +import 'package:smithy_aws/smithy_aws.dart' as _i3; + +/// Writes multiple data records into a Firehose stream in a single call, which can achieve higher throughput per producer than when writing single records. To write single data records into a Firehose stream, use PutRecord. Applications using these operations are referred to as producers. +/// +/// Firehose accumulates and publishes a particular metric for a customer account in one minute intervals. It is possible that the bursts of incoming bytes/records ingested to a Firehose stream last only for a few seconds. Due to this, the actual spikes in the traffic might not be fully visible in the customer's 1 minute CloudWatch metrics. +/// +/// For information about service quota, see [Amazon Firehose Quota](https://docs.aws.amazon.com/firehose/latest/dev/limits.html). +/// +/// Each PutRecordBatch request supports up to 500 records. Each record in the request can be as large as 1,000 KB (before base64 encoding), up to a limit of 4 MB for the entire request. These limits cannot be changed. +/// +/// You must specify the name of the Firehose stream and the data record when using PutRecord. The data record consists of a data blob that can be up to 1,000 KB in size, and any kind of data. For example, it could be a segment from a log file, geographic location data, website clickstream data, and so on. +/// +/// For multi record de-aggregation, you can not put more than 500 records even if the data blob length is less than 1000 KiB. If you include more than 500 records, the request succeeds but the record de-aggregation doesn't work as expected and transformation lambda is invoked with the complete base64 encoded data blob instead of de-aggregated base64 decoded records. +/// +/// Firehose buffers records before delivering them to the destination. To disambiguate the data blobs at the destination, a common solution is to use delimiters in the data, such as a newline (`\\n`) or some other character unique within the data. This allows the consumer application to parse individual data items when reading the data from the destination. +/// +/// The PutRecordBatch response includes a count of failed records, `FailedPutCount`, and an array of responses, `RequestResponses`. Even if the PutRecordBatch call succeeds, the value of `FailedPutCount` may be greater than 0, indicating that there are records for which the operation didn't succeed. Each entry in the `RequestResponses` array provides additional information about the processed record. It directly correlates with a record in the request array using the same ordering, from the top to the bottom. The response array always includes the same number of records as the request array. `RequestResponses` includes both successfully and unsuccessfully processed records. Firehose tries to process all records in each PutRecordBatch request. A single record failure does not stop the processing of subsequent records. +/// +/// A successfully processed record includes a `RecordId` value, which is unique for the record. An unsuccessfully processed record includes `ErrorCode` and `ErrorMessage` values. `ErrorCode` reflects the type of error, and is one of the following values: `ServiceUnavailableException` or `InternalFailure`. `ErrorMessage` provides more detailed information about the error. +/// +/// If there is an internal server error or a timeout, the write might have completed or it might have failed. If `FailedPutCount` is greater than 0, retry the request, resending only those records that might have failed processing. This minimizes the possible duplicate records and also reduces the total bytes sent (and corresponding charges). We recommend that you handle any duplicates at the destination. +/// +/// If PutRecordBatch throws `ServiceUnavailableException`, the API is automatically reinvoked (retried) 3 times. If the exception persists, it is possible that the throughput limits have been exceeded for the Firehose stream. +/// +/// Re-invoking the Put API operations (for example, PutRecord and PutRecordBatch) can result in data duplicates. For larger data assets, allow for a longer time out before retrying Put API operations. +/// +/// Data records sent to Firehose are stored for 24 hours from the time they are added to a Firehose stream as it attempts to send the records to the destination. If the destination is unreachable for more than 24 hours, the data is no longer available. +/// +/// Don't concatenate two or more base64 strings to form the data fields of your records. Instead, concatenate the raw data, then perform base64 encoding. +class PutRecordBatchOperation + extends + _i1.HttpOperation< + PutRecordBatchInput, + PutRecordBatchInput, + PutRecordBatchOutput, + PutRecordBatchOutput + > { + /// Writes multiple data records into a Firehose stream in a single call, which can achieve higher throughput per producer than when writing single records. To write single data records into a Firehose stream, use PutRecord. Applications using these operations are referred to as producers. + /// + /// Firehose accumulates and publishes a particular metric for a customer account in one minute intervals. It is possible that the bursts of incoming bytes/records ingested to a Firehose stream last only for a few seconds. Due to this, the actual spikes in the traffic might not be fully visible in the customer's 1 minute CloudWatch metrics. + /// + /// For information about service quota, see [Amazon Firehose Quota](https://docs.aws.amazon.com/firehose/latest/dev/limits.html). + /// + /// Each PutRecordBatch request supports up to 500 records. Each record in the request can be as large as 1,000 KB (before base64 encoding), up to a limit of 4 MB for the entire request. These limits cannot be changed. + /// + /// You must specify the name of the Firehose stream and the data record when using PutRecord. The data record consists of a data blob that can be up to 1,000 KB in size, and any kind of data. For example, it could be a segment from a log file, geographic location data, website clickstream data, and so on. + /// + /// For multi record de-aggregation, you can not put more than 500 records even if the data blob length is less than 1000 KiB. If you include more than 500 records, the request succeeds but the record de-aggregation doesn't work as expected and transformation lambda is invoked with the complete base64 encoded data blob instead of de-aggregated base64 decoded records. + /// + /// Firehose buffers records before delivering them to the destination. To disambiguate the data blobs at the destination, a common solution is to use delimiters in the data, such as a newline (`\\n`) or some other character unique within the data. This allows the consumer application to parse individual data items when reading the data from the destination. + /// + /// The PutRecordBatch response includes a count of failed records, `FailedPutCount`, and an array of responses, `RequestResponses`. Even if the PutRecordBatch call succeeds, the value of `FailedPutCount` may be greater than 0, indicating that there are records for which the operation didn't succeed. Each entry in the `RequestResponses` array provides additional information about the processed record. It directly correlates with a record in the request array using the same ordering, from the top to the bottom. The response array always includes the same number of records as the request array. `RequestResponses` includes both successfully and unsuccessfully processed records. Firehose tries to process all records in each PutRecordBatch request. A single record failure does not stop the processing of subsequent records. + /// + /// A successfully processed record includes a `RecordId` value, which is unique for the record. An unsuccessfully processed record includes `ErrorCode` and `ErrorMessage` values. `ErrorCode` reflects the type of error, and is one of the following values: `ServiceUnavailableException` or `InternalFailure`. `ErrorMessage` provides more detailed information about the error. + /// + /// If there is an internal server error or a timeout, the write might have completed or it might have failed. If `FailedPutCount` is greater than 0, retry the request, resending only those records that might have failed processing. This minimizes the possible duplicate records and also reduces the total bytes sent (and corresponding charges). We recommend that you handle any duplicates at the destination. + /// + /// If PutRecordBatch throws `ServiceUnavailableException`, the API is automatically reinvoked (retried) 3 times. If the exception persists, it is possible that the throughput limits have been exceeded for the Firehose stream. + /// + /// Re-invoking the Put API operations (for example, PutRecord and PutRecordBatch) can result in data duplicates. For larger data assets, allow for a longer time out before retrying Put API operations. + /// + /// Data records sent to Firehose are stored for 24 hours from the time they are added to a Firehose stream as it attempts to send the records to the destination. If the destination is unreachable for more than 24 hours, the data is no longer available. + /// + /// Don't concatenate two or more base64 strings to form the data fields of your records. Instead, concatenate the raw data, then perform base64 encoding. + PutRecordBatchOperation({ + required String region, + Uri? baseUri, + _i2.AWSCredentialsProvider credentialsProvider = + const _i2.AWSCredentialsProvider.defaultChain(), + List<_i1.HttpRequestInterceptor> requestInterceptors = const [], + List<_i1.HttpResponseInterceptor> responseInterceptors = const [], + }) : _region = region, + _baseUri = baseUri, + _credentialsProvider = credentialsProvider, + _requestInterceptors = requestInterceptors, + _responseInterceptors = responseInterceptors; + + @override + late final List< + _i1.HttpProtocol< + PutRecordBatchInput, + PutRecordBatchInput, + PutRecordBatchOutput, + PutRecordBatchOutput + > + > + protocols = [ + _i3.AwsJson1_1Protocol( + serializers: serializers, + builderFactories: builderFactories, + requestInterceptors: + <_i1.HttpRequestInterceptor>[ + const _i1.WithHost(), + const _i1.WithContentLength(), + const _i1.WithHeader( + 'X-Amz-Target', + 'Firehose_20150804.PutRecordBatch', + ), + _i3.WithSigV4( + region: _region, + service: _i4.AWSService.firehose, + credentialsProvider: _credentialsProvider, + ), + const _i1.WithUserAgent('aws-sdk-dart/0.3.2'), + const _i3.WithSdkInvocationId(), + const _i3.WithSdkRequest(), + ] + + _requestInterceptors, + responseInterceptors: + <_i1.HttpResponseInterceptor>[] + _responseInterceptors, + ), + ]; + + late final _i3.AWSEndpoint _awsEndpoint = endpointResolver.resolve( + sdkId, + _region, + ); + + final String _region; + + final Uri? _baseUri; + + final _i2.AWSCredentialsProvider _credentialsProvider; + + final List<_i1.HttpRequestInterceptor> _requestInterceptors; + + final List<_i1.HttpResponseInterceptor> _responseInterceptors; + + @override + _i1.HttpRequest buildRequest(PutRecordBatchInput input) => + _i1.HttpRequest((b) { + b.method = 'POST'; + b.path = r'/'; + }); + + @override + int successCode([PutRecordBatchOutput? output]) => 200; + + @override + PutRecordBatchOutput buildOutput( + PutRecordBatchOutput payload, + _i4.AWSBaseHttpResponse response, + ) => PutRecordBatchOutput.fromResponse(payload, response); + + @override + List<_i1.SmithyError> get errorTypes => const [ + _i1.SmithyError( + _i1.ShapeId( + namespace: 'com.amazonaws.firehose', + shape: 'InvalidArgumentException', + ), + _i1.ErrorKind.client, + InvalidArgumentException, + builder: InvalidArgumentException.fromResponse, + ), + _i1.SmithyError( + _i1.ShapeId( + namespace: 'com.amazonaws.firehose', + shape: 'InvalidKMSResourceException', + ), + _i1.ErrorKind.client, + InvalidKmsResourceException, + builder: InvalidKmsResourceException.fromResponse, + ), + _i1.SmithyError( + _i1.ShapeId( + namespace: 'com.amazonaws.firehose', + shape: 'InvalidSourceException', + ), + _i1.ErrorKind.client, + InvalidSourceException, + builder: InvalidSourceException.fromResponse, + ), + _i1.SmithyError( + _i1.ShapeId( + namespace: 'com.amazonaws.firehose', + shape: 'ResourceNotFoundException', + ), + _i1.ErrorKind.client, + ResourceNotFoundException, + builder: ResourceNotFoundException.fromResponse, + ), + _i1.SmithyError( + _i1.ShapeId( + namespace: 'com.amazonaws.firehose', + shape: 'ServiceUnavailableException', + ), + _i1.ErrorKind.server, + ServiceUnavailableException, + statusCode: 503, + builder: ServiceUnavailableException.fromResponse, + ), + ]; + + @override + String get runtimeTypeName => 'PutRecordBatch'; + + @override + _i3.AWSRetryer get retryer => _i3.AWSRetryer(); + + @override + Uri get baseUri => _baseUri ?? endpoint.uri; + + @override + _i1.Endpoint get endpoint => _awsEndpoint.endpoint; + + @override + _i1.SmithyOperation run( + PutRecordBatchInput input, { + _i4.AWSHttpClient? client, + _i1.ShapeId? useProtocol, + }) { + return _i5.runZoned( + () => super.run(input, client: client, useProtocol: useProtocol), + zoneValues: { + ...?_awsEndpoint.credentialScope?.zoneValues, + ...{_i4.AWSHeaders.sdkInvocationId: _i4.uuid(secure: true)}, + }, + ); + } +} diff --git a/packages/kinesis/amplify_firehose_dart/lib/src/version.dart b/packages/kinesis/amplify_firehose_dart/lib/src/version.dart new file mode 100644 index 00000000000..455b52f65a8 --- /dev/null +++ b/packages/kinesis/amplify_firehose_dart/lib/src/version.dart @@ -0,0 +1,4 @@ +// Generated code. Do not modify. + +/// The current package version. +const packageVersion = '0.1.0'; diff --git a/packages/kinesis/amplify_firehose_dart/pubspec.yaml b/packages/kinesis/amplify_firehose_dart/pubspec.yaml new file mode 100644 index 00000000000..d93daa271f9 --- /dev/null +++ b/packages/kinesis/amplify_firehose_dart/pubspec.yaml @@ -0,0 +1,42 @@ +name: amplify_firehose_dart +description: A Dart-only implementation for streaming data to Amazon Data Firehose with offline support. For internal use in the amplify_firehose Flutter library. +version: 0.1.0 +homepage: https://docs.amplify.aws/lib/q/platform/flutter/ +repository: https://github.com/aws-amplify/amplify-flutter/tree/main/packages/kinesis/amplify_firehose_dart +issue_tracker: https://github.com/aws-amplify/amplify-flutter/issues + +topics: + - aws + - firehose + - streaming + - analytics + - aws-amplify + +environment: + sdk: ^3.9.0 + +dependencies: + amplify_core: ">=2.10.0 <2.11.0" + amplify_db_common_dart: ">=0.4.14 <0.5.0" + amplify_foundation_dart: ">=2.10.0 <2.11.0" + amplify_foundation_dart_bridge: ">=2.10.0 <2.11.0" + aws_common: ">=0.7.12 <0.8.0" + aws_signature_v4: ">=0.6.10 <0.7.0" + built_collection: ^5.1.1 + built_value: ^8.10.1 + drift: ^2.25.0 + meta: ^1.16.0 + smithy: ">=0.7.10 <0.8.0" + smithy_aws: ">=0.7.10 <0.8.0" + synchronized: ^3.3.0 + web: ^1.1.1 + +dev_dependencies: + amplify_lints: ">=3.1.4 <3.2.0" + build_runner: ^2.4.15 + build_version: ^2.1.1 + built_value_generator: ^8.10.1 + drift_dev: ^2.25.1 + fake_async: ^1.3.0 + mocktail: ^1.0.0 + test: ^1.22.1 diff --git a/packages/kinesis/amplify_firehose_dart/sdk.yaml b/packages/kinesis/amplify_firehose_dart/sdk.yaml new file mode 100644 index 00000000000..54d9a1ea4d4 --- /dev/null +++ b/packages/kinesis/amplify_firehose_dart/sdk.yaml @@ -0,0 +1,3 @@ +apis: + com.amazonaws.firehose: + - PutRecordBatch diff --git a/packages/kinesis/amplify_kinesis_dart/lib/amplify_kinesis_dart.dart b/packages/kinesis/amplify_kinesis_dart/lib/amplify_kinesis_dart.dart index 761db258e44..ced24275aa0 100644 --- a/packages/kinesis/amplify_kinesis_dart/lib/amplify_kinesis_dart.dart +++ b/packages/kinesis/amplify_kinesis_dart/lib/amplify_kinesis_dart.dart @@ -4,18 +4,22 @@ /// Amplify Kinesis Data Streams client for Dart. library; +// Re-export shared types used in the public API +export 'package:amplify_record_cache_dart/amplify_record_cache_dart.dart' + show + FlushStrategy, + FlushInterval, + FlushNone, + FlushData, + RecordData, + ClearCacheData; + // Main client export 'src/amplify_kinesis_client.dart'; // Options export 'src/amplify_kinesis_client_options.dart'; // Exceptions export 'src/exception/amplify_kinesis_exception.dart'; -// Flush strategies -export 'src/flush_strategy/flush_strategy.dart'; -// Return types -export 'src/model/clear_cache_data.dart'; -export 'src/model/flush_data.dart'; -export 'src/model/record_data.dart'; // SDK client (for escape hatch) export 'src/sdk/kinesis.dart' show diff --git a/packages/kinesis/amplify_kinesis_dart/lib/src/amplify_kinesis_client.dart b/packages/kinesis/amplify_kinesis_dart/lib/src/amplify_kinesis_client.dart index 638edab1483..d3fca0268f5 100644 --- a/packages/kinesis/amplify_kinesis_dart/lib/src/amplify_kinesis_client.dart +++ b/packages/kinesis/amplify_kinesis_dart/lib/src/amplify_kinesis_client.dart @@ -12,17 +12,12 @@ import 'package:amplify_foundation_dart/amplify_foundation_dart.dart' import 'package:amplify_foundation_dart_bridge/amplify_foundation_dart_bridge.dart'; import 'package:amplify_kinesis_dart/src/amplify_kinesis_client_options.dart'; import 'package:amplify_kinesis_dart/src/exception/amplify_kinesis_exception.dart'; -import 'package:amplify_kinesis_dart/src/flush_strategy/flush_strategy.dart'; -import 'package:amplify_kinesis_dart/src/impl/auto_flush_scheduler.dart'; import 'package:amplify_kinesis_dart/src/impl/kinesis_record.dart'; import 'package:amplify_kinesis_dart/src/impl/kinesis_sender.dart'; -import 'package:amplify_kinesis_dart/src/impl/record_client.dart'; -import 'package:amplify_kinesis_dart/src/impl/storage/platform/record_storage_platform.dart'; -import 'package:amplify_kinesis_dart/src/model/clear_cache_data.dart'; -import 'package:amplify_kinesis_dart/src/model/flush_data.dart'; -import 'package:amplify_kinesis_dart/src/model/record_data.dart'; +import 'package:amplify_kinesis_dart/src/kinesis_limits.dart' as limits; import 'package:amplify_kinesis_dart/src/sdk/kinesis.dart'; import 'package:amplify_kinesis_dart/src/version.dart'; +import 'package:amplify_record_cache_dart/amplify_record_cache_dart.dart'; import 'package:smithy/smithy.dart' show WithUserAgent; /// User agent component identifying this library. @@ -96,12 +91,6 @@ class AmplifyKinesisClient { _logger = AmplifyLogging.logger('AmplifyKinesisClient'); /// {@macro amplify_kinesis.amplify_kinesis_client} - /// - /// [storagePath] is the directory path for the database file on IO - /// platforms. On web, pass `null` (the path is unused; IndexedDB storage - /// is used instead, with an in-memory fallback). - /// The [region] is used as the database identifier to namespace - /// the database file (e.g. `kinesis_records_us-east-1`). static Future create({ required String region, required AWSCredentialsProvider credentialsProvider, @@ -114,6 +103,11 @@ class AmplifyKinesisClient { identifier: region, storagePath: storagePath, maxCacheBytes: opts.cacheMaxBytes, + maxRecordsPerBatch: limits.maxRecordsPerStream, + maxBytesPerBatch: limits.maxPutRecordsSizeBytes, + maxRecordSizeBytes: limits.maxRecordSizeBytes, + dbPrefix: 'kinesis_records', + storeName: 'kinesis_records', ); final kinesisClient = KinesisClient( @@ -170,11 +164,6 @@ class AmplifyKinesisClient { bool get isClosed => _closed; /// Direct access to the underlying Kinesis SDK client. - /// - /// Use this for advanced operations not covered by this client's API. - /// - /// Note: This getter is only available when the client was created with - /// [create] (not [AmplifyKinesisClient.withRecordClient]). KinesisClient get kinesisClient { final client = _kinesisClient; if (client == null) { @@ -188,16 +177,10 @@ class AmplifyKinesisClient { /// Records data to be sent to a Kinesis Data Stream. /// - /// The record is persisted to local storage and will be sent during - /// the next flush operation (automatic or manual). - /// /// Returns [Result.ok] with [RecordData] on success, or [Result.error] with: - /// - [KinesisValidationException] for invalid input (e.g. oversized record, - /// empty or too-long partition key) + /// - [KinesisValidationException] for invalid input /// - [KinesisLimitExceededException] if the cache is full /// - [KinesisStorageException] for database errors - /// - /// Returns [Result.ok] silently if the client is disabled. Future> record({ required Uint8List data, required String partitionKey, @@ -208,8 +191,21 @@ class AmplifyKinesisClient { _logger.debug('Record collection is disabled, dropping record'); return const Result.ok(RecordData()); } + // KDS-specific partition key validation + final codePoints = partitionKey.runes.length; + if (codePoints == 0 || codePoints > limits.maxPartitionKeyLength) { + return Result.error( + KinesisValidationException( + 'Partition key length ($codePoints) is outside the allowed ' + 'range of 1-${limits.maxPartitionKeyLength} characters.', + recoverySuggestion: + 'Use a partition key between 1 and ' + '${limits.maxPartitionKeyLength} characters.', + ), + ); + } _logger.verbose('Recording to stream: $streamName'); - final kinesisRecord = RecordInput.now( + final kinesisRecord = createKinesisRecordInputNow( data: data, partitionKey: partitionKey, streamName: streamName, @@ -218,27 +214,6 @@ class AmplifyKinesisClient { } /// Flushes cached records to their respective Kinesis streams. - /// - /// Each invocation sends at most one batch per stream, limited by the Kinesis - /// `PutRecords` constraints (up to 500 records or 5 MB per stream). If the - /// cache contains more records than a single batch can hold, the remaining - /// records are sent on subsequent flush invocations — either manually or via - /// the auto-flush scheduler. - /// - /// Records that fail within a batch are marked for retry on the next flush. - /// Records that exceed [AmplifyKinesisClientOptions.maxRetries] are removed - /// from the cache. - /// - /// SDK Kinesis errors (throttling, invalid stream, etc.) are logged and - /// skipped so other streams can still flush. Non-SDK errors (e.g. network, - /// storage) abort the flush and are returned as [Result.error]. - /// - /// If a flush is already in progress, the call returns immediately with - /// `FlushData(recordsFlushed: 0, flushInProgress: true)`. - /// - /// Manual flushes are allowed even when the client is disabled, so that - /// callers can drain cached records without re-enabling collection. - /// Only the automatic flush scheduler is paused when disabled. Future> flush() async { if (_closed) return const Result.error(ClientClosedException()); _logger.verbose('Starting flush'); @@ -246,10 +221,6 @@ class AmplifyKinesisClient { } /// Clears all cached records from local storage. - /// - /// Returns [Result.ok] with [ClearCacheData] containing the count of - /// records cleared, or [Result.error] with: - /// - [KinesisStorageException] for database errors Future> clearCache() async { if (_closed) return const Result.error(ClientClosedException()); _logger.verbose('Clearing cache'); @@ -264,10 +235,6 @@ class AmplifyKinesisClient { } /// Disables record collection and automatic flushing. - /// - /// Records submitted while disabled are silently dropped. Already-cached - /// records remain in storage and will be sent on the next flush after - /// re-enabling. void disable() { _logger.info('Disabling record collection and automatic flushing'); _enabled = false; @@ -275,17 +242,12 @@ class AmplifyKinesisClient { } /// Closes the client and releases all resources. - /// - /// The client cannot be reused after closing. Future close() async { _closed = true; _scheduler?.stop(); await _recordClient.close(); } - /// Wraps an async operation, catching any exceptions and returning them - /// as [Result.error] with the appropriate [AmplifyKinesisException] - /// subtype. Future> _wrapError(Future Function() operation) async { try { final value = await operation(); diff --git a/packages/kinesis/amplify_kinesis_dart/lib/src/amplify_kinesis_client_options.dart b/packages/kinesis/amplify_kinesis_dart/lib/src/amplify_kinesis_client_options.dart index c2b896be47f..82b66d15045 100644 --- a/packages/kinesis/amplify_kinesis_dart/lib/src/amplify_kinesis_client_options.dart +++ b/packages/kinesis/amplify_kinesis_dart/lib/src/amplify_kinesis_client_options.dart @@ -3,7 +3,7 @@ import 'package:amplify_kinesis_dart/src/amplify_kinesis_client.dart' show AmplifyKinesisClient; -import 'package:amplify_kinesis_dart/src/flush_strategy/flush_strategy.dart'; +import 'package:amplify_record_cache_dart/amplify_record_cache_dart.dart'; /// {@template amplify_kinesis.amplify_kinesis_client_options} /// Configuration options for [AmplifyKinesisClient]. diff --git a/packages/kinesis/amplify_kinesis_dart/lib/src/exception/amplify_kinesis_exception.dart b/packages/kinesis/amplify_kinesis_dart/lib/src/exception/amplify_kinesis_exception.dart index 815649bca50..5cbea1a8613 100644 --- a/packages/kinesis/amplify_kinesis_dart/lib/src/exception/amplify_kinesis_exception.dart +++ b/packages/kinesis/amplify_kinesis_dart/lib/src/exception/amplify_kinesis_exception.dart @@ -2,7 +2,7 @@ // SPDX-License-Identifier: Apache-2.0 import 'package:amplify_core/amplify_core.dart'; -import 'package:amplify_kinesis_dart/src/exception/record_cache_exception.dart'; +import 'package:amplify_record_cache_dart/amplify_record_cache_dart.dart'; /// Default recovery suggestion for errors. const String defaultRecoverySuggestion = diff --git a/packages/kinesis/amplify_kinesis_dart/lib/src/impl/kinesis_record.dart b/packages/kinesis/amplify_kinesis_dart/lib/src/impl/kinesis_record.dart index e1bb025c2fe..91a560ec50e 100644 --- a/packages/kinesis/amplify_kinesis_dart/lib/src/impl/kinesis_record.dart +++ b/packages/kinesis/amplify_kinesis_dart/lib/src/impl/kinesis_record.dart @@ -4,46 +4,39 @@ import 'dart:convert'; import 'dart:typed_data'; -/// Internal representation of a record to be sent to Kinesis. -final class RecordInput { - /// Creates a new Kinesis record. - RecordInput({ - required this.data, - required this.partitionKey, - required this.streamName, - required this.createdAt, - }) : dataSize = data.length + utf8.encode(partitionKey).length; - - /// Creates a Kinesis record with the current timestamp. - factory RecordInput.now({ - required Uint8List data, - required String partitionKey, - required String streamName, - }) { - return RecordInput( - data: data, - partitionKey: partitionKey, - streamName: streamName, - createdAt: DateTime.now(), - ); - } - - /// The data blob to send to Kinesis. - final Uint8List data; - - /// The partition key for the record. - final String partitionKey; - - /// The name of the Kinesis Data Stream. - final String streamName; - - /// The size of the record in bytes (data blob + partition key). - /// - /// Per AWS docs, the record size limit applies to the total size of the - /// partition key and data blob combined. Computed once at construction - /// to avoid repeated UTF-8 encoding of the partition key. - final int dataSize; +import 'package:amplify_record_cache_dart/amplify_record_cache_dart.dart'; + +/// Creates a [RecordInput] for Kinesis Data Streams. +/// +/// Unlike the generic `RecordInput`, this factory computes `dataSize` +/// as `data.length + utf8.encode(partitionKey).length` per the KDS +/// PutRecords API spec. +RecordInput createKinesisRecordInput({ + required Uint8List data, + required String partitionKey, + required String streamName, + required DateTime createdAt, +}) { + return RecordInput( + data: data, + streamName: streamName, + partitionKey: partitionKey, + dataSize: data.length + utf8.encode(partitionKey).length, + createdAt: createdAt, + ); +} - /// Timestamp of when the record was created. - final DateTime createdAt; +/// Creates a [RecordInput] for Kinesis Data Streams with the current +/// timestamp. +RecordInput createKinesisRecordInputNow({ + required Uint8List data, + required String partitionKey, + required String streamName, +}) { + return createKinesisRecordInput( + data: data, + partitionKey: partitionKey, + streamName: streamName, + createdAt: DateTime.now(), + ); } diff --git a/packages/kinesis/amplify_kinesis_dart/lib/src/impl/kinesis_sender.dart b/packages/kinesis/amplify_kinesis_dart/lib/src/impl/kinesis_sender.dart index a33eefb3fa1..580be391149 100644 --- a/packages/kinesis/amplify_kinesis_dart/lib/src/impl/kinesis_sender.dart +++ b/packages/kinesis/amplify_kinesis_dart/lib/src/impl/kinesis_sender.dart @@ -1,34 +1,8 @@ // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. // SPDX-License-Identifier: Apache-2.0 -import 'package:amplify_kinesis_dart/src/model/record.dart'; import 'package:amplify_kinesis_dart/src/sdk/kinesis.dart'; - -/// Result of a PutRecords operation. -/// -/// Records are categorized into three buckets: -/// - [successfulIds]: records that were accepted by Kinesis. -/// - [retryableIds]: records that failed with any error code but have not -/// yet exceeded the retry limit. These will be retried in the next flush. -/// - [failedIds]: records that have exceeded the retry limit and should be -/// deleted from the cache. -final class PutRecordsResult { - /// Creates a new [PutRecordsResult]. - const PutRecordsResult({ - required this.successfulIds, - required this.retryableIds, - required this.failedIds, - }); - - /// IDs of records that were successfully sent. - final List successfulIds; - - /// IDs of records that failed but can be retried (retry count < max). - final List retryableIds; - - /// IDs of records that exceeded the retry limit and should be deleted. - final List failedIds; -} +import 'package:amplify_record_cache_dart/amplify_record_cache_dart.dart'; /// {@template amplify_kinesis.kinesis_sender} /// Handles communication with AWS Kinesis Data Streams. @@ -37,7 +11,7 @@ final class PutRecordsResult { /// categorization so that all error codes are treated as retryable /// until the record exceeds `maxRetries`. /// {@endtemplate} -class KinesisSender { +class KinesisSender implements Sender { /// {@macro amplify_kinesis.kinesis_sender} KinesisSender({required KinesisClient kinesisClient, required int maxRetries}) : _kinesisClient = kinesisClient, @@ -46,18 +20,13 @@ class KinesisSender { final KinesisClient _kinesisClient; final int _maxRetries; - /// Sends records to a Kinesis stream and categorizes the response. - /// - /// Each record in the response is categorized as: - /// - successful: no error code - /// - failed: has an error code AND retry count >= [_maxRetries] - /// - retryable: has an error code AND retry count < [_maxRetries] - Future putRecords({ + @override + Future sendBatch({ required String streamName, required List records, }) async { if (records.isEmpty) { - return const PutRecordsResult( + return const SendResult( successfulIds: [], retryableIds: [], failedIds: [], @@ -68,7 +37,7 @@ class KinesisSender { .map( (record) => PutRecordsRequestEntry( data: record.data, - partitionKey: record.partitionKey, + partitionKey: record.partitionKey ?? '', ), ) .toList(); @@ -84,10 +53,7 @@ class KinesisSender { /// Splits the PutRecords response into successful, retryable, and failed /// record IDs based on error codes and retry counts. - PutRecordsResult _splitResponse( - PutRecordsResponse response, - List records, - ) { + SendResult _splitResponse(PutRecordsResponse response, List records) { final successfulIds = []; final retryableIds = []; final failedIds = []; @@ -104,14 +70,11 @@ class KinesisSender { } else if (retryCount >= _maxRetries) { failedIds.add(recordId); } else { - // Error codes can be: ProvisionedThroughputExceededException or - // InternalFailure. All are treated as retryable until the retry - // limit is reached. retryableIds.add(recordId); } } - return PutRecordsResult( + return SendResult( successfulIds: successfulIds, retryableIds: retryableIds, failedIds: failedIds, diff --git a/packages/kinesis/amplify_kinesis_dart/pubspec.yaml b/packages/kinesis/amplify_kinesis_dart/pubspec.yaml index 69e6378bc7e..bb095c4b9de 100644 --- a/packages/kinesis/amplify_kinesis_dart/pubspec.yaml +++ b/packages/kinesis/amplify_kinesis_dart/pubspec.yaml @@ -17,26 +17,20 @@ environment: dependencies: amplify_core: ">=2.10.0 <2.11.0" - amplify_db_common_dart: ">=0.4.17 <0.5.0" amplify_foundation_dart: ">=2.11.0 <2.12.0" amplify_foundation_dart_bridge: ">=2.11.0 <2.12.0" + amplify_record_cache_dart: ">=0.1.0 <0.2.0" aws_common: ">=0.7.12 <0.8.0" aws_signature_v4: ">=0.6.10 <0.7.0" built_collection: ^5.1.1 built_value: ^8.10.1 - drift: ^2.25.0 meta: ^1.16.0 smithy: ">=0.7.10 <0.8.0" smithy_aws: ">=0.7.10 <0.8.0" - synchronized: ^3.3.0 - web: ^1.1.1 dev_dependencies: amplify_lints: ">=3.1.4 <3.2.0" - build_runner: ^2.4.15 - build_version: ^2.1.1 - built_value_generator: ^8.10.1 - drift_dev: ^2.25.1 + drift: ^2.25.0 fake_async: ^1.3.0 mocktail: ^1.0.0 test: ^1.22.1 diff --git a/packages/kinesis/amplify_kinesis_dart/test/amplify_kinesis_exception_test.dart b/packages/kinesis/amplify_kinesis_dart/test/amplify_kinesis_exception_test.dart index 61f9de630b1..5a7106f8436 100644 --- a/packages/kinesis/amplify_kinesis_dart/test/amplify_kinesis_exception_test.dart +++ b/packages/kinesis/amplify_kinesis_dart/test/amplify_kinesis_exception_test.dart @@ -2,7 +2,7 @@ // SPDX-License-Identifier: Apache-2.0 import 'package:amplify_kinesis_dart/src/exception/amplify_kinesis_exception.dart'; -import 'package:amplify_kinesis_dart/src/exception/record_cache_exception.dart'; +import 'package:amplify_record_cache_dart/amplify_record_cache_dart.dart'; import 'package:test/test.dart'; void main() { @@ -16,7 +16,7 @@ void main() { test( 'converts RecordCacheValidationException to KinesisValidationException', () { - final cause = RecordCacheValidationException('bad input', 'fix it'); + const cause = RecordCacheValidationException('bad input', 'fix it'); final result = AmplifyKinesisException.from(cause); expect(result, isA()); expect(result.message, 'bad input'); @@ -44,7 +44,7 @@ void main() { test( 'converts RecordCacheLimitExceededException to KinesisLimitExceededException', () { - final cause = RecordCacheLimitExceededException( + const cause = RecordCacheLimitExceededException( 'cache full', 'flush first', ); diff --git a/packages/kinesis/amplify_kinesis_dart/test/auto_flush_scheduler_test.dart b/packages/kinesis/amplify_kinesis_dart/test/auto_flush_scheduler_test.dart index bf574220d71..c8e1591dfa6 100644 --- a/packages/kinesis/amplify_kinesis_dart/test/auto_flush_scheduler_test.dart +++ b/packages/kinesis/amplify_kinesis_dart/test/auto_flush_scheduler_test.dart @@ -1,9 +1,7 @@ // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. // SPDX-License-Identifier: Apache-2.0 -import 'package:amplify_kinesis_dart/src/impl/auto_flush_scheduler.dart'; -import 'package:amplify_kinesis_dart/src/impl/record_client.dart'; -import 'package:amplify_kinesis_dart/src/model/flush_data.dart'; +import 'package:amplify_record_cache_dart/amplify_record_cache_dart.dart'; import 'package:fake_async/fake_async.dart'; import 'package:mocktail/mocktail.dart'; import 'package:test/test.dart'; diff --git a/packages/kinesis/amplify_kinesis_dart/test/common/mocktail_mocks.dart b/packages/kinesis/amplify_kinesis_dart/test/common/mocktail_mocks.dart index f77e0098ecf..58a4526d99e 100644 --- a/packages/kinesis/amplify_kinesis_dart/test/common/mocktail_mocks.dart +++ b/packages/kinesis/amplify_kinesis_dart/test/common/mocktail_mocks.dart @@ -9,8 +9,8 @@ import 'dart:async'; import 'package:amplify_foundation_dart/amplify_foundation_dart.dart' as foundation; import 'package:amplify_kinesis_dart/src/impl/kinesis_sender.dart'; -import 'package:amplify_kinesis_dart/src/impl/record_client.dart'; import 'package:amplify_kinesis_dart/src/sdk/src/kinesis/kinesis_client.dart'; +import 'package:amplify_record_cache_dart/amplify_record_cache_dart.dart'; import 'package:aws_common/aws_common.dart'; import 'package:mocktail/mocktail.dart'; import 'package:smithy/smithy.dart'; @@ -20,13 +20,6 @@ import 'package:smithy/smithy.dart'; // ============================================================================= /// Creates a mock [SmithyOperation] that returns the result of [fn]. -/// -/// Use this helper to mock SDK client method returns: -/// ```dart -/// when(() => mockClient.putRecords(any())).thenReturn( -/// mockSmithyOperation(() => PutRecordsResponse(...)), -/// ); -/// ``` SmithyOperation mockSmithyOperation(FutureOr Function() fn) => SmithyOperation( CancelableOperation.fromFuture(Future.value(fn())), @@ -39,13 +32,6 @@ SmithyOperation mockSmithyOperation(FutureOr Function() fn) => class MockKinesisClient extends Mock implements KinesisClient {} /// Mock implementation of [SmithyOperation]. -/// -/// Use when you need to throw exceptions from SDK operations: -/// ```dart -/// final mockOperation = MockSmithyOperation(); -/// when(() => mockOperation.result).thenThrow(SomeException()); -/// when(() => mockClient.putRecords(any())).thenReturn(mockOperation); -/// ``` class MockSmithyOperation extends Mock implements SmithyOperation {} /// Mock implementation of [AWSHttpException]. diff --git a/packages/kinesis/amplify_kinesis_dart/test/helpers/test_database.dart b/packages/kinesis/amplify_kinesis_dart/test/helpers/test_database.dart index ebde2530ee1..a334ecb2a4c 100644 --- a/packages/kinesis/amplify_kinesis_dart/test/helpers/test_database.dart +++ b/packages/kinesis/amplify_kinesis_dart/test/helpers/test_database.dart @@ -1,10 +1,10 @@ // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. // SPDX-License-Identifier: Apache-2.0 -import 'package:amplify_kinesis_dart/src/db/kinesis_record_database.dart'; +import 'package:amplify_record_cache_dart/amplify_record_cache_dart.dart'; import 'package:drift/native.dart'; /// Creates an in-memory database for testing. -KinesisRecordDatabase createTestDatabase() { - return KinesisRecordDatabase.forTesting(NativeDatabase.memory()); +RecordCacheDatabase createTestDatabase() { + return RecordCacheDatabase.forTesting(NativeDatabase.memory()); } diff --git a/packages/kinesis/amplify_kinesis_dart/test/in_memory_record_storage_test.dart b/packages/kinesis/amplify_kinesis_dart/test/in_memory_record_storage_test.dart index 16acbc9b1a2..ccbf01938f6 100644 --- a/packages/kinesis/amplify_kinesis_dart/test/in_memory_record_storage_test.dart +++ b/packages/kinesis/amplify_kinesis_dart/test/in_memory_record_storage_test.dart @@ -4,8 +4,7 @@ import 'dart:typed_data'; import 'package:amplify_kinesis_dart/src/impl/kinesis_record.dart'; -import 'package:amplify_kinesis_dart/src/impl/storage/record_storage.dart'; -import 'package:amplify_kinesis_dart/src/impl/storage/record_storage_memory.dart'; +import 'package:amplify_record_cache_dart/amplify_record_cache_dart.dart'; import 'package:test/test.dart'; void main() { @@ -13,7 +12,12 @@ void main() { late InMemoryRecordStorage storage; setUp(() { - storage = InMemoryRecordStorage(maxCacheBytes: 10 * 1024 * 1024); + storage = InMemoryRecordStorage( + maxCacheBytes: 10 * 1024 * 1024, + maxRecordsPerBatch: 500, + maxBytesPerBatch: 5 * 1024 * 1024, + maxRecordSizeBytes: 10 * 1024 * 1024, + ); }); tearDown(() async { @@ -29,7 +33,7 @@ void main() { group('addRecord', () { test('saves and retrieves a record', () async { await storage.addRecord( - RecordInput.now( + createKinesisRecordInputNow( data: Uint8List.fromList([1, 2, 3, 4, 5]), partitionKey: 'test-partition', streamName: 'test-stream', @@ -52,7 +56,7 @@ void main() { test('removes correct records by ID', () async { for (var i = 0; i < 5; i++) { await storage.addRecord( - RecordInput.now( + createKinesisRecordInputNow( data: Uint8List.fromList([i]), partitionKey: 'pk-$i', streamName: 'stream', @@ -74,7 +78,7 @@ void main() { test('handles empty ID list gracefully', () async { await storage.addRecord( - RecordInput.now( + createKinesisRecordInputNow( data: Uint8List.fromList([1]), partitionKey: 'pk', streamName: 'stream', @@ -89,7 +93,7 @@ void main() { group('incrementRetryCount', () { test('increments retry count correctly', () async { await storage.addRecord( - RecordInput.now( + createKinesisRecordInputNow( data: Uint8List.fromList([1]), partitionKey: 'pk', streamName: 'stream', @@ -112,21 +116,21 @@ void main() { group('getRecordsByStream', () { test('returns records grouped by stream name', () async { await storage.addRecord( - RecordInput.now( + createKinesisRecordInputNow( data: Uint8List.fromList([1]), partitionKey: 'pk', streamName: 'stream-a', ), ); await storage.addRecord( - RecordInput.now( + createKinesisRecordInputNow( data: Uint8List.fromList([2]), partitionKey: 'pk', streamName: 'stream-b', ), ); await storage.addRecord( - RecordInput.now( + createKinesisRecordInputNow( data: Uint8List.fromList([3]), partitionKey: 'pk', streamName: 'stream-a', @@ -149,7 +153,7 @@ void main() { test('removes all records', () async { for (var i = 0; i < 5; i++) { await storage.addRecord( - RecordInput.now( + createKinesisRecordInputNow( data: Uint8List.fromList([i]), partitionKey: 'pk-$i', streamName: 'stream', @@ -169,7 +173,7 @@ void main() { expect(await storage.getRecordCount(), equals(0)); for (var i = 0; i < 3; i++) { await storage.addRecord( - RecordInput.now( + createKinesisRecordInputNow( data: Uint8List.fromList([i]), partitionKey: 'pk-$i', streamName: 'stream', diff --git a/packages/kinesis/amplify_kinesis_dart/test/kinesis_data_streams_test.dart b/packages/kinesis/amplify_kinesis_dart/test/kinesis_data_streams_test.dart index 13f107cce5c..522986197d5 100644 --- a/packages/kinesis/amplify_kinesis_dart/test/kinesis_data_streams_test.dart +++ b/packages/kinesis/amplify_kinesis_dart/test/kinesis_data_streams_test.dart @@ -8,11 +8,8 @@ import 'package:amplify_foundation_dart/amplify_foundation_dart.dart' import 'package:amplify_kinesis_dart/src/amplify_kinesis_client.dart'; import 'package:amplify_kinesis_dart/src/amplify_kinesis_client_options.dart'; import 'package:amplify_kinesis_dart/src/exception/amplify_kinesis_exception.dart'; -import 'package:amplify_kinesis_dart/src/flush_strategy/flush_strategy.dart'; import 'package:amplify_kinesis_dart/src/impl/kinesis_record.dart'; -import 'package:amplify_kinesis_dart/src/model/clear_cache_data.dart'; -import 'package:amplify_kinesis_dart/src/model/flush_data.dart'; -import 'package:amplify_kinesis_dart/src/model/record_data.dart'; +import 'package:amplify_record_cache_dart/amplify_record_cache_dart.dart'; import 'package:mocktail/mocktail.dart'; import 'package:test/test.dart'; @@ -24,7 +21,11 @@ void main() { setUpAll(() { registerFallbackValue( - RecordInput.now(data: Uint8List(0), partitionKey: '', streamName: ''), + createKinesisRecordInputNow( + data: Uint8List(0), + partitionKey: '', + streamName: '', + ), ); }); diff --git a/packages/kinesis/amplify_kinesis_dart/test/kinesis_sender_test.dart b/packages/kinesis/amplify_kinesis_dart/test/kinesis_sender_test.dart index c3a95a760c6..485cdc54ee2 100644 --- a/packages/kinesis/amplify_kinesis_dart/test/kinesis_sender_test.dart +++ b/packages/kinesis/amplify_kinesis_dart/test/kinesis_sender_test.dart @@ -10,8 +10,8 @@ library; import 'dart:typed_data'; import 'package:amplify_kinesis_dart/src/impl/kinesis_sender.dart'; -import 'package:amplify_kinesis_dart/src/model/record.dart'; import 'package:amplify_kinesis_dart/src/sdk/kinesis.dart'; +import 'package:amplify_record_cache_dart/amplify_record_cache_dart.dart'; import 'package:mocktail/mocktail.dart'; import 'package:test/test.dart'; @@ -79,7 +79,7 @@ void main() { final sender = _DirectMockSender(mockClient, maxRetries: maxRetries); - await sender.putRecords( + await sender.sendBatch( streamName: 'my-stream', records: [ _testRecord( @@ -132,7 +132,7 @@ void main() { final sender = _DirectMockSender(mockClient, maxRetries: maxRetries); - final result = await sender.putRecords( + final result = await sender.sendBatch( streamName: 'test-stream', records: [ _testRecord( diff --git a/packages/kinesis/amplify_kinesis_dart/test/record_client_concurrent_flush_test.dart b/packages/kinesis/amplify_kinesis_dart/test/record_client_concurrent_flush_test.dart index ec6fc52ad76..566973908fc 100644 --- a/packages/kinesis/amplify_kinesis_dart/test/record_client_concurrent_flush_test.dart +++ b/packages/kinesis/amplify_kinesis_dart/test/record_client_concurrent_flush_test.dart @@ -11,10 +11,7 @@ import 'dart:async'; import 'dart:typed_data'; import 'package:amplify_kinesis_dart/src/impl/kinesis_record.dart'; -import 'package:amplify_kinesis_dart/src/impl/kinesis_sender.dart'; -import 'package:amplify_kinesis_dart/src/impl/record_client.dart'; -import 'package:amplify_kinesis_dart/src/impl/storage/record_storage_sqlite.dart'; -import 'package:amplify_kinesis_dart/src/model/record.dart'; +import 'package:amplify_record_cache_dart/amplify_record_cache_dart.dart'; import 'package:test/test.dart'; import 'helpers/test_database.dart'; @@ -28,6 +25,9 @@ void main() { final storage = SqliteRecordStorage( database: db, maxCacheBytes: 1024 * 1024, + maxRecordsPerBatch: 500, + maxBytesPerBatch: 5 * 1024 * 1024, + maxRecordSizeBytes: 10 * 1024 * 1024, ); final sender = _GatedSender(); final client = RecordClient( @@ -39,7 +39,7 @@ void main() { // Seed records for (var i = 0; i < 5; i++) { await client.record( - RecordInput.now( + createKinesisRecordInputNow( data: Uint8List.fromList([i]), partitionKey: 'key$i', streamName: 'test-stream', @@ -47,7 +47,7 @@ void main() { ); } - // Launch flush1 — it will block inside putRecords until we complete + // Launch flush1 — it will block inside sendBatch until we complete // the gate completer. final flush1 = client.flush(); @@ -78,31 +78,28 @@ void main() { }); } -/// A sender that blocks inside [putRecords] until [gate] is completed, +/// A sender that blocks inside [sendBatch] until [gate] is completed, /// giving the test deterministic control over when the flush finishes. -class _GatedSender implements KinesisSender { - /// Completes when [putRecords] is entered, signaling that the flush +class _GatedSender implements Sender { + /// Completes when [sendBatch] is entered, signaling that the flush /// is in progress and holding the `_flushing` flag. final Completer entered = Completer(); - /// The test completes this to unblock [putRecords]. + /// The test completes this to unblock [sendBatch]. final Completer gate = Completer(); @override - dynamic noSuchMethod(Invocation invocation) => super.noSuchMethod(invocation); - - @override - Future putRecords({ + Future sendBatch({ required String streamName, required List records, }) async { - // Signal that we're inside putRecords (flush is in progress). + // Signal that we're inside sendBatch (flush is in progress). if (!entered.isCompleted) entered.complete(); // Block until the test says go. await gate.future; - return PutRecordsResult( + return SendResult( successfulIds: records.map((r) => r.id).toList(), failedIds: const [], retryableIds: const [], diff --git a/packages/kinesis/amplify_kinesis_dart/test/record_client_test.dart b/packages/kinesis/amplify_kinesis_dart/test/record_client_test.dart index 19942d358d2..9004df5df55 100644 --- a/packages/kinesis/amplify_kinesis_dart/test/record_client_test.dart +++ b/packages/kinesis/amplify_kinesis_dart/test/record_client_test.dart @@ -3,7 +3,7 @@ /// Tests for RecordClient. /// -/// Uses mocktail mocks for KinesisSender with pre-built PutRecordsResult +/// Uses mocktail mocks for KinesisSender with pre-built SendResult /// values and explicit IDs, rather than behavioral test doubles with /// callback logic. library; @@ -11,13 +11,8 @@ library; import 'dart:typed_data'; import 'package:amplify_kinesis_dart/src/impl/kinesis_record.dart'; -import 'package:amplify_kinesis_dart/src/impl/kinesis_sender.dart'; -import 'package:amplify_kinesis_dart/src/impl/record_client.dart'; -import 'package:amplify_kinesis_dart/src/impl/storage/record_storage.dart'; -import 'package:amplify_kinesis_dart/src/impl/storage/record_storage_sqlite.dart'; -import 'package:amplify_kinesis_dart/src/model/clear_cache_data.dart'; -import 'package:amplify_kinesis_dart/src/model/flush_data.dart'; import 'package:amplify_kinesis_dart/src/sdk/kinesis.dart'; +import 'package:amplify_record_cache_dart/amplify_record_cache_dart.dart'; import 'package:mocktail/mocktail.dart'; import 'package:test/test.dart'; @@ -34,7 +29,10 @@ void main() { final db = createTestDatabase(); storage = SqliteRecordStorage( database: db, - maxCacheBytes: 1024, // 1KB for testing + maxCacheBytes: 1024, + maxRecordsPerBatch: 500, + maxBytesPerBatch: 5 * 1024 * 1024, + maxRecordSizeBytes: 10 * 1024 * 1024, ); mockSender = MockKinesisSender(); client = RecordClient( @@ -59,7 +57,7 @@ void main() { group('record()', () { test('accepts records when enabled', () async { await client.record( - RecordInput.now( + createKinesisRecordInputNow( data: Uint8List.fromList([1, 2, 3]), partitionKey: 'pk', streamName: 'stream', @@ -75,7 +73,7 @@ void main() { test('sends all cached records and returns FlushData', () async { for (var i = 0; i < 3; i++) { await client.record( - RecordInput.now( + createKinesisRecordInputNow( data: Uint8List.fromList([i]), partitionKey: 'pk-$i', streamName: 'stream', @@ -85,14 +83,13 @@ void main() { final allRecords = await getAllRecords(); - // Mock: all records succeed when( - () => mockSender.putRecords( + () => mockSender.sendBatch( streamName: any(named: 'streamName'), records: any(named: 'records'), ), ).thenAnswer( - (_) async => PutRecordsResult( + (_) async => SendResult( successfulIds: allRecords.map((r) => r.id).toList(), retryableIds: [], failedIds: [], @@ -104,7 +101,7 @@ void main() { expect(result, isA()); expect(result.recordsFlushed, equals(3)); verify( - () => mockSender.putRecords( + () => mockSender.sendBatch( streamName: any(named: 'streamName'), records: any(named: 'records'), ), @@ -113,36 +110,35 @@ void main() { test('separates records by stream', () async { await client.record( - RecordInput.now( + createKinesisRecordInputNow( data: Uint8List.fromList([1]), partitionKey: 'pk', streamName: 'stream-a', ), ); await client.record( - RecordInput.now( + createKinesisRecordInputNow( data: Uint8List.fromList([2]), partitionKey: 'pk', streamName: 'stream-b', ), ); await client.record( - RecordInput.now( + createKinesisRecordInputNow( data: Uint8List.fromList([3]), partitionKey: 'pk', streamName: 'stream-a', ), ); - // Mock: all records succeed for any stream when( - () => mockSender.putRecords( + () => mockSender.sendBatch( streamName: any(named: 'streamName'), records: any(named: 'records'), ), ).thenAnswer((invocation) async { final records = invocation.namedArguments[#records] as List; - return PutRecordsResult( + return SendResult( successfulIds: records.map((r) => r.id).toList(), retryableIds: [], failedIds: [], @@ -153,7 +149,7 @@ void main() { expect(result.recordsFlushed, equals(3)); verify( - () => mockSender.putRecords( + () => mockSender.sendBatch( streamName: any(named: 'streamName'), records: any(named: 'records'), ), @@ -162,7 +158,7 @@ void main() { test('deletes successful records after send', () async { await client.record( - RecordInput.now( + createKinesisRecordInputNow( data: Uint8List.fromList([1]), partitionKey: 'pk', streamName: 'stream', @@ -171,12 +167,12 @@ void main() { final allRecords = await getAllRecords(); when( - () => mockSender.putRecords( + () => mockSender.sendBatch( streamName: any(named: 'streamName'), records: any(named: 'records'), ), ).thenAnswer( - (_) async => PutRecordsResult( + (_) async => SendResult( successfulIds: allRecords.map((r) => r.id).toList(), retryableIds: [], failedIds: [], @@ -191,7 +187,7 @@ void main() { test('handles mixed success, retryable, and failed', () async { for (var i = 0; i < 3; i++) { await client.record( - RecordInput.now( + createKinesisRecordInputNow( data: Uint8List.fromList([i]), partitionKey: 'pk-$i', streamName: 'stream', @@ -206,14 +202,13 @@ void main() { await storage.incrementRetryCount([allRecords[2].id]); } - // Mock: record 1 succeeds, record 2 retryable, record 3 failed when( - () => mockSender.putRecords( + () => mockSender.sendBatch( streamName: any(named: 'streamName'), records: any(named: 'records'), ), ).thenAnswer( - (_) async => PutRecordsResult( + (_) async => SendResult( successfulIds: [allRecords[0].id], retryableIds: [allRecords[1].id], failedIds: [allRecords[2].id], @@ -235,7 +230,7 @@ void main() { () async { for (var i = 0; i < 3; i++) { await client.record( - RecordInput.now( + createKinesisRecordInputNow( data: Uint8List.fromList([i]), partitionKey: 'key$i', streamName: 'stream', @@ -244,7 +239,7 @@ void main() { } when( - () => mockSender.putRecords( + () => mockSender.sendBatch( streamName: any(named: 'streamName'), records: any(named: 'records'), ), @@ -265,59 +260,62 @@ void main() { }, ); - test('deletes records at max retries when non-SDK error occurs', () async { - for (var i = 0; i < 3; i++) { - await client.record( - RecordInput.now( - data: Uint8List.fromList([i]), - partitionKey: 'key$i', - streamName: 'stream', - ), - ); - } + test( + 'deletes records at max retries when non-SDK error occurs', + () async { + for (var i = 0; i < 3; i++) { + await client.record( + createKinesisRecordInputNow( + data: Uint8List.fromList([i]), + partitionKey: 'key$i', + streamName: 'stream', + ), + ); + } - // Set records 2 and 3 to max retries (3) - final allRecords = await getAllRecords(); - for (var i = 0; i < 3; i++) { - await storage.incrementRetryCount([ - allRecords[1].id, - allRecords[2].id, - ]); - } + // Set records 2 and 3 to max retries (3) + final allRecords = await getAllRecords(); + for (var i = 0; i < 3; i++) { + await storage.incrementRetryCount([ + allRecords[1].id, + allRecords[2].id, + ]); + } - when( - () => mockSender.putRecords( - streamName: any(named: 'streamName'), - records: any(named: 'records'), - ), - ).thenThrow(Exception('Network error')); + when( + () => mockSender.sendBatch( + streamName: any(named: 'streamName'), + records: any(named: 'records'), + ), + ).thenThrow(Exception('Network error')); - try { - await client.flush(); - fail('Expected flush to throw'); - } on Exception { - // Expected — non-SDK errors are rethrown - } + try { + await client.flush(); + fail('Expected flush to throw'); + } on Exception { + // Expected — non-SDK errors are rethrown + } - // Only record 1 should remain (records 2 and 3 deleted at max retries) - final remaining = await getAllRecords(); - expect(remaining, hasLength(1)); - expect(remaining[0].id, equals(allRecords[0].id)); - expect(remaining[0].retryCount, equals(1)); - }); + // Only record 1 should remain (records 2 and 3 at max retries) + final remaining = await getAllRecords(); + expect(remaining, hasLength(1)); + expect(remaining[0].id, equals(allRecords[0].id)); + expect(remaining[0].retryCount, equals(1)); + }, + ); test( 'invalid stream records do not block valid stream flushes', () async { await client.record( - RecordInput.now( + createKinesisRecordInputNow( data: Uint8List.fromList([1, 2, 3]), partitionKey: 'pk', streamName: 'invalid-stream', ), ); await client.record( - RecordInput.now( + createKinesisRecordInputNow( data: Uint8List.fromList([4, 5, 6]), partitionKey: 'pk', streamName: 'valid-stream', @@ -330,19 +328,19 @@ void main() { ); when( - () => mockSender.putRecords( + () => mockSender.sendBatch( streamName: 'invalid-stream', records: any(named: 'records'), ), ).thenThrow(ResourceNotFoundException(message: 'Stream not found')); when( - () => mockSender.putRecords( + () => mockSender.sendBatch( streamName: 'valid-stream', records: any(named: 'records'), ), ).thenAnswer( - (_) async => PutRecordsResult( + (_) async => SendResult( successfulIds: [validRecord.id], retryableIds: [], failedIds: [], @@ -356,7 +354,7 @@ void main() { test('non-SDK errors abort the flush', () async { await client.record( - RecordInput.now( + createKinesisRecordInputNow( data: Uint8List.fromList([1, 2, 3]), partitionKey: 'pk', streamName: 'stream', @@ -364,7 +362,7 @@ void main() { ); when( - () => mockSender.putRecords( + () => mockSender.sendBatch( streamName: any(named: 'streamName'), records: any(named: 'records'), ), @@ -378,7 +376,7 @@ void main() { test('removes all cached records and returns ClearCacheData', () async { for (var i = 0; i < 5; i++) { await client.record( - RecordInput.now( + createKinesisRecordInputNow( data: Uint8List.fromList([i]), partitionKey: 'pk-$i', streamName: 'stream', diff --git a/packages/kinesis/amplify_kinesis_dart/test/record_validation_test.dart b/packages/kinesis/amplify_kinesis_dart/test/record_validation_test.dart index 46604c31e58..115c26b7900 100644 --- a/packages/kinesis/amplify_kinesis_dart/test/record_validation_test.dart +++ b/packages/kinesis/amplify_kinesis_dart/test/record_validation_test.dart @@ -18,13 +18,13 @@ library; import 'dart:convert'; import 'dart:typed_data'; -import 'package:amplify_kinesis_dart/src/exception/record_cache_exception.dart'; +import 'package:amplify_foundation_dart/amplify_foundation_dart.dart' + show Error, Ok; +import 'package:amplify_kinesis_dart/src/amplify_kinesis_client.dart'; +import 'package:amplify_kinesis_dart/src/exception/amplify_kinesis_exception.dart'; import 'package:amplify_kinesis_dart/src/impl/kinesis_record.dart'; -import 'package:amplify_kinesis_dart/src/impl/kinesis_sender.dart'; -import 'package:amplify_kinesis_dart/src/impl/record_client.dart'; -import 'package:amplify_kinesis_dart/src/impl/storage/record_storage.dart'; -import 'package:amplify_kinesis_dart/src/impl/storage/record_storage_sqlite.dart'; import 'package:amplify_kinesis_dart/src/kinesis_limits.dart' as limits; +import 'package:amplify_record_cache_dart/amplify_record_cache_dart.dart'; import 'package:test/test.dart'; import 'helpers/test_database.dart'; @@ -48,7 +48,13 @@ void main() { setUp(() { final db = createTestDatabase(); - storage = SqliteRecordStorage(database: db, maxCacheBytes: 10000); + storage = SqliteRecordStorage( + database: db, + maxCacheBytes: 10000, + maxRecordsPerBatch: 500, + maxBytesPerBatch: 5 * 1024 * 1024, + maxRecordSizeBytes: 10 * 1024 * 1024, + ); client = createClient(storage: storage); }); @@ -68,6 +74,9 @@ void main() { final largeStorage = SqliteRecordStorage( database: largeDb, maxCacheBytes: 20 * 1024 * 1024, + maxRecordsPerBatch: 500, + maxBytesPerBatch: 5 * 1024 * 1024, + maxRecordSizeBytes: 10 * 1024 * 1024, ); final largeClient = createClient(storage: largeStorage); @@ -78,7 +87,7 @@ void main() { ); await largeClient.record( - RecordInput.now( + createKinesisRecordInputNow( data: exactLimitData, partitionKey: partitionKey, streamName: 'stream', @@ -102,7 +111,7 @@ void main() { expect( () => client.record( - RecordInput.now( + createKinesisRecordInputNow( data: oversizedData, partitionKey: partitionKey, streamName: 'stream', @@ -121,7 +130,7 @@ void main() { test('dataSize accounts for partition key bytes', () { final partitionKey = 'k' * 10; // 10 bytes UTF-8 final data = Uint8List(50); - final record = RecordInput.now( + final record = createKinesisRecordInputNow( data: data, partitionKey: partitionKey, streamName: 'stream', @@ -134,7 +143,7 @@ void main() { // Each emoji is 4 bytes in UTF-8, 2 emojis = 8 bytes const partitionKey = '😀😀'; final data = Uint8List(10); - final record = RecordInput.now( + final record = createKinesisRecordInputNow( data: data, partitionKey: partitionKey, streamName: 'stream', @@ -156,6 +165,9 @@ void main() { final tightStorage = SqliteRecordStorage( database: tightDb, maxCacheBytes: 80, + maxRecordsPerBatch: 500, + maxBytesPerBatch: 5 * 1024 * 1024, + maxRecordSizeBytes: 10 * 1024 * 1024, ); final tightClient = createClient(storage: tightStorage); @@ -165,7 +177,7 @@ void main() { // First record: 40 bytes — fits in 80-byte cache await tightClient.record( - RecordInput.now( + createKinesisRecordInputNow( data: data, partitionKey: partitionKey, streamName: 'stream', @@ -174,7 +186,7 @@ void main() { // Second record: 40 more → total 80 — still fits await tightClient.record( - RecordInput.now( + createKinesisRecordInputNow( data: data, partitionKey: partitionKey, streamName: 'stream', @@ -184,7 +196,7 @@ void main() { // Third record: 40 more → total 120 > 80 limit expect( () => tightClient.record( - RecordInput.now( + createKinesisRecordInputNow( data: data, partitionKey: partitionKey, streamName: 'stream', @@ -203,27 +215,34 @@ void main() { // --------------------------------------------------------------- group('partition key validation', () { + late AmplifyKinesisClient kinesisClient; + + setUp(() { + kinesisClient = AmplifyKinesisClient.withRecordClient( + recordClient: client, + ); + }); + test('empty partition key is rejected', () async { + final result = await kinesisClient.record( + data: Uint8List.fromList([1, 2, 3]), + partitionKey: '', + streamName: 'stream', + ); + expect(result, isA>()); expect( - () => client.record( - RecordInput.now( - data: Uint8List.fromList([1, 2, 3]), - partitionKey: '', - streamName: 'stream', - ), - ), - throwsA(isA()), + (result as Error).error, + isA(), ); }); test('partition key at max length 256 code points is accepted', () async { - await client.record( - RecordInput.now( - data: Uint8List.fromList([1]), - partitionKey: 'k' * 256, - streamName: 'stream', - ), + final result = await kinesisClient.record( + data: Uint8List.fromList([1]), + partitionKey: 'k' * 256, + streamName: 'stream', ); + expect(result, isA>()); final records = (await storage.getRecordsByStream()).values .expand((r) => r) @@ -232,15 +251,15 @@ void main() { }); test('partition key exceeding 256 code points is rejected', () async { + final result = await kinesisClient.record( + data: Uint8List.fromList([1]), + partitionKey: 'k' * 257, + streamName: 'stream', + ); + expect(result, isA>()); expect( - () => client.record( - RecordInput.now( - data: Uint8List.fromList([1]), - partitionKey: 'k' * 257, - streamName: 'stream', - ), - ), - throwsA(isA()), + (result as Error).error, + isA(), ); }); @@ -248,13 +267,12 @@ void main() { // Each emoji (😀) is 1 code point but 4 bytes in UTF-8. // 10 emoji = 10 code points (within 256 limit). final partitionKey = '😀' * 10; - await client.record( - RecordInput.now( - data: Uint8List.fromList([1]), - partitionKey: partitionKey, - streamName: 'stream', - ), + final result = await kinesisClient.record( + data: Uint8List.fromList([1]), + partitionKey: partitionKey, + streamName: 'stream', ); + expect(result, isA>()); final records = (await storage.getRecordsByStream()).values .expand((r) => r) @@ -267,15 +285,15 @@ void main() { () async { // 257 emoji = 257 code points > 256 limit final partitionKey = '😀' * 257; + final result = await kinesisClient.record( + data: Uint8List.fromList([1]), + partitionKey: partitionKey, + streamName: 'stream', + ); + expect(result, isA>()); expect( - () => client.record( - RecordInput.now( - data: Uint8List.fromList([1]), - partitionKey: partitionKey, - streamName: 'stream', - ), - ), - throwsA(isA()), + (result as Error).error, + isA(), ); }, ); @@ -292,7 +310,7 @@ void main() { // Oversized record should be rejected expect( () => client.record( - RecordInput.now( + createKinesisRecordInputNow( data: Uint8List(limits.maxRecordSizeBytes), partitionKey: 'k' * 20, streamName: 'stream', @@ -303,7 +321,7 @@ void main() { // Valid record should still work await client.record( - RecordInput.now( + createKinesisRecordInputNow( data: Uint8List.fromList([1, 2, 3]), partitionKey: 'a', streamName: 'stream', @@ -320,16 +338,13 @@ void main() { } /// No-op sender for validation tests that don't need to send records. -class _NoOpSender implements KinesisSender { - @override - dynamic noSuchMethod(Invocation invocation) => super.noSuchMethod(invocation); - +class _NoOpSender implements Sender { @override - Future putRecords({ + Future sendBatch({ required String streamName, required List records, }) async { - return PutRecordsResult( + return SendResult( successfulIds: records.map((r) => r.id).toList(), failedIds: const [], retryableIds: const [], diff --git a/packages/kinesis/amplify_kinesis_dart/test/sqlite_record_storage_cache_accuracy_test.dart b/packages/kinesis/amplify_kinesis_dart/test/sqlite_record_storage_cache_accuracy_test.dart index 1a13a96aae7..ecf3e7d9900 100644 --- a/packages/kinesis/amplify_kinesis_dart/test/sqlite_record_storage_cache_accuracy_test.dart +++ b/packages/kinesis/amplify_kinesis_dart/test/sqlite_record_storage_cache_accuracy_test.dart @@ -11,7 +11,7 @@ library; import 'dart:typed_data'; import 'package:amplify_kinesis_dart/src/impl/kinesis_record.dart'; -import 'package:amplify_kinesis_dart/src/impl/storage/record_storage_sqlite.dart'; +import 'package:amplify_record_cache_dart/amplify_record_cache_dart.dart'; import 'package:test/test.dart'; import 'helpers/test_database.dart'; @@ -22,7 +22,13 @@ void main() { setUp(() { final db = createTestDatabase(); - storage = SqliteRecordStorage(database: db, maxCacheBytes: 1024 * 1024); + storage = SqliteRecordStorage( + database: db, + maxCacheBytes: 1024 * 1024, + maxRecordsPerBatch: 500, + maxBytesPerBatch: 5 * 1024 * 1024, + maxRecordSizeBytes: 10 * 1024 * 1024, + ); }); tearDown(() async { @@ -35,14 +41,14 @@ void main() { test('cached size matches database after add operations', () async { await storage.addRecord( - RecordInput.now( + createKinesisRecordInputNow( data: Uint8List.fromList([1, 2, 3]), partitionKey: 'a', streamName: 'stream1', ), ); await storage.addRecord( - RecordInput.now( + createKinesisRecordInputNow( data: Uint8List.fromList([4, 5, 6, 7]), partitionKey: 'b', streamName: 'stream1', @@ -56,21 +62,21 @@ void main() { test('cached size matches database after delete operations', () async { await storage.addRecord( - RecordInput.now( + createKinesisRecordInputNow( data: Uint8List.fromList([1, 2, 3]), partitionKey: 'a', streamName: 'stream1', ), ); await storage.addRecord( - RecordInput.now( + createKinesisRecordInputNow( data: Uint8List.fromList([4, 5, 6, 7]), partitionKey: 'b', streamName: 'stream1', ), ); await storage.addRecord( - RecordInput.now( + createKinesisRecordInputNow( data: Uint8List.fromList([8, 9]), partitionKey: 'c', streamName: 'stream2', @@ -90,14 +96,14 @@ void main() { test('cached size matches database after clear operations', () async { await storage.addRecord( - RecordInput.now( + createKinesisRecordInputNow( data: Uint8List.fromList([1, 2, 3]), partitionKey: 'a', streamName: 'stream1', ), ); await storage.addRecord( - RecordInput.now( + createKinesisRecordInputNow( data: Uint8List.fromList([4, 5]), partitionKey: 'b', streamName: 'stream2', @@ -113,7 +119,7 @@ void main() { test('cached size remains accurate through mixed operations', () async { // "a"(1) + data(5) = 6 await storage.addRecord( - RecordInput.now( + createKinesisRecordInputNow( data: Uint8List.fromList([1, 2, 3, 4, 5]), partitionKey: 'a', streamName: 'stream1', @@ -121,7 +127,7 @@ void main() { ); // "b"(1) + data(3) = 4 await storage.addRecord( - RecordInput.now( + createKinesisRecordInputNow( data: Uint8List.fromList([6, 7, 8]), partitionKey: 'b', streamName: 'stream2', @@ -142,7 +148,7 @@ void main() { // Add another record: "c"(1) + data(2) = 3 await storage.addRecord( - RecordInput.now( + createKinesisRecordInputNow( data: Uint8List.fromList([9, 10]), partitionKey: 'c', streamName: 'stream3', @@ -174,7 +180,7 @@ void main() { for (var i = 0; i < recordsPerProducer; i++) { final key = 'producer${p}_record$i'; await storage.addRecord( - RecordInput.now( + createKinesisRecordInputNow( data: Uint8List(recordSize), partitionKey: key, streamName: 'stream$p', @@ -194,7 +200,7 @@ void main() { if (records.isNotEmpty) { final toDelete = records.first; await storage.deleteRecords([toDelete.id]); - deleted.add(toDelete.partitionKey); + deleted.add(toDelete.partitionKey ?? ''); } } deletedKeys.add(deleted); @@ -215,7 +221,9 @@ void main() { expect(finalCacheSize, equals(expectedCacheSize)); // Verify every created key is either in DB or was deleted - final remainingKeys = finalRecords.map((r) => r.partitionKey).toSet(); + final remainingKeys = finalRecords + .map((r) => r.partitionKey ?? '') + .toSet(); final allCreatedKeys = createdKeys.values .expand((keys) => keys) .toSet(); @@ -253,21 +261,21 @@ void main() { 'getRecordsByStream with empty excludingIds returns all records', () async { await storage.addRecord( - RecordInput.now( + createKinesisRecordInputNow( data: Uint8List.fromList([1]), partitionKey: 'key1', streamName: 'stream1', ), ); await storage.addRecord( - RecordInput.now( + createKinesisRecordInputNow( data: Uint8List.fromList([2]), partitionKey: 'key2', streamName: 'stream1', ), ); await storage.addRecord( - RecordInput.now( + createKinesisRecordInputNow( data: Uint8List.fromList([3]), partitionKey: 'key3', streamName: 'stream2', diff --git a/packages/kinesis/amplify_record_cache_dart/analysis_options.yaml b/packages/kinesis/amplify_record_cache_dart/analysis_options.yaml new file mode 100644 index 00000000000..01538d576cf --- /dev/null +++ b/packages/kinesis/amplify_record_cache_dart/analysis_options.yaml @@ -0,0 +1,5 @@ +include: package:amplify_lints/library.yaml + +analyzer: + exclude: + - '**/*.g.dart' diff --git a/packages/kinesis/amplify_record_cache_dart/lib/amplify_record_cache_dart.dart b/packages/kinesis/amplify_record_cache_dart/lib/amplify_record_cache_dart.dart new file mode 100644 index 00000000000..49c0ed289dd --- /dev/null +++ b/packages/kinesis/amplify_record_cache_dart/lib/amplify_record_cache_dart.dart @@ -0,0 +1,28 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 + +/// Shared record caching infrastructure for Amplify streaming clients. +library; + +// Client +export 'src/client/auto_flush_scheduler.dart'; +export 'src/client/record_client.dart'; +// Database +export 'src/db/record_cache_database.dart' show RecordCacheDatabase; +// Exceptions +export 'src/exception/record_cache_exception.dart'; +// Flush strategy +export 'src/flush_strategy/flush_strategy.dart'; +// Models +export 'src/model/clear_cache_data.dart'; +export 'src/model/flush_data.dart'; +export 'src/model/record.dart'; +export 'src/model/record_data.dart'; +export 'src/model/record_input.dart'; +// Sender +export 'src/sender/sender.dart'; +// Storage +export 'src/storage/platform/record_storage_platform.dart'; +export 'src/storage/record_storage.dart' hide defaultRecoverySuggestion; +export 'src/storage/record_storage_memory.dart'; +export 'src/storage/record_storage_sqlite.dart'; diff --git a/packages/kinesis/amplify_kinesis_dart/lib/src/impl/auto_flush_scheduler.dart b/packages/kinesis/amplify_record_cache_dart/lib/src/client/auto_flush_scheduler.dart similarity index 91% rename from packages/kinesis/amplify_kinesis_dart/lib/src/impl/auto_flush_scheduler.dart rename to packages/kinesis/amplify_record_cache_dart/lib/src/client/auto_flush_scheduler.dart index 3332abfa806..3220eb30fa6 100644 --- a/packages/kinesis/amplify_kinesis_dart/lib/src/impl/auto_flush_scheduler.dart +++ b/packages/kinesis/amplify_record_cache_dart/lib/src/client/auto_flush_scheduler.dart @@ -4,9 +4,9 @@ import 'dart:async'; import 'package:amplify_foundation_dart/amplify_foundation_dart.dart'; -import 'package:amplify_kinesis_dart/src/impl/record_client.dart'; +import 'package:amplify_record_cache_dart/src/client/record_client.dart'; -/// {@template amplify_kinesis.auto_flush_scheduler} +/// {@template amplify_record_cache.auto_flush_scheduler} /// Manages automatic flush scheduling at a fixed interval. /// /// Takes a [Duration] interval and a [RecordClient]. @@ -17,7 +17,7 @@ import 'package:amplify_kinesis_dart/src/impl/record_client.dart'; /// a new one, preventing duplicate concurrent loops. /// {@endtemplate} final class AutoFlushScheduler { - /// {@macro amplify_kinesis.auto_flush_scheduler} + /// {@macro amplify_record_cache.auto_flush_scheduler} AutoFlushScheduler({required Duration interval, required RecordClient client}) : _interval = interval, _client = client; diff --git a/packages/kinesis/amplify_kinesis_dart/lib/src/impl/record_client.dart b/packages/kinesis/amplify_record_cache_dart/lib/src/client/record_client.dart similarity index 83% rename from packages/kinesis/amplify_kinesis_dart/lib/src/impl/record_client.dart rename to packages/kinesis/amplify_record_cache_dart/lib/src/client/record_client.dart index 873ce07d116..9d564418ca2 100644 --- a/packages/kinesis/amplify_kinesis_dart/lib/src/impl/record_client.dart +++ b/packages/kinesis/amplify_record_cache_dart/lib/src/client/record_client.dart @@ -2,17 +2,16 @@ // SPDX-License-Identifier: Apache-2.0 import 'package:amplify_foundation_dart/amplify_foundation_dart.dart'; -import 'package:amplify_kinesis_dart/src/impl/kinesis_record.dart'; -import 'package:amplify_kinesis_dart/src/impl/kinesis_sender.dart'; -import 'package:amplify_kinesis_dart/src/impl/storage/record_storage.dart'; -import 'package:amplify_kinesis_dart/src/model/clear_cache_data.dart'; -import 'package:amplify_kinesis_dart/src/model/flush_data.dart'; -import 'package:amplify_kinesis_dart/src/model/record_data.dart' - show RecordData; +import 'package:amplify_record_cache_dart/src/model/clear_cache_data.dart'; +import 'package:amplify_record_cache_dart/src/model/flush_data.dart'; +import 'package:amplify_record_cache_dart/src/model/record_data.dart'; +import 'package:amplify_record_cache_dart/src/model/record_input.dart'; +import 'package:amplify_record_cache_dart/src/sender/sender.dart'; +import 'package:amplify_record_cache_dart/src/storage/record_storage.dart'; import 'package:smithy/smithy.dart' show SmithyHttpException, UnknownSmithyHttpException; -/// {@template amplify_kinesis.record_client} +/// {@template amplify_record_cache.record_client} /// Orchestrates record operations: storage, sending, and retry logic. /// /// - `record()` delegates directly to `storage.addRecord()` (validation @@ -21,22 +20,25 @@ import 'package:smithy/smithy.dart' /// stream and sends it. /// {@endtemplate} class RecordClient { - /// {@macro amplify_kinesis.record_client} + /// {@macro amplify_record_cache.record_client} RecordClient({ required RecordStorage storage, - required KinesisSender sender, + required Sender sender, required int maxRetries, }) : _storage = storage, _sender = sender, _maxRetries = maxRetries; final RecordStorage _storage; - final KinesisSender _sender; + final Sender _sender; final int _maxRetries; final Logger _logger = AmplifyLogging.logger('RecordClient'); bool _flushing = false; + /// Provides access to the underlying storage (for testing). + RecordStorage get storage => _storage; + /// Records data to the local cache. /// /// Delegates to [RecordStorage.addRecord] which handles validation @@ -48,7 +50,7 @@ class RecordClient { return const RecordData(); } - /// Flushes cached records to Kinesis. + /// Flushes cached records to the streaming service. /// /// Single-pass: retrieves one batch of records per stream, sends each /// batch, and returns. Records beyond the per-stream limit are picked @@ -79,8 +81,7 @@ class RecordClient { ? 'HTTP ${e.statusCode}: ${e.body}' : e.message; _logger.warn( - 'Kinesis SDK error flushing stream $streamName: $details. ' - 'Skipping', + 'SDK error flushing stream $streamName: $details. Skipping', ); await _handleFailedRequest(records); } catch (e) { @@ -97,7 +98,7 @@ class RecordClient { } Future _sendStreamBatch(String streamName, List records) async { - final result = await _sender.putRecords( + final result = await _sender.sendBatch( streamName: streamName, records: records, ); diff --git a/packages/kinesis/amplify_kinesis_dart/lib/src/db/kinesis_record_database.dart b/packages/kinesis/amplify_record_cache_dart/lib/src/db/record_cache_database.dart similarity index 52% rename from packages/kinesis/amplify_kinesis_dart/lib/src/db/kinesis_record_database.dart rename to packages/kinesis/amplify_record_cache_dart/lib/src/db/record_cache_database.dart index 3f72a6da0b2..42e8b98074d 100644 --- a/packages/kinesis/amplify_kinesis_dart/lib/src/db/kinesis_record_database.dart +++ b/packages/kinesis/amplify_record_cache_dart/lib/src/db/record_cache_database.dart @@ -6,26 +6,30 @@ import 'dart:async'; import 'package:amplify_db_common_dart/amplify_db_common_dart.dart'; import 'package:drift/drift.dart'; -part 'kinesis_record_database.g.dart'; +part 'record_cache_database.g.dart'; -/// Schema of the KinesisRecords table in SQLite. +/// Schema of the cached records table in SQLite. /// -/// When updating this schema, please bump [KinesisRecordDatabase.schemaVersion]. +/// The `partitionKey` column is present for Kinesis Data Streams +/// compatibility. Firehose clients write an empty string. +/// +/// When updating this schema, please bump +/// [RecordCacheDatabase.schemaVersion]. @DataClassName('DriftStoredRecord') -class KinesisRecords extends Table { +class CachedRecords extends Table { /// Auto-incrementing primary key. IntColumn get id => integer().autoIncrement()(); - /// The name of the Kinesis Data Stream. + /// The name of the target stream. TextColumn get streamName => text()(); - /// The partition key for the record. - TextColumn get partitionKey => text()(); + /// The partition key (empty string for services that don't use it). + TextColumn get partitionKey => text().withDefault(const Constant(''))(); - /// The data blob to send to Kinesis. + /// The data blob to send. BlobColumn get data => blob()(); - /// The size of the data blob in bytes. + /// The size of the record in bytes. IntColumn get dataSize => integer()(); /// The number of times this record has been retried. @@ -35,30 +39,33 @@ class KinesisRecords extends Table { IntColumn get createdAt => integer()(); } -/// {@template amplify_kinesis.kinesis_record_database} -/// Drift database for managing stored Kinesis records. +/// {@template amplify_record_cache.record_cache_database} +/// Drift database for managing cached records. /// {@endtemplate} -@DriftDatabase(tables: [KinesisRecords]) -class KinesisRecordDatabase extends _$KinesisRecordDatabase { - /// {@macro amplify_kinesis.kinesis_record_database} +@DriftDatabase(tables: [CachedRecords]) +class RecordCacheDatabase extends _$RecordCacheDatabase { + /// {@macro amplify_record_cache.record_cache_database} /// + /// [dbPrefix] is the database name prefix (e.g. `kinesis_records`, + /// `firehose_records`). /// [identifier] is used to namespace the database (typically the AWS region). - /// [storagePath] is the directory path for the database file - factory KinesisRecordDatabase({ + /// [storagePath] is the directory path for the database file. + factory RecordCacheDatabase({ + required String dbPrefix, required String identifier, required FutureOr? storagePath, }) { final driftQueryExecutor = connect( - name: 'kinesis_records_$identifier', + name: '${dbPrefix}_$identifier', path: storagePath, ); - return KinesisRecordDatabase._(driftQueryExecutor); + return RecordCacheDatabase._(driftQueryExecutor); } /// Creates a database with a custom query executor (for testing). - KinesisRecordDatabase.forTesting(super.executor); + RecordCacheDatabase.forTesting(super.executor); - KinesisRecordDatabase._(super.driftQueryExecutor); + RecordCacheDatabase._(super.driftQueryExecutor); // Bump this number whenever you change or add a table definition. @override @@ -69,14 +76,13 @@ class KinesisRecordDatabase extends _$KinesisRecordDatabase { return MigrationStrategy( onCreate: (Migrator m) async { await m.createAll(); - // Indices matching the Android schema. await customStatement( 'CREATE INDEX IF NOT EXISTS idx_stream_id ' - 'ON kinesis_records(stream_name, id)', + 'ON cached_records(stream_name, id)', ); await customStatement( 'CREATE INDEX IF NOT EXISTS idx_data_size ' - 'ON kinesis_records(data_size)', + 'ON cached_records(data_size)', ); }, ); diff --git a/packages/kinesis/amplify_kinesis_dart/lib/src/db/kinesis_record_database.g.dart b/packages/kinesis/amplify_record_cache_dart/lib/src/db/record_cache_database.g.dart similarity index 83% rename from packages/kinesis/amplify_kinesis_dart/lib/src/db/kinesis_record_database.g.dart rename to packages/kinesis/amplify_record_cache_dart/lib/src/db/record_cache_database.g.dart index e76bbb21b87..51023659802 100644 --- a/packages/kinesis/amplify_kinesis_dart/lib/src/db/kinesis_record_database.g.dart +++ b/packages/kinesis/amplify_record_cache_dart/lib/src/db/record_cache_database.g.dart @@ -1,14 +1,14 @@ // GENERATED CODE - DO NOT MODIFY BY HAND -part of 'kinesis_record_database.dart'; +part of 'record_cache_database.dart'; // ignore_for_file: type=lint -class $KinesisRecordsTable extends KinesisRecords - with TableInfo<$KinesisRecordsTable, DriftStoredRecord> { +class $CachedRecordsTable extends CachedRecords + with TableInfo<$CachedRecordsTable, DriftStoredRecord> { @override final GeneratedDatabase attachedDatabase; final String? _alias; - $KinesisRecordsTable(this.attachedDatabase, [this._alias]); + $CachedRecordsTable(this.attachedDatabase, [this._alias]); static const VerificationMeta _idMeta = const VerificationMeta('id'); @override late final GeneratedColumn id = GeneratedColumn( @@ -42,7 +42,8 @@ class $KinesisRecordsTable extends KinesisRecords aliasedName, false, type: DriftSqlType.string, - requiredDuringInsert: true, + requiredDuringInsert: false, + defaultValue: const Constant(''), ); static const VerificationMeta _dataMeta = const VerificationMeta('data'); @override @@ -101,7 +102,7 @@ class $KinesisRecordsTable extends KinesisRecords String get aliasedName => _alias ?? actualTableName; @override String get actualTableName => $name; - static const String $name = 'kinesis_records'; + static const String $name = 'cached_records'; @override VerificationContext validateIntegrity( Insertable instance, { @@ -128,8 +129,6 @@ class $KinesisRecordsTable extends KinesisRecords _partitionKeyMeta, ), ); - } else if (isInserting) { - context.missing(_partitionKeyMeta); } if (data.containsKey('data')) { context.handle( @@ -202,8 +201,8 @@ class $KinesisRecordsTable extends KinesisRecords } @override - $KinesisRecordsTable createAlias(String alias) { - return $KinesisRecordsTable(attachedDatabase, alias); + $CachedRecordsTable createAlias(String alias) { + return $CachedRecordsTable(attachedDatabase, alias); } } @@ -212,16 +211,16 @@ class DriftStoredRecord extends DataClass /// Auto-incrementing primary key. final int id; - /// The name of the Kinesis Data Stream. + /// The name of the target stream. final String streamName; - /// The partition key for the record. + /// The partition key (empty string for services that don't use it). final String partitionKey; - /// The data blob to send to Kinesis. + /// The data blob to send. final Uint8List data; - /// The size of the data blob in bytes. + /// The size of the record in bytes. final int dataSize; /// The number of times this record has been retried. @@ -251,8 +250,8 @@ class DriftStoredRecord extends DataClass return map; } - KinesisRecordsCompanion toCompanion(bool nullToAbsent) { - return KinesisRecordsCompanion( + CachedRecordsCompanion toCompanion(bool nullToAbsent) { + return CachedRecordsCompanion( id: Value(id), streamName: Value(streamName), partitionKey: Value(partitionKey), @@ -309,7 +308,7 @@ class DriftStoredRecord extends DataClass retryCount: retryCount ?? this.retryCount, createdAt: createdAt ?? this.createdAt, ); - DriftStoredRecord copyWithCompanion(KinesisRecordsCompanion data) { + DriftStoredRecord copyWithCompanion(CachedRecordsCompanion data) { return DriftStoredRecord( id: data.id.present ? data.id.value : this.id, streamName: data.streamName.present @@ -364,7 +363,7 @@ class DriftStoredRecord extends DataClass other.createdAt == this.createdAt); } -class KinesisRecordsCompanion extends UpdateCompanion { +class CachedRecordsCompanion extends UpdateCompanion { final Value id; final Value streamName; final Value partitionKey; @@ -372,7 +371,7 @@ class KinesisRecordsCompanion extends UpdateCompanion { final Value dataSize; final Value retryCount; final Value createdAt; - const KinesisRecordsCompanion({ + const CachedRecordsCompanion({ this.id = const Value.absent(), this.streamName = const Value.absent(), this.partitionKey = const Value.absent(), @@ -381,16 +380,15 @@ class KinesisRecordsCompanion extends UpdateCompanion { this.retryCount = const Value.absent(), this.createdAt = const Value.absent(), }); - KinesisRecordsCompanion.insert({ + CachedRecordsCompanion.insert({ this.id = const Value.absent(), required String streamName, - required String partitionKey, + this.partitionKey = const Value.absent(), required Uint8List data, required int dataSize, this.retryCount = const Value.absent(), required int createdAt, }) : streamName = Value(streamName), - partitionKey = Value(partitionKey), data = Value(data), dataSize = Value(dataSize), createdAt = Value(createdAt); @@ -414,7 +412,7 @@ class KinesisRecordsCompanion extends UpdateCompanion { }); } - KinesisRecordsCompanion copyWith({ + CachedRecordsCompanion copyWith({ Value? id, Value? streamName, Value? partitionKey, @@ -423,7 +421,7 @@ class KinesisRecordsCompanion extends UpdateCompanion { Value? retryCount, Value? createdAt, }) { - return KinesisRecordsCompanion( + return CachedRecordsCompanion( id: id ?? this.id, streamName: streamName ?? this.streamName, partitionKey: partitionKey ?? this.partitionKey, @@ -463,7 +461,7 @@ class KinesisRecordsCompanion extends UpdateCompanion { @override String toString() { - return (StringBuffer('KinesisRecordsCompanion(') + return (StringBuffer('CachedRecordsCompanion(') ..write('id: $id, ') ..write('streamName: $streamName, ') ..write('partitionKey: $partitionKey, ') @@ -476,30 +474,29 @@ class KinesisRecordsCompanion extends UpdateCompanion { } } -abstract class _$KinesisRecordDatabase extends GeneratedDatabase { - _$KinesisRecordDatabase(QueryExecutor e) : super(e); - $KinesisRecordDatabaseManager get managers => - $KinesisRecordDatabaseManager(this); - late final $KinesisRecordsTable kinesisRecords = $KinesisRecordsTable(this); +abstract class _$RecordCacheDatabase extends GeneratedDatabase { + _$RecordCacheDatabase(QueryExecutor e) : super(e); + $RecordCacheDatabaseManager get managers => $RecordCacheDatabaseManager(this); + late final $CachedRecordsTable cachedRecords = $CachedRecordsTable(this); @override Iterable> get allTables => allSchemaEntities.whereType>(); @override - List get allSchemaEntities => [kinesisRecords]; + List get allSchemaEntities => [cachedRecords]; } -typedef $$KinesisRecordsTableCreateCompanionBuilder = - KinesisRecordsCompanion Function({ +typedef $$CachedRecordsTableCreateCompanionBuilder = + CachedRecordsCompanion Function({ Value id, required String streamName, - required String partitionKey, + Value partitionKey, required Uint8List data, required int dataSize, Value retryCount, required int createdAt, }); -typedef $$KinesisRecordsTableUpdateCompanionBuilder = - KinesisRecordsCompanion Function({ +typedef $$CachedRecordsTableUpdateCompanionBuilder = + CachedRecordsCompanion Function({ Value id, Value streamName, Value partitionKey, @@ -509,9 +506,9 @@ typedef $$KinesisRecordsTableUpdateCompanionBuilder = Value createdAt, }); -class $$KinesisRecordsTableFilterComposer - extends Composer<_$KinesisRecordDatabase, $KinesisRecordsTable> { - $$KinesisRecordsTableFilterComposer({ +class $$CachedRecordsTableFilterComposer + extends Composer<_$RecordCacheDatabase, $CachedRecordsTable> { + $$CachedRecordsTableFilterComposer({ required super.$db, required super.$table, super.joinBuilder, @@ -554,9 +551,9 @@ class $$KinesisRecordsTableFilterComposer ); } -class $$KinesisRecordsTableOrderingComposer - extends Composer<_$KinesisRecordDatabase, $KinesisRecordsTable> { - $$KinesisRecordsTableOrderingComposer({ +class $$CachedRecordsTableOrderingComposer + extends Composer<_$RecordCacheDatabase, $CachedRecordsTable> { + $$CachedRecordsTableOrderingComposer({ required super.$db, required super.$table, super.joinBuilder, @@ -599,9 +596,9 @@ class $$KinesisRecordsTableOrderingComposer ); } -class $$KinesisRecordsTableAnnotationComposer - extends Composer<_$KinesisRecordDatabase, $KinesisRecordsTable> { - $$KinesisRecordsTableAnnotationComposer({ +class $$CachedRecordsTableAnnotationComposer + extends Composer<_$RecordCacheDatabase, $CachedRecordsTable> { + $$CachedRecordsTableAnnotationComposer({ required super.$db, required super.$table, super.joinBuilder, @@ -636,41 +633,41 @@ class $$KinesisRecordsTableAnnotationComposer $composableBuilder(column: $table.createdAt, builder: (column) => column); } -class $$KinesisRecordsTableTableManager +class $$CachedRecordsTableTableManager extends RootTableManager< - _$KinesisRecordDatabase, - $KinesisRecordsTable, + _$RecordCacheDatabase, + $CachedRecordsTable, DriftStoredRecord, - $$KinesisRecordsTableFilterComposer, - $$KinesisRecordsTableOrderingComposer, - $$KinesisRecordsTableAnnotationComposer, - $$KinesisRecordsTableCreateCompanionBuilder, - $$KinesisRecordsTableUpdateCompanionBuilder, + $$CachedRecordsTableFilterComposer, + $$CachedRecordsTableOrderingComposer, + $$CachedRecordsTableAnnotationComposer, + $$CachedRecordsTableCreateCompanionBuilder, + $$CachedRecordsTableUpdateCompanionBuilder, ( DriftStoredRecord, BaseReferences< - _$KinesisRecordDatabase, - $KinesisRecordsTable, + _$RecordCacheDatabase, + $CachedRecordsTable, DriftStoredRecord >, ), DriftStoredRecord, PrefetchHooks Function() > { - $$KinesisRecordsTableTableManager( - _$KinesisRecordDatabase db, - $KinesisRecordsTable table, + $$CachedRecordsTableTableManager( + _$RecordCacheDatabase db, + $CachedRecordsTable table, ) : super( TableManagerState( db: db, table: table, createFilteringComposer: () => - $$KinesisRecordsTableFilterComposer($db: db, $table: table), + $$CachedRecordsTableFilterComposer($db: db, $table: table), createOrderingComposer: () => - $$KinesisRecordsTableOrderingComposer($db: db, $table: table), + $$CachedRecordsTableOrderingComposer($db: db, $table: table), createComputedFieldComposer: () => - $$KinesisRecordsTableAnnotationComposer($db: db, $table: table), + $$CachedRecordsTableAnnotationComposer($db: db, $table: table), updateCompanionCallback: ({ Value id = const Value.absent(), @@ -680,7 +677,7 @@ class $$KinesisRecordsTableTableManager Value dataSize = const Value.absent(), Value retryCount = const Value.absent(), Value createdAt = const Value.absent(), - }) => KinesisRecordsCompanion( + }) => CachedRecordsCompanion( id: id, streamName: streamName, partitionKey: partitionKey, @@ -693,12 +690,12 @@ class $$KinesisRecordsTableTableManager ({ Value id = const Value.absent(), required String streamName, - required String partitionKey, + Value partitionKey = const Value.absent(), required Uint8List data, required int dataSize, Value retryCount = const Value.absent(), required int createdAt, - }) => KinesisRecordsCompanion.insert( + }) => CachedRecordsCompanion.insert( id: id, streamName: streamName, partitionKey: partitionKey, @@ -715,21 +712,21 @@ class $$KinesisRecordsTableTableManager ); } -typedef $$KinesisRecordsTableProcessedTableManager = +typedef $$CachedRecordsTableProcessedTableManager = ProcessedTableManager< - _$KinesisRecordDatabase, - $KinesisRecordsTable, + _$RecordCacheDatabase, + $CachedRecordsTable, DriftStoredRecord, - $$KinesisRecordsTableFilterComposer, - $$KinesisRecordsTableOrderingComposer, - $$KinesisRecordsTableAnnotationComposer, - $$KinesisRecordsTableCreateCompanionBuilder, - $$KinesisRecordsTableUpdateCompanionBuilder, + $$CachedRecordsTableFilterComposer, + $$CachedRecordsTableOrderingComposer, + $$CachedRecordsTableAnnotationComposer, + $$CachedRecordsTableCreateCompanionBuilder, + $$CachedRecordsTableUpdateCompanionBuilder, ( DriftStoredRecord, BaseReferences< - _$KinesisRecordDatabase, - $KinesisRecordsTable, + _$RecordCacheDatabase, + $CachedRecordsTable, DriftStoredRecord >, ), @@ -737,9 +734,9 @@ typedef $$KinesisRecordsTableProcessedTableManager = PrefetchHooks Function() >; -class $KinesisRecordDatabaseManager { - final _$KinesisRecordDatabase _db; - $KinesisRecordDatabaseManager(this._db); - $$KinesisRecordsTableTableManager get kinesisRecords => - $$KinesisRecordsTableTableManager(_db, _db.kinesisRecords); +class $RecordCacheDatabaseManager { + final _$RecordCacheDatabase _db; + $RecordCacheDatabaseManager(this._db); + $$CachedRecordsTableTableManager get cachedRecords => + $$CachedRecordsTableTableManager(_db, _db.cachedRecords); } diff --git a/packages/kinesis/amplify_kinesis_dart/lib/src/exception/record_cache_exception.dart b/packages/kinesis/amplify_record_cache_dart/lib/src/exception/record_cache_exception.dart similarity index 73% rename from packages/kinesis/amplify_kinesis_dart/lib/src/exception/record_cache_exception.dart rename to packages/kinesis/amplify_record_cache_dart/lib/src/exception/record_cache_exception.dart index 969cd892d2d..a5152118c22 100644 --- a/packages/kinesis/amplify_kinesis_dart/lib/src/exception/record_cache_exception.dart +++ b/packages/kinesis/amplify_record_cache_dart/lib/src/exception/record_cache_exception.dart @@ -1,13 +1,19 @@ // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. // SPDX-License-Identifier: Apache-2.0 +/// {@template amplify_record_cache.record_cache_exception} /// Internal error type used by RecordClient / RecordStorage. /// -/// Mapped to the public AmplifyKinesisException hierarchy at the -/// AmplifyKinesisClient boundary via `AmplifyKinesisException.from`. +/// Mapped to the public exception hierarchy at the client boundary +/// (e.g. `AmplifyKinesisException.from` or `AmplifyFirehoseException.from`). +/// {@endtemplate} sealed class RecordCacheException implements Exception { /// Creates a [RecordCacheException]. - RecordCacheException(this.message, this.recoverySuggestion, [this.cause]); + const RecordCacheException( + this.message, + this.recoverySuggestion, [ + this.cause, + ]); /// A message describing the error. final String message; @@ -29,7 +35,7 @@ sealed class RecordCacheException implements Exception { /// Database operation failed. final class RecordCacheDatabaseException extends RecordCacheException { /// Creates a [RecordCacheDatabaseException]. - RecordCacheDatabaseException( + const RecordCacheDatabaseException( super.message, super.recoverySuggestion, [ super.cause, @@ -39,18 +45,17 @@ final class RecordCacheDatabaseException extends RecordCacheException { /// Cache limit exceeded — no space for new records. final class RecordCacheLimitExceededException extends RecordCacheException { /// Creates a [RecordCacheLimitExceededException]. - RecordCacheLimitExceededException( + const RecordCacheLimitExceededException( super.message, super.recoverySuggestion, [ super.cause, ]); } -/// Record input validation failed (e.g. oversized record, invalid partition -/// key). +/// Record input validation failed (e.g. oversized record). final class RecordCacheValidationException extends RecordCacheException { /// Creates a [RecordCacheValidationException]. - RecordCacheValidationException( + const RecordCacheValidationException( super.message, super.recoverySuggestion, [ super.cause, diff --git a/packages/kinesis/amplify_kinesis_dart/lib/src/flush_strategy/flush_strategy.dart b/packages/kinesis/amplify_record_cache_dart/lib/src/flush_strategy/flush_strategy.dart similarity index 71% rename from packages/kinesis/amplify_kinesis_dart/lib/src/flush_strategy/flush_strategy.dart rename to packages/kinesis/amplify_record_cache_dart/lib/src/flush_strategy/flush_strategy.dart index 63b0c62135d..106d803be95 100644 --- a/packages/kinesis/amplify_kinesis_dart/lib/src/flush_strategy/flush_strategy.dart +++ b/packages/kinesis/amplify_record_cache_dart/lib/src/flush_strategy/flush_strategy.dart @@ -1,7 +1,7 @@ // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. // SPDX-License-Identifier: Apache-2.0 -/// {@template amplify_kinesis.flush_strategy} +/// {@template amplify_record_cache.flush_strategy} /// Determines when automatic flushing of cached records occurs. /// /// Available strategies: @@ -9,25 +9,25 @@ /// - [FlushNone]: Disable automatic flushing entirely /// {@endtemplate} sealed class FlushStrategy { - /// {@macro amplify_kinesis.flush_strategy} + /// {@macro amplify_record_cache.flush_strategy} const FlushStrategy(); } -/// {@template amplify_kinesis.interval_flush_strategy} +/// {@template amplify_record_cache.interval_flush_strategy} /// A flush strategy that triggers automatic flushes at a fixed interval. /// {@endtemplate} final class FlushInterval extends FlushStrategy { - /// {@macro amplify_kinesis.interval_flush_strategy} + /// {@macro amplify_record_cache.interval_flush_strategy} const FlushInterval({this.interval = const Duration(seconds: 30)}); /// The interval between automatic flush operations. final Duration interval; } -/// {@template amplify_kinesis.none_flush_strategy} +/// {@template amplify_record_cache.none_flush_strategy} /// A flush strategy that disables automatic flushing. /// {@endtemplate} final class FlushNone extends FlushStrategy { - /// {@macro amplify_kinesis.none_flush_strategy} + /// {@macro amplify_record_cache.none_flush_strategy} const FlushNone(); } diff --git a/packages/kinesis/amplify_kinesis_dart/lib/src/model/clear_cache_data.dart b/packages/kinesis/amplify_record_cache_dart/lib/src/model/clear_cache_data.dart similarity index 80% rename from packages/kinesis/amplify_kinesis_dart/lib/src/model/clear_cache_data.dart rename to packages/kinesis/amplify_record_cache_dart/lib/src/model/clear_cache_data.dart index e0016ac18ef..b8d8a0e38da 100644 --- a/packages/kinesis/amplify_kinesis_dart/lib/src/model/clear_cache_data.dart +++ b/packages/kinesis/amplify_record_cache_dart/lib/src/model/clear_cache_data.dart @@ -1,11 +1,11 @@ // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. // SPDX-License-Identifier: Apache-2.0 -/// {@template amplify_kinesis.clear_cache_data} +/// {@template amplify_record_cache.clear_cache_data} /// Data returned from a clearCache operation. /// {@endtemplate} final class ClearCacheData { - /// {@macro amplify_kinesis.clear_cache_data} + /// {@macro amplify_record_cache.clear_cache_data} const ClearCacheData({this.recordsCleared = 0}); /// The number of records that were cleared from the cache. diff --git a/packages/kinesis/amplify_kinesis_dart/lib/src/model/flush_data.dart b/packages/kinesis/amplify_record_cache_dart/lib/src/model/flush_data.dart similarity index 71% rename from packages/kinesis/amplify_kinesis_dart/lib/src/model/flush_data.dart rename to packages/kinesis/amplify_record_cache_dart/lib/src/model/flush_data.dart index b908b88be8a..b69f968554b 100644 --- a/packages/kinesis/amplify_kinesis_dart/lib/src/model/flush_data.dart +++ b/packages/kinesis/amplify_record_cache_dart/lib/src/model/flush_data.dart @@ -1,11 +1,11 @@ // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. // SPDX-License-Identifier: Apache-2.0 -/// {@template amplify_kinesis.flush_data} +/// {@template amplify_record_cache.flush_data} /// Data returned from a flush operation. /// {@endtemplate} final class FlushData { - /// {@macro amplify_kinesis.flush_data} + /// {@macro amplify_record_cache.flush_data} const FlushData({this.recordsFlushed = 0, this.flushInProgress = false}); /// The number of records successfully flushed. @@ -16,5 +16,6 @@ final class FlushData { @override String toString() => - 'FlushData(recordsFlushed: $recordsFlushed, flushInProgress: $flushInProgress)'; + 'FlushData(recordsFlushed: $recordsFlushed, ' + 'flushInProgress: $flushInProgress)'; } diff --git a/packages/kinesis/amplify_kinesis_dart/lib/src/model/record.dart b/packages/kinesis/amplify_record_cache_dart/lib/src/model/record.dart similarity index 68% rename from packages/kinesis/amplify_kinesis_dart/lib/src/model/record.dart rename to packages/kinesis/amplify_record_cache_dart/lib/src/model/record.dart index 77e89a89d11..3ee0f9f0fb1 100644 --- a/packages/kinesis/amplify_kinesis_dart/lib/src/model/record.dart +++ b/packages/kinesis/amplify_record_cache_dart/lib/src/model/record.dart @@ -3,37 +3,38 @@ import 'dart:typed_data'; -/// {@template amplify_kinesis.record} -/// A record persisted in local storage, ready to be flushed to Kinesis. +/// {@template amplify_record_cache.record} +/// A record persisted in local storage, ready to be flushed to a streaming +/// service (Kinesis Data Streams, Firehose, etc.). /// /// This is a plain Dart class with no ORM coupling, shared across all /// storage backends (SQLite, IndexedDB, In-Memory). /// {@endtemplate} final class Record { - /// {@macro amplify_kinesis.record} + /// {@macro amplify_record_cache.record} const Record({ required this.id, required this.streamName, - required this.partitionKey, required this.data, required this.dataSize, required this.retryCount, required this.createdAt, + this.partitionKey, }); /// Auto-incrementing primary key. final int id; - /// The name of the Kinesis Data Stream. + /// The name of the target stream. final String streamName; - /// The partition key for the record. - final String partitionKey; + /// Optional partition key (used by Kinesis Data Streams, null for Firehose). + final String? partitionKey; - /// The data blob to send to Kinesis. + /// The data blob to send. final Uint8List data; - /// The size of the data blob in bytes. + /// The size of the record in bytes. final int dataSize; /// The number of times this record has been retried. @@ -45,6 +46,5 @@ final class Record { @override String toString() => 'Record(id: $id, streamName: $streamName, ' - 'partitionKey: $partitionKey, dataSize: $dataSize, ' - 'retryCount: $retryCount)'; + 'dataSize: $dataSize, retryCount: $retryCount)'; } diff --git a/packages/kinesis/amplify_kinesis_dart/lib/src/model/record_data.dart b/packages/kinesis/amplify_record_cache_dart/lib/src/model/record_data.dart similarity index 73% rename from packages/kinesis/amplify_kinesis_dart/lib/src/model/record_data.dart rename to packages/kinesis/amplify_record_cache_dart/lib/src/model/record_data.dart index 149c2302406..5421fbd4ab4 100644 --- a/packages/kinesis/amplify_kinesis_dart/lib/src/model/record_data.dart +++ b/packages/kinesis/amplify_record_cache_dart/lib/src/model/record_data.dart @@ -1,11 +1,11 @@ // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. // SPDX-License-Identifier: Apache-2.0 -/// {@template amplify_kinesis.record_data} +/// {@template amplify_record_cache.record_data} /// Data returned from a record operation. /// {@endtemplate} final class RecordData { - /// {@macro amplify_kinesis.record_data} + /// {@macro amplify_record_cache.record_data} const RecordData(); @override diff --git a/packages/kinesis/amplify_record_cache_dart/lib/src/model/record_input.dart b/packages/kinesis/amplify_record_cache_dart/lib/src/model/record_input.dart new file mode 100644 index 00000000000..f622c604219 --- /dev/null +++ b/packages/kinesis/amplify_record_cache_dart/lib/src/model/record_input.dart @@ -0,0 +1,54 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 + +import 'dart:typed_data'; + +/// {@template amplify_record_cache.record_input} +/// A record to be persisted in the cache before sending. +/// +/// The [dataSize] is provided by the caller so that each service can +/// compute it according to its own rules: +/// - Kinesis Data Streams: `data.length + utf8.encode(partitionKey).length` +/// - Firehose: `data.length` +/// {@endtemplate} +final class RecordInput { + /// Creates a new record input. + RecordInput({ + required this.data, + required this.streamName, + required this.dataSize, + required this.createdAt, + this.partitionKey, + }); + + /// Creates a record input with the current timestamp. + factory RecordInput.now({ + required Uint8List data, + required String streamName, + required int dataSize, + String? partitionKey, + }) { + return RecordInput( + data: data, + streamName: streamName, + dataSize: dataSize, + createdAt: DateTime.now(), + partitionKey: partitionKey, + ); + } + + /// The data blob. + final Uint8List data; + + /// The name of the target stream. + final String streamName; + + /// Optional partition key (used by KDS, null for Firehose). + final String? partitionKey; + + /// The size of the record in bytes (caller-computed). + final int dataSize; + + /// Timestamp of when the record was created. + final DateTime createdAt; +} diff --git a/packages/kinesis/amplify_record_cache_dart/lib/src/sender/sender.dart b/packages/kinesis/amplify_record_cache_dart/lib/src/sender/sender.dart new file mode 100644 index 00000000000..0fe6cde261d --- /dev/null +++ b/packages/kinesis/amplify_record_cache_dart/lib/src/sender/sender.dart @@ -0,0 +1,45 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 + +import 'package:amplify_record_cache_dart/src/model/record.dart'; + +/// Result of a batch send operation. +/// +/// Records are categorized into three buckets: +/// - [successfulIds]: records accepted by the service. +/// - [retryableIds]: records that failed but can be retried. +/// - [failedIds]: records that exceeded the retry limit and should be deleted. +final class SendResult { + /// Creates a new [SendResult]. + const SendResult({ + required this.successfulIds, + required this.retryableIds, + required this.failedIds, + }); + + /// IDs of records that were successfully sent. + final List successfulIds; + + /// IDs of records that failed but can be retried (retry count < max). + final List retryableIds; + + /// IDs of records that exceeded the retry limit and should be deleted. + final List failedIds; +} + +/// {@template amplify_record_cache.sender} +/// Abstract interface for sending a batch of records to a streaming service. +/// +/// Implementations handle the service-specific API call (e.g. Kinesis +/// `PutRecords`, Firehose `PutRecordBatch`) and categorize the response +/// into successful, retryable, and failed records. +/// {@endtemplate} +// ignore: one_member_abstracts +abstract interface class Sender { + /// Sends a batch of records to the specified stream and returns the + /// categorized result. + Future sendBatch({ + required String streamName, + required List records, + }); +} diff --git a/packages/kinesis/amplify_kinesis_dart/lib/src/impl/storage/platform/record_storage_platform.dart b/packages/kinesis/amplify_record_cache_dart/lib/src/storage/platform/record_storage_platform.dart similarity index 100% rename from packages/kinesis/amplify_kinesis_dart/lib/src/impl/storage/platform/record_storage_platform.dart rename to packages/kinesis/amplify_record_cache_dart/lib/src/storage/platform/record_storage_platform.dart diff --git a/packages/kinesis/amplify_kinesis_dart/lib/src/impl/storage/platform/record_storage_platform_stub.dart b/packages/kinesis/amplify_record_cache_dart/lib/src/storage/platform/record_storage_platform_stub.dart similarity index 73% rename from packages/kinesis/amplify_kinesis_dart/lib/src/impl/storage/platform/record_storage_platform_stub.dart rename to packages/kinesis/amplify_record_cache_dart/lib/src/storage/platform/record_storage_platform_stub.dart index 2cbf1693390..eda24335ef2 100644 --- a/packages/kinesis/amplify_kinesis_dart/lib/src/impl/storage/platform/record_storage_platform_stub.dart +++ b/packages/kinesis/amplify_record_cache_dart/lib/src/storage/platform/record_storage_platform_stub.dart @@ -3,7 +3,7 @@ import 'dart:async'; -import 'package:amplify_kinesis_dart/src/impl/storage/record_storage.dart'; +import 'package:amplify_record_cache_dart/src/storage/record_storage.dart'; /// Creates a platform-specific [RecordStorage] instance. /// @@ -13,6 +13,11 @@ Future createPlatformRecordStorage({ required String identifier, required FutureOr? storagePath, required int maxCacheBytes, + required int maxRecordsPerBatch, + required int maxBytesPerBatch, + required int maxRecordSizeBytes, + required String dbPrefix, + required String storeName, }) { throw UnsupportedError( 'Cannot create RecordStorage: no platform implementation available.', diff --git a/packages/kinesis/amplify_kinesis_dart/lib/src/impl/storage/platform/record_storage_platform_vm.dart b/packages/kinesis/amplify_record_cache_dart/lib/src/storage/platform/record_storage_platform_vm.dart similarity index 58% rename from packages/kinesis/amplify_kinesis_dart/lib/src/impl/storage/platform/record_storage_platform_vm.dart rename to packages/kinesis/amplify_record_cache_dart/lib/src/storage/platform/record_storage_platform_vm.dart index 273caba33af..9ee9d72b91a 100644 --- a/packages/kinesis/amplify_kinesis_dart/lib/src/impl/storage/platform/record_storage_platform_vm.dart +++ b/packages/kinesis/amplify_record_cache_dart/lib/src/storage/platform/record_storage_platform_vm.dart @@ -4,26 +4,35 @@ import 'dart:async'; import 'package:amplify_foundation_dart/amplify_foundation_dart.dart'; -import 'package:amplify_kinesis_dart/src/db/kinesis_record_database.dart'; -import 'package:amplify_kinesis_dart/src/impl/storage/record_storage.dart'; -import 'package:amplify_kinesis_dart/src/impl/storage/record_storage_sqlite.dart'; +import 'package:amplify_record_cache_dart/src/db/record_cache_database.dart'; +import 'package:amplify_record_cache_dart/src/storage/record_storage.dart'; +import 'package:amplify_record_cache_dart/src/storage/record_storage_sqlite.dart'; /// Creates a [SqliteRecordStorage] for VM platforms. Future createPlatformRecordStorage({ required String identifier, required FutureOr? storagePath, required int maxCacheBytes, + required int maxRecordsPerBatch, + required int maxBytesPerBatch, + required int maxRecordSizeBytes, + required String dbPrefix, + required String storeName, }) async { assert(storagePath != null, 'storagePath is required on VM platforms.'); AmplifyLogging.logger( 'RecordStorage', ).info('Using SQLite storage (path: $storagePath)'); - final database = KinesisRecordDatabase( + final database = RecordCacheDatabase( + dbPrefix: dbPrefix, identifier: identifier, storagePath: storagePath, ); return SqliteRecordStorage.create( database: database, maxCacheBytes: maxCacheBytes, + maxRecordsPerBatch: maxRecordsPerBatch, + maxBytesPerBatch: maxBytesPerBatch, + maxRecordSizeBytes: maxRecordSizeBytes, ); } diff --git a/packages/kinesis/amplify_kinesis_dart/lib/src/impl/storage/platform/record_storage_platform_web.dart b/packages/kinesis/amplify_record_cache_dart/lib/src/storage/platform/record_storage_platform_web.dart similarity index 59% rename from packages/kinesis/amplify_kinesis_dart/lib/src/impl/storage/platform/record_storage_platform_web.dart rename to packages/kinesis/amplify_record_cache_dart/lib/src/storage/platform/record_storage_platform_web.dart index 8b96184e92c..f1b37aeb35f 100644 --- a/packages/kinesis/amplify_kinesis_dart/lib/src/impl/storage/platform/record_storage_platform_web.dart +++ b/packages/kinesis/amplify_record_cache_dart/lib/src/storage/platform/record_storage_platform_web.dart @@ -4,9 +4,9 @@ import 'dart:async'; import 'package:amplify_foundation_dart/amplify_foundation_dart.dart'; -import 'package:amplify_kinesis_dart/src/impl/storage/record_storage.dart'; -import 'package:amplify_kinesis_dart/src/impl/storage/record_storage_indexeddb.dart'; -import 'package:amplify_kinesis_dart/src/impl/storage/record_storage_memory.dart'; +import 'package:amplify_record_cache_dart/src/storage/record_storage.dart'; +import 'package:amplify_record_cache_dart/src/storage/record_storage_indexeddb.dart'; +import 'package:amplify_record_cache_dart/src/storage/record_storage_memory.dart'; /// Creates a web [RecordStorage] instance. /// @@ -16,6 +16,11 @@ Future createPlatformRecordStorage({ required String identifier, required FutureOr? storagePath, required int maxCacheBytes, + required int maxRecordsPerBatch, + required int maxBytesPerBatch, + required int maxRecordSizeBytes, + required String dbPrefix, + required String storeName, }) async { final logger = AmplifyLogging.logger('RecordStorage'); // storagePath is ignored on web. @@ -24,11 +29,21 @@ Future createPlatformRecordStorage({ return IndexedDbRecordStorage.create( identifier: identifier, maxCacheBytes: maxCacheBytes, + maxRecordsPerBatch: maxRecordsPerBatch, + maxBytesPerBatch: maxBytesPerBatch, + maxRecordSizeBytes: maxRecordSizeBytes, + dbPrefix: dbPrefix, + storeName: storeName, ); } logger.warn( 'IndexedDB is not available. Falling back to in-memory storage. ' 'Records will be lost when the page is closed.', ); - return InMemoryRecordStorage(maxCacheBytes: maxCacheBytes); + return InMemoryRecordStorage( + maxCacheBytes: maxCacheBytes, + maxRecordsPerBatch: maxRecordsPerBatch, + maxBytesPerBatch: maxBytesPerBatch, + maxRecordSizeBytes: maxRecordSizeBytes, + ); } diff --git a/packages/kinesis/amplify_kinesis_dart/lib/src/impl/storage/record_storage.dart b/packages/kinesis/amplify_record_cache_dart/lib/src/storage/record_storage.dart similarity index 67% rename from packages/kinesis/amplify_kinesis_dart/lib/src/impl/storage/record_storage.dart rename to packages/kinesis/amplify_record_cache_dart/lib/src/storage/record_storage.dart index 5833c12d7d0..91cf6784e27 100644 --- a/packages/kinesis/amplify_kinesis_dart/lib/src/impl/storage/record_storage.dart +++ b/packages/kinesis/amplify_record_cache_dart/lib/src/storage/record_storage.dart @@ -1,23 +1,27 @@ // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. // SPDX-License-Identifier: Apache-2.0 -import 'package:amplify_kinesis_dart/src/exception/amplify_kinesis_exception.dart' - show defaultRecoverySuggestion; -import 'package:amplify_kinesis_dart/src/exception/record_cache_exception.dart'; -import 'package:amplify_kinesis_dart/src/impl/kinesis_record.dart'; -import 'package:amplify_kinesis_dart/src/kinesis_limits.dart' as limits; -import 'package:amplify_kinesis_dart/src/model/record.dart'; +import 'package:amplify_record_cache_dart/src/exception/record_cache_exception.dart'; +import 'package:amplify_record_cache_dart/src/model/record.dart'; +import 'package:amplify_record_cache_dart/src/model/record_input.dart'; import 'package:meta/meta.dart'; -export 'package:amplify_kinesis_dart/src/model/record.dart'; +export 'package:amplify_record_cache_dart/src/model/record.dart'; -/// {@template amplify_kinesis.record_storage} +/// Default recovery suggestion for wrapped database errors. +const defaultRecoverySuggestion = + 'This is an internal error. Please report it as a bug.'; + +/// {@template amplify_record_cache.record_storage} /// Abstract base class for record persistence. /// /// Implementations provide platform-specific storage (SQLite on VM, -/// IndexedDB on web, in-memory fallback). Validation of partition key -/// length, record size, and cache limits is handled here in [addRecord]; -/// subclasses implement [writeRecord] for the actual write. +/// IndexedDB on web, in-memory fallback). Validation of record size +/// and cache limits is handled here in [addRecord]; subclasses +/// implement [writeRecord] for the actual write. +/// +/// Service-specific validation (e.g. partition key length for KDS) +/// should be performed by the client before calling [addRecord]. /// /// All public methods wrap unexpected errors as /// [RecordCacheDatabaseException]. Subclasses throw @@ -25,26 +29,23 @@ export 'package:amplify_kinesis_dart/src/model/record.dart'; /// caught and wrapped automatically. /// {@endtemplate} abstract class RecordStorage { - /// {@macro amplify_kinesis.record_storage} + /// {@macro amplify_record_cache.record_storage} RecordStorage({ required int maxCacheBytes, - int maxRecordsPerStream = limits.maxRecordsPerStream, - int maxBytesPerStream = limits.maxPutRecordsSizeBytes, - int maxRecordSizeBytes = limits.maxRecordSizeBytes, - int maxPartitionKeyLength = limits.maxPartitionKeyLength, + required int maxRecordsPerBatch, + required int maxBytesPerBatch, + required int maxRecordSizeBytes, int initialCachedSize = 0, }) : _maxCacheBytes = maxCacheBytes, - _maxRecordsPerStream = maxRecordsPerStream, - _maxBytesPerStream = maxBytesPerStream, + _maxRecordsPerBatch = maxRecordsPerBatch, + _maxBytesPerBatch = maxBytesPerBatch, _maxRecordSizeBytes = maxRecordSizeBytes, - _maxPartitionKeyLength = maxPartitionKeyLength, cachedSize = initialCachedSize; final int _maxCacheBytes; - final int _maxRecordsPerStream; - final int _maxBytesPerStream; + final int _maxRecordsPerBatch; + final int _maxBytesPerBatch; final int _maxRecordSizeBytes; - final int _maxPartitionKeyLength; /// The current total cached size in bytes. @protected @@ -53,11 +54,11 @@ abstract class RecordStorage { /// The maximum cache size in bytes. int get maxCacheBytes => _maxCacheBytes; - /// Maximum number of records per stream in a single batch. - int get maxRecordsPerStream => _maxRecordsPerStream; + /// Maximum number of records per batch. + int get maxRecordsPerBatch => _maxRecordsPerBatch; - /// Maximum total bytes per stream in a single batch. - int get maxBytesPerStream => _maxBytesPerStream; + /// Maximum total bytes per batch. + int get maxBytesPerBatch => _maxBytesPerBatch; /// Validates and saves a record to storage. /// Throws [RecordCacheValidationException] on invalid input. @@ -65,20 +66,11 @@ abstract class RecordStorage { /// Throws [RecordCacheDatabaseException] on storage errors. Future addRecord(RecordInput record) => _wrap('Failed to add record to cache', () async { - final codePoints = record.partitionKey.runes.length; - if (codePoints == 0 || codePoints > _maxPartitionKeyLength) { - throw RecordCacheValidationException( - 'Partition key length ($codePoints) is outside the allowed ' - 'range of 1-$_maxPartitionKeyLength characters.', - 'Use a partition key between 1 and ' - '$_maxPartitionKeyLength characters.', - ); - } if (record.dataSize > _maxRecordSizeBytes) { throw RecordCacheValidationException( 'Record size (${record.dataSize} bytes) exceeds the maximum ' - 'of $_maxRecordSizeBytes bytes (partition key + data blob).', - 'Reduce the record payload size or use a shorter partition key.', + 'of $_maxRecordSizeBytes bytes.', + 'Reduce the record payload size.', ); } if (cachedSize + record.dataSize > _maxCacheBytes) { diff --git a/packages/kinesis/amplify_kinesis_dart/lib/src/impl/storage/record_storage_indexeddb.dart b/packages/kinesis/amplify_record_cache_dart/lib/src/storage/record_storage_indexeddb.dart similarity index 77% rename from packages/kinesis/amplify_kinesis_dart/lib/src/impl/storage/record_storage_indexeddb.dart rename to packages/kinesis/amplify_record_cache_dart/lib/src/storage/record_storage_indexeddb.dart index 3b6a82fbe11..edcf9ca4742 100644 --- a/packages/kinesis/amplify_kinesis_dart/lib/src/impl/storage/record_storage_indexeddb.dart +++ b/packages/kinesis/amplify_record_cache_dart/lib/src/storage/record_storage_indexeddb.dart @@ -5,13 +5,13 @@ import 'dart:async'; import 'dart:js_interop'; import 'dart:js_interop_unsafe'; -import 'package:amplify_kinesis_dart/src/impl/kinesis_record.dart'; -import 'package:amplify_kinesis_dart/src/impl/storage/record_storage.dart'; +import 'package:amplify_record_cache_dart/src/model/record_input.dart'; +import 'package:amplify_record_cache_dart/src/storage/record_storage.dart'; // ignore: implementation_imports import 'package:aws_common/src/js/indexed_db.dart'; import 'package:web/web.dart'; -/// {@template amplify_kinesis.indexeddb_record_storage} +/// {@template amplify_record_cache.indexeddb_record_storage} /// IndexedDB-backed [RecordStorage] implementation for web. /// /// Use [create] to open the database and eagerly compute the initial @@ -20,29 +20,48 @@ import 'package:web/web.dart'; final class IndexedDbRecordStorage extends RecordStorage { IndexedDbRecordStorage._({ required super.maxCacheBytes, + required super.maxRecordsPerBatch, + required super.maxBytesPerBatch, + required super.maxRecordSizeBytes, required super.initialCachedSize, required IDBDatabase database, - }) : _database = database; + required String storeName, + }) : _database = database, + _storeName = storeName; - /// {@macro amplify_kinesis.indexeddb_record_storage} + /// {@macro amplify_record_cache.indexeddb_record_storage} /// /// Opens the IndexedDB database and eagerly computes the initial /// cache size. + /// + /// [dbPrefix] is used to namespace the database (e.g. `amplify_kinesis_`, + /// `amplify_firehose_`). + /// [storeName] is the object store name (e.g. `kinesis_records`, + /// `firehose_records`). static Future create({ required int maxCacheBytes, + required int maxRecordsPerBatch, + required int maxBytesPerBatch, + required int maxRecordSizeBytes, required String identifier, + required String dbPrefix, + required String storeName, }) async { - final database = await _openDatabase('amplify_kinesis_$identifier'); - final initialSize = await _computeCacheSize(database); + final database = await _openDatabase('$dbPrefix$identifier', storeName); + final initialSize = await _computeCacheSize(database, storeName); return IndexedDbRecordStorage._( maxCacheBytes: maxCacheBytes, + maxRecordsPerBatch: maxRecordsPerBatch, + maxBytesPerBatch: maxBytesPerBatch, + maxRecordSizeBytes: maxRecordSizeBytes, initialCachedSize: initialSize, database: database, + storeName: storeName, ); } final IDBDatabase _database; - static const _storeName = 'kinesis_records'; + final String _storeName; /// Returns an object store handle within a new transaction. IDBObjectStore _getStore([String mode = 'readwrite']) { @@ -54,7 +73,7 @@ final class IndexedDbRecordStorage extends RecordStorage { Future writeRecord(RecordInput record) async { final obj = JSObject() ..setProperty('stream_name'.toJS, record.streamName.toJS) - ..setProperty('partition_key'.toJS, record.partitionKey.toJS) + ..setProperty('partition_key'.toJS, (record.partitionKey ?? '').toJS) ..setProperty('data'.toJS, record.data.toJS) ..setProperty('data_size'.toJS, record.dataSize.toJS) ..setProperty('retry_count'.toJS, 0.toJS) @@ -82,8 +101,8 @@ final class IndexedDbRecordStorage extends RecordStorage { final stream = record.streamName; final count = streamCounts[stream] ?? 0; final size = streamSizes[stream] ?? 0; - if (count >= maxRecordsPerStream) continue; - if (size + record.dataSize > maxBytesPerStream) continue; + if (count >= maxRecordsPerBatch) continue; + if (size + record.dataSize > maxBytesPerBatch) continue; result.putIfAbsent(stream, () => []).add(record); streamCounts[stream] = count + 1; @@ -104,7 +123,7 @@ final class IndexedDbRecordStorage extends RecordStorage { } @override - Future doQueryCacheSize() => _computeCacheSize(_database); + Future doQueryCacheSize() => _computeCacheSize(_database, _storeName); @override Future doIncrementRetryCount(Iterable ids) async { @@ -161,10 +180,11 @@ final class IndexedDbRecordStorage extends RecordStorage { } static Record _jsToRecord(JSObject obj) { + final pk = obj.getProperty('partition_key'.toJS).toDart; return Record( id: obj.getProperty('id'.toJS).toDartInt, streamName: obj.getProperty('stream_name'.toJS).toDart, - partitionKey: obj.getProperty('partition_key'.toJS).toDart, + partitionKey: pk.isEmpty ? null : pk, data: (obj.getProperty('data'.toJS)).toDart, dataSize: obj.getProperty('data_size'.toJS).toDartInt, retryCount: obj.getProperty('retry_count'.toJS).toDartInt, @@ -173,7 +193,10 @@ final class IndexedDbRecordStorage extends RecordStorage { } /// Opens an IndexedDB database, creating the object store if needed. - static Future _openDatabase(String dbName) async { + static Future _openDatabase( + String dbName, + String storeName, + ) async { final db = indexedDB; if (db == null) { throw StateError('IndexedDB is not available'); @@ -182,10 +205,10 @@ final class IndexedDbRecordStorage extends RecordStorage { void onUpgradeNeeded(IDBVersionChangeEvent event) { final database = event.target?.getProperty('result'.toJS); final names = database?.objectStoreNames; - if (!(names?.contains(_storeName) ?? false)) { + if (!(names?.contains(storeName) ?? false)) { database! .createObjectStore( - _storeName, + storeName, IDBObjectStoreParameters(keyPath: 'id'.toJS, autoIncrement: true), ) .createIndex( @@ -205,9 +228,12 @@ final class IndexedDbRecordStorage extends RecordStorage { } /// Computes cache size from DB using a cursor. - static Future _computeCacheSize(IDBDatabase database) async { - final tx = database.transaction(_storeName.toJS, 'readonly'); - final store = tx.objectStore(_storeName); + static Future _computeCacheSize( + IDBDatabase database, + String storeName, + ) async { + final tx = database.transaction(storeName.toJS, 'readonly'); + final store = tx.objectStore(storeName); final request = store.openCursor(); final completer = Completer(); var total = 0; @@ -241,7 +267,7 @@ final class IndexedDbRecordStorage extends RecordStorage { static Future checkIsSupported() async { if (indexedDB == null) return false; try { - final request = indexedDB!.open('kinesis_idb_test', 1); + final request = indexedDB!.open('record_cache_idb_test', 1); await request.future; return true; } on Object { diff --git a/packages/kinesis/amplify_kinesis_dart/lib/src/impl/storage/record_storage_memory.dart b/packages/kinesis/amplify_record_cache_dart/lib/src/storage/record_storage_memory.dart similarity index 84% rename from packages/kinesis/amplify_kinesis_dart/lib/src/impl/storage/record_storage_memory.dart rename to packages/kinesis/amplify_record_cache_dart/lib/src/storage/record_storage_memory.dart index 7131c6ae9a0..24708ffa5b2 100644 --- a/packages/kinesis/amplify_kinesis_dart/lib/src/impl/storage/record_storage_memory.dart +++ b/packages/kinesis/amplify_record_cache_dart/lib/src/storage/record_storage_memory.dart @@ -3,21 +3,20 @@ import 'dart:collection'; -import 'package:amplify_kinesis_dart/src/impl/kinesis_record.dart'; -import 'package:amplify_kinesis_dart/src/impl/storage/record_storage.dart'; +import 'package:amplify_record_cache_dart/src/model/record_input.dart'; +import 'package:amplify_record_cache_dart/src/storage/record_storage.dart'; -/// {@template amplify_kinesis.in_memory_record_storage} +/// {@template amplify_record_cache.in_memory_record_storage} /// In-memory [RecordStorage] fallback for web when IndexedDB is unavailable. /// Records are not persisted. /// {@endtemplate} final class InMemoryRecordStorage extends RecordStorage { - /// {@macro amplify_kinesis.in_memory_record_storage} + /// {@macro amplify_record_cache.in_memory_record_storage} InMemoryRecordStorage({ required super.maxCacheBytes, - super.maxRecordsPerStream, - super.maxBytesPerStream, - super.maxRecordSizeBytes, - super.maxPartitionKeyLength, + required super.maxRecordsPerBatch, + required super.maxBytesPerBatch, + required super.maxRecordSizeBytes, }); int _nextId = 1; @@ -54,8 +53,8 @@ final class InMemoryRecordStorage extends RecordStorage { final stream = record.streamName; final count = streamCounts[stream] ?? 0; final size = streamSizes[stream] ?? 0; - if (count >= maxRecordsPerStream) continue; - if (size + record.dataSize > maxBytesPerStream) continue; + if (count >= maxRecordsPerBatch) continue; + if (size + record.dataSize > maxBytesPerBatch) continue; result.putIfAbsent(stream, () => []).add(record); streamCounts[stream] = count + 1; diff --git a/packages/kinesis/amplify_kinesis_dart/lib/src/impl/storage/record_storage_sqlite.dart b/packages/kinesis/amplify_record_cache_dart/lib/src/storage/record_storage_sqlite.dart similarity index 60% rename from packages/kinesis/amplify_kinesis_dart/lib/src/impl/storage/record_storage_sqlite.dart rename to packages/kinesis/amplify_record_cache_dart/lib/src/storage/record_storage_sqlite.dart index d3b5a88c9e3..c3de48e4d86 100644 --- a/packages/kinesis/amplify_kinesis_dart/lib/src/impl/storage/record_storage_sqlite.dart +++ b/packages/kinesis/amplify_record_cache_dart/lib/src/storage/record_storage_sqlite.dart @@ -3,60 +3,65 @@ import 'dart:async'; -import 'package:amplify_kinesis_dart/src/db/kinesis_record_database.dart'; -import 'package:amplify_kinesis_dart/src/impl/kinesis_record.dart'; -import 'package:amplify_kinesis_dart/src/impl/storage/record_storage.dart'; +import 'package:amplify_record_cache_dart/src/db/record_cache_database.dart'; +import 'package:amplify_record_cache_dart/src/model/record_input.dart'; +import 'package:amplify_record_cache_dart/src/storage/record_storage.dart'; import 'package:drift/drift.dart'; import 'package:meta/meta.dart'; -/// {@template amplify_kinesis.sqlite_record_storage} +/// {@template amplify_record_cache.sqlite_record_storage} /// SQLite-backed [RecordStorage] implementation using Drift. /// /// Used on VM (iOS, macOS, Linux, Windows, Android) platforms. /// {@endtemplate} final class SqliteRecordStorage extends RecordStorage { - /// {@macro amplify_kinesis.sqlite_record_storage} + /// {@macro amplify_record_cache.sqlite_record_storage} /// /// Prefer [create] for production use — it eagerly queries the cache /// size from the database. This constructor is available for tests /// where the database starts empty. SqliteRecordStorage({ - required KinesisRecordDatabase database, + required RecordCacheDatabase database, required super.maxCacheBytes, - super.maxRecordsPerStream, - super.maxBytesPerStream, - super.maxRecordSizeBytes, - super.maxPartitionKeyLength, + required super.maxRecordsPerBatch, + required super.maxBytesPerBatch, + required super.maxRecordSizeBytes, super.initialCachedSize, }) : _db = database; - /// {@macro amplify_kinesis.sqlite_record_storage} + /// {@macro amplify_record_cache.sqlite_record_storage} /// - /// Opens the database. + /// Opens the database and eagerly queries the cache size. static Future create({ - required KinesisRecordDatabase database, + required RecordCacheDatabase database, required int maxCacheBytes, + required int maxRecordsPerBatch, + required int maxBytesPerBatch, + required int maxRecordSizeBytes, }) async { final initialSize = await _queryCacheSize(database); return SqliteRecordStorage( database: database, maxCacheBytes: maxCacheBytes, + maxRecordsPerBatch: maxRecordsPerBatch, + maxBytesPerBatch: maxBytesPerBatch, + maxRecordSizeBytes: maxRecordSizeBytes, initialCachedSize: initialSize, ); } - final KinesisRecordDatabase _db; + final RecordCacheDatabase _db; /// Provides access to the underlying database (for testing). - KinesisRecordDatabase get database => _db; + RecordCacheDatabase get database => _db; /// Queries the current cache size from the database. - static Future _queryCacheSize(KinesisRecordDatabase db) async { - final query = db.selectOnly(db.kinesisRecords) - ..addColumns([db.kinesisRecords.dataSize.sum()]); + static Future _queryCacheSize(RecordCacheDatabase db) async { + final query = db.selectOnly(db.cachedRecords) + ..addColumns([db.cachedRecords.dataSize.sum()]); final result = await query.getSingleOrNull(); if (result == null) return 0; - return result.read(db.kinesisRecords.dataSize.sum()) ?? 0; + return result.read(db.cachedRecords.dataSize.sum()) ?? 0; } /// Returns the in-memory cached size directly (O(1), no DB query). @@ -66,11 +71,11 @@ final class SqliteRecordStorage extends RecordStorage { @override Future writeRecord(RecordInput record) async { await _db - .into(_db.kinesisRecords) + .into(_db.cachedRecords) .insert( - KinesisRecordsCompanion.insert( + CachedRecordsCompanion.insert( streamName: record.streamName, - partitionKey: record.partitionKey, + partitionKey: Value(record.partitionKey ?? ''), data: record.data, dataSize: record.dataSize, createdAt: record.createdAt.millisecondsSinceEpoch, @@ -83,21 +88,23 @@ final class SqliteRecordStorage extends RecordStorage { final results = await _db .customSelect( ''' - SELECT id, stream_name, partition_key, data, data_size, retry_count, created_at + SELECT id, stream_name, partition_key, data, data_size, retry_count, + created_at FROM ( SELECT *, ROW_NUMBER() OVER (PARTITION BY stream_name ORDER BY id) as rn, - SUM(data_size) OVER (PARTITION BY stream_name ORDER BY id) as running_size - FROM kinesis_records + SUM(data_size) OVER (PARTITION BY stream_name ORDER BY id) + as running_size + FROM cached_records ) WHERE rn <= ?1 AND running_size <= ?2 ORDER BY stream_name, id ''', variables: [ - Variable.withInt(maxRecordsPerStream), - Variable.withInt(maxBytesPerStream), + Variable.withInt(maxRecordsPerBatch), + Variable.withInt(maxBytesPerBatch), ], - readsFrom: {_db.kinesisRecords}, + readsFrom: {_db.cachedRecords}, ) .get(); @@ -112,7 +119,7 @@ final class SqliteRecordStorage extends RecordStorage { @override Future doDeleteRecords(Iterable ids) async { if (ids.isEmpty) return; - await (_db.delete(_db.kinesisRecords)..where((t) => t.id.isIn(ids))).go(); + await (_db.delete(_db.cachedRecords)..where((t) => t.id.isIn(ids))).go(); } @override @@ -121,25 +128,25 @@ final class SqliteRecordStorage extends RecordStorage { @override Future doIncrementRetryCount(Iterable ids) async { if (ids.isEmpty) return; - await (_db.update(_db.kinesisRecords)..where((t) => t.id.isIn(ids))).write( - KinesisRecordsCompanion.custom( - retryCount: _db.kinesisRecords.retryCount + const Constant(1), + await (_db.update(_db.cachedRecords)..where((t) => t.id.isIn(ids))).write( + CachedRecordsCompanion.custom( + retryCount: _db.cachedRecords.retryCount + const Constant(1), ), ); } @override Future doGetRecordCount() async { - final query = _db.selectOnly(_db.kinesisRecords) - ..addColumns([_db.kinesisRecords.id.count()]); + final query = _db.selectOnly(_db.cachedRecords) + ..addColumns([_db.cachedRecords.id.count()]); final result = await query.getSingleOrNull(); if (result == null) return 0; - return result.read(_db.kinesisRecords.id.count()) ?? 0; + return result.read(_db.cachedRecords.id.count()) ?? 0; } @override Future doClearRecords() async { - await _db.delete(_db.kinesisRecords).go(); + await _db.delete(_db.cachedRecords).go(); } @override @@ -148,10 +155,11 @@ final class SqliteRecordStorage extends RecordStorage { } Record _rowToRecord(QueryRow row) { + final pk = row.read('partition_key'); return Record( id: row.read('id'), streamName: row.read('stream_name'), - partitionKey: row.read('partition_key'), + partitionKey: pk.isEmpty ? null : pk, data: row.read('data'), dataSize: row.read('data_size'), retryCount: row.read('retry_count'), diff --git a/packages/kinesis/amplify_record_cache_dart/pubspec.yaml b/packages/kinesis/amplify_record_cache_dart/pubspec.yaml new file mode 100644 index 00000000000..9ba99f60029 --- /dev/null +++ b/packages/kinesis/amplify_record_cache_dart/pubspec.yaml @@ -0,0 +1,33 @@ +name: amplify_record_cache_dart +description: Shared record caching infrastructure for Amplify streaming clients (Kinesis Data Streams, Firehose). Provides offline storage, batching, retry logic, and auto-flush scheduling. +version: 0.1.0 +homepage: https://docs.amplify.aws/lib/q/platform/flutter/ +repository: https://github.com/aws-amplify/amplify-flutter/tree/main/packages/kinesis/amplify_record_cache_dart +issue_tracker: https://github.com/aws-amplify/amplify-flutter/issues + +topics: + - aws + - kinesis + - streaming + - caching + - aws-amplify + +environment: + sdk: ^3.9.0 + +dependencies: + amplify_db_common_dart: ">=0.4.17 <0.5.0" + amplify_foundation_dart: ">=2.11.0 <2.12.0" + aws_common: ">=0.7.12 <0.8.0" + drift: ^2.25.0 + meta: ^1.16.0 + smithy: ">=0.7.10 <0.8.0" + web: ^1.1.1 + +dev_dependencies: + amplify_lints: ">=3.1.4 <3.2.0" + build_runner: ^2.4.15 + drift_dev: ^2.25.1 + fake_async: ^1.3.0 + mocktail: ^1.0.0 + test: ^1.22.1 diff --git a/pubspec.yaml b/pubspec.yaml index 41cb24cab33..bef1beb9168 100644 --- a/pubspec.yaml +++ b/pubspec.yaml @@ -106,6 +106,11 @@ aft: packages: - amplify_kinesis - amplify_kinesis_dart + - name: Firehose + summary: amplify_firehose + packages: + - amplify_firehose + - amplify_firehose_dart - name: AWS Common summary: aws_common propagate: none