diff --git a/.github/workflows/gcloud.yml b/.github/workflows/gcloud.yml new file mode 100644 index 00000000..ccc195a0 --- /dev/null +++ b/.github/workflows/gcloud.yml @@ -0,0 +1,73 @@ +name: package:gcloud +permissions: read-all + +on: + pull_request: + branches: [ main ] + paths: + - '.github/workflows/gcloud.yml' + - 'pkgs/gcloud/**' + push: + branches: [ main ] + paths: + - '.github/workflows/gcloud.yml' + - 'pkgs/gcloud/**' + schedule: + - cron: '0 0 * * 0' # weekly + +env: + PUB_ENVIRONMENT: bot.github + +jobs: + # Check code formatting and static analysis on a single OS (linux) + # against Dart dev. + analyze: + runs-on: ubuntu-latest + defaults: + run: + working-directory: pkgs/gcloud + strategy: + fail-fast: false + matrix: + sdk: [dev] + steps: + - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 + - uses: dart-lang/setup-dart@0a8a0fc875eb934c15d08629302413c671d3f672 + with: + sdk: ${{ matrix.sdk }} + - id: install + name: Install dependencies + run: dart pub get + - name: Check formatting + run: dart format --output=none --set-exit-if-changed . + if: always() && steps.install.outcome == 'success' + - name: Analyze code + run: dart analyze --fatal-infos + if: always() && steps.install.outcome == 'success' + + # Run tests on a matrix consisting of two dimensions: + # 1. OS: ubuntu-latest, (macos-latest, windows-latest) + # 2. release channel: dev + test: + needs: analyze + runs-on: ${{ matrix.os }} + defaults: + run: + working-directory: pkgs/gcloud + strategy: + fail-fast: false + matrix: + # Add macos-latest and/or windows-latest if relevant for this package. + os: [ubuntu-latest] + sdk: [2.19.0, dev] + steps: + - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 + - uses: dart-lang/setup-dart@0a8a0fc875eb934c15d08629302413c671d3f672 + with: + sdk: ${{ matrix.sdk }} + - id: install + name: Install dependencies + run: dart pub get + - name: Run VM tests + run: dart test --platform vm -P ci + if: always() && steps.install.outcome == 'success' diff --git a/pkgs/gcloud/.gitignore b/pkgs/gcloud/.gitignore new file mode 100644 index 00000000..96ce5399 --- /dev/null +++ b/pkgs/gcloud/.gitignore @@ -0,0 +1,6 @@ +.dart_tool/ +pubspec.lock +packages +.pub +.packages +.idea \ No newline at end of file diff --git a/pkgs/gcloud/.status b/pkgs/gcloud/.status new file mode 100644 index 00000000..4fce6c26 --- /dev/null +++ b/pkgs/gcloud/.status @@ -0,0 +1,39 @@ +*/packages/*: Skip +*/*/packages/*: Skip +*/*/*/packages/*: Skip + +# We do not run the e2e tests inside the build/ directory in order to prevent +# ./tools/test.py from running several e2e in parallel. +build/test/db_all_e2e_test: Skip +build/test/storage/e2e_test: Skip +build/test/pubsub/pubsub_e2e_test: Skip + +# This test is slow because +# - eventual consistency forces us to put in sleep()s +# - it does e2e testing +# - it combines several tests to avoid concurrent tests touching the same data +test/db_all_e2e_test: Slow, Pass + +# This test is slow because +# - it does e2e testing +test/pubsub/pubsub_e2e_test: Slow, Pass + +[ $browser ] +build/test/storage/e2e_test: Skip +test/storage/e2e_test: Skip + +build/test/db_all_e2e_test: Skip +test/db_all_e2e_test: Skip + +build/test/pubsub/pubsub_e2e_test: Skip +test/pubsub/pubsub_e2e_test: Skip + +# Imports common_e2e.dart, which uses dart:io +build/test/storage/storage_test: Skip +test/storage/storage_test: Skip + +build/test/pubsub/pubsub_test: Skip +test/pubsub/pubsub_test: Skip + +[ $compiler == dart2js ] +*: Skip diff --git a/pkgs/gcloud/AUTHORS b/pkgs/gcloud/AUTHORS new file mode 100644 index 00000000..7c12ae68 --- /dev/null +++ b/pkgs/gcloud/AUTHORS @@ -0,0 +1,6 @@ +# Below is a list of people and organizations that have contributed +# to the Dart project. Names should be added to the list like so: +# +# Name/Organization + +Google Inc. diff --git a/pkgs/gcloud/CHANGELOG.md b/pkgs/gcloud/CHANGELOG.md new file mode 100644 index 00000000..f7d487e9 --- /dev/null +++ b/pkgs/gcloud/CHANGELOG.md @@ -0,0 +1,264 @@ +## 0.8.14 +- Support override metadata properties in `copyObject`. + +## 0.8.13 +- Support the latest version `^13.0.0` of the `googleapis` package. + +## 0.8.12 +- Support the latest version 12.0.0 of the `googleapis` package. + +## 0.8.11 +- After the first `Page` created by `Datastore.withRetry()` retries were not + happening. This is now fixed, ensuring that `Page.next()` will always retry + when `Datastore` is wrapped with `Datastore.withRetry()`. +- Calling with `wait: false` in `Subscription.pull(wait: false)` for `PubSub` + have been deprecated. + +## 0.8.10 + +- Widen the SDK constraint to support Dart 3.0 +- Support retrying Datastore operations. + +## 0.8.9 + +- Support the latest version 1.0.0 of the `http` package. +- Support the latest version 12.0.0 of the `googleapis` package. + +## 0.8.8 + +- Require Dart 2.19 +- Add topics in `pubspec.yaml`. + +## 0.8.7 + +- Fix `Bucket.write` when size is below 1MB. + +## 0.8.6 + +- Throttle streams piped into `Bucket.write` when the size is not known + beforehand. +- Support the latest version 9.0.0 of the `googleapis` package. + +## 0.8.5 + +- Support the latest version 7.0.0 of the `googleapis` package. + +## 0.8.4 + +- Support the latest version 6.0.0 of the `googleapis` package. + +## 0.8.3 + +- Support the latest version of the `googleapis` package. + +## 0.8.2 + + * **BREAKING CHANGE:** `Page.next()` throws if `Page.isLast`, this change only + affects code not migrated to null-safety, when paging through results in + pub-sub and storage without checking `Page.isLast`. + Code fully migrated to null-safety will have experienced a runtime null check + error, and paging code for datastore already throw an `Error`. + +## 0.8.1 + + * `lookupOrNull` method in `DatastoreDB` and `Transaction`. + +## 0.8.0 + + * Require Dart 2.12 or later + * Migration to null safety. + +## 0.7.3 + * Fixed issue in reflection code affecting `Model` and `Model`, + but not `Model`. + +## 0.7.2 + + * Added `delimiter` to `Bucket.list` and `Bucket.page` + (`0.7.1` only added them the implementation). + +## 0.7.1 + + * Added `delimiter` to `Bucket.list` and `Bucket.page`. + * Fix typing of `ExpandoModel` to `ExpandoModel` as we should have done in + version `0.7.0`. + +## 0.7.0+2 + + * Upgrade dependency on `_discoveryapis_commons`, changing `ApiRequestError` + from an `Error` to an `Exception`. Version constraints on + `_discoveryapis_commons` allows both new and old versions. + +## 0.7.0+1 + + * Fix path separator in Bucket.list(). + +## 0.7.0 + + * **BREAKING CHANGE:** Add generics support for `Model.id`. + It is now possible to define the type of the id a model has (either `String` + or `int`). A model can now be defined as + `class MyModel extends Model {}` and `myModel.id` will then + be of type `String` and `myModel.key` of type `Key`. + +## 0.6.4 + + * Require minimum Dart SDK `2.3.0`. + +## 0.6.3 + + * Added `DatastoreDB.lookupValue()` + +## 0.6.2 + + * Fixed bug in `Transaction.rollback()`. + +## 0.6.1 + + * Added examples. + * Fixed formatting and lints. + * Allow `Model` classes to contain constructors with optional or named + arguments (as long as they're annotated with `@required`). + * Add generics support to `withTransaction()`. + +## 0.6.0+4 + + * Updated package description. + * Added an example showing how to use Google Cloud Storage. + +## 0.6.0+3 + + * Fixed code formatting and lints. + +## 0.6.0+2 + +* Support the latest `pkg:http`. + +## 0.6.0+1 + +* Add explicit dependency to `package:_discoveryapis_commons` +* Widen sdk constraint to <3.0.0 + +## 0.6.0 + +* **BREAKING CHANGE:** Add generics support. Instead of writing + `db.query(Person).run()` and getting back a generic `Stream`, you now + write `db.query().run()` and get `Stream`. + The same goes for `.lookup([key])`, which can now be written as + `.lookup([key])` and will return a `List`. + +## 0.5.0 + +* Fixes to support Dart 2. + +## 0.4.0+1 + +* Made a number of strong-mode improvements. + +* Updated dependency on `googleapis` and `googleapis_beta`. + +## 0.4.0 + +* Remove support for `FilterRelation.In` and "propertyname IN" for queries: + This is not supported by the newer APIs and was originally part of fat-client + libraries which performed multiple queries for each item in the list. + +* Adds optional `forComparison` named argument to `Property.encodeValue` which + will be set to `true` when encoding a value for comparison in queries. + +* Upgrade to newer versions of `package:googleapis` and `package:googleapis_beta` + +## 0.3.0 + +* Upgrade to use stable `package:googleapis/datastore/v1.dart`. + +* The internal [DatastoreImpl] class takes now a project name without the `s~` + prefix. + +## 0.2.0+14 + +* Fix analyzer warning. + +## 0.2.0+13 + +* Remove crypto dependency and upgrade dart dependency to >=1.13 since + this dart version provides the Base64 codec. + +## 0.2.0+11 + +* Throw a [StateError] in case a query returned a kind for which there was no + model registered. + +## 0.2.0+10 + +* Address analyzer warnings. + +## 0.2.0+9 + +* Support value transformation in `db.query().filter()`. +* Widen constraint on `googleapis` and `googleapis_beta`. + +## 0.2.0+8 + +* Widen constraint on `googleapis` and `googleapis_beta`. + +## 0.2.0+4 + +* `Storage.read` now honors `offset` and `length` arguments. + +## 0.2.0+2 + +* Widen constraint on `googleapis/googleapis_beta` + +## 0.2.0+1 + +* Fix broken import of package:googleapis/common/common.dart. + +## 0.2.0 + +* Add support for Cloud Pub/Sub. +* Require Dart version 1.9. + +## 0.1.4+2 + +* Enforce fully populated entity keys in a number of places. + +## 0.1.4+1 + +* Deduce the query partition automatically from query ancestor key. + +## 0.1.4 + +* Added optional `defaultPartition` parameter to the constructor of + `DatastoreDB`. + +## 0.1.3+2 + +* Widened googleapis/googleapis_beta constraints in pubspec.yaml. + +## 0.1.3+1 + +* Change the service scope keys to non-private symbols. + +## 0.1.3 + +* Widen package:googleapis dependency constraint in pubspec.yaml. +* Bugfix in `package:appengine/db.dart`: Correctly handle ListProperties +of length 1. + +## 0.1.2 + +* Introduced `package:gcloud/service_scope.dart` library. +* Added global getters for getting gcloud services from the current service +scope. +* Added an `package:gcloud/http.dart` library using service scopes. + +## 0.1.1 + +* Increased version constraint on googleapis{,_auth,_beta}. + +* Removed unused imports. + +## 0.1.0 + +* First release. diff --git a/pkgs/gcloud/LICENSE b/pkgs/gcloud/LICENSE new file mode 100644 index 00000000..000cd7be --- /dev/null +++ b/pkgs/gcloud/LICENSE @@ -0,0 +1,27 @@ +Copyright 2014, the Dart project authors. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following + disclaimer in the documentation and/or other materials provided + with the distribution. + * Neither the name of Google LLC nor the names of its + contributors may be used to endorse or promote products derived + from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/pkgs/gcloud/README.md b/pkgs/gcloud/README.md new file mode 100644 index 00000000..f8175bb9 --- /dev/null +++ b/pkgs/gcloud/README.md @@ -0,0 +1,277 @@ +[![package:gcloud](https://github.com/dart-lang/labs/actions/workflows/gcloud.yml/badge.svg)](https://github.com/dart-lang/labs/actions/workflows/gcloud.yml) +[![pub package](https://img.shields.io/pub/v/gcloud.svg)](https://pub.dev/packages/gcloud) +[![package publisher](https://img.shields.io/pub/publisher/gcloud.svg)](https://pub.dev/packages/gcloud/publisher) + +The `gcloud` package provides a high level idiomatic Dart interface to some of +the most widely used Google Cloud Platform services. + +## Google Cloud Platform support package (gcloud) + +Currently the following services are supported: + + * Cloud Datastore + * Cloud Storage + * Cloud Pub/Sub + +## Status: Experimental + +**NOTE**: This package is currently experimental and published under the +[labs.dart.dev](https://dart.dev/dart-team-packages) pub publisher in order to +solicit feedback. + +For packages in the labs.dart.dev publisher we generally plan to either graduate +the package into a supported publisher (dart.dev, tools.dart.dev) after a period +of feedback and iteration, or discontinue the package. These packages have a +much higher expected rate of API and breaking changes. + +Your feedback is valuable and will help us evolve this package. For general +feedback, suggestions, and comments, please file an issue in the +[bug tracker](https://github.com/dart-lang/labs/issues). + +## API details + +The APIs in this package are all based on the generic generated APIs in the +[googleapis] and [googleapis_beta][googleapisbeta] packages. + +This means that the authentication model for using the APIs in this package +uses the [googleapis_auth][googleapisauth] package. + +Note that this package is only intended for being used with the standalone VM +in a server or command line application. Don't expect this package to work on +the browser or in Flutter. + +The code snippets below demonstrating the use of this package all assume that +the following imports are present: + +```dart +import 'dart:io'; + +import 'package:googleapis_auth/auth_io.dart' as auth; +import 'package:http/http.dart' as http; +import 'package:gcloud/db.dart'; +import 'package:gcloud/storage.dart'; +import 'package:gcloud/pubsub.dart'; +import 'package:gcloud/service_scope.dart' as ss; +import 'package:gcloud/datastore.dart' as datastore; +``` + +### Getting access to the APIs + +The first step in using the APIs is to get an authenticated HTTP client and +with that create API class instances for accessing the different APIs. The +code below assumes that you have a Google Cloud Project called `my-project` +with credentials for a service account from that project stored in the file +`my-project.json`. + +```dart +// Read the service account credentials from the file. +var jsonCredentials = new File('my-project.json').readAsStringSync(); +var credentials = new auth.ServiceAccountCredentials.fromJson(jsonCredentials); + +// Get an HTTP authenticated client using the service account credentials. +var scopes = [] + ..addAll(datastore.Datastore.Scopes) + ..addAll(Storage.SCOPES) + ..addAll(PubSub.SCOPES); +var client = await auth.clientViaServiceAccount(credentials, scopes); + +// Instantiate objects to access Cloud Datastore, Cloud Storage +// and Cloud Pub/Sub APIs. +var db = new DatastoreDB( + new datastore.Datastore(client, 's~my-project')); +var storage = new Storage(client, 'my-project'); +var pubsub = new PubSub(client, 'my-project'); +``` + +All the APIs in this package supports the use of 'service scopes'. Service +scopes are described in details below. + +```dart +ss.fork(() { + // register the services in the new service scope. + registerDbService(db); + registerStorageService(storage); + registerPubSubService(pubsub); + + // Run application using these services. +}); +``` + +The services registered with the service scope can now be reached from within +all the code running in the same service scope using the below getters. + +```dart +dbService. +storageService. +pubsubService. +``` + +This way it is not necessary to pass the service objects around in your code. + +### Use with App Engine + +The `gcloud` package is also integrated in the Dart [appengine] package. This +means the `gcloud` services are available both via the appengine context and +service scopes. The authentication required to access the Google Cloud Platform +services is handled automatically. + +This means that getting to the App Engine Datastore can be through either +the App Engine context + +```dart +var db = context.services.db; +``` + +or just using the service scope registration. + +```dart +var db = dbService; +``` + +## Cloud Datastore +Google Cloud Datastore provide a NoSQL, schemaless database for storing +non-relational data. See the product page +[https://cloud.google.com/datastore/][Datastore] for more information. + +The Cloud Datastore API provides a mapping of Dart objects to entities stored +in the Datastore. The following example shows how to annotate a class to +make it possible to store instances of it in the Datastore. + +```dart +@db.Kind() +class Person extends db.Model { + @db.StringProperty() + String name; + + @db.IntProperty() + int age; +} +``` + +The `Kind` annotation tell that instances of this class can be stored. The +class must also inherit from `Model`. Now to store an object into the +Datastore create an instance and use the `commit` function. + +```dart +var person = new Person() + ..name = '' + ..age = 42; +await db.commit(inserts: [person]); +``` + +The function `query` is used to build a `Query` object which can be run to +perform the query. + +```dart +var persons = (await db.query().run()).toList(); +``` + +To fetch one or multiple existing entities, use `lookup`. + +```dart +var key = new Person() + ..name = 'UniqueName' + ..age = 42 + ..parentKey = db.emptyKey; +var person = (await db.lookup([key])).single; +var people = await db.lookup([key1, key2]); +``` + +NOTE: This package include a lower level API provided through the class +`Datastore` on top of which the `DatastoreDB` API is build. The main reason +for this additional API level is to bridge the gap between the different APIs +exposed inside App Engine and through the public REST API. We reserve the +rights to modify and maybe even remove this additional layer at any time. + +## Cloud Storage +Google Cloud Storage provide a highly available object store (aka BLOB +store). See the product page [https://cloud.google.com/storage/][GCS] +for more information. + +In Cloud Storage the objects (BLOBs) are organized in _buckets_. Each bucket +has a name in a global namespace. The following code creates a new bucket +named `my-bucket` and writes the content of the file `my-file.txt` to the +object named `my-object`. + +```dart +var bucket = await storage.createBucket('my-bucket'); +new File('my-file.txt').openRead().pipe(bucket.write('my-object')); +``` + +The following code will read back the object. + +```dart +bucket.read('my-object').pipe(new File('my-file-copy.txt').openWrite()); +``` + +## Cloud Pub/Sub +Google Cloud Pub/Sub provides many-to-many, asynchronous messaging. See the +product page [https://cloud.google.com/pubsub/][PubSub] for more information. + +Cloud Pub/Sub uses two concepts for messaging. _Topics_ are used if you want +to send messages and _subscriptions_ are used to subscribe to topics and +receive the messages. This decouples the producer of a message from the +consumer of a message. + +The following code creates a _topic_ and sends a simple test message: + +```dart +var topic = await pubsub.createTopic('my-topic'); +await topic.publishString('Hello, world!') +``` + +With the following code a _subscription_ is created on the _topic_ and +a message is pulled using the subscription. A received message must be +acknowledged when the consumer has processed it. + +```dart +var subscription = + await pubsub.createSubscription('my-subscription', 'my-topic'); +var pullEvent = await subscription.pull(); +print(pullEvent.message.asString); +await pullEvent.acknowledge(); +``` + +It is also possible to receive messages using push events instead of pulling +from the subscription. To do this the subscription should be configured as a +push subscription with an HTTP endpoint. + +```dart +await pubsub.createSubscription( + 'my-subscription', + 'my-topic', + endpoint: Uri.parse('https://server.example.com/push')); +``` + +With this subscription all messages will be send to the URL provided in the +`endpoint` argument. The server needs to acknowledge the reception of the +message with a `200 OK` reply. + +### Running tests + +If you want to run the end-to-end tests, a Google Cloud project is required. +When running these tests the following environment variables need to be set: + + GCLOUD_E2E_TEST_PROJECT + +The value of the environment variable `GCLOUD_E2E_TEST_PROJECT` is the name +of the Google Cloud project to use. Authentication for testing uses +[Application Default Credentials][ADC] locally you can provide +`GOOGLE_APPLICATION_CREDENTIALS` or use +[`gcloud auth application-default login`][gcloud-adc]. + +You will also need to create indexes as follows: + +```bash +gcloud --project "$GCLOUD_E2E_TEST_PROJECT" datastore indexes create test/index.yaml +``` + +[Datastore]: https://cloud.google.com/datastore/ +[GCS]: https://cloud.google.com/storage/ +[PubSub]: https://cloud.google.com/pubsub/ +[googleapis]: https://pub.dartlang.org/packages/googleapis +[googleapisbeta]: https://pub.dartlang.org/packages/googleapis_beta +[googleapisauth]: https://pub.dartlang.org/packages/googleapis_beta +[appengine]: https://pub.dartlang.org/packages/appengine +[ADC]: https://cloud.google.com/docs/authentication/production +[gcloud-adc]: https://cloud.google.com/sdk/gcloud/reference/auth/application-default/login diff --git a/pkgs/gcloud/analysis_options.yaml b/pkgs/gcloud/analysis_options.yaml new file mode 100644 index 00000000..ecdba569 --- /dev/null +++ b/pkgs/gcloud/analysis_options.yaml @@ -0,0 +1,12 @@ +include: package:dart_flutter_team_lints/analysis_options.yaml + +analyzer: + language: + strict-casts: true + +linter: + rules: + - cancel_subscriptions + - package_api_docs + - prefer_relative_imports + - test_types_in_equals diff --git a/pkgs/gcloud/dart_test.yaml b/pkgs/gcloud/dart_test.yaml new file mode 100644 index 00000000..e46915dc --- /dev/null +++ b/pkgs/gcloud/dart_test.yaml @@ -0,0 +1,5 @@ +presets: + ci: + tags: + e2e: + skip: "e2e tests don't run on CI" diff --git a/pkgs/gcloud/example/main.dart b/pkgs/gcloud/example/main.dart new file mode 100644 index 00000000..c634c654 --- /dev/null +++ b/pkgs/gcloud/example/main.dart @@ -0,0 +1,25 @@ +// Copyright (c) 2019, the Dart project authors. Please see the AUTHORS file +// for details. All rights reserved. Use of this source code is governed by a +// BSD-style license that can be found in the LICENSE file. + +import 'dart:async' show Future; +import 'dart:convert' show utf8; + +import 'package:gcloud/storage.dart'; +import 'package:googleapis_auth/auth_io.dart' as auth; + +// Note: The README.md contains more details on how to use this package. + +Future main() async { + // When running on Google Computer Engine, AppEngine or GKE credentials can + // be obtained from a meta-data server as follows. + final client = await auth.clientViaMetadataServer(); + try { + final storage = Storage(client, 'my_gcp_project'); + final b = storage.bucket('test-bucket'); + await b.writeBytes('my-file.txt', utf8.encode('hello world')); + print('Wrote "hello world" to "my-file.txt" in "test-bucket"'); + } finally { + client.close(); + } +} diff --git a/pkgs/gcloud/lib/common.dart b/pkgs/gcloud/lib/common.dart new file mode 100644 index 00000000..e564f51e --- /dev/null +++ b/pkgs/gcloud/lib/common.dart @@ -0,0 +1,86 @@ +// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file +// for details. All rights reserved. Use of this source code is governed by a +// BSD-style license that can be found in the LICENSE file. + +import 'dart:async'; + +/// A single page of paged results from a query. +/// +/// Use `next` to move to the next page. If this is the last page `next` +/// completes with `null` +abstract class Page { + /// The items in this page. + List get items; + + /// Whether this is the last page of results. + bool get isLast; + + /// Move to the next page. + /// + /// The future returned completes with the next page or results. + /// + /// Throws if [next] is called on the last page. + Future> next({int pageSize}); +} + +typedef FirstPageProvider = Future> Function(int pageSize); + +/// Helper class to turn a series of pages into a stream. +class StreamFromPages { + static const int _pageSize = 50; + final FirstPageProvider _firstPageProvider; + bool _pendingRequest = false; + bool _paused = false; + bool _cancelled = false; + late Page _currentPage; + late final StreamController _controller; + + StreamFromPages(this._firstPageProvider) { + _controller = StreamController( + sync: true, + onListen: _onListen, + onPause: _onPause, + onResume: _onResume, + onCancel: _onCancel); + } + + Stream get stream => _controller.stream; + + void _handleError(Object e, StackTrace s) { + _controller.addError(e, s); + _controller.close(); + } + + void _handlePage(Page page) { + if (_cancelled) return; + _pendingRequest = false; + _currentPage = page; + page.items.forEach(_controller.add); + if (page.isLast) { + _controller.close(); + } else if (!_paused && !_cancelled) { + page.next().then(_handlePage, onError: _handleError); + } + } + + void _onListen() { + var pageSize = _pageSize; + _pendingRequest = true; + _firstPageProvider(pageSize).then(_handlePage, onError: _handleError); + } + + void _onPause() { + _paused = true; + } + + void _onResume() { + _paused = false; + if (_pendingRequest) return; + _pendingRequest = true; + _currentPage.next().then(_handlePage, onError: _handleError); + } + + void _onCancel() { + _cancelled = true; + } +} diff --git a/pkgs/gcloud/lib/datastore.dart b/pkgs/gcloud/lib/datastore.dart new file mode 100644 index 00000000..f5f2c8fa --- /dev/null +++ b/pkgs/gcloud/lib/datastore.dart @@ -0,0 +1,502 @@ +// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file +// for details. All rights reserved. Use of this source code is governed by a +// BSD-style license that can be found in the LICENSE file. + +/// This library provides a low-level API for accessing Google's Cloud +/// Datastore. +/// +/// For more information on Cloud Datastore, please refer to the following +/// developers page: https://cloud.google.com/datastore/docs +library; + +import 'dart:async'; + +import 'package:http/http.dart' as http; +import 'package:retry/retry.dart'; + +import 'common.dart' show Page; +import 'service_scope.dart' as ss; +import 'src/datastore_impl.dart' show DatastoreImpl; +import 'src/retry_datastore_impl.dart'; + +const Symbol _datastoreKey = #gcloud.datastore; + +/// Access the [Datastore] object available in the current service scope. +/// +/// The returned object will be the one which was previously registered with +/// [registerDatastoreService] within the current (or a parent) service scope. +/// +/// Accessing this getter outside of a service scope will result in an error. +/// See the `package:gcloud/service_scope.dart` library for more information. +Datastore get datastoreService => ss.lookup(_datastoreKey) as Datastore; + +/// Registers the [Datastore] object within the current service scope. +/// +/// The provided `datastore` object will be available via the top-level +/// `datastore` getter. +/// +/// Calling this function outside of a service scope will result in an error. +/// Calling this function more than once inside the same service scope is not +/// allowed. +void registerDatastoreService(Datastore datastore) { + ss.register(_datastoreKey, datastore); +} + +class ApplicationError implements Exception { + final String message; + ApplicationError(this.message); + + @override + String toString() => 'ApplicationError: $message'; +} + +class DatastoreError implements Exception { + final String message; + + DatastoreError([String? message]) + : message = message ?? 'DatastoreError: An unknown error occurred'; + + @override + String toString() => message; +} + +class UnknownDatastoreError extends DatastoreError { + UnknownDatastoreError(error) : super('An unknown error occurred ($error).'); +} + +class TransactionAbortedError extends DatastoreError { + TransactionAbortedError() : super('The transaction was aborted.'); +} + +class TimeoutError extends DatastoreError { + TimeoutError() : super('The operation timed out.'); +} + +/// Thrown when a query would require an index which was not set. +/// +/// An application needs to specify indices in a `index.yaml` file and needs to +/// create indices using the `gcloud preview datastore create-indexes` command. +class NeedIndexError extends DatastoreError { + NeedIndexError() : super('An index is needed for the query to succeed.'); +} + +class PermissionDeniedError extends DatastoreError { + PermissionDeniedError() : super('Permission denied.'); +} + +class InternalError extends DatastoreError { + InternalError() : super('Internal service error.'); +} + +class QuotaExceededError extends DatastoreError { + QuotaExceededError(error) : super('Quota was exceeded ($error).'); +} + +/// A datastore Entity +/// +/// An entity is identified by a unique `key` and consists of a number of +/// `properties`. If a property should not be indexed, it needs to be included +/// in the `unIndexedProperties` set. +/// +/// The `properties` field maps names to values. Values can be of a primitive +/// type or of a composed type. +/// +/// The following primitive types are supported: +/// bool, int, double, String, DateTime, BlobValue, Key +/// +/// It is possible to have a `List` of values. The values must be primitive. +/// Lists inside lists are not supported. +/// +/// Whether a property is indexed or not applies to all values (this is only +/// relevant if the value is a list of primitive values). +class Entity { + final Key key; + final Map properties; + final Set unIndexedProperties; + + Entity(this.key, this.properties, + {this.unIndexedProperties = const {}}); +} + +/// A complete or partial key. +/// +/// A key can uniquely identify a datastore `Entity`s. It consists of a +/// partition and path. The path consists of one or more `KeyElement`s. +/// +/// A key may be incomplete. This is useful when inserting `Entity`s which IDs +/// should be automatically allocated. +/// +/// Example of a fully populated [Key]: +/// +/// var fullKey = new Key([new KeyElement('Person', 1), +/// new KeyElement('Address', 2)]); +/// +/// Example of a partially populated [Key] / an incomplete [Key]: +/// +/// var partialKey = new Key([new KeyElement('Person', 1), +/// new KeyElement('Address', null)]); +class Key { + /// The partition of this `Key`. + final Partition partition; + + /// The path of `KeyElement`s. + final List elements; + + Key(this.elements, {this.partition = Partition.DEFAULT}); + + factory Key.fromParent(String kind, int id, {Key? parent}) { + var partition = Partition.DEFAULT; + var elements = []; + if (parent != null) { + partition = parent.partition; + elements.addAll(parent.elements); + } + elements.add(KeyElement(kind, id)); + return Key(elements, partition: partition); + } + + @override + int get hashCode => + elements.fold(partition.hashCode, (a, b) => a ^ b.hashCode); + + @override + bool operator ==(Object other) { + if (identical(this, other)) return true; + + if (other is Key && + partition == other.partition && + elements.length == other.elements.length) { + for (var i = 0; i < elements.length; i++) { + if (elements[i] != other.elements[i]) return false; + } + return true; + } + return false; + } + + @override + String toString() { + var namespaceString = + partition.namespace == null ? 'null' : "'${partition.namespace}'"; + return "Key(namespace=$namespaceString, path=[${elements.join(', ')}])"; + } +} + +/// A datastore partition. +/// +/// A partition is used for partitioning a dataset into multiple namespaces. +/// The default namespace is `null`. Using empty Strings as namespaces is +/// invalid. +/// +// TODO(Issue #6): Add dataset-id here. +class Partition { + // ignore: constant_identifier_names + static const Partition DEFAULT = Partition._default(); + + /// The namespace of this partition. + /// + /// The default namespace is `null`. + final String? namespace; + + Partition(this.namespace) { + if (namespace == '') { + throw ArgumentError("'namespace' must not be empty"); + } + } + + const Partition._default() : namespace = null; + + @override + int get hashCode => namespace.hashCode; + + @override + bool operator ==(Object other) => + other is Partition && namespace == other.namespace; +} + +/// An element in a `Key`s path. +class KeyElement { + /// The kind of this element. + final String kind; + + /// The ID of this element. It must be either an `int` or a `String. + /// + /// This may be `null`, in which case it does not identify an Entity. It is + /// possible to insert [Entity]s with incomplete keys and let Datastore + /// automatically select a unused integer ID. + final dynamic id; + + KeyElement(this.kind, this.id) { + if (id != null) { + if (id is! int && id is! String) { + throw ArgumentError("'id' must be either null, a String or an int"); + } + } + } + + @override + int get hashCode => kind.hashCode ^ id.hashCode; + + @override + bool operator ==(Object other) => + other is KeyElement && kind == other.kind && id == other.id; + + @override + String toString() => '$kind.$id'; +} + +/// A relation used in query filters. +class FilterRelation { + // ignore: constant_identifier_names + static const FilterRelation LessThan = FilterRelation._('<'); + // ignore: constant_identifier_names + static const FilterRelation LessThanOrEqual = FilterRelation._('<='); + // ignore: constant_identifier_names + static const FilterRelation GreaterThan = FilterRelation._('>'); + + /// Old misspelled name for [GreaterThan], retained for compatibility. + /// + /// @nodoc + @Deprecated('Use FilterRelation.GreaterThan instead') + // ignore: constant_identifier_names + static const FilterRelation GreatherThan = GreaterThan; + // ignore: constant_identifier_names + static const FilterRelation GreaterThanOrEqual = FilterRelation._('>='); + + /// Old misspelled name for [GreaterThanOrEqual], retained for compatibility. + /// + /// @nodoc + @Deprecated('Use FilterRelation.GreaterThanOrEqual instead') + // ignore: constant_identifier_names + static const FilterRelation GreatherThanOrEqual = GreaterThanOrEqual; + // ignore: constant_identifier_names + static const FilterRelation Equal = FilterRelation._('=='); + + final String name; + + const FilterRelation._(this.name); + + @override + String toString() => name; +} + +/// A filter used in queries. +class Filter { + /// The relation used for comparing `name` with `value`. + final FilterRelation relation; + + /// The name of the datastore property used in the comparison. + final String name; + + /// The value used for comparing against the property named by `name`. + final Object value; + + Filter(this.relation, this.name, this.value); +} + +/// The direction of a order. +/// +// TODO(Issue #6): Make this class Private and add the two statics to the +/// 'Order' class. +/// [i.e. so one can write Order.Ascending, Order.Descending]. +class OrderDirection { + // ignore: constant_identifier_names + static const OrderDirection Ascending = OrderDirection._('Ascending'); + // ignore: constant_identifier_names + static const OrderDirection Descending = OrderDirection._('Descending'); + + /// Old misspelled name for [Descending], retained for compatibility. + /// + /// @nodoc + @Deprecated('Use OrderDirection.Descending instead') + // ignore: constant_identifier_names + static const OrderDirection Decending = Descending; + + final String name; + + const OrderDirection._(this.name); +} + +/// A order used in queries. +class Order { + /// The direction of the order. + final OrderDirection direction; + + /// The name of the property used for the order. + final String propertyName; + + // TODO(Issue #6): Make [direction] the second argument and make it optional. + Order(this.direction, this.propertyName); +} + +/// A datastore query. +/// +/// A query consists of filters (kind, ancestor and property filters), one or +/// more orders and a offset/limit pair. +/// +/// All fields may be optional. +/// +/// Example of building a [Query]: +/// var person = ....; +/// var query = new Query(ancestorKey: personKey, kind: 'Address') +class Query { + /// Restrict the result set to entities of this kind. + final String? kind; + + /// Restrict the result set to entities which have this ancestorKey / parent. + final Key? ancestorKey; + + /// Restrict the result set by a list of property [Filter]s. + final List? filters; + + /// Order the matching entities following the given property [Order]s. + final List? orders; + + /// Skip the first [offset] entities in the result set. + final int? offset; + + /// Limit the number of entities returned to [limit]. + final int? limit; + + Query({ + this.ancestorKey, + this.kind, + this.filters, + this.orders, + this.offset, + this.limit, + }); +} + +/// The result of a commit. +class CommitResult { + /// If the commit included `autoIdInserts`, this list will be the fully + /// populated Keys, including the automatically allocated integer IDs. + final List autoIdInsertKeys; + + CommitResult(this.autoIdInsertKeys); +} + +/// A blob value which can be used as a property value in `Entity`s. +class BlobValue { + /// The binary data of this blob. + final List bytes; + + BlobValue(this.bytes); +} + +/// An opaque token returned by the `beginTransaction` method of a [Datastore]. +/// +/// This token can be passed to the `commit` and `lookup` calls if they should +/// operate within this transaction. +abstract class Transaction {} + +/// Interface used to talk to the Google Cloud Datastore service. +/// +/// It can be used to insert/update/delete [Entity]s, lookup/query [Entity]s +/// and allocate IDs from the auto ID allocation policy. +abstract class Datastore { + /// List of required OAuth2 scopes for Datastore operation. + // ignore: constant_identifier_names + static const Scopes = DatastoreImpl.scopes; + + /// Access Datastore using an authenticated client. + /// + /// The [client] is an authenticated HTTP client. This client must + /// provide access to at least the scopes in `Datastore.Scopes`. + /// + /// The [project] is the name of the Google Cloud project. + /// + /// Returns an object providing access to Datastore. The passed-in [client] + /// will not be closed automatically. The caller is responsible for closing + /// it. + factory Datastore(http.Client client, String project) { + return DatastoreImpl(client, project); + } + + /// Retry Datastore operations where the issue seems to be transient. + /// + /// The [delegate] is the configured [Datastore] implementation that will be + /// used. + /// + /// The operations will be retried at maximum of [maxAttempts]. + factory Datastore.withRetry( + Datastore delegate, { + int? maxAttempts, + }) { + return RetryDatastoreImpl( + delegate, + RetryOptions(maxAttempts: maxAttempts ?? 3), + ); + } + + /// Allocate integer IDs for the partially populated [keys] given as argument. + /// + /// The returned [Key]s will be fully populated with the allocated IDs. + Future> allocateIds(List keys); + + /// Starts a new transaction and returns an opaque value representing it. + /// + /// If [crossEntityGroup] is `true`, the transaction can work on up to 5 + /// entity groups. Otherwise the transaction will be limited to only operate + /// on a single entity group. + Future beginTransaction({bool crossEntityGroup = false}); + + /// Make modifications to the datastore. + /// + /// - `inserts` are [Entity]s which have a fully populated [Key] and should + /// be either added to the datastore or updated. + /// + /// - `autoIdInserts` are [Entity]s which do not have a fully populated [Key] + /// and should be added to the dataset, automatically assigning integer + /// IDs. + /// The returned [CommitResult] will contain the fully populated keys. + /// + /// - `deletes` are a list of fully populated [Key]s which uniquely identify + /// the [Entity]s which should be deleted. + /// + /// If a [transaction] is given, all modifications will be done within that + /// transaction. + /// + /// This method might complete with a [TransactionAbortedError] error. + /// Users must take care of retrying transactions. + // TODO(Issue #6): Consider splitting `inserts` into insert/update/upsert. + Future commit( + {List inserts, + List autoIdInserts, + List deletes, + Transaction transaction}); + + /// Roll a started transaction back. + Future rollback(Transaction transaction); + + /// Looks up the fully populated [keys] in the datastore and returns either + /// the [Entity] corresponding to the [Key] or `null`. The order in the + /// returned [Entity]s is the same as in [keys]. + /// + /// If a [transaction] is given, the lookup will be within this transaction. + Future> lookup(List keys, {Transaction transaction}); + + /// Runs a query on the dataset and returns a [Page] of matching [Entity]s. + /// + /// The [Page] instance returned might not contain all matching [Entity]s - + /// in which case `isLast` is set to `false`. The page's `next` method can + /// be used to page through the whole result set. + /// The maximum number of [Entity]s returned within a single page is + /// implementation specific. + /// + /// - `query` is used to restrict the number of returned [Entity]s and may + /// may specify an order. + /// + /// - `partition` can be used to specify the namespace used for the lookup. + /// + /// If a [transaction] is given, the query will be within this transaction. + /// But note that arbitrary queries within a transaction are not possible. + /// A transaction is limited to a very small number of entity groups. Usually + /// queries with transactions are restricted by providing an ancestor filter. + /// + /// Outside of transactions, the result set might be stale. Queries are by + /// default eventually consistent. + Future> query(Query query, + {Partition partition, Transaction transaction}); +} diff --git a/pkgs/gcloud/lib/db.dart b/pkgs/gcloud/lib/db.dart new file mode 100644 index 00000000..6996f6f2 --- /dev/null +++ b/pkgs/gcloud/lib/db.dart @@ -0,0 +1,50 @@ +// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file +// for details. All rights reserved. Use of this source code is governed by a +// BSD-style license that can be found in the LICENSE file. + +library; + +import 'dart:collection'; +// dart:core is imported explicitly so it is available at top-level without +// the `core` prefix defined below. +import 'dart:core'; +// Importing `dart:core` as `core` to allow access to `String` in `IdType` +// without conflicts. +import 'dart:core' as core; +import 'dart:mirrors' as mirrors; + +import 'package:meta/meta.dart'; + +import 'common.dart' show StreamFromPages; +import 'datastore.dart' as ds; +import 'service_scope.dart' as ss; + +part 'src/db/annotations.dart'; +part 'src/db/db.dart'; +part 'src/db/exceptions.dart'; +part 'src/db/model_db.dart'; +part 'src/db/model_db_impl.dart'; +part 'src/db/models.dart'; + +const Symbol _dbKey = #gcloud.db; + +/// Access the [DatastoreDB] object available in the current service scope. +/// +/// The returned object will be the one which was previously registered with +/// [registerDbService] within the current (or a parent) service scope. +/// +/// Accessing this getter outside of a service scope will result in an error. +/// See the `package:gcloud/service_scope.dart` library for more information. +DatastoreDB get dbService => ss.lookup(_dbKey) as DatastoreDB; + +/// Registers the [DatastoreDB] object within the current service scope. +/// +/// The provided `db` object will be available via the top-level `dbService` +/// getter. +/// +/// Calling this function outside of a service scope will result in an error. +/// Calling this function more than once inside the same service scope is not +/// allowed. +void registerDbService(DatastoreDB db) { + ss.register(_dbKey, db); +} diff --git a/pkgs/gcloud/lib/db/metamodel.dart b/pkgs/gcloud/lib/db/metamodel.dart new file mode 100644 index 00000000..cc8bab44 --- /dev/null +++ b/pkgs/gcloud/lib/db/metamodel.dart @@ -0,0 +1,22 @@ +// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file +// for details. All rights reserved. Use of this source code is governed by a +// BSD-style license that can be found in the LICENSE file. + +import '../db.dart' as db; + +@db.Kind(name: '__namespace__') +class Namespace extends db.ExpandoModel { + // ignore: constant_identifier_names + static const int EmptyNamespaceId = 1; + + String? get name { + // The default namespace will be reported with id 1. + if (id == Namespace.EmptyNamespaceId) return null; + return id as String; + } +} + +@db.Kind(name: '__kind__') +class Kind extends db.Model { + String get name => id as String; +} diff --git a/pkgs/gcloud/lib/fix_data.yaml b/pkgs/gcloud/lib/fix_data.yaml new file mode 100644 index 00000000..976365ae --- /dev/null +++ b/pkgs/gcloud/lib/fix_data.yaml @@ -0,0 +1,29 @@ +version: 1 +transforms: + - title: 'Rename to Descending' + date: 2024-09-11 + element: + uris: ['datastore.dart'] + field: 'Decending' + inClass: 'OrderDirection' + changes: + - kind: 'rename' + newName: 'Descending' + - title: 'Rename to GreaterThan' + date: 2024-09-11 + element: + uris: ['datastore.dart'] + field: 'GreatherThan' + inClass: 'FilterRelation' + changes: + - kind: 'rename' + newName: 'GreaterThan' + - title: 'Rename to GreaterThanOrEqual' + date: 2024-09-11 + element: + uris: ['datastore.dart'] + field: 'GreatherThanOrEqual' + inClass: 'FilterRelation' + changes: + - kind: 'rename' + newName: 'GreaterThanOrEqual' diff --git a/pkgs/gcloud/lib/http.dart b/pkgs/gcloud/lib/http.dart new file mode 100644 index 00000000..30fd7aef --- /dev/null +++ b/pkgs/gcloud/lib/http.dart @@ -0,0 +1,42 @@ +// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file +// for details. All rights reserved. Use of this source code is governed by a +// BSD-style license that can be found in the LICENSE file. + +/// Provides access to an authenticated HTTP client which can be used to access +/// Google APIs. +library; + +import 'package:http/http.dart' as http; + +import 'service_scope.dart' as ss; + +const Symbol _authenticatedClientKey = #gcloud.http; + +/// Access the [http.Client] object available in the current service scope. +/// +/// The returned object will be the one which was previously registered with +/// [registerAuthClientService] within the current (or a parent) service +/// scope. +/// +/// Accessing this getter outside of a service scope will result in an error. +/// See the `package:gcloud/service_scope.dart` library for more information. +http.Client get authClientService => + ss.lookup(_authenticatedClientKey) as http.Client; + +/// Registers the [http.Client] object within the current service scope. +/// +/// The provided `client` object will be available via the top-level +/// `authenticatedHttp` getter. +/// +/// Calling this function outside of a service scope will result in an error. +/// Calling this function more than once inside the same service scope is not +/// allowed. +void registerAuthClientService(http.Client client, {bool close = true}) { + ss.register(_authenticatedClientKey, client); + if (close) { + ss.registerScopeExitCallback(() { + client.close(); + return null; + }); + } +} diff --git a/pkgs/gcloud/lib/pubsub.dart b/pkgs/gcloud/lib/pubsub.dart new file mode 100644 index 00000000..efa220cf --- /dev/null +++ b/pkgs/gcloud/lib/pubsub.dart @@ -0,0 +1,428 @@ +// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file +// for details. All rights reserved. Use of this source code is governed by a +// BSD-style license that can be found in the LICENSE file. + +import 'dart:async'; +import 'dart:collection'; +import 'dart:convert'; +import 'dart:io'; + +import 'package:googleapis/pubsub/v1.dart' as pubsub; +import 'package:http/http.dart' as http; + +import 'common.dart'; +import 'service_scope.dart' as ss; +import 'src/common_utils.dart'; + +export 'common.dart'; + +part 'src/pubsub_impl.dart'; + +const Symbol _pubsubKey = #gcloud.pubsub; + +/// Access the [PubSub] object available in the current service scope. +/// +/// The returned object will be the one which was previously registered with +/// [registerPubSubService] within the current (or a parent) service scope. +/// +/// Accessing this getter outside of a service scope will result in an error. +/// See the `package:gcloud/service_scope.dart` library for more information. +PubSub get pubsubService => ss.lookup(_pubsubKey) as PubSub; + +/// Registers the [pubsub] object within the current service scope. +/// +/// The provided `pubsub` object will be available via the top-level +/// `pubsubService` getter. +/// +/// Calling this function outside of a service scope will result in an error. +/// Calling this function more than once inside the same service scope is not +/// allowed. +void registerPubSubService(PubSub pubsub) { + ss.register(_pubsubKey, pubsub); +} + +/// A Cloud Pub/Sub client. +/// +/// Connects to the Cloud Pub/Sub service and gives access to its operations. +/// +/// Google Cloud Pub/Sub is a reliable, many-to-many, asynchronous messaging +/// service from Google Cloud Platform. A detailed overview is available on +/// [Pub/Sub docs](https://developers.google.com/pubsub/overview). +/// +/// To access Pub/Sub, an authenticate HTTP client is required. This client +/// should as a minimum provide access to the scopes `PubSub.Scopes`. +/// +/// The following example shows how to access Pub/Sub using a service account +/// and pull a message from a subscription. +/// +/// import 'package:http/http.dart' as http; +/// import 'package:googleapis_auth/auth_io.dart' as auth; +/// import 'package:gcloud/pubsub.dart'; +/// +/// Future createClient() { +/// // Service account credentials retrieved from Cloud Console. +/// String creds = +/// r''' +/// { +/// "private_key_id": ..., +/// "private_key": ..., +/// "client_email": ..., +/// "client_id": ..., +/// "type": "service_account" +/// }'''; +/// return auth.clientViaServiceAccount( +/// new auth.ServiceAccountCredentials.fromJson(creds), +/// PubSub.Scopes); +/// } +/// +/// main() { +/// var project = 'my-project'; +/// var client; +/// var pubsub; +/// createClient().then((c) { +/// client = c; +/// pubsub = new PubSub(client, project); +/// return pubsub.lookupSubscription('my-subscription'); +/// }) +/// .then((Subscription subscription) => subscription.pull()) +/// .then((PullEvent event) => print('Message ${event.message.asString}')) +/// .whenComplete(() => client.close()); +/// } +/// +/// When working with topics and subscriptions they are referred to using +/// names. These names can be either relative names or absolute names. +/// +/// An absolute name of a topic starts with `projects/` and has the form: +/// +/// projects//topics/ +/// +/// When a relative topic name is used, its absolute name is generated by +/// pre-pending `projects//topics/`, where `` is the +/// project id passed to the constructor. +/// +/// An absolute name of a subscription starts with `projects/` and has the +/// form: +/// +/// projects//subscriptions/ +/// +/// When a relative subscription name is used, its absolute name is +/// generated by pre-pending `projects//subscriptions/`, where +/// `` is the project id passed to the constructor. +/// +abstract class PubSub { + /// List of required OAuth2 scopes for Pub/Sub operation. + // ignore: constant_identifier_names + static const SCOPES = [pubsub.PubsubApi.pubsubScope]; + + /// Access Pub/Sub using an authenticated client. + /// + /// The [client] is an authenticated HTTP client. This client must + /// provide access to at least the scopes in `PubSub.Scopes`. + /// + /// The [project] is the name of the Google Cloud project. + /// + /// Returns an object providing access to Pub/Sub. The passed-in [client] will + /// not be closed automatically. The caller is responsible for closing it. + factory PubSub(http.Client client, String project) { + var emulator = Platform.environment['PUBSUB_EMULATOR_HOST']; + return emulator == null + ? _PubSubImpl(client, project) + : _PubSubImpl.rootUrl(client, project, 'http://$emulator/'); + } + + /// The name of the project. + String get project; + + /// Create a new topic named [name]. + /// + /// The [name] can be either an absolute name or a relative name. + /// + /// Returns a `Future` which completes with the newly created topic. + Future createTopic(String name); + + /// Delete topic named [name]. + /// + /// The [name] can be either an absolute name or a relative name. + /// + /// Returns a `Future` which completes with `null` when the operation + /// is finished. + Future deleteTopic(String name); + + /// Look up topic named [name]. + /// + /// The [name] can be either an absolute name or a relative name. + /// + /// Returns a `Future` which completes with the topic. + Future lookupTopic(String name); + + /// Lists all topics. + /// + /// Returns a `Stream` of topics. + Stream listTopics(); + + /// Start paging through all topics. + /// + /// The maximum number of topics in each page is specified in [pageSize]. + /// + /// Returns a `Future` which completes with a `Page` object holding the + /// first page. Use the `Page` object to move to the next page of topics. + Future> pageTopics({int pageSize = 50}); + + /// Create a new subscription named [name] listening on topic [topic]. + /// + /// If [endpoint] is passed this will create a push subscription. + /// + /// Otherwise this will create a pull subscription. + /// + /// The [name] can be either an absolute name or a relative name. + /// + /// Returns a `Future` which completes with the newly created subscription. + Future createSubscription(String name, String topic, + {Uri endpoint}); + + /// Delete subscription named [name]. + /// + /// The [name] can be either an absolute name or a relative name. + /// + /// Returns a `Future` which completes with the subscription. + Future deleteSubscription(String name); + + /// Lookup subscription with named [name]. + /// + /// The [name] can be either an absolute name or a relative name. + /// + /// Returns a `Future` which completes with the subscription. + Future lookupSubscription(String name); + + /// List subscriptions. + /// + /// If [query] is passed this will list all subscriptions matching the query. + /// + /// Otherwise this will list all subscriptions. + /// + /// The only supported query string is the name of a topic. If a name of a + /// topic is passed as [query], this will list all subscriptions on that + /// topic. + /// + /// Returns a `Stream` of subscriptions. + Stream listSubscriptions([String query]); + + /// Start paging through subscriptions. + /// + /// If [topic] is passed this will list all subscriptions to that topic. + /// + /// Otherwise this will list all subscriptions. + /// + /// The maximum number of subscriptions in each page is specified in + /// [pageSize] + /// + /// Returns a `Future` which completes with a `Page` object holding the + /// first page. Use the `Page` object to move to the next page of + /// subscriptions. + Future> pageSubscriptions( + {String topic, int pageSize = 50}); +} + +/// A Pub/Sub topic. +/// +/// A topic is used by a publisher to publish (send) messages. +abstract class Topic { + /// The relative name of this topic. + String get name; + + /// The name of the project for this topic. + String get project; + + /// The absolute name of this topic. + String get absoluteName; + + /// Delete this topic. + /// + /// Returns a `Future` which completes with `null` when the operation + /// is finished. + Future delete(); + + /// Publish a message. + /// + /// Returns a `Future` which completes with `null` when the operation + /// is finished. + Future publish(Message message); + + /// Publish a string as a message. + /// + /// The message will get the attributes specified in [attributes]. + /// + /// The [attributes] are passed together with the message to the receiver. + /// + /// Returns a `Future` which completes with `null` when the operation + /// is finished. + Future publishString(String message, {Map attributes}); + + /// Publish bytes as a message. + /// + /// The message will get the attributes specified in [attributes]. + /// + /// The [attributes] are passed together with the message to the receiver. + /// + /// Returns a `Future` which completes with `null` when the operation + /// is finished. + Future publishBytes(List message, {Map attributes}); +} + +/// A Pub/Sub subscription +/// +/// A subscription is used to receive messages. A subscriber application +/// create a subscription on a topic to receive messages from it. +/// +/// Subscriptions can be either pull subscriptions or push subscriptions. +/// +/// For a pull subscription the receiver calls the `Subscription.pull` +/// method on the subscription object to get the next message. +/// +/// For a push subscription a HTTPS endpoint is configured. This endpoint get +/// POST requests with the messages. +abstract class Subscription { + /// The relative name of this subscription. + String get name; + + /// The name of the project for this subscription. + String get project; + + /// The absolute name of this subscription. + String get absoluteName; + + /// The topic subscribed to. + Topic get topic; + + /// Whether this is a push subscription. + /// + /// A push subscription is configured with an endpoint URI, and messages + /// are automatically sent to this endpoint without needing to call [pull]. + bool get isPush; + + /// Whether this is a pull subscription. + /// + /// A subscription without a configured endpoint URI is a pull subscription. + /// Messages are not delivered automatically, but must instead be requested + /// using [pull]. + bool get isPull; + + /// The URI for the push endpoint. + /// + /// If this is a pull subscription this is `null`. + Uri? get endpoint; + + /// Update the push configuration with a new endpoint. + /// + /// if [endpoint] is `null`, the subscription stops delivering messages + /// automatically, and becomes a pull subscription, if it isn't already. + /// + /// If [endpoint] is not `null`, the subscription will be a push + /// subscription, if it wasn't already, and Pub/Sub will start automatically + /// delivering message to the endpoint URI. + /// + /// Returns a `Future` which completes when the operation completes. + Future updatePushConfiguration(Uri endpoint); + + /// Delete this subscription. + /// + /// Returns a `Future` which completes when the operation completes. + Future delete(); + + /// Pull a message from the subscription. + /// + /// If `wait` is `true` (the default), the method will wait for a message + /// to become available, and will then complete the `Future` with a + /// `PullEvent` containing the message. + /// + /// If [wait] is `false`, the method will complete the returned `Future` + /// with `null` if it finds that there are no messages available. + Future pull({bool wait = true}); +} + +/// The content of a Pub/Sub message. +/// +/// All Pub/Sub messages consist of a body of binary data and has an optional +/// set of attributes (key-value pairs) associated with it. +/// +/// A `Message` contains the message body a list of bytes. The message body can +/// be read and written as a String, in which case the string is converted to +/// or from UTF-8 automatically. +abstract class Message { + /// Creates a new message with a String for the body. The String will + /// be UTF-8 encoded to create the actual binary body for the message. + /// + /// Message attributes can be passed in the [attributes] map. + factory Message.withString(String message, {Map attributes}) = + _MessageImpl.withString; + + /// Creates a new message with a binary body. + /// + /// Message attributes can be passed in the [attributes] Map. + factory Message.withBytes(List message, + {Map attributes}) = _MessageImpl.withBytes; + + /// The message body as a String. + /// + /// The binary body is decoded into a String using an UTF-8 decoder. + /// + /// If the body is not UTF-8 encoded use the [asBytes] getter and manually + /// apply the correct decoding. + String get asString; + + /// The message body as bytes. + List get asBytes; + + /// The attributes for this message. + Map get attributes; +} + +/// A Pub/Sub pull event. +/// +/// Instances of this class are returned when pulling messages with +/// [Subscription.pull]. +abstract class PullEvent { + /// The message content. + Message get message; + + /// Acknowledge reception of this message. + /// + /// Returns a `Future` which completes with `null` when the acknowledge has + /// been processed. + Future acknowledge(); +} + +/// Pub/Sub push event. +/// +/// This class can be used in a HTTP server for decoding messages pushed to +/// an endpoint. +/// +/// When a message is received on a push endpoint use the [PushEvent.fromJson] +/// constructor with the HTTP body to decode the received message. +/// +/// E.g. with a `dart:io` HTTP handler: +/// +/// void pushHandler(HttpRequest request) { +/// // Decode the JSON body. +/// request.transform(UTF8.decoder).join('').then((body) { +/// // Decode the JSON into a push message. +/// var message = new PushMessage.fromJson(body) +/// +/// // Process the message... +/// +/// // Respond with status code 20X to acknowledge the message. +/// response.statusCode = statusCode; +/// response.close(); +/// }); +/// } +//// +abstract class PushEvent { + /// The message content. + Message get message; + + /// The absolute name of the subscription. + String get subscriptionName; + + /// Create a `PushMessage` from JSON received on a Pub/Sub push endpoint. + factory PushEvent.fromJson(String json) = _PushEventImpl.fromJson; +} diff --git a/pkgs/gcloud/lib/service_scope.dart b/pkgs/gcloud/lib/service_scope.dart new file mode 100644 index 00000000..d85aed45 --- /dev/null +++ b/pkgs/gcloud/lib/service_scope.dart @@ -0,0 +1,279 @@ +// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file +// for details. All rights reserved. Use of this source code is governed by a +// BSD-style license that can be found in the LICENSE file. + +/// This library enables one to create a service scope in which code can run. +/// +/// A service scope is an environment in which code runs. The environment is a +/// [Zone] with added functionality. Code can be run inside a new service scope +/// by using the `fork(callback)` method. This will call `callback` inside a new +/// service scope and will keep the scope alive until the Future returned by the +/// callback completes. At this point the service scope ends. +/// +/// Code running inside a new service scope can +/// +/// - register objects (e.g. a database connection pool or a logging service) +/// - look up previously registered objects +/// - register on-scope-exit handlers +/// +/// Service scopes can be nested. All registered values from the parent service +/// scope are still accessible as long as they have not been overridden. The +/// callback passed to `fork()` is responsible for not completing it's returned +/// Future until all nested service scopes have ended. +/// +/// The on-scope-exit callbacks will be called when the service scope ends. The +/// callbacks are run in reverse registration order and are guaranteed to be +/// executed. During a scope exit callback the active service scope cannot +/// be modified anymore and `lookup()`s will only return values which were +/// registered before the registration of the on-scope-exit callback. +/// +/// One use-case of this is making services available to a server application. +/// The server application will run inside a service scope which will have all +/// necessary services registered. +/// Once the server app shuts down, the registered on-scope-exit callbacks will +/// automatically be invoked and the process will shut down cleanly. +/// +/// Here is an example use case: +/// +/// import 'dart:async'; +/// import 'package:gcloud/service_scope.dart' as scope; +/// +/// class DBPool { ... } +/// +/// DBPool get dbService => scope.lookup(#dbpool); +/// +/// Future runApp() { +/// // The application can use the registered objects (here the +/// // dbService). It does not need to pass it around, but can use a +/// // global getter. +/// return dbService.query( ... ).listen(print).asFuture(); +/// } +/// +/// main() { +/// // Creates a new service scope and runs the given closure inside it. +/// ss.fork(() { +/// // We create a new database pool with a 10 active connections and +/// // add it to the current service scope with key `#dbpool`. +/// // In addition we insert a on-scope-exit callback which will be +/// // called once the application is done. +/// var pool = new DBPool(connections: 10); +/// scope.register(#dbpool, pool, onScopeExit: () => pool.close()); +/// return runApp(); +/// }).then((_) { +/// print('Server application shut down cleanly'); +/// }); +/// } +/// +/// As an example, the `package:appengine/appengine.dart` package runs request +/// handlers inside a service scope, which has most `package:gcloud` services +/// registered. +/// +/// The core application code can then be independent of `package:appengine` +/// and instead depend only on the services needed (e.g. +/// `package:gcloud/storage.dart`) by using getters in the service library (e.g. +/// the `storageService`) which are implemented with service scope lookups. +library; + +import 'dart:async'; + +/// The Symbol used as index in the zone map for the service scope object. +const Symbol _serviceScopeKey = #gcloud.service_scope; + +/// An empty service scope. +/// +/// New service scope can be created by calling [fork] on the empty +/// service scope. +final _ServiceScope _emptyServiceScope = _ServiceScope(); + +/// Returns the current [_ServiceScope] object. +_ServiceScope? get _serviceScope => + Zone.current[_serviceScopeKey] as _ServiceScope?; + +/// Start a new zone with a new service scope and run [func] inside it. +/// +/// The function [func] must return a `Future` and the service scope will end +/// when this future completes. +/// +/// If an uncaught error occurs and [onError] is given, it will be called. The +/// `onError` parameter can take the same values as `Zone.current.fork`. +Future fork(Future Function() func, {Function? onError}) { + var currentServiceScope = _serviceScope; + currentServiceScope ??= _emptyServiceScope; + return currentServiceScope._fork(func, onError: onError); +} + +/// Register a new [object] into the current service scope using the given +/// [key]. +/// +/// If [onScopeExit] is provided, it will be called when the service scope ends. +/// +/// The registered on-scope-exit functions are executed in reverse registration +/// order. +void register(Object key, Object value, {ScopeExitCallback? onScopeExit}) { + var serviceScope = _serviceScope; + if (serviceScope == null) { + throw StateError('Not running inside a service scope zone.'); + } + serviceScope.register(key, value, onScopeExit: onScopeExit); +} + +/// Register a [onScopeExitCallback] to be invoked when this service scope ends. +/// +/// The registered on-scope-exit functions are executed in reverse registration +/// order. +void registerScopeExitCallback(ScopeExitCallback onScopeExitCallback) { + var serviceScope = _serviceScope; + if (serviceScope == null) { + throw StateError('Not running inside a service scope zone.'); + } + serviceScope.registerOnScopeExitCallback(onScopeExitCallback); +} + +/// Look up an item by it's key in the currently active service scope. +/// +/// Returns `null` if there is no entry with the given key. +Object? lookup(Object key) { + var serviceScope = _serviceScope; + if (serviceScope == null) { + throw StateError('Not running inside a service scope zone.'); + } + return serviceScope.lookup(key); +} + +/// Represents a global service scope of values stored via zones. +class _ServiceScope { + /// A mapping of keys to values stored inside the service scope. + final Map _key2Values = + {}; + + /// A set which indicates whether an object was copied from it's parent. + final Set _parentCopies = {}; + + /// On-Scope-Exit functions which will be called in reverse insertion order. + final List<_RegisteredEntry> _registeredEntries = []; + + bool _cleaningUp = false; + bool _destroyed = false; + + /// Looks up an object by it's service scope key - returns `null` if not + /// found. + Object? lookup(Object serviceScope) { + _ensureNotInDestroyingState(); + var entry = _key2Values[serviceScope]; + return entry?.value; + } + + /// Inserts a new item to the service scope using [serviceScopeKey]. + /// + /// Optionally calls a [onScopeExit] function once this service scope ends. + void register(Object serviceScopeKey, Object value, + {ScopeExitCallback? onScopeExit}) { + _ensureNotInCleaningState(); + _ensureNotInDestroyingState(); + + var isParentCopy = _parentCopies.contains(serviceScopeKey); + if (!isParentCopy && _key2Values.containsKey(serviceScopeKey)) { + throw ArgumentError( + 'Service scope already contains key $serviceScopeKey.'); + } + + var entry = _RegisteredEntry(serviceScopeKey, value, onScopeExit); + + _key2Values[serviceScopeKey] = entry; + if (isParentCopy) _parentCopies.remove(serviceScopeKey); + + _registeredEntries.add(entry); + } + + /// Inserts a new on-scope-exit function to be called once this service scope + /// ends. + void registerOnScopeExitCallback(ScopeExitCallback onScopeExitCallback) { + _ensureNotInCleaningState(); + _ensureNotInDestroyingState(); + + _registeredEntries.add(_RegisteredEntry(null, null, onScopeExitCallback)); + } + + /// Start a new zone with a forked service scope. + Future _fork(Future Function() func, {Function? onError}) { + _ensureNotInCleaningState(); + _ensureNotInDestroyingState(); + + var serviceScope = _copy(); + var map = {_serviceScopeKey: serviceScope}; + return runZoned(() { + var f = func(); + return f.whenComplete(serviceScope._runScopeExitHandlers); + // ignore: deprecated_member_use + }, zoneValues: map, onError: onError); + } + + void _ensureNotInDestroyingState() { + if (_destroyed) { + throw StateError( + 'The service scope has already been exited. It is therefore ' + 'forbidden to use this service scope anymore. ' + 'Please make sure that your code waits for all asynchronous tasks ' + 'before the closure passed to fork() completes.'); + } + } + + void _ensureNotInCleaningState() { + if (_cleaningUp) { + throw StateError( + 'The service scope is in the process of cleaning up. It is therefore ' + 'forbidden to make any modifications to the current service scope. ' + 'Please make sure that your code waits for all asynchronous tasks ' + 'before the closure passed to fork() completes.'); + } + } + + /// Copies all service scope entries to a new service scope, but not their + /// on-scope-exit handlers. + _ServiceScope _copy() { + var serviceScopeCopy = _ServiceScope(); + serviceScopeCopy._key2Values.addAll(_key2Values); + serviceScopeCopy._parentCopies.addAll(_key2Values.keys); + return serviceScopeCopy; + } + + /// Runs all on-scope-exit functions in [_ServiceScope]. + Future _runScopeExitHandlers() { + _cleaningUp = true; + var errors = []; + + // We are running all on-scope-exit functions in reverse registration order. + // Even if one fails, we continue cleaning up and report then the list of + // errors (if there were any). + return Future.forEach(_registeredEntries.reversed, + (_RegisteredEntry registeredEntry) { + if (registeredEntry.key != null) { + _key2Values.remove(registeredEntry.key); + } + if (registeredEntry.scopeExitCallback != null) { + return Future.sync(registeredEntry.scopeExitCallback!) + .catchError((e, s) => errors.add(e)); + } else { + return Future.value(); + } + }).then((_) { + _cleaningUp = true; + _destroyed = true; + if (errors.isNotEmpty) { + throw Exception( + 'The following errors occurred while running scope exit handlers' + ': $errors'); + } + }); + } +} + +typedef ScopeExitCallback = FutureOr Function(); + +class _RegisteredEntry { + final Object? key; + final Object? value; + final ScopeExitCallback? scopeExitCallback; + + _RegisteredEntry(this.key, this.value, this.scopeExitCallback); +} diff --git a/pkgs/gcloud/lib/src/common_utils.dart b/pkgs/gcloud/lib/src/common_utils.dart new file mode 100644 index 00000000..d8380109 --- /dev/null +++ b/pkgs/gcloud/lib/src/common_utils.dart @@ -0,0 +1,13 @@ +// Copyright (c) 2021, the Dart project authors. Please see the AUTHORS file +// for details. All rights reserved. Use of this source code is governed by a +// BSD-style license that can be found in the LICENSE file. + +import '../common.dart'; + +extension PageUtils on Page { + void throwIfIsLast() { + if (isLast) { + throw StateError('Page.next() cannot be called when Page.isLast == true'); + } + } +} diff --git a/pkgs/gcloud/lib/src/datastore_impl.dart b/pkgs/gcloud/lib/src/datastore_impl.dart new file mode 100644 index 00000000..c0a9b366 --- /dev/null +++ b/pkgs/gcloud/lib/src/datastore_impl.dart @@ -0,0 +1,627 @@ +// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file +// for details. All rights reserved. Use of this source code is governed by a +// BSD-style license that can be found in the LICENSE file. + +import 'dart:async'; + +import 'package:googleapis/datastore/v1.dart' as api; +import 'package:http/http.dart' as http; + +import '../common.dart' show Page; +import '../datastore.dart' as datastore; +import 'common_utils.dart'; + +class TransactionImpl implements datastore.Transaction { + final String data; + + TransactionImpl(this.data); +} + +class DatastoreImpl implements datastore.Datastore { + static const List scopes = [ + api.DatastoreApi.datastoreScope, + api.DatastoreApi.cloudPlatformScope, + ]; + + final api.DatastoreApi _api; + final String _project; + + /// The [project] parameter is the name of the cloud project (it should not + /// start with a `s~`). + DatastoreImpl(http.Client client, String project) + : _api = api.DatastoreApi(client), + _project = project; + + api.Key _convertDatastore2ApiKey(datastore.Key key, {bool enforceId = true}) { + var apiKey = api.Key(); + + apiKey.partitionId = api.PartitionId() + ..projectId = _project + ..namespaceId = key.partition.namespace; + + apiKey.path = key.elements.map((datastore.KeyElement element) { + final part = api.PathElement(); + part.kind = element.kind; + final id = element.id; + if (id is int) { + part.id = '$id'; + } else if (id is String) { + part.name = id; + } else if (enforceId) { + throw datastore.ApplicationError( + 'Error while encoding entity key: Using `null` as the id is not ' + 'allowed.'); + } + return part; + }).toList(); + + return apiKey; + } + + static datastore.Key _convertApi2DatastoreKey(api.Key key) { + var elements = key.path!.map((api.PathElement element) { + if (element.id != null) { + return datastore.KeyElement(element.kind!, int.parse(element.id!)); + } else if (element.name != null) { + return datastore.KeyElement(element.kind!, element.name); + } else { + throw datastore.DatastoreError( + 'Invalid server response: Expected allocated name/id.'); + } + }).toList(); + + var partition = datastore.Partition.DEFAULT; + if (key.partitionId != null) { + partition = datastore.Partition(key.partitionId!.namespaceId); + // TODO: assert projectId. + } + return datastore.Key(elements, partition: partition); + } + + bool _compareApiKey(api.Key a, api.Key b) { + if (a.path!.length != b.path!.length) return false; + + // FIXME(Issue #2): Is this comparison working correctly? + if (a.partitionId != null) { + if (b.partitionId == null) { + return false; + } + if (a.partitionId!.projectId != b.partitionId!.projectId) { + return false; + } + if (a.partitionId!.namespaceId != b.partitionId!.namespaceId) { + return false; + } + } else if (b.partitionId != null) { + return false; + } + + for (var i = 0; i < a.path!.length; i++) { + if (a.path![i].id != b.path![i].id || + a.path![i].name != b.path![i].name || + a.path![i].kind != b.path![i].kind) { + return false; + } + } + return true; + } + + api.Value _convertDatastore2ApiPropertyValue(value, bool indexed, + {bool lists = true}) { + var apiValue = api.Value()..excludeFromIndexes = !indexed; + if (value == null) { + return apiValue..nullValue = 'NULL_VALUE'; + } else if (value is bool) { + return apiValue..booleanValue = value; + } else if (value is int) { + return apiValue..integerValue = '$value'; + } else if (value is double) { + return apiValue..doubleValue = value; + } else if (value is String) { + return apiValue..stringValue = value; + } else if (value is DateTime) { + return apiValue..timestampValue = value.toIso8601String(); + } else if (value is datastore.BlobValue) { + return apiValue..blobValueAsBytes = value.bytes; + } else if (value is datastore.Key) { + return apiValue + ..keyValue = _convertDatastore2ApiKey(value, enforceId: false); + } else if (value is List) { + if (!lists) { + // FIXME(Issue #3): Consistently handle exceptions. + throw Exception('List values are not allowed.'); + } + + api.Value convertItem(i) => + _convertDatastore2ApiPropertyValue(i, indexed, lists: false); + + return api.Value() + ..arrayValue = + (api.ArrayValue()..values = value.map(convertItem).toList()); + } else { + throw UnsupportedError( + 'Types ${value.runtimeType} cannot be used for serializing.'); + } + } + + static dynamic _convertApi2DatastoreProperty(api.Value value) { + if (value.booleanValue != null) { + return value.booleanValue; + } else if (value.integerValue != null) { + return int.parse(value.integerValue!); + } else if (value.doubleValue != null) { + return value.doubleValue; + } else if (value.stringValue != null) { + return value.stringValue; + } else if (value.timestampValue != null) { + return DateTime.parse(value.timestampValue!); + } else if (value.blobValue != null) { + return datastore.BlobValue(value.blobValueAsBytes); + } else if (value.keyValue != null) { + return _convertApi2DatastoreKey(value.keyValue!); + } else if (value.arrayValue != null && value.arrayValue!.values != null) { + return value.arrayValue!.values! + .map(_convertApi2DatastoreProperty) + .toList(); + } else if (value.entityValue != null) { + throw UnsupportedError('Entity values are not supported.'); + } else if (value.geoPointValue != null) { + throw UnsupportedError('GeoPoint values are not supported.'); + } + return null; + } + + static datastore.Entity _convertApi2DatastoreEntity(api.Entity entity) { + var unindexedProperties = {}; + var properties = {}; + + if (entity.properties != null) { + entity.properties!.forEach((String name, api.Value value) { + properties[name] = _convertApi2DatastoreProperty(value); + if (value.excludeFromIndexes != null && value.excludeFromIndexes!) { + unindexedProperties.add(name); + } + }); + } + return datastore.Entity(_convertApi2DatastoreKey(entity.key!), properties, + unIndexedProperties: unindexedProperties); + } + + api.Entity _convertDatastore2ApiEntity(datastore.Entity entity, + {bool enforceId = false}) { + var apiEntity = api.Entity(); + + apiEntity.key = _convertDatastore2ApiKey(entity.key, enforceId: enforceId); + final properties = apiEntity.properties = {}; + if (entity.properties.isNotEmpty) { + for (var key in entity.properties.keys) { + var value = entity.properties[key]; + final indexed = !entity.unIndexedProperties.contains(key); + properties[key] = _convertDatastore2ApiPropertyValue(value, indexed); + } + } + return apiEntity; + } + + static Map relationMapping = const { + datastore.FilterRelation.LessThan: 'LESS_THAN', + datastore.FilterRelation.LessThanOrEqual: 'LESS_THAN_OR_EQUAL', + datastore.FilterRelation.Equal: 'EQUAL', + datastore.FilterRelation.GreaterThan: 'GREATER_THAN', + datastore.FilterRelation.GreaterThanOrEqual: 'GREATER_THAN_OR_EQUAL', + }; + + api.Filter _convertDatastore2ApiFilter(datastore.Filter filter) { + var pf = api.PropertyFilter(); + var operator = relationMapping[filter.relation]; + if (operator == null) { + throw ArgumentError('Unknown filter relation: ${filter.relation}.'); + } + pf.op = operator; + pf.property = api.PropertyReference()..name = filter.name; + pf.value = + _convertDatastore2ApiPropertyValue(filter.value, true, lists: false); + return api.Filter()..propertyFilter = pf; + } + + api.Filter _convertDatastoreAncestorKey2ApiFilter(datastore.Key key) { + var pf = api.PropertyFilter(); + pf.op = 'HAS_ANCESTOR'; + pf.property = api.PropertyReference()..name = '__key__'; + pf.value = api.Value() + ..keyValue = _convertDatastore2ApiKey(key, enforceId: true); + return api.Filter()..propertyFilter = pf; + } + + api.Filter? _convertDatastore2ApiFilters( + List? filters, + datastore.Key? ancestorKey, + ) { + if ((filters == null || filters.isEmpty) && ancestorKey == null) { + return null; + } + + var compFilter = api.CompositeFilter(); + if (filters != null) { + compFilter.filters = filters.map(_convertDatastore2ApiFilter).toList(); + } + if (ancestorKey != null) { + var filter = _convertDatastoreAncestorKey2ApiFilter(ancestorKey); + if (compFilter.filters == null) { + compFilter.filters = [filter]; + } else { + compFilter.filters!.add(filter); + } + } + compFilter.op = 'AND'; + return api.Filter()..compositeFilter = compFilter; + } + + api.PropertyOrder _convertDatastore2ApiOrder(datastore.Order order) { + var property = api.PropertyReference()..name = order.propertyName; + var direction = order.direction == datastore.OrderDirection.Ascending + ? 'ASCENDING' + : 'DESCENDING'; + return api.PropertyOrder() + ..direction = direction + ..property = property; + } + + List? _convertDatastore2ApiOrders( + List? orders) { + if (orders == null) return null; + + return orders.map(_convertDatastore2ApiOrder).toList(); + } + + static Future _handleError(Object error, StackTrace stack) { + if (error is api.DetailedApiRequestError) { + if (error.status == 400) { + return Future.error( + datastore.ApplicationError( + error.message ?? 'An unknown error occurred', + ), + stack, + ); + } else if (error.status == 409) { + // NOTE: This is reported as: + // "too much contention on these datastore entities" + // TODO: + return Future.error(datastore.TransactionAbortedError(), stack); + } else if (error.status == 412) { + return Future.error(datastore.NeedIndexError(), stack); + } + } + return Future.error(error, stack); + } + + @override + Future> allocateIds(List keys) { + var request = api.AllocateIdsRequest(); + request.keys = keys.map((key) { + return _convertDatastore2ApiKey(key, enforceId: false); + }).toList(); + return _api.projects.allocateIds(request, _project).then((response) { + return (response.keys ?? []).map(_convertApi2DatastoreKey).toList(); + }, onError: _handleError); + } + + @override + Future beginTransaction( + {bool crossEntityGroup = false}) { + var request = api.BeginTransactionRequest(); + return _api.projects.beginTransaction(request, _project).then((result) { + return TransactionImpl(result.transaction!); + }, onError: _handleError); + } + + @override + Future commit({ + List inserts = const [], + List autoIdInserts = const [], + List deletes = const [], + datastore.Transaction? transaction, + }) { + final request = api.CommitRequest(); + + if (transaction != null) { + request.mode = 'TRANSACTIONAL'; + request.transaction = (transaction as TransactionImpl).data; + } else { + request.mode = 'NON_TRANSACTIONAL'; + } + + var mutations = request.mutations = []; + if (inserts.isNotEmpty) { + for (var i = 0; i < inserts.length; i++) { + mutations.add(api.Mutation() + ..upsert = _convertDatastore2ApiEntity(inserts[i], enforceId: true)); + } + } + var autoIdStartIndex = -1; + if (autoIdInserts.isNotEmpty) { + autoIdStartIndex = mutations.length; + for (var i = 0; i < autoIdInserts.length; i++) { + mutations.add(api.Mutation() + ..insert = + _convertDatastore2ApiEntity(autoIdInserts[i], enforceId: false)); + } + } + if (deletes.isNotEmpty) { + for (var i = 0; i < deletes.length; i++) { + mutations.add(api.Mutation() + ..delete = _convertDatastore2ApiKey(deletes[i], enforceId: true)); + } + } + return _api.projects.commit(request, _project).then((result) { + var keys = []; + if (autoIdInserts.isNotEmpty) { + assert(result.mutationResults != null); + var mutationResults = result.mutationResults!; + assert(autoIdStartIndex != -1); + assert(mutationResults.length >= + (autoIdStartIndex + autoIdInserts.length)); + keys = mutationResults + .skip(autoIdStartIndex) + .take(autoIdInserts.length) + .map((r) => _convertApi2DatastoreKey(r.key!)) + .toList(); + } + return datastore.CommitResult(keys); + }, onError: _handleError); + } + + @override + Future> lookup( + List keys, { + datastore.Transaction? transaction, + }) { + var apiKeys = keys.map((key) { + return _convertDatastore2ApiKey(key, enforceId: true); + }).toList(); + var request = api.LookupRequest(); + request.keys = apiKeys; + if (transaction != null) { + // TODO: Make readOptions more configurable. + request.readOptions = api.ReadOptions() + ..transaction = (transaction as TransactionImpl).data; + } + return _api.projects.lookup(request, _project).then((response) { + if (response.deferred != null && response.deferred!.isNotEmpty) { + throw datastore.DatastoreError( + 'Could not successfully look up all keys due to resource ' + 'constraints.'); + } + + // NOTE: This is worst-case O(n^2)! + // Maybe we can optimize this somehow. But the API says: + // message LookupResponse { + // // The order of results in these fields is undefined and has no relation to + // // the order of the keys in the input. + // + // // Entities found as ResultType.FULL entities. + // repeated EntityResult found = 1; + // + // // Entities not found as ResultType.KEY_ONLY entities. + // repeated EntityResult missing = 2; + // + // // A list of keys that were not looked up due to resource constraints. + // repeated Key deferred = 3; + // } + var entities = List.filled(apiKeys.length, null); + for (var i = 0; i < apiKeys.length; i++) { + var apiKey = apiKeys[i]; + + var found = false; + + if (response.found != null) { + for (var result in response.found!) { + if (_compareApiKey(apiKey, result.entity!.key!)) { + entities[i] = _convertApi2DatastoreEntity(result.entity!); + found = true; + break; + } + } + } + + if (found) continue; + + if (response.missing != null) { + for (var result in response.missing!) { + if (_compareApiKey(apiKey, result.entity!.key!)) { + entities[i] = null; + found = true; + break; + } + } + } + + if (!found) { + throw datastore.DatastoreError('Invalid server response: ' + 'Tried to lookup ${apiKey.toJson()} but entity was neither in ' + 'missing nor in found.'); + } + } + return entities; + }, onError: _handleError); + } + + @override + Future> query( + datastore.Query query, { + datastore.Partition partition = datastore.Partition.DEFAULT, + datastore.Transaction? transaction, + }) { + // NOTE: We explicitly do not set 'limit' here, since this is handled by + // QueryPageImpl.runQuery. + var apiQuery = api.Query() + ..filter = _convertDatastore2ApiFilters(query.filters, query.ancestorKey) + ..order = _convertDatastore2ApiOrders(query.orders) + ..offset = query.offset; + + if (query.kind != null) { + apiQuery.kind = [api.KindExpression()..name = query.kind]; + } + + var request = api.RunQueryRequest(); + request.query = apiQuery; + if (transaction != null) { + // TODO: Make readOptions more configurable. + request.readOptions = api.ReadOptions() + ..transaction = (transaction as TransactionImpl).data; + } + if (partition != datastore.Partition.DEFAULT) { + request.partitionId = api.PartitionId() + ..namespaceId = partition.namespace; + } + + return QueryPageImpl.runQuery(_api, _project, request, query.limit) + .catchError(_handleError); + } + + @override + Future rollback(datastore.Transaction transaction) { + // TODO: Handle [transaction] + var request = api.RollbackRequest() + ..transaction = (transaction as TransactionImpl).data; + return _api.projects.rollback(request, _project).catchError(_handleError); + } +} + +class QueryPageImpl implements Page { + static const int _maxEntitiesPerResponse = 2000; + + final api.DatastoreApi _api; + final String _project; + final api.RunQueryRequest _nextRequest; + final List _entities; + final bool _isLast; + + // This might be `null` in which case we request as many as we can get. + final int? _remainingNumberOfEntities; + + QueryPageImpl(this._api, this._project, this._nextRequest, this._entities, + this._isLast, this._remainingNumberOfEntities); + + static Future runQuery(api.DatastoreApi api, String project, + api.RunQueryRequest request, int? limit, + {int batchSize = _maxEntitiesPerResponse}) { + if (limit != null && limit < batchSize) { + batchSize = limit; + } + + request.query!.limit = batchSize; + + return api.projects.runQuery(request, project).then((response) { + var returnedEntities = const []; + + final batch = response.batch!; + if (batch.entityResults != null) { + returnedEntities = batch.entityResults! + .map((result) => result.entity!) + .map(DatastoreImpl._convertApi2DatastoreEntity) + .toList(); + } + + // This check is only necessary for the first request/response pair + // (if offset was supplied). + if (request.query!.offset != null && + request.query!.offset! > 0 && + request.query!.offset != batch.skippedResults) { + throw datastore.DatastoreError( + 'Server did not skip over the specified ${request.query!.offset} ' + 'entities.'); + } + + if (limit != null && returnedEntities.length > limit) { + throw datastore.DatastoreError( + 'Server returned more entities then the limit for the request' + '(${request.query!.limit}) was.'); + } + + // FIXME: TODO: Big hack! + // It looks like Apiary/Atlas is currently broken. + /* + if (limit != null && + returnedEntities.length < batchSize && + response.batch.moreResults == 'MORE_RESULTS_AFTER_LIMIT') { + throw new datastore.DatastoreError( + 'Server returned response with less entities then the limit was, ' + 'but signals there are more results after the limit.'); + } + */ + + // In case a limit was specified, we need to subtraction the number of + // entities we already got. + // (the checks above guarantee that this subtraction is >= 0). + int? remainingEntities; + if (limit != null) { + remainingEntities = limit - returnedEntities.length; + } + + // If the server signals there are more entities and we either have no + // limit or our limit has not been reached, we set `moreBatches` to + // `true`. + var moreBatches = (remainingEntities == null || remainingEntities > 0) && + batch.moreResults == 'MORE_RESULTS_AFTER_LIMIT'; + + var gotAll = limit != null && remainingEntities == 0; + var noMore = batch.moreResults == 'NO_MORE_RESULTS'; + var isLast = gotAll || noMore; + + // As a sanity check, we assert that `moreBatches XOR isLast`. + assert(isLast != moreBatches); + + // FIXME: TODO: Big hack! + // It looks like Apiary/Atlas is currently broken. + if (moreBatches && returnedEntities.isEmpty) { + print('Warning: Api to Google Cloud Datastore returned bogus response. ' + 'Trying a workaround.'); + isLast = true; + moreBatches = false; + } + + if (!isLast && batch.endCursor == null) { + throw datastore.DatastoreError( + 'Server did not supply an end cursor, even though the query ' + 'is not done.'); + } + + if (isLast) { + return QueryPageImpl( + api, project, request, returnedEntities, true, null); + } else { + // NOTE: We reuse the old RunQueryRequest object here . + + // The offset will be 0 from now on, since the first request will have + // skipped over the first `offset` results. + request.query!.offset = 0; + + // Furthermore we set the startCursor to the endCursor of the previous + // result batch, so we can continue where we left off. + request.query!.startCursor = batch.endCursor; + + return QueryPageImpl( + api, project, request, returnedEntities, false, remainingEntities); + } + }); + } + + @override + bool get isLast => _isLast; + + @override + List get items => _entities; + + @override + Future> next({int? pageSize}) async { + // NOTE: We do not respect [pageSize] here, the only mechanism we can + // really use is `query.limit`, but this is user-specified when making + // the query. + throwIfIsLast(); + + return QueryPageImpl.runQuery( + _api, _project, _nextRequest, _remainingNumberOfEntities) + .catchError(DatastoreImpl._handleError); + } +} diff --git a/pkgs/gcloud/lib/src/db/annotations.dart b/pkgs/gcloud/lib/src/db/annotations.dart new file mode 100644 index 00000000..de895bc2 --- /dev/null +++ b/pkgs/gcloud/lib/src/db/annotations.dart @@ -0,0 +1,327 @@ +// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file +// for details. All rights reserved. Use of this source code is governed by a +// BSD-style license that can be found in the LICENSE file. + +part of '../../db.dart'; + +/// Annotation used to mark dart classes which can be stored into datastore. +/// +/// The `Kind` annotation on a class as well as other `Property` annotations on +/// fields or getters of the class itself (and any of it's superclasses) up to +/// the [Model] class describe the *mapping* of *dart objects* to datastore +/// *entities*. +/// +/// An "entity" is an object which can be stored into Google Cloud Datastore. +/// It contains a number of named "properties", some of them might get indexed, +/// others are not. A "property" value can be of a limited set of supported +/// types (such as `int` and `String`). +/// +/// Here is an example of a dart model class which can be stored into datastore: +/// @Kind() +/// class Person extends db.Model { +/// @StringProperty() +/// String name; +/// +/// @IntProperty() +/// int age; +/// +/// @DateTimeProperty() +/// DateTime dateOfBirth; +/// } +class Kind { + /// The kind name used when saving objects to datastore. + /// + /// If `null` the name will be the same as the class name at which the + /// annotation is placed. + final String? name; + + /// The type, either [ID_TYPE_INTEGER] or [ID_TYPE_STRING]. + final IdType idType; + + /// Annotation specifying the name of this kind and whether to use integer or + /// string `id`s. + /// + /// If `name` is omitted, it will default to the name of class to which this + /// annotation is attached to. + const Kind({this.name, this.idType = IdType.Integer}); +} + +/// The type used for id's of an entity. +class IdType { + /// Use integer ids for identifying entities. + // ignore: constant_identifier_names + static const IdType Integer = IdType('Integer'); + + /// Use string ids for identifying entities. + // ignore: constant_identifier_names + static const IdType String = IdType('String'); + + final core.String _type; + + const IdType(this._type); + + @override + core.String toString() => 'IdType: $_type'; +} + +/// Describes a property of an Entity. +/// +/// Please see [Kind] for an example on how to use them. +abstract class Property { + /// The name of the property. + /// + /// If it is `null`, the name will be the same as used in the + /// model class. + final String? propertyName; + + /// Specifies whether this property is required or not. + /// + /// If required is `true`, it will be enforced when saving model objects to + /// the datastore and when retrieving them. + final bool required; + + /// Specifies whether this property should be indexed or not. + /// + /// When running queries no this property, it is necessary to set [indexed] to + /// `true`. + final bool indexed; + + const Property( + {this.propertyName, this.required = false, this.indexed = true}); + + bool validate(ModelDB db, Object? value) { + if (required && value == null) return false; + return true; + } + + Object? encodeValue(ModelDB db, Object? value, {bool forComparison = false}); + + Object? decodePrimitiveValue(ModelDB db, Object? value); +} + +/// An abstract base class for primitive properties which can e.g. be used +/// within a composed `ListProperty`. +abstract class PrimitiveProperty extends Property { + const PrimitiveProperty( + {String? propertyName, bool required = false, bool indexed = true}) + : super(propertyName: propertyName, required: required, indexed: indexed); + + @override + Object? encodeValue(ModelDB db, Object? value, + {bool forComparison = false}) => + value; + + @override + Object? decodePrimitiveValue(ModelDB db, Object? value) => value; +} + +/// A boolean [Property]. +/// +/// It will validate that values are booleans before writing them to the +/// datastore and when reading them back. +class BoolProperty extends PrimitiveProperty { + const BoolProperty( + {String? propertyName, bool required = false, bool indexed = true}) + : super(propertyName: propertyName, required: required, indexed: indexed); + + @override + bool validate(ModelDB db, Object? value) => + super.validate(db, value) && (value == null || value is bool); +} + +/// A integer [Property]. +/// +/// It will validate that values are integers before writing them to the +/// datastore and when reading them back. +class IntProperty extends PrimitiveProperty { + const IntProperty( + {String? propertyName, bool required = false, bool indexed = true}) + : super(propertyName: propertyName, required: required, indexed: indexed); + + @override + bool validate(ModelDB db, Object? value) => + super.validate(db, value) && (value == null || value is int); +} + +/// A double [Property]. +/// +/// It will validate that values are doubles before writing them to the +/// datastore and when reading them back. +class DoubleProperty extends PrimitiveProperty { + const DoubleProperty( + {String? propertyName, bool required = false, bool indexed = true}) + : super(propertyName: propertyName, required: required, indexed: indexed); + + @override + bool validate(ModelDB db, Object? value) => + super.validate(db, value) && (value == null || value is double); +} + +/// A string [Property]. +/// +/// It will validate that values are strings before writing them to the +/// datastore and when reading them back. +class StringProperty extends PrimitiveProperty { + const StringProperty( + {String? propertyName, bool required = false, bool indexed = true}) + : super(propertyName: propertyName, required: required, indexed: indexed); + + @override + bool validate(ModelDB db, Object? value) => + super.validate(db, value) && (value == null || value is String); +} + +/// A key [Property]. +/// +/// It will validate that values are keys before writing them to the +/// datastore and when reading them back. +class ModelKeyProperty extends PrimitiveProperty { + const ModelKeyProperty( + {String? propertyName, bool required = false, bool indexed = true}) + : super(propertyName: propertyName, required: required, indexed: indexed); + + @override + bool validate(ModelDB db, Object? value) => + super.validate(db, value) && (value == null || value is Key); + + @override + Object? encodeValue(ModelDB db, Object? value, {bool forComparison = false}) { + if (value == null) return null; + return db.toDatastoreKey(value as Key); + } + + @override + Object? decodePrimitiveValue(ModelDB db, Object? value) { + if (value == null) return null; + return db.fromDatastoreKey(value as ds.Key); + } +} + +/// A binary blob [Property]. +/// +/// It will validate that values are blobs before writing them to the +/// datastore and when reading them back. Blob values will be represented by +/// List. +class BlobProperty extends PrimitiveProperty { + const BlobProperty({String? propertyName, bool required = false}) + : super(propertyName: propertyName, required: required, indexed: false); + + // NOTE: We don't validate that the entries of the list are really integers + // of the range 0..255! + // If an untyped list was created the type check will always succeed. i.e. + // "[1, true, 'bar'] is List" evaluates to `true` + @override + bool validate(ModelDB db, Object? value) => + super.validate(db, value) && (value == null || value is List); + + @override + Object? encodeValue(ModelDB db, Object? value, {bool forComparison = false}) { + if (value == null) return null; + return ds.BlobValue(value as List); + } + + @override + Object? decodePrimitiveValue(ModelDB db, Object? value) { + if (value == null) return null; + + return (value as ds.BlobValue).bytes; + } +} + +/// A datetime [Property]. +/// +/// It will validate that values are DateTime objects before writing them to the +/// datastore and when reading them back. +class DateTimeProperty extends PrimitiveProperty { + const DateTimeProperty( + {String? propertyName, bool required = false, bool indexed = true}) + : super(propertyName: propertyName, required: required, indexed: indexed); + + @override + bool validate(ModelDB db, Object? value) => + super.validate(db, value) && (value == null || value is DateTime); + + @override + Object? decodePrimitiveValue(ModelDB db, Object? value) { + if (value is int) { + return DateTime.fromMillisecondsSinceEpoch(value ~/ 1000, isUtc: true); + } + return value; + } +} + +/// A composed list [Property], with a `subProperty` for the list elements. +/// +/// It will validate that values are List objects before writing them to the +/// datastore and when reading them back. It will also validate the elements +/// of the list itself. +class ListProperty extends Property { + final PrimitiveProperty subProperty; + + // TODO: We want to support optional list properties as well. + // Get rid of "required: true" here. + const ListProperty(this.subProperty, + {String? propertyName, bool indexed = true}) + : super(propertyName: propertyName, required: true, indexed: indexed); + + @override + bool validate(ModelDB db, Object? value) { + if (!super.validate(db, value) || value is! List) return false; + + for (var entry in value) { + if (!subProperty.validate(db, entry)) return false; + } + return true; + } + + @override + Object? encodeValue(ModelDB db, Object? value, {bool forComparison = false}) { + if (forComparison) { + // If we have comparison of list properties (i.e. repeated property names) + // the comparison object must not be a list, but the value itself. + // i.e. + // + // class Article { + // ... + // @ListProperty(StringProperty()) + // List tags; + // ... + // } + // + // should be queried via + // + // await db.query(Article, 'tags=', "Dart").toList(); + // + // So the [value] for the comparison is of type `String` and not + // `List`! + return subProperty.encodeValue(db, value, forComparison: true); + } + + if (value == null) return null; + var list = value as List; + if (list.isEmpty) return null; + if (list.length == 1) return subProperty.encodeValue(db, list[0]); + return list.map((value) => subProperty.encodeValue(db, value)).toList(); + } + + @override + Object decodePrimitiveValue(ModelDB db, Object? value) { + if (value == null) return []; + if (value is! List) return [subProperty.decodePrimitiveValue(db, value)]; + return value + .map((entry) => subProperty.decodePrimitiveValue(db, entry)) + .toList(); + } +} + +/// A convenience [Property] for list of strings. +class StringListProperty extends ListProperty { + const StringListProperty({String? propertyName, bool indexed = true}) + : super(const StringProperty(), + propertyName: propertyName, indexed: indexed); + + @override + Object decodePrimitiveValue(ModelDB db, Object? value) { + return (super.decodePrimitiveValue(db, value) as core.List).cast(); + } +} diff --git a/pkgs/gcloud/lib/src/db/db.dart b/pkgs/gcloud/lib/src/db/db.dart new file mode 100644 index 00000000..5c2f888f --- /dev/null +++ b/pkgs/gcloud/lib/src/db/db.dart @@ -0,0 +1,480 @@ +// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file +// for details. All rights reserved. Use of this source code is governed by a +// BSD-style license that can be found in the LICENSE file. + +part of '../../db.dart'; + +/// A function definition for transactional functions. +/// +/// The function will be given a [Transaction] object which can be used to make +/// lookups/queries and queue modifications (inserts/updates/deletes). +typedef TransactionHandler = Future Function(Transaction transaction); + +/// A datastore transaction. +/// +/// It can be used for making lookups/queries and queue modifications +/// (inserts/updates/deletes). Finally the transaction can be either committed +/// or rolled back. +class Transaction { + static const int _transactionStarted = 0; + static const int _transactionRolledBack = 1; + static const int _transactionCommitted = 2; + static const int _transactionCommitFailed = 3; + + final DatastoreDB db; + final ds.Transaction _datastoreTransaction; + + final List _inserts = []; + final List _deletes = []; + + int _state = _transactionStarted; + + Transaction(this.db, this._datastoreTransaction); + + /// Looks up [keys] within this transaction. + Future> lookup(List keys) { + return _lookupHelper( + db, + keys, + datastoreTransaction: _datastoreTransaction, + ); + } + + /// Looks up a single [key] within this transaction, and returns the + /// associated [Model] object. + /// + /// If [orElse] is specified, then it will be consulted to provide a default + /// value for the model object in the event that [key] was not found within + /// the transaction. + /// + /// If the [key] is not found within the transaction and [orElse] was not + /// specified, then a [KeyNotFoundException] will be thrown. + Future lookupValue(Key key, + {T Function()? orElse}) async { + final values = await lookup([key]); + assert(values.length == 1); + var value = values.single; + if (value == null) { + if (orElse != null) { + value = orElse(); + } else { + throw KeyNotFoundException(key); + } + } + return value; + } + + /// Looks up a single [key] in the datastore, and returns the associated + /// [Model] object. + /// + /// If the [key] is not found in the datastore, null will be returned. + Future lookupOrNull(Key key) async { + final values = await lookup([key]); + assert(values.length == 1); + return values.single; + } + + /// Enqueues [inserts] and [deletes] which should be committed at commit time. + void queueMutations({List? inserts, List? deletes}) { + _checkSealed(); + if (inserts != null) { + _inserts.addAll(inserts); + } + if (deletes != null) { + _deletes.addAll(deletes); + } + } + + /// Query for [kind] models with [ancestorKey]. + /// + /// Note that [ancestorKey] is required, since a transaction is not allowed to + /// touch/look at an arbitrary number of rows. + Query query(Key ancestorKey, {Partition? partition}) { + // TODO(#25): The `partition` element is redundant and should be removed. + if (partition == null) { + partition = ancestorKey.partition; + } else if (ancestorKey.partition != partition) { + throw ArgumentError( + 'Ancestor queries must have the same partition in the ancestor key ' + 'as the partition where the query executes in.'); + } + _checkSealed(); + return Query(db, + partition: partition, + ancestorKey: ancestorKey, + datastoreTransaction: _datastoreTransaction); + } + + /// Rolls this transaction back. + Future rollback() { + _checkSealed(changeState: _transactionRolledBack, allowFailed: true); + return db.datastore.rollback(_datastoreTransaction); + } + + /// Commits this transaction including all of the queued mutations. + Future commit() { + _checkSealed(changeState: _transactionCommitted); + try { + return _commitHelper(db, + inserts: _inserts, + deletes: _deletes, + datastoreTransaction: _datastoreTransaction); + } catch (error) { + _state = _transactionCommitFailed; + rethrow; + } + } + + void _checkSealed({int? changeState, bool allowFailed = false}) { + if (_state == _transactionCommitted) { + throw StateError('The transaction has already been committed.'); + } else if (_state == _transactionRolledBack) { + throw StateError('The transaction has already been rolled back.'); + } else if (_state == _transactionCommitFailed && !allowFailed) { + throw StateError('The transaction has attempted commit and failed.'); + } + if (changeState != null) { + _state = changeState; + } + } +} + +class Query { + final _relationMapping = const { + '<': ds.FilterRelation.LessThan, + '<=': ds.FilterRelation.LessThanOrEqual, + '>': ds.FilterRelation.GreaterThan, + '>=': ds.FilterRelation.GreaterThanOrEqual, + '=': ds.FilterRelation.Equal, + }; + + final DatastoreDB _db; + final ds.Transaction? _transaction; + final String _kind; + + final Partition? _partition; + final Key? _ancestorKey; + + final List _filters = []; + final List _orders = []; + int? _offset; + int? _limit; + + Query(DatastoreDB dbImpl, + {Partition? partition, + Key? ancestorKey, + ds.Transaction? datastoreTransaction}) + : _db = dbImpl, + _kind = dbImpl.modelDB.kindName(T), + _partition = partition, + _ancestorKey = ancestorKey, + _transaction = datastoreTransaction; + + /// Adds a filter to this [Query]. + /// + /// [filterString] has form "name OP" where 'name' is a fieldName of the + /// model and OP is an operator. The following operators are supported: + /// + /// * '<' (less than) + /// * '<=' (less than or equal) + /// * '>' (greater than) + /// * '>=' (greater than or equal) + /// * '=' (equal) + /// + /// [comparisonObject] is the object for comparison. + void filter(String filterString, Object? comparisonObject) { + var parts = filterString.split(' '); + if (parts.length != 2 || !_relationMapping.containsKey(parts[1])) { + throw ArgumentError("Invalid filter string '$filterString'."); + } + + var name = parts[0]; + var comparison = parts[1]; + var propertyName = _convertToDatastoreName(name); + + // This is for backwards compatibility: We allow [datastore.Key]s for now. + // TODO: We should remove the condition in a major version update of + // `package:gcloud`. + if (comparisonObject is! ds.Key) { + comparisonObject = _db.modelDB + .toDatastoreValue(_kind, name, comparisonObject, forComparison: true); + } + _filters.add(ds.Filter( + _relationMapping[comparison]!, propertyName, comparisonObject!)); + } + + /// Adds an order to this [Query]. + /// + /// [orderString] has the form "-name" where 'name' is a fieldName of the + /// model and the optional '-' says whether the order is descending or + /// ascending. + void order(String orderString) { + // TODO: validate [orderString] (e.g. is name valid) + if (orderString.startsWith('-')) { + _orders.add(ds.Order(ds.OrderDirection.Descending, + _convertToDatastoreName(orderString.substring(1)))); + } else { + _orders.add(ds.Order( + ds.OrderDirection.Ascending, _convertToDatastoreName(orderString))); + } + } + + /// Sets the [offset] of this [Query]. + /// + /// When running this query, [offset] results will be skipped. + void offset(int offset) { + _offset = offset; + } + + /// Sets the [limit] of this [Query]. + /// + /// When running this query, a maximum of [limit] results will be returned. + void limit(int limit) { + _limit = limit; + } + + /// Execute this [Query] on the datastore. + /// + /// Outside of transactions this method might return stale data or may not + /// return the newest updates performed on the datastore since updates + /// will be reflected in the indices in an eventual consistent way. + Stream run() { + ds.Key? ancestorKey; + if (_ancestorKey != null) { + ancestorKey = _db.modelDB.toDatastoreKey(_ancestorKey!); + } + var query = ds.Query( + ancestorKey: ancestorKey, + kind: _kind, + filters: _filters, + orders: _orders, + offset: _offset, + limit: _limit); + + ds.Partition? partition; + if (_partition != null) { + partition = ds.Partition(_partition!.namespace); + } + + return StreamFromPages((int pageSize) { + if (_transaction != null) { + if (partition != null) { + return _db.datastore + .query(query, transaction: _transaction!, partition: partition); + } + return _db.datastore.query(query, transaction: _transaction!); + } + if (partition != null) { + return _db.datastore.query(query, partition: partition); + } + return _db.datastore.query(query); + }).stream.map((e) => _db.modelDB.fromDatastoreEntity(e)!); + } + + // TODO: + // - add runPaged() returning Page + // - add run*() method once we have EntityResult{Entity,Cursor} in low-level + // API. + + String _convertToDatastoreName(String name) { + var propertyName = _db.modelDB.fieldNameToPropertyName(_kind, name); + if (propertyName == null) { + throw ArgumentError('Field $name is not available for kind $_kind'); + } + return propertyName; + } +} + +class DatastoreDB { + final ds.Datastore datastore; + final ModelDB _modelDB; + final Partition _defaultPartition; + + DatastoreDB(this.datastore, {ModelDB? modelDB, Partition? defaultPartition}) + : _modelDB = modelDB ?? ModelDBImpl(), + _defaultPartition = defaultPartition ?? Partition(null); + + /// The [ModelDB] used to serialize/deserialize objects. + ModelDB get modelDB => _modelDB; + + /// Gets the empty key using the default [Partition]. + /// + /// Model keys with parent set to [emptyKey] will create their own entity + /// groups. + Key get emptyKey => defaultPartition.emptyKey; + + /// Gets the default [Partition]. + Partition get defaultPartition => _defaultPartition; + + /// Creates a new [Partition] with namespace [namespace]. + Partition newPartition(String namespace) { + return Partition(namespace); + } + + /// Begins a new a new transaction. + /// + /// A transaction can touch only a limited number of entity groups. This limit + /// is currently 5. + // TODO: Add retries and/or auto commit/rollback. + Future withTransaction(TransactionHandler transactionHandler) { + return datastore + .beginTransaction(crossEntityGroup: true) + .then((datastoreTransaction) { + var transaction = Transaction(this, datastoreTransaction); + return transactionHandler(transaction); + }); + } + + /// Build a query for [kind] models. + Query query({Partition? partition, Key? ancestorKey}) { + // TODO(#26): There is only one case where `partition` is not redundant + // Namely if `ancestorKey == null` and `partition != null`. We could + // say we get rid of `partition` and enforce `ancestorKey` to + // be `Partition.emptyKey`? + if (partition == null) { + if (ancestorKey != null) { + partition = ancestorKey.partition; + } else { + partition = defaultPartition; + } + } else if (ancestorKey != null && partition != ancestorKey.partition) { + throw ArgumentError( + 'Ancestor queries must have the same partition in the ancestor key ' + 'as the partition where the query executes in.'); + } + return Query(this, partition: partition, ancestorKey: ancestorKey); + } + + /// Looks up [keys] in the datastore and returns a list of [Model] objects. + /// + /// Any key that is not found in the datastore will have a corresponding + /// value of null in the list of model objects that is returned. + /// + /// For transactions, please use [beginTransaction] and call the [lookup] + /// method on it's returned [Transaction] object. + /// + /// See also: + /// + /// * [lookupValue], which looks a single value up by its key, requiring a + /// successful lookup. + Future> lookup(List keys) { + return _lookupHelper(this, keys); + } + + /// Looks up a single [key] in the datastore, and returns the associated + /// [Model] object. + /// + /// If [orElse] is specified, then it will be consulted to provide a default + /// value for the model object in the event that [key] was not found in the + /// datastore. + /// + /// If the [key] is not found in the datastore and [orElse] was not + /// specified, then a [KeyNotFoundException] will be thrown. + Future lookupValue(Key key, + {T Function()? orElse}) async { + final values = await lookup([key]); + assert(values.length == 1); + var value = values.single; + if (value == null) { + if (orElse != null) { + value = orElse(); + } else { + throw KeyNotFoundException(key); + } + } + return value; + } + + /// Looks up a single [key] in the datastore, and returns the associated + /// [Model] object. + /// + /// If the [key] is not found in the datastore, null will be returned. + Future lookupOrNull(Key key) async { + final values = await lookup([key]); + assert(values.length == 1); + return values.single; + } + + /// Add [inserts] to the datastore and remove [deletes] from it. + /// + /// The order of inserts and deletes is not specified. When the commit is done + /// direct lookups will see the effect but non-ancestor queries will see the + /// change in an eventual consistent way. + /// + /// The inserts are done as upserts unless the provided model does not have an + /// id, in which case an autoId will be generated. + /// + /// For transactions, please use `beginTransaction` and it's returned + /// [Transaction] object. + Future commit({List? inserts, List? deletes}) { + return _commitHelper(this, inserts: inserts, deletes: deletes); + } +} + +Future _commitHelper(DatastoreDB db, + {List? inserts, + List? deletes, + ds.Transaction? datastoreTransaction}) { + List? entityInserts, entityAutoIdInserts; + List? entityDeletes; + late List autoIdModelInserts; + if (inserts != null) { + entityInserts = []; + entityAutoIdInserts = []; + autoIdModelInserts = []; + + for (var model in inserts) { + // If parent was not explicitly set, we assume this model will map to + // its own entity group. + model.parentKey ??= db.defaultPartition.emptyKey; + if (model.id == null) { + autoIdModelInserts.add(model); + entityAutoIdInserts.add(db.modelDB.toDatastoreEntity(model)); + } else { + entityInserts.add(db.modelDB.toDatastoreEntity(model)); + } + } + } + if (deletes != null) { + entityDeletes = deletes.map(db.modelDB.toDatastoreKey).toList(); + } + Future r; + if (datastoreTransaction != null) { + r = db.datastore.commit( + inserts: entityInserts ?? [], + autoIdInserts: entityAutoIdInserts ?? [], + deletes: entityDeletes ?? [], + transaction: datastoreTransaction); + } else { + r = db.datastore.commit( + inserts: entityInserts ?? [], + autoIdInserts: entityAutoIdInserts ?? [], + deletes: entityDeletes ?? []); + } + + return r.then((ds.CommitResult result) { + if (entityAutoIdInserts != null && entityAutoIdInserts.isNotEmpty) { + for (var i = 0; i < result.autoIdInsertKeys.length; i++) { + var key = db.modelDB.fromDatastoreKey(result.autoIdInsertKeys[i]); + autoIdModelInserts[i].parentKey = key.parent; + autoIdModelInserts[i].id = key.id; + } + } + }); +} + +Future> _lookupHelper(DatastoreDB db, List keys, + {ds.Transaction? datastoreTransaction}) { + var entityKeys = keys.map(db.modelDB.toDatastoreKey).toList(); + + if (datastoreTransaction != null) { + return db.datastore + .lookup(entityKeys, transaction: datastoreTransaction) + .then((List entities) { + return entities.map(db.modelDB.fromDatastoreEntity).toList(); + }); + } + return db.datastore.lookup(entityKeys).then((List entities) { + return entities.map(db.modelDB.fromDatastoreEntity).toList(); + }); +} diff --git a/pkgs/gcloud/lib/src/db/exceptions.dart b/pkgs/gcloud/lib/src/db/exceptions.dart new file mode 100644 index 00000000..6eed41d8 --- /dev/null +++ b/pkgs/gcloud/lib/src/db/exceptions.dart @@ -0,0 +1,18 @@ +// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file +// for details. All rights reserved. Use of this source code is governed by a +// BSD-style license that can be found in the LICENSE file. + +part of '../../db.dart'; + +/// Exception that gets thrown when a caller attempts to look up a value by +/// its key, and the key cannot be found in the datastore. +class KeyNotFoundException implements Exception { + /// Creates a new [KeyNotFoundException] for the specified [key]. + const KeyNotFoundException(this.key); + + /// The [Key] that was not found in the datastore. + final Key key; + + @override + String toString() => 'Key not found: ${key.type}:${key.id}'; +} diff --git a/pkgs/gcloud/lib/src/db/model_db.dart b/pkgs/gcloud/lib/src/db/model_db.dart new file mode 100644 index 00000000..dd30c1bf --- /dev/null +++ b/pkgs/gcloud/lib/src/db/model_db.dart @@ -0,0 +1,34 @@ +// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file +// for details. All rights reserved. Use of this source code is governed by a +// BSD-style license that can be found in the LICENSE file. + +part of '../../db.dart'; + +/// A database of all registered models. +/// +/// Responsible for converting between dart model objects and datastore +/// entities. +abstract class ModelDB { + /// Converts a [ds.Key] to a [Key]. + Key fromDatastoreKey(ds.Key datastoreKey); + + /// Converts a [Key] to a [ds.Key]. + ds.Key toDatastoreKey(Key dbKey); + + /// Converts a [Model] instance to a [ds.Entity]. + ds.Entity toDatastoreEntity(Model model); + + /// Converts a [ds.Entity] to a [Model] instance. + T? fromDatastoreEntity(ds.Entity? entity); + + /// Returns the kind name for instances of [type]. + String kindName(Type type); + + /// Returns the property name used for [fieldName] + // TODO: Get rid of this eventually. + String? fieldNameToPropertyName(String kind, String fieldName); + + /// Converts [value] according to the [Property] named [fieldName] in [kind]. + Object? toDatastoreValue(String kind, String fieldName, Object? value, + {bool forComparison = false}); +} diff --git a/pkgs/gcloud/lib/src/db/model_db_impl.dart b/pkgs/gcloud/lib/src/db/model_db_impl.dart new file mode 100644 index 00000000..0703ec5d --- /dev/null +++ b/pkgs/gcloud/lib/src/db/model_db_impl.dart @@ -0,0 +1,604 @@ +// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file +// for details. All rights reserved. Use of this source code is governed by a +// BSD-style license that can be found in the LICENSE file. + +part of '../../db.dart'; + +/// An implementation of [ModelDB] based on model class annotations. +/// +/// The two constructors will scan loaded dart libraries for classes with a +/// [Kind] annotation. +/// +/// An example on how to write a model class is: +/// @Kind +/// class Person extends db.Model { +/// @StringProperty +/// String name; +/// +/// @IntProperty +/// int age; +/// +/// @DateTimeProperty +/// DateTime dateOfBirth; +/// } +/// +/// These classes must either extend [Model] or [ExpandoModel]. Furthermore +/// they must have an empty default constructor which can be used to construct +/// model objects when doing lookups/queries from datastore. +class ModelDBImpl implements ModelDB { + final Map<_ModelDescription, Map> _modelDesc2Properties = + {}; + final Map _kind2ModelDesc = {}; + final Map<_ModelDescription, mirrors.ClassMirror> _modelDesc2ClassMirror = {}; + final Map<_ModelDescription, Type> _type2ModelDesc = {}; + final Map _modelDesc2Type = {}; + + /// Initializes a new [ModelDB] from all libraries. + /// + /// This will scan all libraries for classes with a [Kind] annotation. + /// + /// In case an error is encountered (e.g. two model classes with the same kind + /// name) a [StateError] will be thrown. + ModelDBImpl() { + // WARNING: This is O(n) of the source code, which is very bad! + // Would be nice to have: `currentMirrorSystem().subclassesOf(Model)` + _initialize(mirrors.currentMirrorSystem().libraries.values); + } + + /// Initializes a new [ModelDB] from all libraries. + /// + /// This will scan the given [librarySymbol] for classes with a [Kind] + /// annotation. + /// + /// In case an error is encountered (e.g. two model classes with the same kind + /// name) a [StateError] will be thrown. + ModelDBImpl.fromLibrary(Symbol librarySymbol) { + _initialize([mirrors.currentMirrorSystem().findLibrary(librarySymbol)]); + } + + /// Converts a [ds.Key] to a [Key]. The key returned will have the correct + /// id type which is either `Key` or `Key`. + @override + Key fromDatastoreKey(ds.Key datastoreKey) { + var namespace = Partition(datastoreKey.partition.namespace); + var key = namespace.emptyKey; + for (var element in datastoreKey.elements) { + var type = _type2ModelDesc[_kind2ModelDesc[element.kind]!]; + if (type == null) { + throw StateError( + 'Could not find a model associated with kind "${element.kind}". ' + 'Please ensure a model class was annotated with ' + '`@Kind(name: "${element.kind}")`.'); + } + final elementId = element.id; + if (elementId is String) { + key = key.append(type, id: elementId); + } else if (elementId is int) { + key = key.append(type, id: elementId); + } else { + throw StateError('Key must be either String or int, but ' + 'was ${elementId.runtimeType} for key ${element.kind}'); + } + } + return key; + } + + /// Converts a [Key] to a [ds.Key]. + @override + ds.Key toDatastoreKey(Key dbKey) { + var elements = []; + var currentKey = dbKey; + while (!currentKey.isEmpty) { + var id = currentKey.id; + + var modelDescription = _modelDescriptionForType(currentKey.type)!; + var kind = modelDescription.kindName(this); + + var useIntegerId = modelDescription.useIntegerId; + + if (useIntegerId && id != null && id is! int) { + throw ArgumentError('Expected an integer id property but ' + 'id was of type ${id.runtimeType}'); + } + if (!useIntegerId && (id != null && id is! String)) { + throw ArgumentError('Expected a string id property but ' + 'id was of type ${id.runtimeType}'); + } + + elements.add(ds.KeyElement(kind, id)); + currentKey = currentKey.parent!; + } + var partition = currentKey._parent as Partition; + return ds.Key(elements.reversed.toList(), + partition: ds.Partition(partition.namespace)); + } + + /// Converts a [Model] instance to a [ds.Entity]. + @override + ds.Entity toDatastoreEntity(Model model) { + try { + var modelDescription = _modelDescriptionForType(model.runtimeType)!; + return modelDescription.encodeModel(this, model); + } catch (error, stack) { + throw ArgumentError('Error while encoding entity ($error, $stack).'); + } + } + + /// Converts a [ds.Entity] to a [Model] instance. + @override + T? fromDatastoreEntity(ds.Entity? entity) { + if (entity == null) { + return null; + } + var key = fromDatastoreKey(entity.key); + var kind = entity.key.elements.last.kind; + var modelDescription = _kind2ModelDesc[kind]; + if (modelDescription == null) { + throw StateError('Trying to deserialize entity of kind ' + '$kind, but no Model class available for it.'); + } + + try { + return modelDescription.decodeEntity(this, key, entity); + } catch (error, stack) { + throw StateError('Error while decoding entity ($error, $stack).'); + } + } + + /// Returns the string representation of the kind of model class [type]. + /// + /// If the model class `type` is not found it will throw an `ArgumentError`. + @override + String kindName(Type type) { + var kind = _modelDesc2Type[type]?.kind; + if (kind == null) { + throw ArgumentError('The class $type was not associated with a kind.'); + } + return kind; + } + + /// Returns the name of the property corresponding to the kind [kind] and + /// [fieldName]. + @override + String? fieldNameToPropertyName(String kind, String fieldName) { + var modelDescription = _kind2ModelDesc[kind]; + if (modelDescription == null) { + throw ArgumentError('The kind "$kind" is unknown.'); + } + return modelDescription.fieldNameToPropertyName(fieldName); + } + + /// Converts [value] according to the [Property] named [name] in [type]. + @override + Object? toDatastoreValue(String kind, String fieldName, Object? value, + {bool forComparison = false}) { + var modelDescription = _kind2ModelDesc[kind]; + if (modelDescription == null) { + throw ArgumentError('The kind "$kind" is unknown.'); + } + return modelDescription.encodeField(this, fieldName, value, + forComparison: forComparison); + } + + Iterable<_ModelDescription> get _modelDescriptions { + return _modelDesc2Type.values; + } + + Map _propertiesForModel( + _ModelDescription modelDescription) { + return _modelDesc2Properties[modelDescription]!; + } + + _ModelDescription? _modelDescriptionForType(Type? type) { + return _modelDesc2Type[type!]; + } + + mirrors.ClassMirror? _modelClass(_ModelDescription md) { + return _modelDesc2ClassMirror[md]; + } + + void _initialize(Iterable libraries) { + for (var lm in libraries) { + lm.declarations.values + .whereType() + .where((d) => d.hasReflectedType) + .forEach(_tryLoadNewModelClass); + } + + // Ask every [ModelDescription] to compute whatever global state it wants + // to have. + for (var modelDescription in _modelDescriptions) { + modelDescription.initialize(this); + } + + // Ask every [ModelDescription] whether we should register it with a given + // kind name. + for (var modelDescription in _modelDescriptions) { + var kindName = modelDescription.kindName(this); + if (_kind2ModelDesc.containsKey(kindName)) { + throw StateError('Cannot have two ModelDescriptions ' + 'with the same kind ($kindName)'); + } + _kind2ModelDesc[kindName] = modelDescription; + } + } + + void _tryLoadNewModelClass(mirrors.ClassMirror classMirror) { + Kind? kindAnnotation; + for (var instance in classMirror.metadata) { + if ((instance.reflectee as Object).runtimeType == Kind) { + if (kindAnnotation != null) { + throw StateError( + 'Cannot have more than one ModelMetadata() annotation ' + 'on a Model class'); + } + kindAnnotation = instance.reflectee as Kind?; + } + } + + if (kindAnnotation != null) { + var name = kindAnnotation.name; + var integerId = kindAnnotation.idType == IdType.Integer; + var stringId = kindAnnotation.idType == IdType.String; + + // Fall back to the class name. + name ??= mirrors.MirrorSystem.getName(classMirror.simpleName); + + // This constraint should be guaranteed by the Kind() const constructor. + assert((integerId && !stringId) || (!integerId && stringId)); + + _tryLoadNewModelClassFull(classMirror, name, integerId); + } + } + + static bool _isRequiredAnnotation(mirrors.InstanceMirror annotation) { + return annotation.type.simpleName == #Required; + } + + /// Returns true if a constructor invocation is valid even if the specified + /// [parameter] is omitted. + /// + /// This is true for named parameters, optional parameters, and parameters + /// with a default value. + static bool _canBeOmitted(mirrors.ParameterMirror parameter) { + if (parameter.metadata.any(_isRequiredAnnotation)) { + return false; + } + return parameter.isOptional || + parameter.isNamed || + parameter.hasDefaultValue; + } + + /// Returns true if the specified [classMirror] has a default (unnamed) + /// constructor that accepts an empty arguments list. + @visibleForTesting + static bool hasDefaultConstructor(mirrors.ClassMirror classMirror) { + for (var declaration in classMirror.declarations.values) { + if (declaration is mirrors.MethodMirror) { + if (declaration.isConstructor && + declaration.constructorName == const Symbol('') && + declaration.parameters.every(_canBeOmitted)) { + return true; + } + } + } + return false; + } + + void _tryLoadNewModelClassFull( + mirrors.ClassMirror modelClass, String name, bool useIntegerId) { + assert(!_modelDesc2Type.containsKey(modelClass.reflectedType)); + + _ModelDescription modelDesc; + if (_isExpandoClass(modelClass)) { + modelDesc = _ExpandoModelDescription(name, useIntegerId); + } else { + modelDesc = _ModelDescription(name, useIntegerId); + } + + _type2ModelDesc[modelDesc] = modelClass.reflectedType; + _modelDesc2Type[modelClass.reflectedType] = modelDesc; + _modelDesc2ClassMirror[modelDesc] = modelClass; + _modelDesc2Properties[modelDesc] = + _propertiesFromModelDescription(modelClass); + + // Ensure we have an empty constructor. + if (!hasDefaultConstructor(modelClass)) { + throw StateError('Class ${modelClass.simpleName} does not have a default ' + 'constructor.'); + } + } + + Map _propertiesFromModelDescription( + mirrors.ClassMirror modelClassMirror) { + var properties = {}; + var propertyNames = {}; + + // Loop over all classes in the inheritance path up to the Object class. + while (modelClassMirror.superclass != null) { + var memberMap = modelClassMirror.instanceMembers; + // Loop over all declarations (which includes fields) + modelClassMirror.declarations + .forEach((Symbol fieldSymbol, mirrors.DeclarationMirror decl) { + // Look if the symbol is a getter and we have metadata attached to it. + if (memberMap.containsKey(fieldSymbol) && + memberMap[fieldSymbol]!.isGetter) { + final propertyAnnotations = decl.metadata + .map((mirrors.InstanceMirror mirror) => mirror.reflectee) + .whereType() + .toList(); + + if (propertyAnnotations.length > 1) { + throw StateError( + 'Cannot have more than one Property annotation on a model ' + 'field.'); + } else if (propertyAnnotations.length == 1) { + var property = propertyAnnotations.first; + + // Get a String representation of the field and the value. + var fieldName = mirrors.MirrorSystem.getName(fieldSymbol); + + // Determine the name to use for the property in datastore. + var propertyName = property.propertyName; + propertyName ??= fieldName; + + if (properties.containsKey(fieldName)) { + throw StateError( + 'Cannot have two Property objects describing the same field ' + 'in a model object class hierarchy.'); + } + + if (propertyNames.contains(propertyName)) { + throw StateError( + 'Cannot have two Property objects mapping to the same ' + 'datastore property name "$propertyName".'); + } + properties[fieldName] = property; + propertyNames.add(propertyName); + } + } + }); + modelClassMirror = modelClassMirror.superclass!; + } + + return properties; + } + + final _originalExpandoModelClass = mirrors.reflectClass(ExpandoModel); + final _originalModelClass = mirrors.reflectClass(Model); + + bool _isExpandoClass(mirrors.ClassMirror? modelClass) { + while (modelClass != null && modelClass.superclass != modelClass) { + if (modelClass.originalDeclaration == _originalExpandoModelClass) { + return true; + } else if (modelClass.originalDeclaration == _originalModelClass) { + return false; + } + + modelClass = modelClass.superclass; + } + throw StateError('This should be unreachable.'); + } +} + +class _ModelDescription { + final HashMap _property2FieldName = HashMap(); + final HashMap _field2PropertyName = HashMap(); + final Set _indexedProperties = {}; + final Set _unIndexedProperties = {}; + + final String kind; + final bool useIntegerId; + + _ModelDescription(this.kind, this.useIntegerId); + + void initialize(ModelDBImpl db) { + // Compute propertyName -> fieldName mapping. + db._propertiesForModel(this).forEach((String fieldName, Property prop) { + // The default of a datastore property name is the fieldName. + // It can be overridden with [Property.propertyName]. + var propertyName = prop.propertyName; + propertyName ??= fieldName; + + _property2FieldName[propertyName] = fieldName; + _field2PropertyName[fieldName] = propertyName; + }); + + // Compute properties & unindexed properties + db._propertiesForModel(this).forEach((String fieldName, Property prop) { + var propertyName = prop.propertyName; + propertyName ??= fieldName; + + if (prop.indexed) { + _indexedProperties.add(propertyName); + } else { + _unIndexedProperties.add(propertyName); + } + }); + } + + String kindName(ModelDBImpl db) => kind; + + ds.Entity encodeModel(ModelDBImpl db, T model) { + var key = db.toDatastoreKey(model.key); + + var properties = {}; + var mirror = mirrors.reflect(model); + + db._propertiesForModel(this).forEach((String fieldName, Property prop) { + _encodeProperty(db, model, mirror, properties, fieldName, prop); + }); + + return ds.Entity(key, properties, + unIndexedProperties: _unIndexedProperties); + } + + void _encodeProperty( + ModelDBImpl db, + Model model, + mirrors.InstanceMirror mirror, + Map properties, + String fieldName, + Property prop) { + var propertyName = prop.propertyName; + propertyName ??= fieldName; + + var value = + mirror.getField(mirrors.MirrorSystem.getSymbol(fieldName)).reflectee; + if (!prop.validate(db, value)) { + throw StateError('Property validation failed for ' + 'property $fieldName while trying to serialize entity of kind ' + '${model.runtimeType}. '); + } + properties[propertyName] = prop.encodeValue(db, value); + } + + H decodeEntity(ModelDBImpl db, Key key, ds.Entity entity) { + // NOTE: this assumes a default constructor for the model classes! + var classMirror = db._modelClass(this)!; + var mirror = classMirror.newInstance(const Symbol(''), []); + + // Set the id and the parent key + final model = mirror.reflectee as Model; + model.id = key.id; + model.parentKey = key.parent; + + db._propertiesForModel(this).forEach((String fieldName, Property prop) { + _decodeProperty(db, entity, mirror, fieldName, prop); + }); + return mirror.reflectee as H; + } + + void _decodeProperty(ModelDBImpl db, ds.Entity entity, + mirrors.InstanceMirror mirror, String fieldName, Property prop) { + var propertyName = fieldNameToPropertyName(fieldName); + + var rawValue = entity.properties[propertyName!]; + var value = prop.decodePrimitiveValue(db, rawValue); + + if (!prop.validate(db, value)) { + throw StateError('Property validation failed while ' + 'trying to deserialize entity of kind ' + '${entity.key.elements.last.kind} (property name: $propertyName)'); + } + + try { + mirror.setField(mirrors.MirrorSystem.getSymbol(fieldName), value); + // ignore: avoid_catching_errors + } on TypeError catch (error) { + throw StateError( + 'Error trying to set property "${prop.propertyName}" ' + 'to $value for field "$fieldName": $error', + ); + } + } + + String? fieldNameToPropertyName(String fieldName) { + return _field2PropertyName[fieldName]; + } + + String? propertyNameToFieldName(ModelDBImpl db, String propertySearchName) { + return _property2FieldName[propertySearchName]; + } + + Object? encodeField(ModelDBImpl db, String fieldName, Object? value, + {bool enforceFieldExists = true, bool forComparison = false}) { + var property = db._propertiesForModel(this)[fieldName]; + if (property != null) { + return property.encodeValue(db, value, forComparison: forComparison); + } + if (enforceFieldExists) { + throw ArgumentError( + 'A field named "$fieldName" does not exist in kind "$kind".'); + } + return null; + } +} + +// NOTE/TODO: +// Currently expanded properties are only +// * decoded if there are no clashes in [usedNames] +// * encoded if there are no clashes in [usedNames] +// We might want to throw an error if there are clashes, because otherwise +// - we may end up removing properties after a read-write cycle +// - we may end up dropping added properties in a write +// ([usedNames] := [realFieldNames] + [realPropertyNames]) +class _ExpandoModelDescription extends _ModelDescription { + late Set realFieldNames; + late Set realPropertyNames; + late Set usedNames; + + _ExpandoModelDescription(String kind, bool useIntegerId) + : super(kind, useIntegerId); + + @override + void initialize(ModelDBImpl db) { + super.initialize(db); + + realFieldNames = Set.from(_field2PropertyName.keys); + realPropertyNames = Set.from(_property2FieldName.keys); + usedNames = {} + ..addAll(realFieldNames) + ..addAll(realPropertyNames); + } + + @override + ds.Entity encodeModel(ModelDBImpl db, ExpandoModel model) { + var entity = super.encodeModel(db, model); + var properties = entity.properties; + model.additionalProperties.forEach((String key, Object? value) { + // NOTE: All expanded properties will be indexed. + if (!usedNames.contains(key)) { + properties[key] = value; + } + }); + return entity; + } + + @override + T decodeEntity(ModelDBImpl db, Key key, ds.Entity entity) { + var model = super.decodeEntity(db, key, entity) as ExpandoModel; + var properties = entity.properties; + properties.forEach((String key, Object? value) { + if (!usedNames.contains(key)) { + model.additionalProperties[key] = value; + } + }); + // TODO: check if there is a more elegant solution than this + return model as T; + } + + @override + String fieldNameToPropertyName(String fieldName) { + var propertyName = super.fieldNameToPropertyName(fieldName); + // If the ModelDescription doesn't know about [fieldName], it's an + // expanded property, where propertyName == fieldName. + propertyName ??= fieldName; + return propertyName; + } + + @override + String propertyNameToFieldName(ModelDBImpl db, String propertyName) { + var fieldName = super.propertyNameToFieldName(db, propertyName); + // If the ModelDescription doesn't know about [propertyName], it's an + // expanded property, where propertyName == fieldName. + fieldName ??= propertyName; + return fieldName; + } + + @override + Object encodeField(ModelDBImpl db, String fieldName, Object? value, + {bool enforceFieldExists = true, bool forComparison = false}) { + // The [enforceFieldExists] argument is intentionally ignored. + + var primitiveValue = super.encodeField(db, fieldName, value, + enforceFieldExists: false, forComparison: forComparison); + // If superclass can't encode field, we return value here (and assume + // it's primitive) + // NOTE: Implicit assumption: + // If value != null then superclass will return != null. + // TODO: Ensure [value] is primitive in this case. + primitiveValue ??= value; + return primitiveValue!; + } +} diff --git a/pkgs/gcloud/lib/src/db/models.dart b/pkgs/gcloud/lib/src/db/models.dart new file mode 100644 index 00000000..384e9f5f --- /dev/null +++ b/pkgs/gcloud/lib/src/db/models.dart @@ -0,0 +1,132 @@ +// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file +// for details. All rights reserved. Use of this source code is governed by a +// BSD-style license that can be found in the LICENSE file. + +part of '../../db.dart'; + +/// Represents a unique identifier for a [Model] stored in a datastore. +/// +/// The [Key] can be incomplete if it's id is `null`. In this case the id will +/// be automatically allocated and set at commit time. +class Key { + // Either KeyImpl or PartitionImpl + final Object _parent; + + final Type? type; + final T? id; + + Key(Key parent, this.type, this.id) : _parent = parent { + if (type == null) { + throw ArgumentError('The type argument must not be null.'); + } + if (id != null && id is! String && id is! int) { + throw ArgumentError('The id argument must be an integer or a String.'); + } + } + + Key.emptyKey(Partition partition) + : _parent = partition, + type = null, + id = null; + + /// Parent of this [Key]. + Key? get parent { + if (_parent is Key) { + return _parent as Key; + } + return null; + } + + /// The partition of this [Key]. + Partition get partition { + var obj = _parent; + while (obj is! Partition) { + obj = (obj as Key)._parent; + } + return obj; + } + + Key append(Type modelType, {U? id}) { + return Key(this, modelType, id); + } + + bool get isEmpty => _parent is Partition; + + @override + bool operator ==(Object other) { + return other is Key && + _parent == other._parent && + type == other.type && + id == other.id; + } + + @override + int get hashCode => _parent.hashCode ^ type.hashCode ^ id.hashCode; + + /// Converts `Key` to `Key`. + Key cast() => Key(parent!, type, id as U?); +} + +/// Represents a datastore partition. +/// +/// A datastore is partitioned into namespaces. The default namespace is +/// `null`. +class Partition { + final String? namespace; + + Partition(this.namespace) { + if (namespace == '') { + throw ArgumentError('The namespace must not be an empty string'); + } + } + + /// Returns an empty [Key]. + /// + /// Entities where the parent [Key] is empty will create their own entity + /// group. + Key get emptyKey => Key.emptyKey(this); + + @override + bool operator ==(Object other) { + return other is Partition && namespace == other.namespace; + } + + @override + int get hashCode => namespace.hashCode; +} + +/// Superclass for all model classes. +/// +/// Every model class has a [id] of type [T] which must be `int` or `String`, +/// and a [parentKey]. The [key] getter is returning the key for the model +/// object. +abstract class Model { + T? id; + Key? parentKey; + + Key get key => parentKey!.append(runtimeType, id: id); +} + +/// Superclass for all expanded model classes. +/// +/// The [ExpandoModel] class adds support for having dynamic properties. You can +/// set arbitrary fields on these models. The expanded values must be values +/// accepted by the [RawDatastore] implementation. +abstract class ExpandoModel extends Model { + final Map additionalProperties = {}; + + @override + Object? noSuchMethod(Invocation invocation) { + var name = mirrors.MirrorSystem.getName(invocation.memberName); + if (name.endsWith('=')) name = name.substring(0, name.length - 1); + if (invocation.isGetter) { + return additionalProperties[name]; + } else if (invocation.isSetter) { + var value = invocation.positionalArguments[0]; + additionalProperties[name] = value; + return value; + } else { + throw ArgumentError('Unsupported noSuchMethod call on ExpandoModel'); + } + } +} diff --git a/pkgs/gcloud/lib/src/pubsub_impl.dart b/pkgs/gcloud/lib/src/pubsub_impl.dart new file mode 100644 index 00000000..cfdb3aca --- /dev/null +++ b/pkgs/gcloud/lib/src/pubsub_impl.dart @@ -0,0 +1,524 @@ +// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file +// for details. All rights reserved. Use of this source code is governed by a +// BSD-style license that can be found in the LICENSE file. + +part of '../pubsub.dart'; + +class _PubSubImpl implements PubSub { + @override + final String project; + final pubsub.PubsubApi _api; + final String _topicPrefix; + final String _subscriptionPrefix; + + _PubSubImpl(http.Client client, this.project) + : _api = pubsub.PubsubApi(client), + _topicPrefix = 'projects/$project/topics/', + _subscriptionPrefix = 'projects/$project/subscriptions/'; + + _PubSubImpl.rootUrl(http.Client client, this.project, String rootUrl) + : _api = pubsub.PubsubApi(client, rootUrl: rootUrl), + _topicPrefix = 'projects/$project/topics/', + _subscriptionPrefix = 'projects/$project/subscriptions/'; + + String _fullTopicName(String name) { + return name.startsWith('projects/') ? name : '$_topicPrefix$name'; + } + + String _fullSubscriptionName(String name) { + return name.startsWith('projects/') ? name : '$_subscriptionPrefix$name'; + } + + Future _createTopic(String name) { + return _api.projects.topics.create(pubsub.Topic(), name); + } + + Future _deleteTopic(String name) { + // The Pub/Sub delete API returns an instance of Empty. + return _api.projects.topics.delete(name).then((_) => null); + } + + Future _getTopic(String name) { + return _api.projects.topics.get(name); + } + + Future _listTopics( + int pageSize, String? nextPageToken) { + return _api.projects.topics.list('projects/$project', + pageSize: pageSize, pageToken: nextPageToken); + } + + Future _createSubscription( + String name, String topic, Uri? endpoint) { + var subscription = pubsub.Subscription()..topic = topic; + if (endpoint != null) { + var pushConfig = pubsub.PushConfig()..pushEndpoint = endpoint.toString(); + subscription.pushConfig = pushConfig; + } + return _api.projects.subscriptions.create(subscription, name); + } + + Future _deleteSubscription(String name) { + // The Pub/Sub delete API returns an instance of Empty. + return _api.projects.subscriptions + .delete(_fullSubscriptionName(name)) + .then((_) => null); + } + + Future _getSubscription(String name) { + return _api.projects.subscriptions.get(name); + } + + Future _listSubscriptions( + String? topic, int pageSize, String? nextPageToken) { + return _api.projects.subscriptions.list('projects/$project', + pageSize: pageSize, pageToken: nextPageToken); + } + + Future _modifyPushConfig(String subscription, Uri? endpoint) { + var pushConfig = pubsub.PushConfig()..pushEndpoint = endpoint?.toString(); + var request = pubsub.ModifyPushConfigRequest()..pushConfig = pushConfig; + return _api.projects.subscriptions.modifyPushConfig(request, subscription); + } + + Future _publish( + String topic, List message, Map attributes) { + var request = pubsub.PublishRequest() + ..messages = [ + (pubsub.PubsubMessage() + ..dataAsBytes = message + ..attributes = attributes.isEmpty ? null : attributes) + ]; + // TODO(sgjesse): Handle PublishResponse containing message ids. + return _api.projects.topics.publish(request, topic).then((_) => null); + } + + Future _pull( + String subscription, bool returnImmediately) { + var request = pubsub.PullRequest() + ..maxMessages = 1 + // ignore: deprecated_member_use + ..returnImmediately = returnImmediately; + return _api.projects.subscriptions.pull(request, subscription); + } + + Future _ack(String ackId, String subscription) { + var request = pubsub.AcknowledgeRequest()..ackIds = [ackId]; + // The Pub/Sub acknowledge API returns an instance of Empty. + return _api.projects.subscriptions + .acknowledge(request, subscription) + .then((_) => null); + } + + void _checkTopicName(String name) { + if (name.startsWith('projects/') && !name.contains('/topics/')) { + throw ArgumentError( + 'Illegal topic name. Absolute topic names must have the form ' + "'projects/[project-id]/topics/[topic-name]"); + } + if (name.endsWith('/topics/')) { + throw ArgumentError( + 'Illegal topic name. Relative part of the name cannot be empty'); + } + } + + void _checkSubscriptionName(String name) { + if (name.startsWith('projects/') && !name.contains('/subscriptions/')) { + throw ArgumentError( + 'Illegal subscription name. Absolute subscription names must have ' + "the form 'projects/[project-id]/subscriptions/[subscription-name]"); + } + if (name.endsWith('/subscriptions/')) { + throw ArgumentError( + 'Illegal subscription name. Relative part of the name cannot be ' + 'empty'); + } + } + + @override + Future createTopic(String name) { + _checkTopicName(name); + return _createTopic(_fullTopicName(name)) + .then((top) => _TopicImpl(this, top)); + } + + @override + Future deleteTopic(String name) { + _checkTopicName(name); + return _deleteTopic(_fullTopicName(name)); + } + + @override + Future lookupTopic(String name) { + _checkTopicName(name); + return _getTopic(_fullTopicName(name)).then((top) => _TopicImpl(this, top)); + } + + @override + Stream listTopics() { + Future> firstPage(int pageSize) { + return _listTopics(pageSize, null) + .then((response) => _TopicPageImpl(this, pageSize, response)); + } + + return StreamFromPages(firstPage).stream; + } + + @override + Future> pageTopics({int pageSize = 50}) { + return _listTopics(pageSize, null).then((response) { + return _TopicPageImpl(this, pageSize, response); + }); + } + + @override + Future createSubscription(String name, String topic, + {Uri? endpoint}) { + _checkSubscriptionName(name); + _checkTopicName(topic); + return _createSubscription( + _fullSubscriptionName(name), _fullTopicName(topic), endpoint) + .then((sub) => _SubscriptionImpl(this, sub)); + } + + @override + Future deleteSubscription(String name) { + _checkSubscriptionName(name); + return _deleteSubscription(_fullSubscriptionName(name)); + } + + @override + Future lookupSubscription(String name) { + _checkSubscriptionName(name); + return _getSubscription(_fullSubscriptionName(name)) + .then((sub) => _SubscriptionImpl(this, sub)); + } + + @override + Stream listSubscriptions([String? query]) { + Future> firstPage(int pageSize) { + return _listSubscriptions(query, pageSize, null).then( + (response) => _SubscriptionPageImpl(this, query, pageSize, response)); + } + + return StreamFromPages(firstPage).stream; + } + + @override + Future> pageSubscriptions( + {String? topic, int pageSize = 50}) { + return _listSubscriptions(topic, pageSize, null).then((response) { + return _SubscriptionPageImpl(this, topic, pageSize, response); + }); + } +} + +/// Message class for messages constructed through 'new Message()'. It stores +/// the user supplied body as either String or bytes. +class _MessageImpl implements Message { + // The message body, if it is a `String`. In that case, [bytesMessage] is + // null. + final String? _stringMessage; + + // The message body, if it is a byte list. In that case, [stringMessage] is + // null. + final List? _bytesMessage; + + @override + final Map attributes; + + _MessageImpl.withString( + this._stringMessage, { + Map? attributes, + }) : _bytesMessage = null, + attributes = attributes ?? {}; + + _MessageImpl.withBytes(this._bytesMessage, {Map? attributes}) + : _stringMessage = null, + attributes = attributes ?? {}; + + @override + List get asBytes => _bytesMessage ?? utf8.encode(_stringMessage!); + + @override + String get asString => _stringMessage ?? utf8.decode(_bytesMessage!); +} + +/// Message received using [Subscription.pull]. +/// +/// Contains the [pubsub.PubsubMessage] received from Pub/Sub, and +/// makes the message body and labels available on request. +/// +/// The labels map is lazily created when first accessed. +class _PullMessage implements Message { + final pubsub.PubsubMessage _message; + List? _bytes; + String? _string; + + _PullMessage(this._message); + + @override + List get asBytes { + _bytes ??= _message.dataAsBytes; + return _bytes!; + } + + @override + String get asString { + _string ??= utf8.decode(_message.dataAsBytes); + return _string!; + } + + @override + Map get attributes => + _message.attributes ?? {}; +} + +/// Message received through Pub/Sub push delivery. +/// +/// Stores the message body received from Pub/Sub as the Base64 encoded string +/// from the wire protocol. +/// +/// The labels have been decoded into a Map. +class _PushMessage implements Message { + final String _base64Message; + @override + final Map attributes; + + _PushMessage(this._base64Message, this.attributes); + + @override + List get asBytes => base64.decode(_base64Message); + + @override + String get asString => utf8.decode(asBytes); +} + +/// Pull event received from Pub/Sub pull delivery. +/// +/// Stores the pull response received from Pub/Sub. +class _PullEventImpl implements PullEvent { + /// Pub/Sub API object. + final _PubSubImpl _api; + + /// Subscription this was received from. + final String _subscriptionName; + + /// Low level response received from Pub/Sub. + final pubsub.PullResponse _response; + @override + final Message message; + + _PullEventImpl( + this._api, this._subscriptionName, pubsub.PullResponse response) + : _response = response, + message = _PullMessage(response.receivedMessages![0].message!); + + @override + Future acknowledge() { + return _api._ack(_response.receivedMessages![0].ackId!, _subscriptionName); + } +} + +/// Push event received from Pub/Sub push delivery. +/// +/// decoded from JSON encoded push HTTP request body. +class _PushEventImpl implements PushEvent { + static const _prefix = '/subscriptions/'; + final Message _message; + final String _subscriptionName; + + @override + Message get message => _message; + + @override + String get subscriptionName => _subscriptionName; + + _PushEventImpl(this._message, this._subscriptionName); + + factory _PushEventImpl.fromJson(String json) { + Map body = jsonDecode(json) as Map; + var data = (body['message'] as Map)['data'] as String; + Map labels = HashMap(); + for (var label in (body['message'] as Map)['labels'] as List) { + final l = label as Map; + var key = l['key'] as String; + var value = l['strValue'] ?? l['numValue']; + labels[key] = value.toString(); + } + var subscription = body['subscription'] as String; + // TODO(#1): Remove this when the push event subscription name is prefixed + // with '/subscriptions/'. + if (!subscription.startsWith(_prefix)) { + subscription = _prefix + subscription; + } + return _PushEventImpl(_PushMessage(data, labels), subscription); + } +} + +class _TopicImpl implements Topic { + final _PubSubImpl _api; + final pubsub.Topic _topic; + + _TopicImpl(this._api, this._topic); + + @override + String get name { + assert(_topic.name!.startsWith(_api._topicPrefix)); + return _topic.name!.substring(_api._topicPrefix.length); + } + + @override + String get project { + assert(_topic.name!.startsWith(_api._topicPrefix)); + return _api.project; + } + + @override + String get absoluteName => _topic.name!; + + @override + Future publish(Message message) { + return _api._publish(_topic.name!, message.asBytes, message.attributes); + } + + @override + Future delete() => _api._deleteTopic(_topic.name!); + + @override + Future publishString(String message, {Map? attributes}) { + attributes ??= {}; + return _api._publish(_topic.name!, utf8.encode(message), attributes); + } + + @override + Future publishBytes(List message, {Map? attributes}) { + attributes ??= {}; + return _api._publish(_topic.name!, message, attributes); + } +} + +class _SubscriptionImpl implements Subscription { + final _PubSubImpl _api; + final pubsub.Subscription _subscription; + + _SubscriptionImpl(this._api, this._subscription); + + @override + String get name { + assert(_subscription.name!.startsWith(_api._subscriptionPrefix)); + return _subscription.name!.substring(_api._subscriptionPrefix.length); + } + + @override + String get project { + assert(_subscription.name!.startsWith(_api._subscriptionPrefix)); + return _api.project; + } + + @override + String get absoluteName => _subscription.name!; + + @override + Topic get topic { + var topic = pubsub.Topic()..name = _subscription.topic; + return _TopicImpl(_api, topic); + } + + @override + Future delete() => _api._deleteSubscription(_subscription.name!); + + @override + Future pull({ + @Deprecated('returnImmediately has been deprecated from pubsub') + bool wait = true, + }) { + return _api._pull(_subscription.name!, !wait).then((response) { + // The documentation says 'Returns an empty list if there are no + // messages available in the backlog'. However the receivedMessages + // property can also be null in that case. + if (response.receivedMessages == null || + response.receivedMessages!.isEmpty) { + return null; + } + return _PullEventImpl(_api, _subscription.name!, response); + }).catchError((e) => null, + test: (e) => e is pubsub.DetailedApiRequestError && e.status == 400); + } + + @override + Uri? get endpoint => null; + + @override + bool get isPull => endpoint == null; + + @override + bool get isPush => endpoint != null; + + @override + Future updatePushConfiguration(Uri endpoint) { + return _api._modifyPushConfig(_subscription.name!, endpoint); + } +} + +class _TopicPageImpl implements Page { + final _PubSubImpl _api; + final int _pageSize; + final String? _nextPageToken; + @override + final List items = []; + + _TopicPageImpl(this._api, this._pageSize, pubsub.ListTopicsResponse response) + : _nextPageToken = response.nextPageToken { + final topics = response.topics; + if (topics != null) { + items.addAll(topics.map((t) => _TopicImpl(_api, t))); + } + } + + @override + bool get isLast => _nextPageToken == null; + + @override + Future> next({int? pageSize}) async { + throwIfIsLast(); + final pageSize_ = pageSize ?? _pageSize; + + return _api._listTopics(pageSize_, _nextPageToken).then((response) { + return _TopicPageImpl(_api, pageSize_, response); + }); + } +} + +class _SubscriptionPageImpl implements Page { + final _PubSubImpl _api; + final String? _topic; + final int _pageSize; + final String? _nextPageToken; + @override + final List items = []; + + _SubscriptionPageImpl(this._api, this._topic, this._pageSize, + pubsub.ListSubscriptionsResponse response) + : _nextPageToken = response.nextPageToken { + final subscriptions = response.subscriptions; + if (subscriptions != null) { + items.addAll(subscriptions.map((s) => _SubscriptionImpl(_api, s))); + } + } + + @override + bool get isLast => _nextPageToken == null; + + @override + Future> next({int? pageSize}) { + throwIfIsLast(); + final pageSize_ = pageSize ?? _pageSize; + + return _api + ._listSubscriptions(_topic, pageSize_, _nextPageToken) + .then((response) { + return _SubscriptionPageImpl(_api, _topic, pageSize_, response); + }); + } +} diff --git a/pkgs/gcloud/lib/src/retry_datastore_impl.dart b/pkgs/gcloud/lib/src/retry_datastore_impl.dart new file mode 100644 index 00000000..72b75277 --- /dev/null +++ b/pkgs/gcloud/lib/src/retry_datastore_impl.dart @@ -0,0 +1,160 @@ +// Copyright (c) 2023, the Dart project authors. Please see the AUTHORS file +// for details. All rights reserved. Use of this source code is governed by a +// BSD-style license that can be found in the LICENSE file. + +import 'package:retry/retry.dart'; + +import '../common.dart'; +import '../datastore.dart' as datastore; + +/// Datastore implementation which retries most operations +class RetryDatastoreImpl implements datastore.Datastore { + final datastore.Datastore _delegate; + final RetryOptions _retryOptions; + + RetryDatastoreImpl(this._delegate, this._retryOptions); + + @override + Future> allocateIds(List keys) async { + return await _retryOptions.retry( + () => _delegate.allocateIds(keys), + retryIf: _retryIf, + ); + } + + @override + Future beginTransaction({ + bool crossEntityGroup = false, + }) async { + return await _retryOptions.retry( + () => _delegate.beginTransaction(crossEntityGroup: crossEntityGroup), + retryIf: _retryIf, + ); + } + + @override + Future commit({ + List inserts = const [], + List autoIdInserts = const [], + List deletes = const [], + datastore.Transaction? transaction, + }) async { + Future fn() async { + if (transaction == null) { + return await _delegate.commit( + inserts: inserts, + autoIdInserts: autoIdInserts, + deletes: deletes, + ); + } else { + return await _delegate.commit( + inserts: inserts, + autoIdInserts: autoIdInserts, + deletes: deletes, + transaction: transaction, + ); + } + } + + final shouldNotRetry = autoIdInserts.isNotEmpty && transaction == null; + if (shouldNotRetry) { + return await fn(); + } else { + return await _retryOptions.retry(fn, retryIf: _retryIf); + } + } + + @override + Future> lookup( + List keys, { + datastore.Transaction? transaction, + }) async { + return await _retryOptions.retry( + () async { + if (transaction == null) { + return await _delegate.lookup(keys); + } else { + return await _delegate.lookup(keys, transaction: transaction); + } + }, + retryIf: _retryIf, + ); + } + + @override + Future> query( + datastore.Query query, { + datastore.Partition? partition, + datastore.Transaction? transaction, + }) async { + Future> fn() async { + if (partition != null && transaction != null) { + return await _delegate.query( + query, + partition: partition, + transaction: transaction, + ); + } else if (partition != null) { + return await _delegate.query(query, partition: partition); + } else if (transaction != null) { + return await _delegate.query( + query, + transaction: transaction, + ); + } else { + return await _delegate.query(query); + } + } + + return await _retryOptions.retry( + () async => _RetryPage(await fn(), _retryOptions), + retryIf: _retryIf, + ); + } + + @override + Future rollback(datastore.Transaction transaction) async { + return await _retryOptions.retry( + () => _delegate.rollback(transaction), + retryIf: _retryIf, + ); + } +} + +class _RetryPage implements Page { + final Page _delegate; + final RetryOptions _retryOptions; + + _RetryPage(this._delegate, this._retryOptions); + + @override + bool get isLast => _delegate.isLast; + + @override + List get items => _delegate.items; + + @override + Future> next({int? pageSize}) async { + final nextPage = await _retryOptions.retry( + () async { + if (pageSize == null) { + return await _delegate.next(); + } else { + return await _delegate.next(pageSize: pageSize); + } + }, + retryIf: _retryIf, + ); + return _RetryPage(nextPage, _retryOptions); + } +} + +bool _retryIf(Exception e) { + if (e is datastore.TransactionAbortedError || + e is datastore.NeedIndexError || + e is datastore.QuotaExceededError || + e is datastore.PermissionDeniedError) { + return false; + } + return true; +} diff --git a/pkgs/gcloud/lib/src/storage_impl.dart b/pkgs/gcloud/lib/src/storage_impl.dart new file mode 100644 index 00000000..afaf8a70 --- /dev/null +++ b/pkgs/gcloud/lib/src/storage_impl.dart @@ -0,0 +1,666 @@ +// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file +// for details. All rights reserved. Use of this source code is governed by a +// BSD-style license that can be found in the LICENSE file. + +part of '../storage.dart'; + +const String _absolutePrefix = 'gs://'; +const String _directoryDelimiter = '/'; + +/// Representation of an absolute name consisting of bucket name and object +/// name. +class _AbsoluteName { + final String bucketName; + final String objectName; + + _AbsoluteName._(this.bucketName, this.objectName); + + factory _AbsoluteName.parse(String absoluteName) { + if (!absoluteName.startsWith(_absolutePrefix)) { + throw FormatException("Absolute name '$absoluteName' does not start " + "with '$_absolutePrefix'"); + } + var index = absoluteName.indexOf('/', _absolutePrefix.length); + if (index == -1 || index == _absolutePrefix.length) { + throw FormatException("Absolute name '$absoluteName' does not have " + 'a bucket name'); + } + if (index == absoluteName.length - 1) { + throw FormatException("Absolute name '$absoluteName' does not have " + 'an object name'); + } + final bucketName = absoluteName.substring(_absolutePrefix.length, index); + final objectName = absoluteName.substring(index + 1); + + return _AbsoluteName._(bucketName, objectName); + } +} + +/// Storage API implementation providing access to buckets. +class _StorageImpl implements Storage { + final String project; + final storage_api.StorageApi _api; + + _StorageImpl(http.Client client, this.project) + : _api = storage_api.StorageApi(client); + + @override + Future createBucket(String bucketName, + {PredefinedAcl? predefinedAcl, Acl? acl}) { + var bucket = storage_api.Bucket()..name = bucketName; + var predefinedName = predefinedAcl?._name; + if (acl != null) { + bucket.acl = acl._toBucketAccessControlList(); + } + return _api.buckets + .insert(bucket, project, predefinedAcl: predefinedName) + .then((bucket) => null); + } + + @override + Future deleteBucket(String bucketName) { + return _api.buckets.delete(bucketName); + } + + @override + Bucket bucket(String bucketName, + {PredefinedAcl? defaultPredefinedObjectAcl, Acl? defaultObjectAcl}) { + return _BucketImpl( + this, bucketName, defaultPredefinedObjectAcl, defaultObjectAcl); + } + + @override + Future bucketExists(String bucketName) { + bool notFoundError(e) { + return e is storage_api.DetailedApiRequestError && e.status == 404; + } + + return _api.buckets + .get(bucketName) + .then((_) => true) + .catchError((e) => false, test: notFoundError); + } + + @override + Future bucketInfo(String bucketName) { + return _api.buckets + .get(bucketName, projection: 'full') + .then(_BucketInfoImpl.new); + } + + @override + Stream listBucketNames() { + Future<_BucketPageImpl> firstPage(int pageSize) { + return _listBuckets(pageSize, null) + .then((response) => _BucketPageImpl(this, pageSize, response)); + } + + return StreamFromPages(firstPage).stream; + } + + @override + Future> pageBucketNames({int pageSize = 50}) { + return _listBuckets(pageSize, null).then((response) { + return _BucketPageImpl(this, pageSize, response); + }); + } + + @override + Future copyObject(String src, String dest, {ObjectMetadata? metadata}) { + var srcName = _AbsoluteName.parse(src); + var destName = _AbsoluteName.parse(dest); + metadata ??= _ObjectMetadata(); + var objectMetadata = metadata as _ObjectMetadata; + final object = objectMetadata._object; + return _api.objects + .copy(object, srcName.bucketName, srcName.objectName, + destName.bucketName, destName.objectName) + .then((_) => null); + } + + Future _listBuckets( + int pageSize, String? nextPageToken) { + return _api.buckets + .list(project, maxResults: pageSize, pageToken: nextPageToken); + } +} + +class _BucketInfoImpl implements BucketInfo { + final storage_api.Bucket _bucket; + + _BucketInfoImpl(this._bucket); + + @override + String get bucketName => _bucket.name!; + + @override + String get etag => _bucket.etag!; + + @override + DateTime get created => _bucket.timeCreated!; + + @override + String get id => _bucket.id!; + + @override + Acl get acl => Acl._fromBucketAcl(_bucket); +} + +/// Bucket API implementation providing access to objects. +class _BucketImpl implements Bucket { + final storage_api.StorageApi _api; + final PredefinedAcl? _defaultPredefinedObjectAcl; + final Acl? _defaultObjectAcl; + @override + final String bucketName; + + _BucketImpl(_StorageImpl storage, this.bucketName, + this._defaultPredefinedObjectAcl, this._defaultObjectAcl) + : _api = storage._api; + + @override + String absoluteObjectName(String objectName) { + return '$_absolutePrefix$bucketName/$objectName'; + } + + @override + StreamSink> write(String objectName, + {int? length, + ObjectMetadata? metadata, + Acl? acl, + PredefinedAcl? predefinedAcl, + String? contentType}) { + storage_api.Object object; + if (metadata == null) { + metadata = _ObjectMetadata(acl: acl, contentType: contentType); + } else { + if (acl != null) { + metadata = metadata.replace(acl: acl); + } + if (contentType != null) { + metadata = metadata.replace(contentType: contentType); + } + } + var objectMetadata = metadata as _ObjectMetadata; + object = objectMetadata._object; + + // If no predefined ACL is passed use the default (if any). + String? predefinedName; + if (predefinedAcl != null || _defaultPredefinedObjectAcl != null) { + var predefined = predefinedAcl ?? _defaultPredefinedObjectAcl!; + predefinedName = predefined._name; + } + + // If no ACL is passed use the default (if any). + if (object.acl == null && _defaultObjectAcl != null) { + object.acl = _defaultObjectAcl!._toObjectAccessControlList(); + } + + // Fill properties not passed in metadata. + object.name = objectName; + + var sink = _MediaUploadStreamSink( + _api, bucketName, objectName, object, predefinedName, length); + return sink; + } + + @override + Future writeBytes(String objectName, List bytes, + {ObjectMetadata? metadata, + Acl? acl, + PredefinedAcl? predefinedAcl, + String? contentType}) { + var sink = write(objectName, + length: bytes.length, + metadata: metadata, + acl: acl, + predefinedAcl: predefinedAcl, + contentType: contentType) as _MediaUploadStreamSink; + sink.add(bytes); + return sink.close(); + } + + @override + Stream> read(String objectName, {int? offset, int? length}) async* { + offset ??= 0; + + if (offset != 0 && length == null) { + throw ArgumentError('length must have a value if offset is non-zero.'); + } + + var options = storage_api.DownloadOptions.fullMedia; + + if (length != null) { + if (length <= 0) { + throw ArgumentError.value( + length, 'length', 'If provided, length must greater than zero.'); + } + // For ByteRange, end is *inclusive*. + var end = offset + length - 1; + var range = storage_api.ByteRange(offset, end); + assert(range.length == length); + options = storage_api.PartialDownloadOptions(range); + } + + var media = (await _api.objects.get(bucketName, objectName, + downloadOptions: options)) as commons.Media; + + yield* media.stream; + } + + @override + Future info(String objectName) { + return _api.objects + .get(bucketName, objectName, projection: 'full') + .then((object) => _ObjectInfoImpl(object as storage_api.Object)); + } + + @override + Future delete(String objectName) { + return _api.objects.delete(bucketName, objectName); + } + + @override + Stream list({String? prefix, String? delimiter}) { + delimiter ??= _directoryDelimiter; + Future<_ObjectPageImpl> firstPage(int pageSize) async { + final response = + await _listObjects(bucketName, prefix, delimiter, 50, null); + return _ObjectPageImpl(this, prefix, delimiter, pageSize, response); + } + + return StreamFromPages(firstPage).stream; + } + + @override + Future> page( + {String? prefix, String? delimiter, int pageSize = 50}) async { + delimiter ??= _directoryDelimiter; + final response = + await _listObjects(bucketName, prefix, delimiter, pageSize, null); + return _ObjectPageImpl(this, prefix, delimiter, pageSize, response); + } + + @override + Future updateMetadata(String objectName, ObjectMetadata metadata) { + // TODO: support other ObjectMetadata implementations? + var md = metadata as _ObjectMetadata; + var object = md._object; + if (md._object.acl == null && _defaultObjectAcl == null) { + throw ArgumentError('ACL is required for update'); + } + if (md.contentType == null) { + throw ArgumentError('Content-Type is required for update'); + } + md._object.acl ??= _defaultObjectAcl!._toObjectAccessControlList(); + return _api.objects.update(object, bucketName, objectName); + } + + Future _listObjects(String bucketName, String? prefix, + String? delimiter, int pageSize, String? nextPageToken) { + return _api.objects.list(bucketName, + prefix: prefix, + delimiter: delimiter, + maxResults: pageSize, + pageToken: nextPageToken); + } +} + +class _BucketPageImpl implements Page { + final _StorageImpl _storage; + final int? _pageSize; + final String? _nextPageToken; + @override + final List items; + + _BucketPageImpl(this._storage, this._pageSize, storage_api.Buckets response) + : items = [ + for (final item in response.items ?? const []) + item.name! + ], + _nextPageToken = response.nextPageToken; + + @override + bool get isLast => _nextPageToken == null; + + @override + Future> next({int? pageSize}) async { + if (isLast) { + throw StateError('Page.next() cannot be called when Page.isLast == true'); + } + pageSize ??= _pageSize; + + return _storage._listBuckets(pageSize!, _nextPageToken).then((response) { + return _BucketPageImpl(_storage, pageSize, response); + }); + } +} + +class _ObjectPageImpl implements Page { + final _BucketImpl _bucket; + final String? _prefix; + final String? _delimiter; + final int? _pageSize; + final String? _nextPageToken; + @override + final List items; + + _ObjectPageImpl(this._bucket, this._prefix, this._delimiter, this._pageSize, + storage_api.Objects response) + : items = [ + for (final item in response.prefixes ?? const []) + BucketEntry._directory(item), + for (final item in response.items ?? const []) + BucketEntry._object(item.name!) + ], + _nextPageToken = response.nextPageToken; + + @override + bool get isLast => _nextPageToken == null; + + @override + Future> next({int? pageSize}) async { + if (isLast) { + throw StateError('Page.next() cannot be called when Page.isLast == true'); + } + pageSize ??= _pageSize; + + return _bucket + ._listObjects( + _bucket.bucketName, _prefix, _delimiter, pageSize!, _nextPageToken) + .then((response) { + return _ObjectPageImpl(_bucket, _prefix, _delimiter, pageSize, response); + }); + } +} + +class _ObjectGenerationImpl implements ObjectGeneration { + @override + final String objectGeneration; + @override + final int metaGeneration; + + _ObjectGenerationImpl(this.objectGeneration, this.metaGeneration); +} + +class _ObjectInfoImpl implements ObjectInfo { + final storage_api.Object _object; + final ObjectMetadata _metadata; + Uri? _downloadLink; + ObjectGeneration? _generation; + + _ObjectInfoImpl(storage_api.Object object) + : _object = object, + _metadata = _ObjectMetadata._(object); + + @override + String get name => _object.name!; + + @override + int get length => int.parse(_object.size!); + + @override + DateTime get updated => _object.updated!; + + @override + String get etag => _object.etag!; + + @override + List get md5Hash => base64.decode(_object.md5Hash!); + + @override + int get crc32CChecksum { + var list = base64.decode(_object.crc32c!); + return (list[3] << 24) | (list[2] << 16) | (list[1] << 8) | list[0]; + } + + @override + Uri get downloadLink { + return _downloadLink ??= Uri.parse(_object.mediaLink!); + } + + @override + ObjectGeneration get generation { + return _generation ??= _ObjectGenerationImpl( + _object.generation!, int.parse(_object.metageneration!)); + } + + /// Additional metadata. + @override + ObjectMetadata get metadata => _metadata; +} + +class _ObjectMetadata implements ObjectMetadata { + final storage_api.Object _object; + Acl? _cachedAcl; + ObjectGeneration? _cachedGeneration; + Map? _cachedCustom; + + _ObjectMetadata( + {Acl? acl, + String? contentType, + String? contentEncoding, + String? cacheControl, + String? contentDisposition, + String? contentLanguage, + Map? custom}) + : _object = storage_api.Object() { + _object.acl = acl?._toObjectAccessControlList(); + _object.contentType = contentType; + _object.contentEncoding = contentEncoding; + _object.cacheControl = cacheControl; + _object.contentDisposition = contentDisposition; + _object.contentLanguage = contentLanguage; + if (custom != null) _object.metadata = custom; + } + + _ObjectMetadata._(this._object); + + @override + Acl? get acl { + _cachedAcl ??= Acl._fromObjectAcl(_object); + return _cachedAcl; + } + + @override + String? get contentType => _object.contentType; + + @override + String? get contentEncoding => _object.contentEncoding; + + @override + String? get cacheControl => _object.cacheControl; + + @override + String? get contentDisposition => _object.contentDisposition; + + @override + String? get contentLanguage => _object.contentLanguage; + + ObjectGeneration? get generation { + _cachedGeneration ??= ObjectGeneration( + _object.generation!, int.parse(_object.metageneration!)); + return _cachedGeneration; + } + + @override + Map? get custom { + if (_object.metadata == null) return null; + _cachedCustom ??= UnmodifiableMapView(_object.metadata!); + return _cachedCustom; + } + + @override + ObjectMetadata replace( + {Acl? acl, + String? contentType, + String? contentEncoding, + String? cacheControl, + String? contentDisposition, + String? contentLanguage, + Map? custom}) { + return _ObjectMetadata( + acl: acl ?? this.acl, + contentType: contentType ?? this.contentType, + contentEncoding: contentEncoding ?? this.contentEncoding, + cacheControl: cacheControl ?? this.cacheControl, + contentDisposition: contentDisposition ?? this.contentEncoding, + contentLanguage: contentLanguage ?? this.contentEncoding, + custom: custom != null ? Map.from(custom) : this.custom); + } +} + +/// Implementation of StreamSink which handles Google media upload. +/// It provides a StreamSink and logic which selects whether to use normal +/// media upload (multipart mime) or resumable media upload. +class _MediaUploadStreamSink implements StreamSink> { + static const _maxNormalUploadLength = 1024 * 1024; + final storage_api.StorageApi _api; + final String _bucketName; + final String _objectName; + final storage_api.Object _object; + final String? _predefinedAcl; + final int? _length; + final BytesBuilder _buffer = BytesBuilder(); + final _controller = StreamController>(sync: true); + late StreamSubscription _subscription; + late StreamController> _resumableController; + final _doneCompleter = Completer(); + + static const int _stateLengthKnown = 0; + static const int _stateProbingLength = 1; + static const int _stateDecidedResumable = 2; + int? _state; + + _MediaUploadStreamSink(this._api, this._bucketName, this._objectName, + this._object, this._predefinedAcl, this._length) { + if (_length != null) { + // If the length is known in advance decide on the upload strategy + // immediately + _state = _stateLengthKnown; + if (_length! <= _maxNormalUploadLength) { + _startNormalUpload(_controller.stream, _length); + } else { + _startResumableUpload(_controller.stream, _length); + } + } else { + _state = _stateProbingLength; + // If the length is not known in advance decide on the upload strategy + // later. Start buffering until enough data has been read to decide. + _subscription = _controller.stream + .listen(_onData, onDone: _onDone, onError: _onError); + } + } + + @override + void add(List event) { + _controller.add(event); + } + + @override + void addError(Object errorEvent, [StackTrace? stackTrace]) { + _controller.addError(errorEvent, stackTrace); + } + + @override + Future addStream(Stream> stream) { + return _controller.addStream(stream); + } + + @override + Future close() { + _controller.close(); + return _doneCompleter.future; + } + + @override + Future get done => _doneCompleter.future; + + void _onData(List data) { + assert(_state != _stateLengthKnown); + if (_state == _stateProbingLength) { + _buffer.add(data); + if (_buffer.length > _maxNormalUploadLength) { + // Start resumable upload. + // TODO: Avoid using another stream-controller. + _resumableController = StreamController>(sync: true); + _resumableController.add(_buffer.takeBytes()); + _startResumableUpload(_resumableController.stream, _length); + _state = _stateDecidedResumable; + + // At this point, we're forwarding events to the synchronous controller, + // so let's also forward pause and resume requests. + _resumableController + ..onPause = _subscription.pause + ..onResume = _subscription.resume; + // We don't have to handle `onCancel`: The upload will only cancel the + // stream in case of errors, which we already handle by closing the + // subscription. + } + } else { + assert(_state == _stateDecidedResumable); + _resumableController.add(data); + } + } + + void _onDone() { + if (_state == _stateProbingLength) { + // As the data is already cached don't bother to wait on somebody + // listening on the stream before adding the data. + final length = _buffer.length; + _startNormalUpload(Stream.value(_buffer.takeBytes()), length); + } else { + _resumableController.close(); + } + } + + void _onError(Object e, StackTrace s) { + // If still deciding on the strategy complete with error. Otherwise + // forward the error for default processing. + if (_state == _stateProbingLength) { + _completeError(e, s); + } else { + _resumableController.addError(e, s); + } + } + + void _completeError(Object e, StackTrace s) { + if (_state != _stateLengthKnown) { + // Always cancel subscription on error. + _subscription.cancel(); + } + _doneCompleter.completeError(e, s); + } + + void _startNormalUpload(Stream> stream, int? length) async { + var contentType = _object.contentType ?? 'application/octet-stream'; + var media = storage_api.Media(stream, length, contentType: contentType); + try { + final response = await _api.objects.insert( + _object, + _bucketName, + name: _objectName, + predefinedAcl: _predefinedAcl, + uploadMedia: media, + uploadOptions: storage_api.UploadOptions.defaultOptions, + ); + _doneCompleter.complete(_ObjectInfoImpl(response)); + } catch (e, st) { + _completeError(e, st); + } + } + + void _startResumableUpload(Stream> stream, int? length) { + var contentType = _object.contentType ?? 'application/octet-stream'; + var media = storage_api.Media(stream, length, contentType: contentType); + _api.objects + .insert(_object, _bucketName, + name: _objectName, + predefinedAcl: _predefinedAcl, + uploadMedia: media, + uploadOptions: storage_api.UploadOptions.resumable) + .then((response) { + _doneCompleter.complete(_ObjectInfoImpl(response)); + }, onError: _completeError); + } +} diff --git a/pkgs/gcloud/lib/storage.dart b/pkgs/gcloud/lib/storage.dart new file mode 100644 index 00000000..21e7705f --- /dev/null +++ b/pkgs/gcloud/lib/storage.dart @@ -0,0 +1,792 @@ +// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file +// for details. All rights reserved. Use of this source code is governed by a +// BSD-style license that can be found in the LICENSE file. + +/// This library provides access to Google Cloud Storage. +/// +/// Google Cloud Storage is an object store for binary objects. Each +/// object has a set of metadata attached to it. For more information on +/// Google Cloud Storage see https://developers.google.com/storage/. +/// +/// There are two main concepts in Google Cloud Storage: Buckets and Objects. +/// A bucket is a container for objects and objects are the actual binary +/// objects. +/// +/// The API has two main classes for dealing with buckets and objects. +/// +/// The class `Storage` is the main API class providing access to working +/// with buckets. This is the 'bucket service' interface. +/// +/// The class `Bucket` provide access to working with objects in a specific +/// bucket. This is the 'object service' interface. +/// +/// Both buckets have objects, have names. The bucket namespace is flat and +/// global across all projects. This means that a bucket is always +/// addressable using its name without requiring further context. +/// +/// Within buckets the object namespace is also flat. Object are *not* +/// organized hierarchical. However, as object names allow the slash `/` +/// character this is often used to simulate a hierarchical structure +/// based on common prefixes. +/// +/// This package uses relative and absolute names to refer to objects. A +/// relative name is just the object name within a bucket, and requires the +/// context of a bucket to be used. A relative name just looks like this: +/// +/// object_name +/// +/// An absolute name includes the bucket name and uses the `gs://` prefix +/// also used by the `gsutil` tool. An absolute name looks like this. +/// +/// gs://bucket_name/object_name +/// +/// In most cases relative names are used. Absolute names are typically +/// only used for operations involving objects in different buckets. +/// +/// For most of the APIs in this library which take instances of other classes +/// from this library it is the assumption that the actual implementations +/// provided here are used. +library; + +import 'dart:async'; +import 'dart:collection' show UnmodifiableListView, UnmodifiableMapView; +import 'dart:convert'; +import 'dart:typed_data'; + +import 'package:_discoveryapis_commons/_discoveryapis_commons.dart' as commons; +import 'package:googleapis/storage/v1.dart' as storage_api; +import 'package:http/http.dart' as http; + +import 'common.dart'; +import 'service_scope.dart' as ss; + +export 'common.dart'; + +part 'src/storage_impl.dart'; + +const Symbol _storageKey = #gcloud.storage; + +/// Access the [Storage] object available in the current service scope. +/// +/// The returned object will be the one which was previously registered with +/// [registerStorageService] within the current (or a parent) service scope. +/// +/// Accessing this getter outside of a service scope will result in an error. +/// See the `package:gcloud/service_scope.dart` library for more information. +Storage get storageService => ss.lookup(_storageKey) as Storage; + +/// Registers the [storage] object within the current service scope. +/// +/// The provided `storage` object will be available via the top-level +/// `storageService` getter. +/// +/// Calling this function outside of a service scope will result in an error. +/// Calling this function more than once inside the same service scope is not +/// allowed. +void registerStorageService(Storage storage) { + ss.register(_storageKey, storage); +} + +/// An ACL (Access Control List) describes access rights to buckets and +/// objects. +/// +/// An ACL is a prioritized sequence of access control specifications, +/// which individually prevent or grant access. +/// The access controls are described by [AclEntry] objects. +class Acl { + final List _entries; + + /// The entries in the ACL. + List get entries => UnmodifiableListView(_entries); + + /// Create a new ACL with a list of ACL entries. + Acl(Iterable entries) : _entries = List.from(entries); + + Acl._fromBucketAcl(storage_api.Bucket bucket) + : _entries = [ + for (final control + in bucket.acl ?? const []) + AclEntry(_aclScopeFromEntity(control.entity!), + _aclPermissionFromRole(control.role)) + ]; + + Acl._fromObjectAcl(storage_api.Object object) + : _entries = [ + for (final entry in object.acl ?? []) + AclEntry(_aclScopeFromEntity(entry.entity!), + _aclPermissionFromRole(entry.role)), + ]; + + static AclScope _aclScopeFromEntity(String entity) { + if (entity.startsWith('user-')) { + var tmp = entity.substring(5); + var at = tmp.indexOf('@'); + if (at != -1) { + return AccountScope(tmp); + } else { + return StorageIdScope(tmp); + } + } else if (entity.startsWith('group-')) { + return GroupScope(entity.substring(6)); + } else if (entity.startsWith('domain-')) { + return DomainScope(entity.substring(7)); + } else if (entity.startsWith('allAuthenticatedUsers-')) { + return AclScope.allAuthenticated; + } else if (entity.startsWith('allUsers-')) { + return AclScope.allUsers; + } else if (entity.startsWith('project-')) { + var tmp = entity.substring(8); + var dash = tmp.indexOf('-'); + if (dash != -1) { + return ProjectScope(tmp.substring(dash + 1), tmp.substring(0, dash)); + } + } + return OpaqueScope(entity); + } + + static AclPermission _aclPermissionFromRole(String? role) { + if (role == 'READER') return AclPermission.READ; + if (role == 'WRITER') return AclPermission.WRITE; + if (role == 'OWNER') return AclPermission.FULL_CONTROL; + throw UnsupportedError( + "Server returned a unsupported permission role '$role'"); + } + + List _toBucketAccessControlList() { + return _entries.map((entry) => entry._toBucketAccessControl()).toList(); + } + + List _toObjectAccessControlList() { + return _entries.map((entry) => entry._toObjectAccessControl()).toList(); + } + + @override + late final int hashCode = Object.hashAll(_entries); + + @override + bool operator ==(Object other) { + if (other is Acl) { + List entries = _entries; + List otherEntries = other._entries; + if (entries.length != otherEntries.length) return false; + for (var i = 0; i < entries.length; i++) { + if (entries[i] != otherEntries[i]) return false; + } + return true; + } else { + return false; + } + } + + @override + String toString() => 'Acl($_entries)'; +} + +/// An ACL entry specifies that an entity has a specific access permission. +/// +/// A permission grants a specific permission to the entity. +class AclEntry { + final AclScope scope; + final AclPermission permission; + + AclEntry(this.scope, this.permission); + + storage_api.BucketAccessControl _toBucketAccessControl() { + var acl = storage_api.BucketAccessControl(); + acl.entity = scope._storageEntity; + acl.role = permission._storageBucketRole; + return acl; + } + + storage_api.ObjectAccessControl _toObjectAccessControl() { + var acl = storage_api.ObjectAccessControl(); + acl.entity = scope._storageEntity; + acl.role = permission._storageObjectRole; + return acl; + } + + @override + late final int hashCode = Object.hash(scope, permission); + + @override + bool operator ==(Object other) { + return other is AclEntry && + scope == other.scope && + permission == other.permission; + } + + @override + String toString() => 'AclEntry($scope, $permission)'; +} + +/// An ACL scope specifies an entity for which a permission applies. +/// +/// A scope can be one of: +/// +/// * Google Storage ID +/// * Google account email address +/// * Google group email address +/// * Google Apps domain +/// * Special identifier for all Google account holders +/// * Special identifier for all users +/// +/// See https://cloud.google.com/storage/docs/accesscontrol for more details. +abstract class AclScope { + /// ACL type for scope representing a Google Storage id. + static const int _typeStorageId = 0; + + /// ACL type for scope representing a project entity. + static const int _typeProject = 1; + + /// ACL type for scope representing an account holder. + static const int _typeAccount = 2; + + /// ACL type for scope representing a group. + static const int _typeGroup = 3; + + /// ACL type for scope representing a domain. + static const int _typeDomain = 4; + + /// ACL type for scope representing all authenticated users. + static const int _typeAllAuthenticated = 5; + + /// ACL type for scope representing all users. + static const int _typeAllUsers = 6; + + /// ACL type for scope representing an unsupported scope. + static const int _typeOpaque = 7; + + /// The id of the actual entity this ACL scope represents. The actual values + /// are set in the different subclasses. + final String _id; + + /// The type of this scope this ACL scope represents. + final int _type; + + /// ACL scope for all authenticated users. + static AllAuthenticatedScope allAuthenticated = AllAuthenticatedScope(); + + /// ACL scope for all users. + static AllUsersScope allUsers = AllUsersScope(); + + AclScope._(this._type, this._id); + + @override + late final int hashCode = Object.hash(_type, _id); + + @override + bool operator ==(Object other) { + return other is AclScope && _type == other._type && _id == other._id; + } + + @override + String toString() => 'AclScope($_storageEntity)'; + + String get _storageEntity; +} + +/// An ACL scope for an entity identified by a 'Google Storage ID'. +/// +/// The [storageId] is a string of 64 hexadecimal digits that identifies a +/// specific Google account holder or a specific Google group. +class StorageIdScope extends AclScope { + StorageIdScope(String storageId) + : super._(AclScope._typeStorageId, storageId); + + /// Google Storage ID. + String get storageId => _id; + + @override + String get _storageEntity => 'user-$_id'; +} + +/// An ACL scope for an entity identified by an individual email address. +class AccountScope extends AclScope { + AccountScope(String email) : super._(AclScope._typeAccount, email); + + /// Email address. + String get email => _id; + + @override + String get _storageEntity => 'user-$_id'; +} + +/// An ACL scope for an entity identified by an Google Groups email. +class GroupScope extends AclScope { + GroupScope(String group) : super._(AclScope._typeGroup, group); + + /// Group name. + String get group => _id; + + @override + String get _storageEntity => 'group-$_id'; +} + +/// An ACL scope for an entity identified by a domain name. +class DomainScope extends AclScope { + DomainScope(String domain) : super._(AclScope._typeDomain, domain); + + /// Domain name. + String get domain => _id; + + @override + String get _storageEntity => 'domain-$_id'; +} + +/// An ACL scope for an project related entity. +class ProjectScope extends AclScope { + /// Project role. + /// + /// Possible values are `owners`, `editors` and `viewers`. + final String role; + + ProjectScope(String project, this.role) + : super._(AclScope._typeProject, project); + + /// Project ID. + String get project => _id; + + @override + String get _storageEntity => 'project-$role-$_id'; +} + +/// An ACL scope for an unsupported scope. +class OpaqueScope extends AclScope { + OpaqueScope(String id) : super._(AclScope._typeOpaque, id); + + @override + String get _storageEntity => _id; +} + +/// ACL scope for a all authenticated users. +class AllAuthenticatedScope extends AclScope { + AllAuthenticatedScope() : super._(AclScope._typeAllAuthenticated, 'invalid'); + + @override + String get _storageEntity => 'allAuthenticatedUsers'; +} + +/// ACL scope for a all users. +class AllUsersScope extends AclScope { + AllUsersScope() : super._(AclScope._typeAllUsers, 'invalid'); + + @override + String get _storageEntity => 'allUsers'; +} + +/// Permissions for individual scopes in an ACL. +class AclPermission { + /// Provide read access. + // ignore: constant_identifier_names + static const READ = AclPermission._('READER'); + + /// Provide write access. + /// + /// For objects this permission is the same as [FULL_CONTROL]. + // ignore: constant_identifier_names + static const WRITE = AclPermission._('WRITER'); + + /// Provide full control. + /// + /// For objects this permission is the same as [WRITE]. + // ignore: constant_identifier_names + static const FULL_CONTROL = AclPermission._('OWNER'); + + final String _id; + + const AclPermission._(this._id); + + String get _storageBucketRole => _id; + + String get _storageObjectRole => this == WRITE ? FULL_CONTROL._id : _id; + + @override + int get hashCode => _id.hashCode; + + @override + bool operator ==(Object other) { + return other is AclPermission && _id == other._id; + } + + @override + String toString() => 'AclPermission($_id)'; +} + +/// Definition of predefined ACLs. +/// +/// There is a convenient way of referring to number of _predefined_ ACLs. These +/// predefined ACLs have explicit names, and can _only_ be used to set an ACL, +/// when either creating or updating a bucket or object. This set of predefined +/// ACLs are expanded on the server to their actual list of [AclEntry] objects. +/// When information is retrieved on a bucket or object, this expanded list will +/// be present. For a description of these predefined ACLs see: +/// https://cloud.google.com/storage/docs/accesscontrol#extension. +class PredefinedAcl { + final String _name; + const PredefinedAcl._(this._name); + + /// Predefined ACL for the 'authenticated-read' ACL. Applies to both buckets + /// and objects. + static const PredefinedAcl authenticatedRead = + PredefinedAcl._('authenticatedRead'); + + /// Predefined ACL for the 'private' ACL. Applies to both buckets + /// and objects. + static const PredefinedAcl private = PredefinedAcl._('private'); + + /// Predefined ACL for the 'project-private' ACL. Applies to both buckets + /// and objects. + static const PredefinedAcl projectPrivate = PredefinedAcl._('projectPrivate'); + + /// Predefined ACL for the 'public-read' ACL. Applies to both buckets + /// and objects. + static const PredefinedAcl publicRead = PredefinedAcl._('publicRead'); + + /// Predefined ACL for the 'public-read-write' ACL. Applies only to buckets. + static const PredefinedAcl publicReadWrite = + PredefinedAcl._('publicReadWrite'); + + /// Predefined ACL for the 'bucket-owner-full-control' ACL. Applies only to + /// objects. + static const PredefinedAcl bucketOwnerFullControl = + PredefinedAcl._('bucketOwnerFullControl'); + + /// Predefined ACL for the 'bucket-owner-read' ACL. Applies only to + /// objects. + static const PredefinedAcl bucketOwnerRead = + PredefinedAcl._('bucketOwnerRead'); + + @override + String toString() => 'PredefinedAcl($_name)'; +} + +/// Information on a bucket. +abstract class BucketInfo { + /// Name of the bucket. + String get bucketName; + + /// Entity tag for the bucket. + String get etag; + + /// When this bucket was created. + DateTime get created; + + /// Bucket ID. + String get id; + + /// Acl of the bucket. + Acl get acl; +} + +/// Access to Cloud Storage +abstract class Storage { + /// List of required OAuth2 scopes for Cloud Storage operation. + // ignore: constant_identifier_names + static const List SCOPES = [ + storage_api.StorageApi.devstorageFullControlScope + ]; + + /// Initializes access to cloud storage. + factory Storage(http.Client client, String project) = _StorageImpl; + + /// Create a cloud storage bucket. + /// + /// Creates a cloud storage bucket named [bucketName]. + /// + /// The bucket ACL can be set by passing [predefinedAcl] or [acl]. If both + /// are passed the entries from [acl] with be followed by the expansion of + /// [predefinedAcl]. + /// + /// Returns a [Future] which completes when the bucket has been created. + Future createBucket(String bucketName, + {PredefinedAcl? predefinedAcl, Acl? acl}); + + /// Delete a cloud storage bucket. + /// + /// Deletes the cloud storage bucket named [bucketName]. + /// + /// If the bucket is not empty the operation will fail. + /// + /// The returned [Future] completes when the operation is finished. + Future deleteBucket(String bucketName); + + /// Access bucket object operations. + /// + /// Instantiates a `Bucket` object referring to the bucket named [bucketName]. + /// + /// When an object is created using the resulting `Bucket` an ACL will always + /// be set. If the object creation does not pass any explicit ACL information + /// a default ACL will be used. + /// + /// If the arguments [defaultPredefinedObjectAcl] or [defaultObjectAcl] are + /// passed they define the default ACL. If both are passed the entries from + /// [defaultObjectAcl] with be followed by the expansion of + /// [defaultPredefinedObjectAcl] when an object is created. + /// + /// Otherwise the default object ACL attached to the bucket will be used. + /// + /// Returns a `Bucket` instance. + Bucket bucket(String bucketName, + {PredefinedAcl? defaultPredefinedObjectAcl, Acl? defaultObjectAcl}); + + /// Check whether a cloud storage bucket exists. + /// + /// Checks whether the bucket named [bucketName] exists. + /// + /// Returns a [Future] which completes with `true` if the bucket exists. + Future bucketExists(String bucketName); + + /// Get information on a bucket + /// + /// Provide metadata information for bucket named [bucketName]. + /// + /// Returns a [Future] which completes with a `BucketInfo` object. + Future bucketInfo(String bucketName); + + /// List names of all buckets. + /// + /// Returns a [Stream] of bucket names. + Stream listBucketNames(); + + /// Start paging through names of all buckets. + /// + /// The maximum number of buckets in each page is specified in [pageSize]. + /// + /// Returns a [Future] which completes with a `Page` object holding the + /// first page. Use the `Page` object to move to the next page of buckets. + Future> pageBucketNames({int pageSize = 50}); + + /// Copy an object. + /// + /// Copy object [src] to object [dest]. + /// + /// The names of [src] and [dest] must be absolute. + /// + /// [metadata] can be used to overwrite metadata properties. + Future copyObject(String src, String dest, {ObjectMetadata? metadata}); +} + +/// Information on a specific object. +/// +/// This class provides access to information on an object. This includes +/// both the properties which are provided by Cloud Storage (such as the +/// MD5 hash) and the properties which can be changed (such as content type). +/// +/// The properties provided by Cloud Storage are direct properties on this +/// object. +/// +/// The mutable properties are properties on the `metadata` property. +abstract class ObjectInfo { + /// Name of the object. + String get name; + + /// Length of the data. + int get length; + + /// When this object was updated. + DateTime get updated; + + /// Entity tag for the object. + String get etag; + + /// MD5 hash of the object. + List get md5Hash; + + /// CRC32c checksum, as described in RFC 4960. + int get crc32CChecksum; + + /// URL for direct download. + Uri get downloadLink; + + /// Object generation. + ObjectGeneration get generation; + + /// Additional metadata. + ObjectMetadata get metadata; +} + +/// Generational information on an object. +class ObjectGeneration { + /// Object generation. + final String objectGeneration; + + /// Metadata generation. + final int metaGeneration; + + const ObjectGeneration(this.objectGeneration, this.metaGeneration); +} + +/// Access to object metadata. +abstract class ObjectMetadata { + factory ObjectMetadata( + {Acl? acl, + String? contentType, + String? contentEncoding, + String? cacheControl, + String? contentDisposition, + String? contentLanguage, + Map? custom}) = _ObjectMetadata; + + /// ACL. + Acl? get acl; + + /// `Content-Type` for this object. + String? get contentType; + + /// `Content-Encoding` for this object. + String? get contentEncoding; + + /// `Cache-Control` for this object. + String? get cacheControl; + + /// `Content-Disposition` for this object. + String? get contentDisposition; + + /// `Content-Language` for this object. + /// + /// The value of this field must confirm to RFC 3282. + String? get contentLanguage; + + /// Custom metadata. + Map? get custom; + + /// Create a copy of this object with some values replaced. + /// + // TODO: This cannot be used to set values to null. + ObjectMetadata replace( + {Acl? acl, + String? contentType, + String? contentEncoding, + String? cacheControl, + String? contentDisposition, + String? contentLanguage, + Map? custom}); +} + +/// Result from List objects in a bucket. +/// +/// Listing operate like a directory listing, despite the object +/// namespace being flat. +/// +/// See [Bucket.list] for information on how the hierarchical structure +/// is determined. +class BucketEntry { + /// Whether this is information on an object. + final bool isObject; + + /// Name of object or directory. + final String name; + + BucketEntry._object(this.name) : isObject = true; + + BucketEntry._directory(this.name) : isObject = false; + + /// Whether this is a prefix. + bool get isDirectory => !isObject; +} + +/// Access to operations on a specific cloud storage bucket. +abstract class Bucket { + /// Name of this bucket. + String get bucketName; + + /// Absolute name of an object in this bucket. This includes the gs:// prefix. + String absoluteObjectName(String objectName); + + /// Create a new object. + /// + /// Create an object named [objectName] in the bucket. + /// + /// If an object named [objectName] already exists this object will be + /// replaced. + /// + /// If the length of the data to write is known in advance this can be passed + /// as [length]. This can help to optimize the upload process. + /// + /// Additional metadata on the object can be passed either through the + /// `metadata` argument or through the specific named arguments + /// (such as `contentType`). Values passed through the specific named + /// arguments takes precedence over the values in `metadata`. + /// + /// If [contentType] is not passed the default value of + /// `application/octet-stream` will be used. + /// + /// It is possible to at one of the predefined ACLs on the created object + /// using the [predefinedAcl] argument. If the [metadata] argument contain a + /// ACL as well, this ACL with be followed by the expansion of + /// [predefinedAcl]. + /// + /// Returns a `StreamSink` where the object content can be written. When + /// The object content has been written the `StreamSink` completes with + /// an `ObjectInfo` instance with the information on the object created. + StreamSink> write(String objectName, + {int? length, + ObjectMetadata? metadata, + Acl? acl, + PredefinedAcl? predefinedAcl, + String? contentType}); + + /// Create an new object in the bucket with specified content. + /// + /// Writes [bytes] to the created object. + /// + /// See [write] for more information on the additional arguments. + /// + /// Returns a `Future` which completes with an `ObjectInfo` instance when + /// the object is written. + Future writeBytes(String name, List bytes, + {ObjectMetadata? metadata, + Acl? acl, + PredefinedAcl? predefinedAcl, + String? contentType}); + + /// Read object content as byte stream. + /// + /// If [offset] is provided, [length] must also be provided. + /// + /// If [length] is provided, it must be greater than `0`. + /// + /// If there is a problem accessing the file, a [DetailedApiRequestError] is + /// thrown. + Stream> read(String objectName, {int? offset, int? length}); + + /// Lookup object metadata. + /// + // TODO: More documentation + Future info(String name); + + /// Delete an object. + /// + // TODO: More documentation + Future delete(String name); + + /// Update object metadata. + /// + // TODO: More documentation + Future updateMetadata(String objectName, ObjectMetadata metadata); + + /// List objects in the bucket. + /// + /// Listing operates like a directory listing, despite the object + /// namespace being flat. Unless [delimiter] is specified, the character `/` + /// is being used to separate object names into directory components. + /// To list objects recursively, the [delimiter] can be set to empty string. + /// + /// Retrieves a list of objects and directory components starting + /// with [prefix]. + /// + /// Returns a [Stream] of [BucketEntry]. Each element of the stream + /// represents either an object or a directory component. + Stream list({String? prefix, String? delimiter}); + + /// Start paging through objects in the bucket. + /// + /// The maximum number of objects in each page is specified in [pageSize]. + /// + /// See [list] for more information on the other arguments. + /// + /// Returns a `Future` which completes with a `Page` object holding the + /// first page. Use the `Page` object to move to the next page. + Future> page( + {String? prefix, String? delimiter, int pageSize = 50}); +} diff --git a/pkgs/gcloud/pubspec.yaml b/pkgs/gcloud/pubspec.yaml new file mode 100644 index 00000000..1e1e9367 --- /dev/null +++ b/pkgs/gcloud/pubspec.yaml @@ -0,0 +1,26 @@ +name: gcloud +version: 0.8.14 +description: >- + High level idiomatic Dart API for Google Cloud Storage, Pub-Sub and Datastore. +repository: https://github.com/dart-lang/labs/tree/main/pkgs/gcloud + +topics: + - cloud + - gcp + +environment: + sdk: '>=2.19.0 <4.0.0' + +dependencies: + _discoveryapis_commons: ^1.0.0 + googleapis: '>=3.0.0 <14.0.0' + http: '>=0.13.5 <2.0.0' + meta: ^1.3.0 + retry: ^3.1.1 + +dev_dependencies: + dart_flutter_team_lints: ^1.0.0 + googleapis_auth: ^1.1.0 + http_parser: ^4.0.0 + mime: ^1.0.0 + test: ^1.17.5 diff --git a/pkgs/gcloud/test/common.dart b/pkgs/gcloud/test/common.dart new file mode 100644 index 00000000..4179a410 --- /dev/null +++ b/pkgs/gcloud/test/common.dart @@ -0,0 +1,240 @@ +// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file +// for details. All rights reserved. Use of this source code is governed by a +// BSD-style license that can be found in the LICENSE file. + +// ignore_for_file: only_throw_errors + +import 'dart:async'; +import 'dart:convert'; + +import 'package:http/http.dart' as http; +import 'package:http/testing.dart' as http_testing; +import 'package:http_parser/http_parser.dart' as http_parser; +import 'package:mime/mime.dart' as mime; +import 'package:test/test.dart'; + +const _contentTypeJsonUtf8 = 'application/json; charset=utf-8'; + +const _responseHeaders = {'content-type': _contentTypeJsonUtf8}; + +class MockClient extends http.BaseClient { + static const bytes = [1, 2, 3, 4, 5]; + + final _bytesHeaderRegexp = RegExp(r'bytes=(\d+)-(\d+)'); + + final String hostname; + final String rootPath; + final Uri rootUri; + + Map> mocks = {}; + late http_testing.MockClient client; + + MockClient(this.hostname, this.rootPath) + : rootUri = Uri.parse('https://$hostname$rootPath') { + client = http_testing.MockClient(handler); + } + + void register( + String method, Pattern path, http_testing.MockClientHandler handler) { + var map = mocks.putIfAbsent(method, () => {}); + if (path is RegExp) { + map[RegExp('$rootPath${path.pattern}')] = handler; + } else { + map['$rootPath$path'] = handler; + } + } + + void registerUpload( + String method, Pattern path, http_testing.MockClientHandler handler) { + var map = mocks.putIfAbsent(method, () => {}); + map['/upload$rootPath$path'] = handler; + } + + void registerResumableUpload( + String method, Pattern path, http_testing.MockClientHandler handler) { + var map = mocks.putIfAbsent(method, () => {}); + map['/resumable/upload$rootPath$path'] = handler; + } + + void clear() { + mocks = {}; + } + + Future handler(http.Request request) { + expect( + request.url.host, + anyOf(rootUri.host, 'storage.googleapis.com'), + ); + var path = request.url.path; + if (mocks[request.method] == null) { + throw 'No mock handler for method ${request.method} found. ' + 'Request URL was: ${request.url}'; + } + http_testing.MockClientHandler? mockHandler; + mocks[request.method]! + .forEach((pattern, http_testing.MockClientHandler handler) { + if (pattern.matchAsPrefix(path) != null) { + mockHandler = handler; + } + }); + if (mockHandler == null) { + throw 'No mock handler for method ${request.method} and path ' + '[$path] found. Request URL was: ${request.url}'; + } + return mockHandler!(request); + } + + @override + Future send(http.BaseRequest request) { + return client.send(request); + } + + Future respond(dynamic response) { + return Future.value( + http.Response(jsonEncode(response), 200, headers: _responseHeaders)); + } + + Future respondEmpty() { + return Future.value(http.Response('{}', 200, headers: _responseHeaders)); + } + + Future respondInitiateResumableUpload(String project) { + final headers = Map.from(_responseHeaders); + headers['location'] = 'https://$hostname/resumable/upload$rootPath' + 'b/$project/o?uploadType=resumable&alt=json&' + 'upload_id=AEnB2UqucpaWy7d5cr5iVQzmbQcQlLDIKiClrm0SAX3rJ7UN' + 'Mu5bEoC9b4teJcJUKpqceCUeqKzuoP_jz2ps_dV0P0nT8OTuZQ'; + return Future.value(http.Response('', 200, headers: headers)); + } + + Future respondContinueResumableUpload() { + return Future.value(http.Response('', 308, headers: _responseHeaders)); + } + + Future respondBytes(http.Request request) async { + expect(request.url.queryParameters['alt'], 'media'); + + var myBytes = bytes; + var headers = Map.from(_responseHeaders); + + var range = request.headers['range']; + if (range != null) { + var match = _bytesHeaderRegexp.allMatches(range).single; + + var start = int.parse(match[1]!); + var end = int.parse(match[2]!); + + myBytes = bytes.sublist(start, end + 1); + headers['content-length'] = myBytes.length.toString(); + headers['content-range'] = 'bytes $start-$end/'; + } + + return http.Response.bytes(myBytes, 200, headers: headers); + } + + Future respondError(int statusCode) { + var error = { + 'error': {'code': statusCode, 'message': 'error'} + }; + return Future.value(http.Response(jsonEncode(error), statusCode, + headers: _responseHeaders)); + } + + Future processNormalMediaUpload(http.Request request) { + var completer = Completer(); + + var contentType = + http_parser.MediaType.parse(request.headers['content-type']!); + expect(contentType.mimeType, 'multipart/related'); + var boundary = contentType.parameters['boundary']; + + var partCount = 0; + String? json; + Stream.fromIterable([ + request.bodyBytes, + [13, 10] + ]) + .transform(mime.MimeMultipartTransformer(boundary!)) + .listen((mime.MimeMultipart mimeMultipart) { + var contentType = mimeMultipart.headers['content-type']!; + partCount++; + if (partCount == 1) { + // First part in the object JSON. + expect(contentType, 'application/json; charset=utf-8'); + mimeMultipart + .transform(utf8.decoder) + .fold('', (p, e) => '$p$e') + .then((j) => json = j); + } else if (partCount == 2) { + // Second part is the base64 encoded bytes. + mimeMultipart + .transform(ascii.decoder) + .fold('', (p, e) => '$p$e') + .then(base64.decode) + .then((bytes) { + completer.complete(NormalMediaUpload(json!, bytes, contentType)); + }); + } else { + // Exactly two parts expected. + throw 'Unexpected part count'; + } + }); + + return completer.future; + } +} + +class NormalMediaUpload { + final String json; + final List bytes; + final String contentType; + NormalMediaUpload(this.json, this.bytes, this.contentType); +} + +// Implementation of http.Client which traces all requests and responses. +// Mainly useful for local testing. +class TraceClient extends http.BaseClient { + final http.Client client; + + TraceClient(this.client); + + @override + Future send(http.BaseRequest request) { + print(request); + return request.finalize().toBytes().then((body) { + print('--- START REQUEST ---'); + print(utf8.decode(body)); + print('--- END REQUEST ---'); + var r = RequestImpl(request.method, request.url, body); + r.headers.addAll(request.headers); + return client.send(r).then((http.StreamedResponse rr) { + return rr.stream.toBytes().then((body) { + print('--- START RESPONSE ---'); + print(utf8.decode(body)); + print('--- END RESPONSE ---'); + return http.StreamedResponse( + http.ByteStream.fromBytes(body), rr.statusCode, + headers: rr.headers); + }); + }); + }); + } + + @override + void close() { + client.close(); + } +} + +// http.BaseRequest implementation used by the TraceClient. +class RequestImpl extends http.BaseRequest { + final List _body; + + RequestImpl(String method, Uri url, this._body) : super(method, url); + + @override + http.ByteStream finalize() { + super.finalize(); + return http.ByteStream.fromBytes(_body); + } +} diff --git a/pkgs/gcloud/test/common_e2e.dart b/pkgs/gcloud/test/common_e2e.dart new file mode 100644 index 00000000..9c90d44a --- /dev/null +++ b/pkgs/gcloud/test/common_e2e.dart @@ -0,0 +1,47 @@ +// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file +// for details. All rights reserved. Use of this source code is governed by a +// BSD-style license that can be found in the LICENSE file. + +import 'dart:async'; +import 'dart:io'; + +import 'package:googleapis_auth/auth_io.dart' as auth; +import 'package:http/http.dart' as http; + +import 'common.dart'; + +const testProject = 'test-project'; + +// Environment variables for specifying the cloud project to use and the +// location of the service account key for that project. +const projectEnv = 'GCLOUD_E2E_TEST_PROJECT'; + +// Used for storage e2e tests: +// +// List operations on buckets are eventually consistent. Bucket deletion is +// also dependent on list operations to ensure the bucket is empty before +// deletion. +// +// So this can make tests flaky. The following delay is introduced as an +// attempt to account for that. +const storageListDelay = Duration(seconds: 5); + +Future withAuthClient( + List scopes, + Future Function(String project, http.Client client) callback, { + bool trace = false, +}) async { + var project = Platform.environment[projectEnv]; + + if (project == null) { + throw StateError('Environment variables $projectEnv '); + } + + http.Client client = await auth.clientViaApplicationDefaultCredentials( + scopes: scopes, + ); + if (trace) { + client = TraceClient(client); + } + return await callback(project, client); +} diff --git a/pkgs/gcloud/test/datastore/e2e/datastore_test_impl.dart b/pkgs/gcloud/test/datastore/e2e/datastore_test_impl.dart new file mode 100644 index 00000000..3163afef --- /dev/null +++ b/pkgs/gcloud/test/datastore/e2e/datastore_test_impl.dart @@ -0,0 +1,1131 @@ +// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file +// for details. All rights reserved. Use of this source code is governed by a +// BSD-style license that can be found in the LICENSE file. + +/// NOTE: In order to run these tests, the following datastore indices must +/// exist: +/// $ cat index.yaml +/// indexes: +/// - kind: TestQueryKind +/// ancestor: no +/// properties: +/// - name: indexedProp +/// direction: asc +/// - name: blobPropertyIndexed +/// direction: asc +/// +/// - kind: TestQueryKind +/// ancestor: no +/// properties: +/// - name: listproperty +/// - name: test_property +/// direction: desc +/// $ gcloud datastore create-indexes index.yaml +/// +/// Now, wait for indexing done +library; + +import 'dart:async'; + +import 'package:gcloud/common.dart'; +import 'package:gcloud/datastore.dart'; +import 'package:gcloud/src/datastore_impl.dart' as datastore_impl; +import 'package:http/http.dart'; +import 'package:test/test.dart'; + +import '../../common_e2e.dart'; +import '../error_matchers.dart'; +import 'utils.dart'; + +Future> consumePages(FirstPageProvider provider) { + return StreamFromPages(provider).stream.toList(); +} + +void runTests(Datastore datastore, String? namespace) { + final partition = Partition(namespace); + + Future withTransaction(FutureOr Function(Transaction t) f, + {bool xg = false}) { + return datastore.beginTransaction(crossEntityGroup: xg).then(f); + } + + Future> insert(List entities, List autoIdEntities, + {bool transactional = true}) { + if (transactional) { + return withTransaction((Transaction transaction) { + return datastore + .commit( + inserts: entities, + autoIdInserts: autoIdEntities, + transaction: transaction) + .then((result) { + if (autoIdEntities.isNotEmpty) { + expect( + result.autoIdInsertKeys.length, equals(autoIdEntities.length)); + } + return result.autoIdInsertKeys; + }); + }, xg: true); + } else { + return datastore + .commit(inserts: entities, autoIdInserts: autoIdEntities) + .then((result) { + if (autoIdEntities.isNotEmpty) { + expect(result.autoIdInsertKeys.length, equals(autoIdEntities.length)); + } + return result.autoIdInsertKeys; + }); + } + } + + Future delete(List keys, {bool transactional = true}) { + if (transactional) { + return withTransaction((Transaction t) { + return datastore + .commit(deletes: keys, transaction: t) + .then((result) => null); + }, xg: true); + } else { + return datastore.commit(deletes: keys).then((_) => _); + } + } + + Future> lookup(List keys, {bool transactional = true}) { + if (transactional) { + return withTransaction((Transaction transaction) { + return datastore.lookup(keys, transaction: transaction); + }, xg: true); + } else { + return datastore.lookup(keys); + } + } + + bool isValidKey(Key key, {bool ignoreIds = false}) { + if (key.elements.isEmpty) return false; + + for (var element in key.elements) { + if (!ignoreIds) { + if (element.id == null || + (element.id is! String && element.id is! int)) { + return false; + } + } + } + return true; + } + + bool compareKey(Key a, Key b, {bool ignoreIds = false}) { + if (a.partition != b.partition) return false; + if (a.elements.length != b.elements.length) return false; + for (var i = 0; i < a.elements.length; i++) { + if (a.elements[i].kind != b.elements[i].kind) return false; + if (!ignoreIds && a.elements[i].id != b.elements[i].id) return false; + } + return true; + } + + bool compareEntity(Entity a, Entity b, {bool ignoreIds = false}) { + if (!compareKey(a.key, b.key, ignoreIds: ignoreIds)) return false; + if (a.properties.length != b.properties.length) return false; + for (var key in a.properties.keys) { + if (!b.properties.containsKey(key)) return false; + if (a.properties[key] != null && a.properties[key] is List) { + var aList = a.properties[key] as List; + var bList = b.properties[key] as List; + if (aList.length != bList.length) return false; + for (var i = 0; i < aList.length; i++) { + if (aList[i] != bList[i]) return false; + } + } else if (a.properties[key] is BlobValue) { + if (b.properties[key] is BlobValue) { + var b1 = (a.properties[key] as BlobValue).bytes; + var b2 = (b.properties[key] as BlobValue).bytes; + if (b1.length != b2.length) return false; + for (var i = 0; i < b1.length; i++) { + if (b1[i] != b2[i]) return false; + } + return true; + } + return false; + } else { + if (a.properties[key] != b.properties[key]) { + return false; + } + } + } + return true; + } + + group('e2e_datastore', () { + group('insert', () { + Future> testInsert(List entities, + {bool transactional = false, bool xg = false, bool unnamed = true}) { + Future> test(Transaction? transaction) { + return (transaction == null + ? datastore.commit(autoIdInserts: entities) + : datastore.commit( + autoIdInserts: entities, transaction: transaction)) + .then((CommitResult result) { + expect(result.autoIdInsertKeys.length, equals(entities.length)); + + for (var i = 0; i < result.autoIdInsertKeys.length; i++) { + var key = result.autoIdInsertKeys[i]; + expect(isValidKey(key), isTrue); + if (unnamed) { + expect( + compareKey(key, entities[i].key, ignoreIds: true), isTrue); + } else { + expect(compareKey(key, entities[i].key), isTrue); + } + } + return result.autoIdInsertKeys; + }); + } + + if (transactional) { + return withTransaction(test, xg: xg); + } + return test(null); + } + + FutureOr testInsertNegative(List entities, + {bool transactional = false, bool xg = false}) { + void test(Transaction? transaction) { + expect( + transaction == null + ? datastore.commit(autoIdInserts: entities) + : datastore.commit( + autoIdInserts: entities, transaction: transaction), + throwsA(isApplicationError)); + } + + if (transactional) { + return withTransaction(test, xg: xg); + } + test(null); + } + + var unnamedEntities1 = buildEntities(42, 43, partition: partition); + var unnamedEntities5 = buildEntities(1, 6, partition: partition); + var unnamedEntities26 = buildEntities(6, 32, partition: partition); + var named20000 = buildEntities(1000, 21001, + idFunction: (i) => 'named_${i}_of_10000', partition: partition); + + test('insert', () { + return testInsert(unnamedEntities5, transactional: false).then((keys) { + return delete(keys).then((_) { + return lookup(keys).then((List entities) { + for (var e in entities) { + expect(e, isNull); + } + }); + }); + }); + }); + + test('insert_transactional', () { + return testInsert(unnamedEntities1, transactional: true).then((keys) { + return delete(keys).then((_) { + return lookup(keys).then((List entities) { + for (var e in entities) { + expect(e, isNull); + } + }); + }); + }); + }); + + test('insert_transactional_xg', () { + return testInsert(unnamedEntities5, transactional: true, xg: true) + .then((keys) { + return delete(keys).then((_) { + return lookup(keys).then((List entities) { + for (var e in entities) { + expect(e, isNull); + } + }); + }); + }); + }); + + test('negative_insert__incomplete_path', () { + expect(() => datastore.commit(inserts: unnamedEntities1), + throwsA(isApplicationError)); + }); + + test('negative_insert_transactional_xg', () { + return testInsertNegative(unnamedEntities26, + transactional: true, xg: true); + }, + skip: 'With Firestore in Datastore mode, transactions are no longer ' + 'limited to 25 entity groups'); + + test('negative_insert_20000_entities', () async { + // Maybe it should not be a [DataStoreError] here? + // FIXME/TODO: This was adapted + expect( + datastore.commit(inserts: named20000), throwsA(isApplicationError)); + }); + + // TODO: test invalid inserts (like entities without key, ...) + }); + + group('allocate_ids', () { + test('allocate_ids_query', () { + void compareResult(List keys, List completedKeys) { + expect(completedKeys.length, equals(keys.length)); + for (var i = 0; i < keys.length; i++) { + var insertedKey = keys[i]; + var completedKey = completedKeys[i]; + + expect(completedKey.elements.length, + equals(insertedKey.elements.length)); + for (var j = 0; j < insertedKey.elements.length - 1; j++) { + expect(completedKey.elements[j], equals(insertedKey.elements[j])); + } + for (var j = insertedKey.elements.length - 1; + j < insertedKey.elements.length; + j++) { + expect(completedKey.elements[j].kind, + equals(insertedKey.elements[j].kind)); + expect(completedKey.elements[j].id, isNotNull); + expect(completedKey.elements[j].id, isInt); + } + } + } + + var keys = buildKeys(1, 4, partition: partition); + return datastore.allocateIds(keys).then((List completedKeys) { + compareResult(keys, completedKeys); + // TODO: Make sure we can insert these keys + // FIXME: Insert currently doesn't through if entities already exist! + }); + }); + }); + + group('lookup', () { + Future testLookup(List keysToLookup, List entitiesToLookup, + {bool transactional = false, + bool xg = false, + bool negative = false, + bool named = false}) { + expect(keysToLookup.length, equals(entitiesToLookup.length)); + for (var i = 0; i < keysToLookup.length; i++) { + expect( + compareKey(keysToLookup[i], entitiesToLookup[i].key, + ignoreIds: !named), + isTrue); + } + + Future test(Transaction? transaction) { + return datastore.lookup(keysToLookup).then((List entities) { + expect(entities.length, equals(keysToLookup.length)); + if (negative) { + for (var i = 0; i < entities.length; i++) { + expect(entities[i], isNull); + } + } else { + for (var i = 0; i < entities.length; i++) { + expect(compareKey(entities[i]!.key, keysToLookup[i]), isTrue); + expect( + compareEntity(entities[i]!, entitiesToLookup[i], + ignoreIds: !named), + isTrue); + } + } + if (transaction != null) { + return datastore + .commit(transaction: transaction) + .then((_) => null); + } + return null; + }); + } + + if (transactional) { + return withTransaction(test, xg: xg); + } + return test(null); + } + + var unnamedEntities1 = buildEntities(42, 43, partition: partition); + var unnamedEntities5 = buildEntities(1, 6, partition: partition); + var unnamedEntities20 = buildEntities(6, 26, partition: partition); + var entitiesWithAllPropertyTypes = + buildEntityWithAllProperties(1, 6, partition: partition); + + test('lookup', () { + return insert([], unnamedEntities20, transactional: false).then((keys) { + for (var key in keys) { + expect(isValidKey(key), isTrue); + } + return testLookup(keys, unnamedEntities20).then((_) { + return delete(keys, transactional: false); + }); + }); + }); + + test('lookup_with_all_properties', () { + return insert(entitiesWithAllPropertyTypes, [], transactional: false) + .then((_) { + var keys = entitiesWithAllPropertyTypes.map((e) => e.key).toList(); + return testLookup(keys, entitiesWithAllPropertyTypes).then((_) { + return delete(keys, transactional: false); + }); + }); + }); + + test('lookup_transactional', () { + return insert([], unnamedEntities1).then((keys) { + for (var key in keys) { + expect(isValidKey(key), isTrue); + } + return testLookup(keys, unnamedEntities1, transactional: true) + .then((_) => delete(keys)); + }); + }); + + test('lookup_transactional_xg', () { + return insert([], unnamedEntities5).then((keys) { + for (var key in keys) { + expect(isValidKey(key), isTrue); + } + return testLookup(keys, unnamedEntities5, + transactional: true, xg: true) + .then((_) { + return delete(keys); + }); + }); + }); + + // TODO: ancestor lookups, string id lookups + }); + + group('delete', () { + Future testDelete(List keys, + {bool transactional = false, bool xg = false}) { + Future test(Transaction? transaction) { + return datastore.commit(deletes: keys).then((_) { + if (transaction != null) { + return datastore.commit(transaction: transaction); + } + return null; + }); + } + + if (transactional) { + return withTransaction(test, xg: xg); + } + return test(null); + } + + var unnamedEntities99 = buildEntities(6, 106, partition: partition); + + test('delete', () { + return insert([], unnamedEntities99, transactional: false).then((keys) { + for (var key in keys) { + expect(isValidKey(key), isTrue); + } + return lookup(keys, transactional: false).then((entities) { + for (var e in entities) { + expect(e, isNotNull); + } + return testDelete(keys).then((_) { + return lookup(keys, transactional: false).then((entities) { + for (var e in entities) { + expect(e, isNull); + } + }); + }); + }); + }); + }); + + // This should not work with [unnamedEntities20], but is working! + // FIXME TODO FIXME : look into this. + test('delete_transactional', () { + return insert([], unnamedEntities99, transactional: false).then((keys) { + for (var key in keys) { + expect(isValidKey(key), isTrue); + } + return lookup(keys, transactional: false).then((entities) { + for (var e in entities) { + expect(e, isNotNull); + } + return testDelete(keys, transactional: true).then((_) { + return lookup(keys, transactional: false).then((entities) { + for (var e in entities) { + expect(e, isNull); + } + }); + }); + }); + }); + }); + + test('delete_transactional_xg', () { + return insert([], unnamedEntities99, transactional: false).then((keys) { + for (var key in keys) { + expect(isValidKey(key), isTrue); + } + return lookup(keys, transactional: false).then((entities) { + expect(entities.length, equals(unnamedEntities99.length)); + for (var e in entities) { + expect(e, isNotNull); + } + return testDelete(keys, transactional: true, xg: true).then((_) { + return lookup(keys, transactional: false).then((entities) { + expect(entities.length, equals(unnamedEntities99.length)); + for (var e in entities) { + expect(e, isNull); + } + }); + }); + }); + }); + }); + + // TODO: ancestor deletes, string id deletes + }); + + group('rollback', () { + Future testRollback(List keys, {bool xg = false}) { + return withTransaction((Transaction transaction) { + return datastore + .lookup(keys, transaction: transaction) + .then((List entities) { + return datastore.rollback(transaction); + }); + }, xg: xg); + } + + var namedEntities1 = + buildEntities(42, 43, idFunction: (i) => 'i$i', partition: partition); + var namedEntities5 = + buildEntities(1, 6, idFunction: (i) => 'i$i', partition: partition); + + var namedEntities1Keys = namedEntities1.map((e) => e.key).toList(); + var namedEntities5Keys = namedEntities5.map((e) => e.key).toList(); + + test('rollback', () { + return testRollback(namedEntities1Keys); + }); + + test('rollback_xg', () { + return testRollback(namedEntities5Keys, xg: true); + }); + }); + + group('empty_commit', () { + Future testEmptyCommit(List keys, + {bool transactional = false, bool xg = false}) { + Future test(Transaction? transaction) { + return (transaction == null + ? datastore.lookup(keys) + : datastore.lookup(keys, transaction: transaction)) + .then((List entities) { + if (transaction == null) return datastore.commit(); + return datastore.commit(transaction: transaction); + }); + } + + if (transactional) { + return withTransaction(test, xg: xg); + } else { + return test(null); + } + } + + var namedEntities1 = + buildEntities(42, 43, idFunction: (i) => 'i$i', partition: partition); + var namedEntities5 = + buildEntities(1, 6, idFunction: (i) => 'i$i', partition: partition); + var namedEntities20 = + buildEntities(6, 26, idFunction: (i) => 'i$i', partition: partition); + + var namedEntities1Keys = namedEntities1.map((e) => e.key).toList(); + var namedEntities5Keys = namedEntities5.map((e) => e.key).toList(); + var namedEntities20Keys = namedEntities20.map((e) => e.key).toList(); + + test('empty_commit', () { + return testEmptyCommit(namedEntities20Keys); + }); + + test('empty_commit_transactional', () { + return testEmptyCommit(namedEntities1Keys); + }); + + test('empty_commit_transactional_xg', () { + return testEmptyCommit(namedEntities5Keys); + }); + + /* Disabled until we validate if the server has started to support + * more than 5 concurrent commits to different entity groups. + test('negative_empty_commit_xg', () { + expect(testEmptyCommit( + namedEntities20Keys, transactional: true, xg: true), + throwsA(isApplicationError)); + }); + */ + }); + + group('conflicting_transaction', () { + Future testConflictingTransaction(List entities, + {bool xg = false}) { + Future test(List entities, Transaction transaction, value) { + // Change entities: + var changedEntities = List.filled(entities.length, null); + for (var i = 0; i < entities.length; i++) { + var entity = entities[i]!; + var newProperties = Map.from(entity.properties); + for (var prop in newProperties.keys) { + newProperties[prop] = '${newProperties[prop]}conflict$value'; + } + changedEntities[i] = Entity(entity.key, newProperties); + } + return datastore.commit( + inserts: changedEntities.cast(), + transaction: transaction); + } + + // Insert first + return insert(entities, [], transactional: true).then((_) { + var keys = entities.map((e) => e.key).toList(); + + var numTransactions = 10; + + // Start transactions + var transactions = >[]; + for (var i = 0; i < numTransactions; i++) { + transactions.add(datastore.beginTransaction(crossEntityGroup: xg)); + } + return Future.wait(transactions) + .then((List transactions) { + // Do a lookup for the entities in every transaction + var lookups = >>[]; + for (var transaction in transactions) { + lookups.add(datastore.lookup(keys, transaction: transaction)); + } + return Future.wait(lookups).then((List> results) { + // Do a conflicting commit in every transaction. + var commits = []; + for (var i = 0; i < transactions.length; i++) { + var transaction = transactions[i]; + commits.add(test(results[i], transaction, i)); + } + return Future.wait(commits); + }); + }); + }); + } + + var namedEntities1 = + buildEntities(42, 43, idFunction: (i) => 'i$i', partition: partition); + var namedEntities5 = + buildEntities(1, 6, idFunction: (i) => 'i$i', partition: partition); + + test('conflicting_transaction', () { + expect(testConflictingTransaction(namedEntities1), + throwsA(isTransactionAbortedError)); + }); + + test('conflicting_transaction_xg', () { + expect(testConflictingTransaction(namedEntities5, xg: true), + throwsA(isTransactionAbortedError)); + }); + }); + + group('query', () { + Future> testQuery(String kind, + {List? filters, + List? orders, + bool transactional = false, + bool xg = false, + int? offset, + int? limit}) { + Future> test(Transaction? transaction) { + var query = Query( + kind: kind, + filters: filters, + orders: orders, + offset: offset, + limit: limit); + return consumePages( + (_) => datastore.query(query, partition: partition)) + .then((List entities) { + if (transaction != null) { + return datastore + .commit(transaction: transaction) + .then((_) => entities); + } + return entities; + }); + } + + if (transactional) { + return withTransaction(test, xg: xg); + } + return test(null); + } + + Future testQueryAndCompare(String kind, List expectedEntities, + {List? filters, + List? orders, + bool transactional = false, + bool xg = false, + bool correctOrder = true, + int? offset, + int? limit}) { + return testQuery(kind, + filters: filters, + orders: orders, + transactional: transactional, + xg: xg, + offset: offset, + limit: limit) + .then((List entities) { + expect(entities.length, equals(expectedEntities.length)); + + if (correctOrder) { + for (var i = 0; i < entities.length; i++) { + expect(compareEntity(entities[i], expectedEntities[i]), isTrue); + } + } else { + for (var i = 0; i < entities.length; i++) { + var found = false; + for (var j = 0; j < expectedEntities.length; j++) { + if (compareEntity(entities[i], expectedEntities[i])) { + found = true; + } + } + expect(found, isTrue); + } + } + }); + } + + Future testOffsetLimitQuery(String kind, List expectedEntities, + {List? orders, bool transactional = false, bool xg = false}) { + // We query for all subsets of expectedEntities + // NOTE: This is O(0.5 * n^2) queries, but n is currently only 6. + var queryTests = []; + for (var start = 0; start < expectedEntities.length; start++) { + for (var end = start; end < expectedEntities.length; end++) { + var offset = start; + var limit = end - start; + var entities = expectedEntities.sublist(offset, offset + limit); + queryTests.add(() { + return testQueryAndCompare(kind, entities, + transactional: transactional, + xg: xg, + orders: orders, + offset: offset, + limit: limit); + }); + } + } + // Query with limit higher than the number of results. + queryTests.add(() { + return testQueryAndCompare(kind, expectedEntities, + transactional: transactional, + xg: xg, + orders: orders, + offset: 0, + limit: expectedEntities.length * 10); + }); + + return Future.forEach(queryTests, (f) => f()); + } + + const testQueryKind = 'TestQueryKind'; + var stringNamedEntities = buildEntities(1, 6, + idFunction: (i) => 'str$i', + kind: testQueryKind, + partition: partition); + var stringNamedKeys = stringNamedEntities.map((e) => e.key).toList(); + + var queryKey = testPropertyKeyPrefix; + var queryUpperbound = '${testPropertyValuePrefix}4'; + var queryLowerBound = '${testPropertyValuePrefix}1'; + var queryListEntry = '${testListValue}2'; + var queryIndexValue = '${testIndexedPropertyValuePrefix}1'; + + int reverseOrderFunction(Entity a, Entity b) { + // Reverse the order + return -1 * + (a.properties[queryKey] as String) + .compareTo(b.properties[queryKey].toString()); + } + + bool filterFunction(Entity entity) { + var value = entity.properties[queryKey] as String; + return value.compareTo(queryUpperbound) == -1 && + value.compareTo(queryLowerBound) == 1; + } + + bool listFilterFunction(Entity entity) { + var values = entity.properties[testListProperty] as List; + return values.contains(queryListEntry); + } + + bool indexFilterMatches(Entity entity) { + return entity.properties[testIndexedProperty] == queryIndexValue; + } + + var sorted = stringNamedEntities.toList()..sort(reverseOrderFunction); + var filtered = stringNamedEntities.where(filterFunction).toList(); + var sortedAndFiltered = sorted.where(filterFunction).toList(); + var sortedAndListFiltered = sorted.where(listFilterFunction).toList(); + var indexedEntity = sorted.where(indexFilterMatches).toList(); + assert(indexedEntity.length == 1); + + var filters = [ + Filter(FilterRelation.GreaterThan, queryKey, queryLowerBound), + Filter(FilterRelation.LessThan, queryKey, queryUpperbound), + ]; + var listFilters = [ + Filter(FilterRelation.Equal, testListProperty, queryListEntry) + ]; + var indexedPropertyFilter = [ + Filter(FilterRelation.Equal, testIndexedProperty, queryIndexValue), + Filter( + FilterRelation.Equal, testBlobIndexedProperty, testBlobIndexedValue) + ]; + var unIndexedPropertyFilter = [ + Filter(FilterRelation.Equal, testUnindexedProperty, queryIndexValue) + ]; + + var orders = [Order(OrderDirection.Descending, queryKey)]; + + test('query', () async { + await insert(stringNamedEntities, []); + await waitUntilEntitiesReady(datastore, stringNamedKeys, partition); + + // EntityKind query + await testQueryAndCompare(testQueryKind, stringNamedEntities, + transactional: false, correctOrder: false); + await testQueryAndCompare(testQueryKind, stringNamedEntities, + transactional: true, correctOrder: false); + await testQueryAndCompare(testQueryKind, stringNamedEntities, + transactional: true, correctOrder: false, xg: true); + + // EntityKind query with order + await testQueryAndCompare(testQueryKind, sorted, + transactional: false, orders: orders); + await testQueryAndCompare(testQueryKind, sorted, + transactional: true, orders: orders); + await testQueryAndCompare(testQueryKind, sorted, + transactional: false, xg: true, orders: orders); + + // EntityKind query with filter + await testQueryAndCompare(testQueryKind, filtered, + transactional: false, filters: filters); + await testQueryAndCompare(testQueryKind, filtered, + transactional: true, filters: filters); + await testQueryAndCompare(testQueryKind, filtered, + transactional: false, xg: true, filters: filters); + + // EntityKind query with filter + order + await testQueryAndCompare(testQueryKind, sortedAndFiltered, + transactional: false, filters: filters, orders: orders); + await testQueryAndCompare(testQueryKind, sortedAndFiltered, + transactional: true, filters: filters, orders: orders); + await testQueryAndCompare(testQueryKind, sortedAndFiltered, + transactional: false, xg: true, filters: filters, orders: orders); + + // EntityKind query with IN filter + order + await testQueryAndCompare(testQueryKind, sortedAndListFiltered, + transactional: false, filters: listFilters, orders: orders); + await testQueryAndCompare(testQueryKind, sortedAndListFiltered, + transactional: true, filters: listFilters, orders: orders); + await testQueryAndCompare(testQueryKind, sortedAndListFiltered, + transactional: false, + xg: true, + filters: listFilters, + orders: orders); + + // Limit & Offset test + await testOffsetLimitQuery(testQueryKind, sorted, + transactional: false, orders: orders); + await testOffsetLimitQuery(testQueryKind, sorted, + transactional: true, orders: orders); + await testOffsetLimitQuery(testQueryKind, sorted, + transactional: false, xg: true, orders: orders); + + // Query for indexed property + await testQueryAndCompare(testQueryKind, indexedEntity, + transactional: false, filters: indexedPropertyFilter); + await testQueryAndCompare(testQueryKind, indexedEntity, + transactional: true, filters: indexedPropertyFilter); + await testQueryAndCompare(testQueryKind, indexedEntity, + transactional: false, xg: true, filters: indexedPropertyFilter); + + // Query for un-indexed property + await testQueryAndCompare(testQueryKind, [], + transactional: false, filters: unIndexedPropertyFilter); + await testQueryAndCompare(testQueryKind, [], + transactional: true, filters: unIndexedPropertyFilter); + await testQueryAndCompare(testQueryKind, [], + transactional: false, xg: true, filters: unIndexedPropertyFilter); + + // Delete results + await delete(stringNamedKeys, transactional: true); + + // Wait until the entity deletes are reflected in the indices. + await waitUntilEntitiesGone(datastore, stringNamedKeys, partition); + + // Make sure queries don't return results + await testQueryAndCompare(testQueryKind, [], transactional: false); + await testQueryAndCompare(testQueryKind, [], transactional: true); + await testQueryAndCompare(testQueryKind, [], + transactional: true, xg: true); + await testQueryAndCompare(testQueryKind, [], + transactional: false, filters: filters, orders: orders); + + // TODO: query by multiple keys, multiple sort orders, ... + }); + + test('ancestor_query', () async { + /* + * This test creates an + * RootKind:1 -- This defines the entity group (no entity with that key) + * + SubKind:1 -- This a subpath (no entity with that key) + * + SubSubKind:1 -- This is a real entity of kind SubSubKind + * + SubSubKind2:1 -- This is a real entity of kind SubSubKind2 + */ + var rootKey = Key([KeyElement('RootKind', 1)], partition: partition); + var subKey = Key.fromParent('SubKind', 1, parent: rootKey); + var subSubKey = Key.fromParent('SubSubKind', 1, parent: subKey); + var subSubKey2 = Key.fromParent('SubSubKind2', 1, parent: subKey); + var properties = {'foo': 'bar'}; + + var entity = Entity(subSubKey, properties); + var entity2 = Entity(subSubKey2, properties); + + var orders = [Order(OrderDirection.Ascending, '__key__')]; + + await datastore.commit(inserts: [entity, entity2]); + + // FIXME/TODO: Ancestor queries should be strongly consistent. + // We should not need to wait for them. + await waitUntilEntitiesReady( + datastore, [subSubKey, subSubKey2], partition); + + // Test that lookup only returns inserted entities. + await datastore.lookup([rootKey, subKey, subSubKey, subSubKey2]).then( + (List entities) { + expect(entities.length, 4); + expect(entities[0], isNull); + expect(entities[1], isNull); + expect(entities[2], isNotNull); + expect(entities[3], isNotNull); + expect(compareEntity(entity, entities[2]!), isTrue); + expect(compareEntity(entity2, entities[3]!), isTrue); + }); + + // Query by ancestor. + // - by [rootKey] + { + var ancestorQuery = Query(ancestorKey: rootKey, orders: orders); + await consumePages( + (_) => datastore.query(ancestorQuery, partition: partition)) + .then((results) { + expect(results.length, 2); + expect(compareEntity(entity, results[0]), isTrue); + expect(compareEntity(entity2, results[1]), isTrue); + }); + } + + // - by [subKey] + { + var ancestorQuery = Query(ancestorKey: subKey, orders: orders); + await consumePages( + (_) => datastore.query(ancestorQuery, partition: partition)) + .then((results) { + expect(results.length, 2); + expect(compareEntity(entity, results[0]), isTrue); + expect(compareEntity(entity2, results[1]), isTrue); + }); + } + + // - by [subSubKey] + { + var ancestorQuery = Query(ancestorKey: subSubKey); + await consumePages( + (_) => datastore.query(ancestorQuery, partition: partition)) + .then((results) { + expect(results.length, 1); + expect(compareEntity(entity, results[0]), isTrue); + }); + } + + // - by [subSubKey2] + { + var ancestorQuery = Query(ancestorKey: subSubKey2); + await consumePages( + (_) => datastore.query(ancestorQuery, partition: partition)) + .then((results) { + expect(results.length, 1); + expect(compareEntity(entity2, results[0]), isTrue); + }); + } + + // Query by ancestor and kind. + // - by [rootKey] + 'SubSubKind' + { + var query = Query(ancestorKey: rootKey, kind: 'SubSubKind'); + await consumePages( + (_) => datastore.query(query, partition: partition)) + .then((List results) { + expect(results.length, 1); + expect(compareEntity(entity, results[0]), isTrue); + }); + } + // - by [rootKey] + 'SubSubKind2' + { + var query = Query(ancestorKey: rootKey, kind: 'SubSubKind2'); + await consumePages( + (_) => datastore.query(query, partition: partition)) + .then((List results) { + expect(results.length, 1); + expect(compareEntity(entity2, results[0]), isTrue); + }); + } + // - by [subSubKey] + 'SubSubKind' + { + var query = Query(ancestorKey: subSubKey, kind: 'SubSubKind'); + await consumePages( + (_) => datastore.query(query, partition: partition)) + .then((List results) { + expect(results.length, 1); + expect(compareEntity(entity, results[0]), isTrue); + }); + } + // - by [subSubKey2] + 'SubSubKind2' + { + var query = Query(ancestorKey: subSubKey2, kind: 'SubSubKind2'); + await consumePages( + (_) => datastore.query(query, partition: partition)) + .then((List results) { + expect(results.length, 1); + expect(compareEntity(entity2, results[0]), isTrue); + }); + } + // - by [subSubKey] + 'SubSubKind2' + { + var query = Query(ancestorKey: subSubKey, kind: 'SubSubKind2'); + await consumePages( + (_) => datastore.query(query, partition: partition)) + .then((List results) { + expect(results.length, 0); + }); + } + // - by [subSubKey2] + 'SubSubKind' + { + var query = Query(ancestorKey: subSubKey2, kind: 'SubSubKind'); + await consumePages( + (_) => datastore.query(query, partition: partition)) + .then((List results) { + expect(results.length, 0); + }); + } + + // Cleanup + { + await datastore.commit(deletes: [subSubKey, subSubKey2]); + } + }); + }); + }); +} + +Future cleanupDB(Datastore db, String? namespace) { + Future> getKinds(String? namespace) { + var partition = Partition(namespace); + var q = Query(kind: '__kind__'); + return consumePages((_) => db.query(q, partition: partition)) + .then((List entities) { + return entities + .map((Entity e) => e.key.elements.last.id as String?) + .where((String? kind) => !kind!.contains('__')) + .toList(); + }); + } + + // cleanup() will call itself again as long as the DB is not clean. + Future cleanup(String? namespace, String? kind) { + var partition = Partition(namespace); + var q = Query(kind: kind, limit: 500); + return consumePages((_) => db.query(q, partition: partition)) + .then((List entities) { + if (entities.isEmpty) return null; + + print('[cleanupDB]: Removing left-over ${entities.length} entities'); + var deletes = entities.map((e) => e.key).toList(); + return db.commit(deletes: deletes).then((_) => cleanup(namespace, kind)); + }); + } + + return getKinds(namespace).then((List kinds) { + return Future.forEach(kinds, (String? kind) { + return cleanup(namespace, kind); + }); + }); +} + +Future waitUntilEntitiesReady(Datastore db, List keys, Partition p) { + return waitUntilEntitiesHelper(db, keys, true, p); +} + +Future waitUntilEntitiesGone(Datastore db, List keys, Partition p) { + return waitUntilEntitiesHelper(db, keys, false, p); +} + +Future waitUntilEntitiesHelper( + Datastore db, List keys, bool positive, Partition p) { + var keysByKind = >{}; + for (var key in keys) { + keysByKind.putIfAbsent(key.elements.last.kind, () => []).add(key); + } + + Future waitForKeys(String kind, List? keys) { + var q = Query(kind: kind); + return consumePages((_) => db.query(q, partition: p)).then((entities) { + for (var key in keys!) { + var found = false; + for (var entity in entities) { + if (key == entity.key) found = true; + } + if (positive) { + if (!found) return waitForKeys(kind, keys); + } else { + if (found) return waitForKeys(kind, keys); + } + } + return null; + }); + } + + return Future.forEach(keysByKind.keys.toList(), (String kind) { + return waitForKeys(kind, keysByKind[kind]); + }); +} + +Future main() async { + late Datastore datastore; + late Client client; + + var scopes = datastore_impl.DatastoreImpl.scopes; + await withAuthClient(scopes, (String project, Client httpClient) { + datastore = datastore_impl.DatastoreImpl(httpClient, project); + client = httpClient; + return cleanupDB(datastore, null); + }); + + tearDownAll(() async { + await cleanupDB(datastore, null); + client.close(); + }); + + runTests(datastore, null); +} diff --git a/pkgs/gcloud/test/datastore/e2e/utils.dart b/pkgs/gcloud/test/datastore/e2e/utils.dart new file mode 100644 index 00000000..94c68eed --- /dev/null +++ b/pkgs/gcloud/test/datastore/e2e/utils.dart @@ -0,0 +1,111 @@ +// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file +// for details. All rights reserved. Use of this source code is governed by a +// BSD-style license that can be found in the LICENSE file. + +import 'package:gcloud/datastore.dart'; + +const _testKind = 'TestKind'; +const testPropertyKeyPrefix = 'test_property'; +const testListProperty = 'listproperty'; +const testListValue = 'listvalue'; +const testPropertyValuePrefix = 'test_property'; + +const testIndexedProperty = 'indexedProp'; +const testIndexedPropertyValuePrefix = 'indexedValue'; +const testUnindexedProperty = 'unindexedProp'; +const testBlobIndexedProperty = 'blobPropertyIndexed'; +final testBlobIndexedValue = BlobValue([0xaa, 0xaa, 0xff, 0xff]); + +Key buildKey( + int i, { + Object Function(int)? idFunction, + String kind = _testKind, + Partition? p, +}) { + var path = [KeyElement(kind, idFunction == null ? null : idFunction(i))]; + return Key(path, partition: p ?? Partition.DEFAULT); +} + +Map buildProperties(int i) { + var listValues = [ + 'foo', + '$testListValue$i', + ]; + + return { + testPropertyKeyPrefix: '$testPropertyValuePrefix$i', + testListProperty: listValues, + testIndexedProperty: '$testIndexedPropertyValuePrefix$i', + testUnindexedProperty: '$testIndexedPropertyValuePrefix$i', + testBlobIndexedProperty: testBlobIndexedValue, + }; +} + +List buildKeys( + int from, + int to, { + Object Function(int)? idFunction, + String kind = _testKind, + Partition? partition, +}) { + var keys = []; + for (var i = from; i < to; i++) { + keys.add(buildKey(i, idFunction: idFunction, kind: kind, p: partition)); + } + return keys; +} + +List buildEntities( + int from, + int to, { + Object Function(int)? idFunction, + String kind = _testKind, + Partition? partition, +}) { + var entities = []; + var unIndexedProperties = {}; + for (var i = from; i < to; i++) { + var key = buildKey(i, idFunction: idFunction, kind: kind, p: partition); + var properties = buildProperties(i); + unIndexedProperties.add(testUnindexedProperty); + entities + .add(Entity(key, properties, unIndexedProperties: unIndexedProperties)); + } + return entities; +} + +List buildEntityWithAllProperties(int from, int to, + {String kind = _testKind, Partition? partition}) { + var us42 = const Duration(microseconds: 42); + var unIndexed = {'blobProperty'}; + + Map buildProperties(int i) { + return { + 'nullValue': null, + 'boolProperty': true, + 'intProperty': 42, + 'doubleProperty': 4.2, + 'stringProperty': 'foobar', + 'blobProperty': BlobValue([0xff, 0xff, 0xaa, 0xaa]), + 'blobPropertyIndexed': BlobValue([0xaa, 0xaa, 0xff, 0xff]), + 'dateProperty': + DateTime.fromMillisecondsSinceEpoch(1, isUtc: true).add(us42), + 'keyProperty': buildKey(1, idFunction: (i) => 's$i', kind: kind), + 'listProperty': [ + 42, + 4.2, + 'foobar', + buildKey(1, idFunction: (i) => 's$i', kind: 'TestKind'), + ], + }; + } + + var entities = []; + for (var i = from; i < to; i++) { + var key = + buildKey(i, idFunction: (i) => 'allprop$i', kind: kind, p: partition); + var properties = buildProperties(i); + entities.add(Entity(key, properties, unIndexedProperties: unIndexed)); + } + return entities; +} diff --git a/pkgs/gcloud/test/datastore/error_matchers.dart b/pkgs/gcloud/test/datastore/error_matchers.dart new file mode 100644 index 00000000..76be4ff4 --- /dev/null +++ b/pkgs/gcloud/test/datastore/error_matchers.dart @@ -0,0 +1,19 @@ +// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file +// for details. All rights reserved. Use of this source code is governed by a +// BSD-style license that can be found in the LICENSE file. + +import 'dart:io'; + +import 'package:gcloud/datastore.dart'; +import 'package:test/test.dart'; + +const isApplicationError = TypeMatcher(); + +const isDataStoreError = TypeMatcher(); +const isTransactionAbortedError = TypeMatcher(); +const isNeedIndexError = TypeMatcher(); +const isTimeoutError = TypeMatcher(); + +const isInt = TypeMatcher(); + +const isSocketException = TypeMatcher(); diff --git a/pkgs/gcloud/test/db/db_test.dart b/pkgs/gcloud/test/db/db_test.dart new file mode 100644 index 00000000..6039ab30 --- /dev/null +++ b/pkgs/gcloud/test/db/db_test.dart @@ -0,0 +1,106 @@ +// Copyright (c) 2015, the Dart project authors. Please see the AUTHORS file +// for details. All rights reserved. Use of this source code is governed by a +// BSD-style license that can be found in the LICENSE file. + +// ignore_for_file: unreachable_from_main + +import 'dart:mirrors' show reflectClass; + +import 'package:gcloud/datastore.dart' as datastore; +import 'package:gcloud/db.dart'; +import 'package:http/http.dart' as http; +import 'package:test/test.dart'; + +@Kind() +class Foobar extends Model {} + +class _FakeHttpClient extends http.BaseClient { + @override + Future send(http.BaseRequest request) { + throw UnimplementedError('FakeHttpClient cannot make requests'); + } +} + +void main() { + group('db', () { + final ds = datastore.Datastore(_FakeHttpClient(), ''); + test('default-partition', () { + var db = DatastoreDB(ds); + + // Test defaultPartition + expect(db.defaultPartition.namespace, isNull); + + // Test emptyKey + expect(db.emptyKey.partition.namespace, isNull); + + // Test emptyKey.append() + var key = db.emptyKey.append(Foobar, id: 42); + expect(key.parent, db.emptyKey); + expect(key.partition.namespace, isNull); + expect(key.id, 42); + expect(key.type, equals(Foobar)); + }); + + test('non-default-partition', () { + var nsDb = + DatastoreDB(ds, defaultPartition: Partition('foobar-namespace')); + + // Test defaultPartition + expect(nsDb.defaultPartition.namespace, 'foobar-namespace'); + + // Test emptyKey + expect(nsDb.emptyKey.partition.namespace, 'foobar-namespace'); + + // Test emptyKey.append() + var key = nsDb.emptyKey.append(Foobar, id: 42); + expect(key.parent, nsDb.emptyKey); + expect(key.partition.namespace, 'foobar-namespace'); + expect(key.id, 42); + expect(key.type, equals(Foobar)); + }); + + test('hasDefaultConstructor', () { + expect(hasDefaultConstructor(Empty), isTrue); + expect(hasDefaultConstructor(OnlyNamedConstructor), isFalse); + expect(hasDefaultConstructor(DefaultAndNamedConstructor), isTrue); + expect(hasDefaultConstructor(RequiredArguments), isFalse); + expect(hasDefaultConstructor(OnlyPositionalArguments), isTrue); + expect(hasDefaultConstructor(OnlyNamedArguments), isTrue); + // TODO: Figure out how mirrors can detect 'required' named parameters. + // expect(hasDefaultConstructor(RequiredNamedArguments), isFalse); + expect(hasDefaultConstructor(DefaultArgumentValues), isTrue); + }); + }); +} + +bool hasDefaultConstructor(Type type) => + ModelDBImpl.hasDefaultConstructor(reflectClass(type)); + +class Empty { + const Empty(); +} + +class OnlyNamedConstructor { + const OnlyNamedConstructor.named(); +} + +class DefaultAndNamedConstructor { + const DefaultAndNamedConstructor(); + const DefaultAndNamedConstructor.named(); +} + +class RequiredArguments { + const RequiredArguments(int arg); +} + +class OnlyPositionalArguments { + const OnlyPositionalArguments([int? arg, int? arg2]); +} + +class OnlyNamedArguments { + const OnlyNamedArguments({int? arg, int? arg2}); +} + +class DefaultArgumentValues { + const DefaultArgumentValues([int arg1 = 1, int arg2 = 2]); +} diff --git a/pkgs/gcloud/test/db/e2e/db_test_impl.dart b/pkgs/gcloud/test/db/e2e/db_test_impl.dart new file mode 100644 index 00000000..16a25e89 --- /dev/null +++ b/pkgs/gcloud/test/db/e2e/db_test_impl.dart @@ -0,0 +1,747 @@ +// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file +// for details. All rights reserved. Use of this source code is governed by a +// BSD-style license that can be found in the LICENSE file. + +// ignore_for_file: avoid_dynamic_calls + +/// NOTE: In order to run these tests, the following datastore indices must +/// exist: +/// $ cat index.yaml +/// indexes: +/// - kind: User +/// ancestor: no +/// properties: +/// - name: name +/// direction: asc +/// - name: nickname +/// direction: desc +/// +/// - kind: User +/// ancestor: no +/// properties: +/// - name: name +/// direction: desc +/// - name: nickname +/// direction: desc +/// +/// - kind: User +/// ancestor: no +/// properties: +/// - name: name +/// direction: desc +/// - name: nickname +/// direction: asc +/// +/// - kind: User +/// ancestor: no +/// properties: +/// - name: language +/// direction: asc +/// - name: name +/// direction: asc +/// +/// $ gcloud datastore create-indexes index.yaml +/// +/// Now, wait for indexing done +library; + +import 'dart:async'; + +import 'package:gcloud/db.dart' as db; +import 'package:gcloud/src/datastore_impl.dart' as datastore_impl; +import 'package:test/test.dart'; + +import '../../common_e2e.dart'; +import '../../datastore/e2e/datastore_test_impl.dart' as datastore_test; + +@db.Kind() +class Person extends db.Model { + @db.StringProperty() + String? name; + + @db.IntProperty() + int? age; + + @db.ModelKeyProperty(propertyName: 'mangledWife') + db.Key? wife; + + @override + // ignore: hash_and_equals + bool operator ==(Object other) => sameAs(other); + + bool sameAs(Object other) { + return other is Person && + id == other.id && + parentKey == other.parentKey && + name == other.name && + age == other.age && + wife == other.wife; + } + + @override + String toString() => 'Person(id: $id, name: $name, age: $age)'; +} + +@db.Kind(idType: db.IdType.String) +class PersonStringId extends db.Model { + String? get name => id; + + @db.IntProperty() + int? age; + + @db.ModelKeyProperty(propertyName: 'mangledWife') + db.Key? wife; + + @override + // ignore: hash_and_equals + bool operator ==(Object other) => sameAs(other); + + bool sameAs(Object other) { + return other is PersonStringId && + id == other.id && + parentKey == other.parentKey && + age == other.age && + wife == other.wife; + } + + @override + String toString() => 'PersonStringId(id/name: $name, age: $age)'; +} + +@db.Kind() +class User extends Person { + @db.StringProperty() + String? nickname; + + @db.StringListProperty(propertyName: 'language') + List? languages = const []; + + @override + bool sameAs(Object other) { + if (!(super.sameAs(other) && other is User && nickname == other.nickname)) { + return false; + } + + var user = other; + if (languages == null) { + if (user.languages == null) return true; + return false; + } + if (languages!.length != user.languages?.length) { + return false; + } + + for (var i = 0; i < languages!.length; i++) { + if (languages![i] != user.languages![i]) { + return false; + } + } + return true; + } + + @override + String toString() => + 'User(${super.toString()}, nickname: $nickname, languages: $languages'; +} + +@db.Kind() +class ExpandoPerson extends db.ExpandoModel { + @db.StringProperty() + String? name; + + @db.StringProperty(propertyName: 'NN') + String? nickname; + + @override + // ignore: hash_and_equals + bool operator ==(Object other) { + if (other is ExpandoPerson && id == other.id && name == other.name) { + if (additionalProperties.length != other.additionalProperties.length) { + return false; + } + for (var key in additionalProperties.keys) { + if (additionalProperties[key] != other.additionalProperties[key]) { + return false; + } + } + return true; + } + return false; + } +} + +Future sleep(Duration duration) => Future.delayed(duration); + +void runTests(db.DatastoreDB store, String? namespace) { + var partition = namespace != null + ? store.newPartition(namespace) + : store.defaultPartition; + + void compareModels(List expectedModels, List models, + {bool anyOrder = false}) { + expect(models.length, equals(expectedModels.length)); + if (anyOrder) { + // Do expensive O(n^2) search. + for (var searchModel in expectedModels) { + var found = false; + for (var m in models) { + if (m == searchModel) { + found = true; + break; + } + } + expect(found, isTrue); + } + } else { + for (var i = 0; i < expectedModels.length; i++) { + expect(models[i], equals(expectedModels[i])); + } + } + } + + Future testInsertLookupDelete(List objects, + {bool transactional = false}) { + var keys = objects.map((db.Model obj) => obj.key).toList(); + + if (transactional) { + return store.withTransaction((db.Transaction commitTransaction) { + commitTransaction.queueMutations(inserts: objects); + return commitTransaction.commit(); + }).then((_) { + return store.withTransaction((db.Transaction deleteTransaction) { + return deleteTransaction.lookup(keys).then((List models) { + compareModels(objects, models); + deleteTransaction.queueMutations(deletes: keys); + return deleteTransaction.commit(); + }); + }); + }); + } else { + return store.commit(inserts: objects).then(expectAsync1((_) { + return store.lookup(keys).then(expectAsync1((List models) { + compareModels(objects, models); + return store.commit(deletes: keys).then(expectAsync1((_) { + return store + .lookup(keys) + .then(expectAsync1((List models) { + for (var i = 0; i < models.length; i++) { + expect(models[i], isNull); + } + })); + })); + })); + })); + } + } + + group('key', () { + test('equal_and_hashcode', () { + var k1 = store.emptyKey.append(User, id: 10).append(Person, id: 12); + var k2 = store.defaultPartition.emptyKey + .append(User, id: 10) + .append(Person, id: 12); + expect(k1, equals(k2)); + expect(k1.hashCode, equals(k2.hashCode)); + }); + }); + + group('e2e_db', () { + group('insert_lookup_delete', () { + test('persons', () { + var root = partition.emptyKey; + var persons = []; + for (var i = 1; i <= 10; i++) { + persons.add(Person() + ..id = i + ..parentKey = root + ..age = 42 + i + ..name = 'user$i'); + } + persons.first.wife = persons.last.key; + return testInsertLookupDelete(persons); + }); + test('PersonStringId', () { + var root = partition.emptyKey; + var persons = []; + for (var i = 1; i <= 10; i++) { + persons.add(PersonStringId() + ..id = 'user$i' + ..parentKey = root + ..age = 42 + i); + } + persons.first.wife = persons.last.key; + return testInsertLookupDelete(persons); + }); + test('users', () { + var root = partition.emptyKey; + var users = []; + for (var i = 1; i <= 10; i++) { + users.add(User() + ..id = i + ..parentKey = root + ..age = 42 + i + ..name = 'user$i' + ..nickname = 'nickname${i % 3}'); + } + return testInsertLookupDelete(users); + }); + test('expando_insert', () { + var root = partition.emptyKey; + var expandoPersons = []; + for (var i = 1; i <= 10; i++) { + dynamic expandoPerson = ExpandoPerson() + ..parentKey = root + ..id = i + ..name = 'user$i'; + expandoPerson.foo = 'foo$i'; + expandoPerson.bar = i; + expect(expandoPerson.additionalProperties['foo'], equals('foo$i')); + expect(expandoPerson.additionalProperties['bar'], equals(i)); + expandoPersons.add(expandoPerson as ExpandoPerson); + } + return testInsertLookupDelete(expandoPersons); + }); + test('transactional_insert', () { + var root = partition.emptyKey; + var models = []; + + models.add(Person() + ..id = 1 + ..parentKey = root + ..age = 1 + ..name = 'user1'); + models.add(User() + ..id = 2 + ..parentKey = root + ..age = 2 + ..name = 'user2' + ..nickname = 'nickname2'); + dynamic expandoPerson = ExpandoPerson() + ..parentKey = root + ..id = 3 + ..name = 'user1'; + expandoPerson.foo = 'foo1'; + expandoPerson.bar = 2; + + return testInsertLookupDelete(models, transactional: true); + }); + + test('parent_key', () { + var root = partition.emptyKey; + var users = []; + for (var i = 333; i <= 334; i++) { + users.add(User() + ..id = i + ..parentKey = root + ..age = 42 + i + ..name = 'user$i' + ..nickname = 'nickname${i % 3}'); + } + var persons = []; + for (var i = 335; i <= 336; i++) { + persons.add(Person() + ..id = i + ..parentKey = root + ..age = 42 + i + ..name = 'person$i'); + } + + // We test that we can insert + lookup + // users[0], (persons[0] + users[0] as parent) + // persons[1], (users[1] + persons[0] as parent) + persons[0].parentKey = users[0].key; + users[1].parentKey = persons[1].key; + + return testInsertLookupDelete([...users, ...persons]); + }); + + test('auto_ids', () { + var root = partition.emptyKey; + var persons = []; + persons.add(Person() + ..id = 42 + ..parentKey = root + ..age = 80 + ..name = 'user80'); + // Auto id person with parentKey + persons.add(Person() + ..parentKey = root + ..age = 81 + ..name = 'user81'); + // Auto id person with non-root parentKey + var fatherKey = persons.first.parentKey; + persons.add(Person() + ..parentKey = fatherKey + ..age = 82 + ..name = 'user82'); + persons.add(Person() + ..id = 43 + ..parentKey = root + ..age = 83 + ..name = 'user83'); + return store.commit(inserts: persons).then(expectAsync1((_) { + // At this point, autoIds are allocated and are reflected in the + // models (as well as parentKey if it was empty). + + var keys = persons.map((db.Model obj) => obj.key).toList(); + + for (var i = 0; i < persons.length; i++) { + expect(persons[i].age, equals(80 + i)); + expect(persons[i].name, equals('user${80 + i}')); + } + + expect(persons[0].id, equals(42)); + expect(persons[0].parentKey, equals(root)); + + expect(persons[1].id, isNotNull); + expect(persons[1].id is int, isTrue); + expect(persons[1].parentKey, equals(root)); + + expect(persons[2].id, isNotNull); + expect(persons[2].id is int, isTrue); + expect(persons[2].parentKey, equals(fatherKey)); + + expect(persons[3].id, equals(43)); + expect(persons[3].parentKey, equals(root)); + + expect(persons[1].id != persons[2].id, isTrue); + // NOTE: We can't make assumptions about the id of persons[3], + // because an id doesn't need to be globally unique, only under + // entities with the same parent. + + return store.lookup(keys).then(expectAsync1((List models) { + // Since the id/parentKey fields are set after commit and a lookup + // returns new model instances, we can do full model comparison + // here. + compareModels(persons, models); + return store.commit(deletes: keys).then(expectAsync1((_) { + return store.lookup(keys).then(expectAsync1((List models) { + for (var i = 0; i < models.length; i++) { + expect(models[i], isNull); + } + })); + })); + })); + })); + }); + }); + + test('query', () { + var root = partition.emptyKey; + var users = []; + for (var i = 1; i <= 10; i++) { + var languages = []; + if (i == 9) { + languages = ['foo']; + } else if (i == 10) { + languages = ['foo', 'bar']; + } + users.add(User() + ..id = i + ..parentKey = root + ..wife = root.append(User, id: 42 + i) + ..age = 42 + i + ..name = 'user$i' + ..nickname = 'nickname${i % 3}' + ..languages = languages); + } + + var expandoPersons = []; + for (var i = 1; i <= 3; i++) { + dynamic expandoPerson = ExpandoPerson() + ..parentKey = root + ..id = i + ..name = 'user$i' + ..nickname = 'nickuser$i'; + expandoPerson.foo = 'foo$i'; + expandoPerson.bar = i; + expect(expandoPerson.additionalProperties['foo'], equals('foo$i')); + expect(expandoPerson.additionalProperties['bar'], equals(i)); + expandoPersons.add(expandoPerson as ExpandoPerson); + } + + var lowerBound = 'user2'; + + var usersSortedNameDescNicknameAsc = List.from(users); + usersSortedNameDescNicknameAsc.sort((User a, User b) { + var result = b.name!.compareTo(a.name!); + if (result == 0) return a.nickname!.compareTo(b.nickname!); + return result; + }); + + var usersSortedNameDescNicknameDesc = List.from(users); + usersSortedNameDescNicknameDesc.sort((User a, User b) { + var result = b.name!.compareTo(a.name!); + if (result == 0) return b.nickname!.compareTo(a.nickname!); + return result; + }); + + var usersSortedAndFilteredNameDescNicknameAsc = + usersSortedNameDescNicknameAsc.where((User u) { + return lowerBound.compareTo(u.name!) <= 0; + }).toList(); + + var usersSortedAndFilteredNameDescNicknameDesc = + usersSortedNameDescNicknameDesc.where((User u) { + return lowerBound.compareTo(u.name!) <= 0; + }).toList(); + + var fooUsers = + users.where((User u) => u.languages!.contains('foo')).toList(); + var barUsers = + users.where((User u) => u.languages!.contains('bar')).toList(); + var usersWithWife = users + .where((User u) => u.wife == root.append(User, id: 42 + 3)) + .toList(); + + var allInserts = [...users, ...expandoPersons]; + var allKeys = allInserts.map((db.Model model) => model.key).toList(); + return store.commit(inserts: allInserts).then((_) { + return Future.wait([ + waitUntilEntitiesReady( + store, users.map((u) => u.key).toList(), partition), + waitUntilEntitiesReady( + store, expandoPersons.map((u) => u.key).toList(), partition), + ]).then((_) { + var tests = [ + // Queries for [Person] return no results, we only have [User] + // objects. + () { + return store + .query(partition: partition) + .run() + .toList() + .then((List models) { + compareModels([], models); + }); + }, + + // All users query + () { + return store + .query(partition: partition) + .run() + .toList() + .then((List models) { + compareModels(users, models, anyOrder: true); + }); + }, + + // Sorted query + () async { + var query = store.query(partition: partition) + ..order('-name') + ..order('nickname'); + var models = await runQueryWithExponentialBackoff( + query, usersSortedNameDescNicknameAsc.length); + compareModels(usersSortedNameDescNicknameAsc, models); + }, + () async { + var query = store.query(partition: partition) + ..order('-name') + ..order('-nickname') + ..run(); + var models = await runQueryWithExponentialBackoff( + query, usersSortedNameDescNicknameDesc.length); + compareModels(usersSortedNameDescNicknameDesc, models); + }, + + // Sorted query with filter + () async { + var query = store.query(partition: partition) + ..filter('name >=', lowerBound) + ..order('-name') + ..order('nickname'); + var models = await runQueryWithExponentialBackoff( + query, usersSortedAndFilteredNameDescNicknameAsc.length); + compareModels(usersSortedAndFilteredNameDescNicknameAsc, models); + }, + () async { + var query = store.query(partition: partition) + ..filter('name >=', lowerBound) + ..order('-name') + ..order('-nickname') + ..run(); + var models = await runQueryWithExponentialBackoff( + query, usersSortedAndFilteredNameDescNicknameDesc.length); + compareModels(usersSortedAndFilteredNameDescNicknameDesc, models); + }, + + // Filter lists + () async { + var query = store.query(partition: partition) + ..filter('languages =', 'foo') + ..order('name') + ..run(); + var models = + await runQueryWithExponentialBackoff(query, fooUsers.length); + compareModels(fooUsers, models, anyOrder: true); + }, + () async { + var query = store.query(partition: partition) + ..filter('languages =', 'bar') + ..order('name') + ..run(); + var models = + await runQueryWithExponentialBackoff(query, barUsers.length); + compareModels(barUsers, models, anyOrder: true); + }, + + // Filter equals + () async { + var wifeKey = root.append(User, id: usersWithWife.first.wife!.id); + var query = store.query(partition: partition) + ..filter('wife =', wifeKey) + ..run(); + var models = await runQueryWithExponentialBackoff( + query, usersWithWife.length); + compareModels(usersWithWife, models, anyOrder: true); + }, + + // Simple limit/offset test. + () async { + var query = store.query(partition: partition) + ..order('-name') + ..order('nickname') + ..offset(3) + ..limit(4); + var expectedModels = + usersSortedAndFilteredNameDescNicknameAsc.sublist(3, 7); + var models = await runQueryWithExponentialBackoff( + query, expectedModels.length); + compareModels(expectedModels, models); + }, + + // Expando queries: Filter on normal property. + () async { + var query = store.query(partition: partition) + ..filter('name =', expandoPersons.last.name) + ..run(); + var models = await runQueryWithExponentialBackoff(query, 1); + compareModels([expandoPersons.last], models); + }, + // Expando queries: Filter on expanded String property + () async { + var query = store.query(partition: partition) + ..filter('foo =', (expandoPersons.last as dynamic).foo) + ..run(); + var models = await runQueryWithExponentialBackoff(query, 1); + compareModels([expandoPersons.last], models); + }, + // Expando queries: Filter on expanded int property + () async { + var query = store.query(partition: partition) + ..filter('bar =', (expandoPersons.last as dynamic).bar) + ..run(); + var models = await runQueryWithExponentialBackoff(query, 1); + compareModels([expandoPersons.last], models); + }, + // Expando queries: Filter normal property with different + // propertyName (datastore name is 'NN'). + () async { + var query = store.query(partition: partition) + ..filter('nickname =', expandoPersons.last.nickname) + ..run(); + var models = await runQueryWithExponentialBackoff(query, 1); + compareModels([expandoPersons.last], models); + }, + + // Delete results + () => store.commit(deletes: allKeys), + + // Wait until the entity deletes are reflected in the indices. + () => Future.wait([ + waitUntilEntitiesGone( + store, users.map((u) => u.key).toList(), partition), + waitUntilEntitiesGone(store, + expandoPersons.map((u) => u.key).toList(), partition), + ]), + + // Make sure queries don't return results + () => store.lookup(allKeys).then((List models) { + expect(models.length, equals(allKeys.length)); + for (var model in models) { + expect(model, isNull); + } + }), + ]; + return Future.forEach(tests, (dynamic f) => f()); + }); + }); + }); + }); +} + +Future> runQueryWithExponentialBackoff( + db.Query query, int expectedResults) async { + for (var i = 0; i <= 6; i++) { + if (i > 0) { + // Wait for 0.1s, 0.2s, ..., 12.8s + var duration = Duration(milliseconds: 100 * (2 << i)); + print('Running query did return less results than expected.' + 'Using exponential backoff: Sleeping for $duration.'); + await sleep(duration); + } + + var models = await query.run().toList(); + if (models.length >= expectedResults) { + return models; + } + } + + throw Exception( + 'Tried running a query with exponential backoff, giving up now.'); +} + +Future waitUntilEntitiesReady( + db.DatastoreDB mdb, List keys, db.Partition partition) { + return waitUntilEntitiesHelper(mdb, keys, true, partition); +} + +Future waitUntilEntitiesGone( + db.DatastoreDB mdb, List keys, db.Partition partition) { + return waitUntilEntitiesHelper(mdb, keys, false, partition); +} + +Future waitUntilEntitiesHelper( + db.DatastoreDB mdb, + List keys, + bool positive, + db.Partition partition, +) async { + var done = false; + while (!done) { + final models = await mdb.query(partition: partition).run().toList(); + + done = true; + for (var key in keys) { + var found = false; + for (var model in models) { + if (key == model.key) found = true; + } + if (positive) { + if (!found) { + done = false; + } + } else { + if (found) { + done = false; + } + } + } + } +} + +Future main() async { + late db.DatastoreDB store; + + var scopes = datastore_impl.DatastoreImpl.scopes; + await withAuthClient(scopes, (String project, httpClient) { + var datastore = datastore_impl.DatastoreImpl(httpClient, project); + return datastore_test.cleanupDB(datastore, null).then((_) { + store = db.DatastoreDB(datastore); + }); + }); + + runTests(store, null); +} diff --git a/pkgs/gcloud/test/db/e2e/metamodel_test_impl.dart b/pkgs/gcloud/test/db/e2e/metamodel_test_impl.dart new file mode 100644 index 00000000..9ff1721c --- /dev/null +++ b/pkgs/gcloud/test/db/e2e/metamodel_test_impl.dart @@ -0,0 +1,104 @@ +// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file +// for details. All rights reserved. Use of this source code is governed by a +// BSD-style license that can be found in the LICENSE file. + +// ignore_for_file: avoid_dynamic_calls + +import 'dart:async'; + +import 'package:gcloud/datastore.dart'; +import 'package:gcloud/db.dart' as db; +import 'package:gcloud/db/metamodel.dart'; +import 'package:test/test.dart'; + +List buildEntitiesWithDifferentNamespaces() { + Key newKey(String? namespace, String kind, int id) { + var partition = Partition(namespace); + return Key([KeyElement(kind, id)], partition: partition); + } + + Entity newEntity(String? namespace, String kind, {int id = 1}) { + return Entity(newKey(namespace, kind, id), {'ping': 'pong'}); + } + + return [ + newEntity(null, 'NullKind', id: 1), + newEntity(null, 'NullKind', id: 2), + newEntity(null, 'NullKind2', id: 1), + newEntity(null, 'NullKind2', id: 2), + newEntity('FooNamespace', 'FooKind', id: 1), + newEntity('FooNamespace', 'FooKind', id: 2), + newEntity('FooNamespace', 'FooKind2', id: 1), + newEntity('FooNamespace', 'FooKind2', id: 2), + newEntity('BarNamespace', 'BarKind', id: 1), + newEntity('BarNamespace', 'BarKind', id: 2), + newEntity('BarNamespace', 'BarKind2', id: 1), + newEntity('BarNamespace', 'BarKind2', id: 2), + ]; +} + +Future sleep(Duration duration) { + var completer = Completer(); + Timer(duration, completer.complete); + return completer.future; +} + +void runTests(Datastore datastore, db.DatastoreDB store) { + // Shorten this name, so we don't have to break lines at 80 chars. + final cond = predicate; + + group('e2e_db_metamodel', () { + // NOTE: This test cannot safely be run concurrently, since it's using fixed + // keys (i.e. fixed partition + fixed Ids). + test('namespaces__insert_lookup_delete', () { + var entities = buildEntitiesWithDifferentNamespaces(); + var keys = entities.map((e) => e.key).toList(); + + return datastore.commit(inserts: entities).then((_) { + return sleep(const Duration(seconds: 10)).then((_) { + var namespaceQuery = store.query(); + return namespaceQuery.run().toList().then((namespaces) { + expect(namespaces.length, greaterThanOrEqualTo(3)); + expect(namespaces, contains(cond((dynamic ns) => ns.name == null))); + expect(namespaces, + contains(cond((dynamic ns) => ns.name == 'FooNamespace'))); + expect(namespaces, + contains(cond((dynamic ns) => ns.name == 'BarNamespace'))); + + var futures = []; + for (var namespace in namespaces) { + if (!(namespace.name == 'FooNamespace' || + namespace.name == 'BarNamespace')) { + continue; + } + var partition = store.newPartition(namespace.name!); + var kindQuery = store.query(partition: partition); + futures.add(kindQuery.run().toList().then((List kinds) { + expect(kinds.length, greaterThanOrEqualTo(2)); + if (namespace.name == null) { + expect(kinds, + contains(cond((dynamic k) => k.name == 'NullKind'))); + expect(kinds, + contains(cond((dynamic k) => k.name == 'NullKind2'))); + } else if (namespace.name == 'FooNamespace') { + expect(kinds, + contains(cond((dynamic k) => k.name == 'FooKind'))); + expect(kinds, + contains(cond((dynamic k) => k.name == 'FooKind2'))); + } else if (namespace.name == 'BarNamespace') { + expect(kinds, + contains(cond((dynamic k) => k.name == 'BarKind'))); + expect(kinds, + contains(cond((dynamic k) => k.name == 'BarKind2'))); + } + })); + } + return Future.wait(futures).then((_) { + expect(datastore.commit(deletes: keys), completes); + }); + }); + }); + }); + }); + }); +} diff --git a/pkgs/gcloud/test/db/model_db_test.dart b/pkgs/gcloud/test/db/model_db_test.dart new file mode 100644 index 00000000..f2506469 --- /dev/null +++ b/pkgs/gcloud/test/db/model_db_test.dart @@ -0,0 +1,54 @@ +// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file +// for details. All rights reserved. Use of this source code is governed by a +// BSD-style license that can be found in the LICENSE file. + +import 'dart:async'; + +import 'package:gcloud/db.dart'; +import 'package:test/test.dart'; + +import 'model_dbs/duplicate_fieldname.dart' as test4; +import 'model_dbs/duplicate_kind.dart' as test1; +import 'model_dbs/duplicate_property.dart' as test2; +import 'model_dbs/multiple_annotations.dart' as test3; +import 'model_dbs/no_default_constructor.dart' as test5; + +void main() { + // These unused imports make sure that [ModelDBImpl.fromLibrary()] will find + // all the Model/ModelDescription classes. + // + // ignore: unnecessary_null_comparison + assert([test1.A, test2.A, test3.A, test4.A, test5.A] != null); + + ModelDBImpl newModelDB(Symbol symbol) => ModelDBImpl.fromLibrary(symbol); + + group('model_db', () { + group('from_library', () { + test('duplicate_kind', () { + expect(Future.sync(() { + newModelDB(#gcloud.db.model_test.duplicate_kind); + }), throwsA(isStateError)); + }); + test('duplicate_property', () { + expect(Future.sync(() { + newModelDB(#gcloud.db.model_test.duplicate_property); + }), throwsA(isStateError)); + }); + test('multiple_annotations', () { + expect(Future.sync(() { + newModelDB(#gcloud.db.model_test.multiple_annotations); + }), throwsA(isStateError)); + }); + test('duplicate_fieldname', () { + expect(Future.sync(() { + newModelDB(#gcloud.db.model_test.duplicate_fieldname); + }), throwsA(isStateError)); + }); + test('no_default_constructor', () { + expect(Future.sync(() { + newModelDB(#gcloud.db.model_test.no_default_constructor); + }), throwsA(isStateError)); + }); + }); + }); +} diff --git a/pkgs/gcloud/test/db/model_dbs/duplicate_fieldname.dart b/pkgs/gcloud/test/db/model_dbs/duplicate_fieldname.dart new file mode 100644 index 00000000..199aabd5 --- /dev/null +++ b/pkgs/gcloud/test/db/model_dbs/duplicate_fieldname.dart @@ -0,0 +1,22 @@ +// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file +// for details. All rights reserved. Use of this source code is governed by a +// BSD-style license that can be found in the LICENSE file. + +// ignore: unnecessary_library_directive +library gcloud.db.model_test.duplicate_fieldname; + +import 'package:gcloud/db.dart' as db; + +@db.Kind() +class A extends db.Model { + @db.IntProperty() + int? foo; +} + +@db.Kind() +class B extends A { + @override + @db.IntProperty(propertyName: 'bar') + // ignore: overridden_fields + int? foo; +} diff --git a/pkgs/gcloud/test/db/model_dbs/duplicate_kind.dart b/pkgs/gcloud/test/db/model_dbs/duplicate_kind.dart new file mode 100644 index 00000000..84fb9b2f --- /dev/null +++ b/pkgs/gcloud/test/db/model_dbs/duplicate_kind.dart @@ -0,0 +1,14 @@ +// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file +// for details. All rights reserved. Use of this source code is governed by a +// BSD-style license that can be found in the LICENSE file. + +// ignore: unnecessary_library_directive +library gcloud.db.model_test.duplicate_kind; + +import 'package:gcloud/db.dart' as db; + +@db.Kind() +class A extends db.Model {} + +@db.Kind(name: 'A') +class B extends db.Model {} diff --git a/pkgs/gcloud/test/db/model_dbs/duplicate_property.dart b/pkgs/gcloud/test/db/model_dbs/duplicate_property.dart new file mode 100644 index 00000000..2c3c9aba --- /dev/null +++ b/pkgs/gcloud/test/db/model_dbs/duplicate_property.dart @@ -0,0 +1,17 @@ +// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file +// for details. All rights reserved. Use of this source code is governed by a +// BSD-style license that can be found in the LICENSE file. + +// ignore: unnecessary_library_directive +library gcloud.db.model_test.duplicate_property; + +import 'package:gcloud/db.dart' as db; + +@db.Kind() +class A extends db.Model { + @db.IntProperty() + int? foo; + + @db.IntProperty(propertyName: 'foo') + int? bar; +} diff --git a/pkgs/gcloud/test/db/model_dbs/multiple_annotations.dart b/pkgs/gcloud/test/db/model_dbs/multiple_annotations.dart new file mode 100644 index 00000000..1869a4ed --- /dev/null +++ b/pkgs/gcloud/test/db/model_dbs/multiple_annotations.dart @@ -0,0 +1,12 @@ +// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file +// for details. All rights reserved. Use of this source code is governed by a +// BSD-style license that can be found in the LICENSE file. + +// ignore: unnecessary_library_directive +library gcloud.db.model_test.multiple_annotations; + +import 'package:gcloud/db.dart' as db; + +@db.Kind() +@db.Kind() +class A extends db.Model {} diff --git a/pkgs/gcloud/test/db/model_dbs/no_default_constructor.dart b/pkgs/gcloud/test/db/model_dbs/no_default_constructor.dart new file mode 100644 index 00000000..11809497 --- /dev/null +++ b/pkgs/gcloud/test/db/model_dbs/no_default_constructor.dart @@ -0,0 +1,13 @@ +// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file +// for details. All rights reserved. Use of this source code is governed by a +// BSD-style license that can be found in the LICENSE file. + +// ignore: unnecessary_library_directive +library gcloud.db.model_test.no_default_constructor; + +import 'package:gcloud/db.dart' as db; + +@db.Kind() +class A extends db.Model { + A(int i); +} diff --git a/pkgs/gcloud/test/db/properties_test.dart b/pkgs/gcloud/test/db/properties_test.dart new file mode 100644 index 00000000..25a7145f --- /dev/null +++ b/pkgs/gcloud/test/db/properties_test.dart @@ -0,0 +1,286 @@ +// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file +// for details. All rights reserved. Use of this source code is governed by a +// BSD-style license that can be found in the LICENSE file. + +// ignore_for_file: only_throw_errors, unreachable_from_main + +import 'dart:typed_data'; + +import 'package:gcloud/datastore.dart' as datastore; +import 'package:gcloud/db.dart'; +import 'package:test/test.dart'; + +void main() { + group('properties', () { + var datastoreKey = datastore.Key([datastore.KeyElement('MyKind', 42)], + partition: datastore.Partition('foonamespace')); + var dbKey = KeyMock(datastoreKey); + var modelDBMock = ModelDBMock(datastoreKey, dbKey); + + test('bool_property', () { + var prop = const BoolProperty(required: true); + expect(prop.validate(modelDBMock, null), isFalse); + + prop = const BoolProperty(required: false); + expect(prop.validate(modelDBMock, null), isTrue); + expect(prop.validate(modelDBMock, true), isTrue); + expect(prop.validate(modelDBMock, false), isTrue); + expect(prop.encodeValue(modelDBMock, null), equals(null)); + expect(prop.encodeValue(modelDBMock, true), equals(true)); + expect(prop.encodeValue(modelDBMock, false), equals(false)); + expect(prop.decodePrimitiveValue(modelDBMock, null), equals(null)); + expect(prop.decodePrimitiveValue(modelDBMock, true), equals(true)); + expect(prop.decodePrimitiveValue(modelDBMock, false), equals(false)); + }); + + test('int_property', () { + var prop = const IntProperty(required: true); + expect(prop.validate(modelDBMock, null), isFalse); + + prop = const IntProperty(required: false); + expect(prop.validate(modelDBMock, null), isTrue); + expect(prop.validate(modelDBMock, 33), isTrue); + expect(prop.encodeValue(modelDBMock, null), equals(null)); + expect(prop.encodeValue(modelDBMock, 42), equals(42)); + expect(prop.decodePrimitiveValue(modelDBMock, null), equals(null)); + expect(prop.decodePrimitiveValue(modelDBMock, 99), equals(99)); + }); + + test('double_property', () { + var prop = const DoubleProperty(required: true); + expect(prop.validate(modelDBMock, null), isFalse); + + prop = const DoubleProperty(required: false); + expect(prop.validate(modelDBMock, null), isTrue); + expect(prop.validate(modelDBMock, 33.0), isTrue); + expect(prop.encodeValue(modelDBMock, null), equals(null)); + expect(prop.encodeValue(modelDBMock, 42.3), equals(42.3)); + expect(prop.decodePrimitiveValue(modelDBMock, null), equals(null)); + expect(prop.decodePrimitiveValue(modelDBMock, 99.1), equals(99.1)); + }); + + test('string_property', () { + var prop = const StringProperty(required: true); + expect(prop.validate(modelDBMock, null), isFalse); + + prop = const StringProperty(required: false); + expect(prop.validate(modelDBMock, null), isTrue); + expect(prop.validate(modelDBMock, 'foobar'), isTrue); + expect(prop.encodeValue(modelDBMock, null), equals(null)); + expect(prop.encodeValue(modelDBMock, 'foo'), equals('foo')); + expect(prop.decodePrimitiveValue(modelDBMock, null), equals(null)); + expect(prop.decodePrimitiveValue(modelDBMock, 'bar'), equals('bar')); + }); + + test('blob_property', () { + var prop = const BlobProperty(required: true); + expect(prop.validate(modelDBMock, null), isFalse); + + prop = const BlobProperty(required: false); + expect(prop.validate(modelDBMock, null), isTrue); + expect(prop.validate(modelDBMock, [1, 2]), isTrue); + expect(prop.encodeValue(modelDBMock, null), equals(null)); + expect( + (prop.encodeValue(modelDBMock, []) as datastore.BlobValue).bytes, + equals([])); + expect( + (prop.encodeValue(modelDBMock, [1, 2]) as datastore.BlobValue).bytes, + equals([1, 2])); + expect( + (prop.encodeValue(modelDBMock, Uint8List.fromList([1, 2])) + as datastore.BlobValue) + .bytes, + equals([1, 2])); + expect(prop.decodePrimitiveValue(modelDBMock, null), equals(null)); + expect(prop.decodePrimitiveValue(modelDBMock, datastore.BlobValue([])), + equals([])); + expect( + prop.decodePrimitiveValue(modelDBMock, datastore.BlobValue([5, 6])), + equals([5, 6])); + expect( + prop.decodePrimitiveValue( + modelDBMock, datastore.BlobValue(Uint8List.fromList([5, 6]))), + equals([5, 6])); + }); + + test('datetime_property', () { + var utc99 = DateTime.fromMillisecondsSinceEpoch(99, isUtc: true); + + var prop = const DateTimeProperty(required: true); + expect(prop.validate(modelDBMock, null), isFalse); + + prop = const DateTimeProperty(required: false); + expect(prop.validate(modelDBMock, null), isTrue); + expect(prop.validate(modelDBMock, utc99), isTrue); + expect(prop.encodeValue(modelDBMock, null), equals(null)); + expect(prop.encodeValue(modelDBMock, utc99), equals(utc99)); + expect(prop.decodePrimitiveValue(modelDBMock, null), equals(null)); + expect(prop.decodePrimitiveValue(modelDBMock, 99 * 1000), equals(utc99)); + expect( + prop.decodePrimitiveValue(modelDBMock, 99 * 1000 + 1), equals(utc99)); + expect(prop.decodePrimitiveValue(modelDBMock, utc99), equals(utc99)); + }); + + test('list_property', () { + var prop = const ListProperty(BoolProperty()); + + expect(prop.validate(modelDBMock, null), isFalse); + expect(prop.validate(modelDBMock, []), isTrue); + expect(prop.validate(modelDBMock, [true]), isTrue); + expect(prop.validate(modelDBMock, [true, false]), isTrue); + expect(prop.validate(modelDBMock, [true, false, 1]), isFalse); + expect(prop.encodeValue(modelDBMock, []), equals(null)); + expect(prop.encodeValue(modelDBMock, [true]), equals(true)); + expect( + prop.encodeValue(modelDBMock, [true, false]), equals([true, false])); + expect(prop.encodeValue(modelDBMock, true, forComparison: true), + equals(true)); + expect(prop.encodeValue(modelDBMock, false, forComparison: true), + equals(false)); + expect(prop.encodeValue(modelDBMock, null, forComparison: true), + equals(null)); + expect(prop.decodePrimitiveValue(modelDBMock, null), equals([])); + expect(prop.decodePrimitiveValue(modelDBMock, []), equals([])); + expect(prop.decodePrimitiveValue(modelDBMock, true), equals([true])); + expect(prop.decodePrimitiveValue(modelDBMock, [true, false]), + equals([true, false])); + }); + + test('composed_list_property', () { + var prop = const ListProperty(CustomProperty()); + + var c1 = Custom()..customValue = 'c1'; + var c2 = Custom()..customValue = 'c2'; + + expect(prop.validate(modelDBMock, null), isFalse); + expect(prop.validate(modelDBMock, []), isTrue); + expect(prop.validate(modelDBMock, [c1]), isTrue); + expect(prop.validate(modelDBMock, [c1, c2]), isTrue); + expect(prop.validate(modelDBMock, [c1, c2, 1]), isFalse); + expect(prop.encodeValue(modelDBMock, []), equals(null)); + expect(prop.encodeValue(modelDBMock, [c1]), equals(c1.customValue)); + expect(prop.encodeValue(modelDBMock, [c1, c2]), + equals([c1.customValue, c2.customValue])); + expect(prop.decodePrimitiveValue(modelDBMock, null), equals([])); + expect(prop.decodePrimitiveValue(modelDBMock, []), equals([])); + expect( + prop.decodePrimitiveValue(modelDBMock, c1.customValue), equals([c1])); + expect( + prop.decodePrimitiveValue( + modelDBMock, [c1.customValue, c2.customValue]), + equals([c1, c2])); + }); + + test('modelkey_property', () { + var prop = const ModelKeyProperty(required: true); + expect(prop.validate(modelDBMock, null), isFalse); + + prop = const ModelKeyProperty(required: false); + expect(prop.validate(modelDBMock, null), isTrue); + expect(prop.validate(modelDBMock, dbKey), isTrue); + expect(prop.validate(modelDBMock, datastoreKey), isFalse); + expect(prop.encodeValue(modelDBMock, null), equals(null)); + expect(prop.encodeValue(modelDBMock, dbKey), equals(datastoreKey)); + expect(prop.decodePrimitiveValue(modelDBMock, null), equals(null)); + expect( + prop.decodePrimitiveValue(modelDBMock, datastoreKey), equals(dbKey)); + }); + }); +} + +class Custom { + String? customValue; + + @override + int get hashCode => customValue.hashCode; + + @override + bool operator ==(Object other) { + return other is Custom && other.customValue == customValue; + } +} + +class CustomProperty extends StringProperty { + const CustomProperty( + {String? propertyName, bool required = false, bool indexed = true}); + + @override + bool validate(ModelDB db, Object? value) { + if (required && value == null) return false; + return value == null || value is Custom; + } + + @override + Object? decodePrimitiveValue(ModelDB db, Object? value) { + if (value == null) return null; + return Custom()..customValue = value as String; + } + + @override + Object? encodeValue(ModelDB db, Object? value, {bool forComparison = false}) { + if (value == null) return null; + return (value as Custom).customValue; + } +} + +class KeyMock implements Key { + final datastore.Key _datastoreKey; + + KeyMock(this._datastoreKey); + + @override + Object id = 1; + @override + Type? type; + @override + Key get parent => this; + @override + bool get isEmpty => false; + @override + Partition get partition => throw UnimplementedError('not mocked'); + datastore.Key get datastoreKey => _datastoreKey; + @override + Key append(Type modelType, {T? id}) => + throw UnimplementedError('not mocked'); + @override + Key cast() => Key(parent, type, id as U); + @override + // ignore: hash_and_equals + int get hashCode => 1; +} + +class ModelDBMock implements ModelDB { + final datastore.Key _datastoreKey; + final Key _dbKey; + ModelDBMock(this._datastoreKey, this._dbKey); + + @override + Key fromDatastoreKey(datastore.Key datastoreKey) { + if (!identical(_datastoreKey, datastoreKey)) { + throw 'Broken test'; + } + return _dbKey; + } + + @override + datastore.Key toDatastoreKey(Key key) { + if (!identical(_dbKey, key)) { + throw 'Broken test'; + } + return _datastoreKey; + } + + @override + T? fromDatastoreEntity(datastore.Entity? entity) => null; + @override + datastore.Entity toDatastoreEntity(Model model) => + throw UnimplementedError('not mocked'); + @override + String? fieldNameToPropertyName(String kind, String fieldName) => null; + @override + String kindName(Type type) => throw UnimplementedError('not mocked'); + @override + Object? toDatastoreValue(String kind, String fieldName, Object? value, + {bool forComparison = false}) => + null; +} diff --git a/pkgs/gcloud/test/db_all_e2e_test.dart b/pkgs/gcloud/test/db_all_e2e_test.dart new file mode 100644 index 00000000..af37670f --- /dev/null +++ b/pkgs/gcloud/test/db_all_e2e_test.dart @@ -0,0 +1,71 @@ +// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file +// for details. All rights reserved. Use of this source code is governed by a +// BSD-style license that can be found in the LICENSE file. + +@Tags(['e2e']) +@Timeout(Duration(seconds: 120)) + +library gcloud.test.db_all_test; + +import 'dart:async'; +import 'dart:io'; + +import 'package:gcloud/datastore.dart'; +import 'package:gcloud/db.dart' as db; +import 'package:gcloud/src/datastore_impl.dart' as datastore_impl; +import 'package:http/http.dart'; +import 'package:test/test.dart'; + +import 'common_e2e.dart'; +import 'datastore/e2e/datastore_test_impl.dart' as datastore_test; +import 'db/e2e/db_test_impl.dart' as db_test; +import 'db/e2e/metamodel_test_impl.dart' as db_metamodel_test; + +Future main() async { + var scopes = datastore_impl.DatastoreImpl.scopes; + var now = DateTime.now().millisecondsSinceEpoch; + var namespace = '${Platform.operatingSystem}$now'; + + late Datastore datastore; + late db.DatastoreDB datastoreDB; + Client? client; + + await withAuthClient(scopes, (String project, httpClient) async { + datastore = + Datastore.withRetry(datastore_impl.DatastoreImpl(httpClient, project)); + datastoreDB = db.DatastoreDB(datastore); + client = httpClient; + }); + + tearDownAll(() async { + client?.close(); + }); + + group('datastore_test', () { + tearDown(() async { + await datastore_test.cleanupDB(datastore, namespace); + }); + + datastore_test.runTests(datastore, namespace); + }); + + test('sleep-between-test-suites', () { + expect(Future.delayed(const Duration(seconds: 10)), completes); + }); + + group('datastore_test', () { + db_test.runTests(datastoreDB, namespace); + }); + + test('sleep-between-test-suites', () { + expect(Future.delayed(const Duration(seconds: 10)), completes); + }); + + group('datastore_test', () { + tearDown(() async { + await datastore_test.cleanupDB(datastore, namespace); + }); + + db_metamodel_test.runTests(datastore, datastoreDB); + }); +} diff --git a/pkgs/gcloud/test/index.yaml b/pkgs/gcloud/test/index.yaml new file mode 100644 index 00000000..96048adb --- /dev/null +++ b/pkgs/gcloud/test/index.yaml @@ -0,0 +1,47 @@ +# To run tests for datastore, the following index.yaml must be declared for +# the project using: +# $ gcloud --project "$GCLOUD_E2E_TEST_PROJECT" datastore indexes create test/index.yaml +indexes: +# Needed by tests in: test/db/e2e/db_test_impl.dart +- kind: User + ancestor: no + properties: + - name: name + direction: asc + - name: nickname + direction: desc +- kind: User + ancestor: no + properties: + - name: name + direction: desc + - name: nickname + direction: desc +- kind: User + ancestor: no + properties: + - name: name + direction: desc + - name: nickname + direction: asc +- kind: User + ancestor: no + properties: + - name: language + direction: asc + - name: name + direction: asc +# Needed by tests in: test/datastore/e2e/datastore_test_impl.dart +- kind: TestQueryKind + ancestor: no + properties: + - name: indexedProp + direction: asc + - name: blobPropertyIndexed + direction: asc +- kind: TestQueryKind + ancestor: no + properties: + - name: listproperty + - name: test_property + direction: desc diff --git a/pkgs/gcloud/test/pubsub/pubsub_e2e_test.dart b/pkgs/gcloud/test/pubsub/pubsub_e2e_test.dart new file mode 100644 index 00000000..b64d2319 --- /dev/null +++ b/pkgs/gcloud/test/pubsub/pubsub_e2e_test.dart @@ -0,0 +1,196 @@ +// Copyright (c) 2015, the Dart project authors. Please see the AUTHORS file +// for details. All rights reserved. Use of this source code is governed by a +// BSD-style license that can be found in the LICENSE file. + +// ignore_for_file: only_throw_errors + +@Tags(['e2e']) +@Timeout(Duration(seconds: 120)) +library; + +import 'package:gcloud/pubsub.dart'; +import 'package:http/http.dart'; +import 'package:test/test.dart'; + +import '../common_e2e.dart'; + +void main() { + late PubSub pubsub; + late String project; + late String prefix; + late Client client; + + setUpAll(() async { + // Generate a unique prefix for all names generated by the tests. + var id = DateTime.now().millisecondsSinceEpoch; + prefix = 'dart-e2e-test-$id'; + await withAuthClient(PubSub.SCOPES, (p, httpClient) async { + // Share the same pubsub connection for all tests. + pubsub = PubSub(httpClient, p); + project = p; + client = httpClient; + }); + }); + + tearDownAll(() async { + var leftovers = false; + var cleanupErrors = false; + + print('checking for leftover subscriptions'); + try { + // Try to delete any leftover subscriptions from the tests. + var subscriptions = await pubsub.listSubscriptions().toList(); + for (var subscription in subscriptions) { + if (subscription.name.startsWith(prefix)) { + try { + print('WARNING: Removing leftover subscription ' + '${subscription.name}'); + leftovers = true; + await pubsub.deleteSubscription(subscription.name); + } catch (e, s) { + print('Error during test cleanup of subscription ' + '${subscription.name} ($e)'); + print(s); + cleanupErrors = true; + } + } + } + } catch (e, s) { + print('Error checking for leftover subscriptions ($e)'); + print(s); + cleanupErrors = true; + } + + // Try to delete any leftover topics from the tests. + print('checking for leftover topics'); + try { + var topics = await pubsub.listTopics().toList(); + for (var topic in topics) { + if (topic.name.startsWith(prefix)) { + try { + print('WARNING: Removing leftover topic ${topic.name}'); + leftovers = true; + await pubsub.deleteTopic(topic.name); + } catch (e, s) { + print('Error during test cleanup of topic ${topic.name} ($e)'); + print(s); + cleanupErrors = true; + } + } + } + } catch (e, s) { + print('Error checking for leftover topics ($e)'); + print(s); + cleanupErrors = true; + } + + if (leftovers) { + throw 'Test terminated with leftover topics and/or subscriptions'; + } + + if (cleanupErrors) { + throw 'Test encountered errors while checking for leftovers'; + } + + client.close(); + }); + + String generateTopicName() { + var id = DateTime.now().millisecondsSinceEpoch; + return '$prefix-topic-$id'; + } + + String generateSubscriptionName() { + var id = DateTime.now().millisecondsSinceEpoch; + return '$prefix-subscription-$id'; + } + + group('topic', () { + test('create-lookup-delete', () async { + var topicName = generateTopicName(); + var topic = await pubsub.createTopic(topicName); + expect(topic.name, topicName); + topic = await pubsub.lookupTopic(topicName); + expect(topic.name, topicName); + expect(topic.project, project); + expect(topic.absoluteName, 'projects/$project/topics/$topicName'); + expect(await pubsub.deleteTopic(topicName), isNull); + }); + + test('create-list-delete', () async { + const count = 5; + + var topicPrefix = generateTopicName(); + + String name(i) => '$topicPrefix-$i'; + + for (var i = 0; i < count; i++) { + await pubsub.createTopic(name(i)); + } + var topics = await pubsub.listTopics().map((t) => t.name).toList(); + for (var i = 0; i < count; i++) { + expect(topics.contains(name(i)), isTrue); + await pubsub.deleteTopic(name(i)); + } + }); + }); + + group('subscription', () { + test('create-lookup-delete', () async { + var topicName = generateTopicName(); + var subscriptionName = generateSubscriptionName(); + await pubsub.createTopic(topicName); + var subscription = + await pubsub.createSubscription(subscriptionName, topicName); + expect(subscription.name, subscriptionName); + subscription = await pubsub.lookupSubscription(subscriptionName); + expect(subscription.name, subscriptionName); + expect(subscription.project, project); + expect(subscription.absoluteName, + 'projects/$project/subscriptions/$subscriptionName'); + expect(subscription.isPull, isTrue); + expect(subscription.isPush, isFalse); + expect(await pubsub.deleteSubscription(subscriptionName), isNull); + expect(await pubsub.deleteTopic(topicName), isNull); + }); + + test('create-list-delete', () async { + const count = 5; + var topicName = generateTopicName(); + await pubsub.createTopic(topicName); + + var subscriptionPrefix = generateSubscriptionName(); + + String name(i) => '$subscriptionPrefix-$i'; + + for (var i = 0; i < count; i++) { + await pubsub.createSubscription(name(i), topicName); + } + var subscriptions = + await pubsub.listSubscriptions().map((t) => t.name).toList(); + for (var i = 0; i < count; i++) { + expect(subscriptions.contains(name(i)), isTrue); + await pubsub.deleteSubscription(name(i)); + } + await pubsub.deleteTopic(topicName); + }); + + test('push-pull', () async { + var topicName = generateTopicName(); + var subscriptionName = generateSubscriptionName(); + var topic = await pubsub.createTopic(topicName); + var subscription = + await pubsub.createSubscription(subscriptionName, topicName); + expect(await subscription.pull(), isNull); + + expect(await topic.publishString('Hello, world!'), isNull); + var pullEvent = await subscription.pull(); + expect(pullEvent, isNotNull); + expect(pullEvent!.message.asString, 'Hello, world!'); + expect(await pullEvent.acknowledge(), isNull); + + await pubsub.deleteSubscription(subscriptionName); + await pubsub.deleteTopic(topicName); + }, timeout: const Timeout(Duration(minutes: 2))); + }); +} diff --git a/pkgs/gcloud/test/pubsub/pubsub_test.dart b/pkgs/gcloud/test/pubsub/pubsub_test.dart new file mode 100644 index 00000000..8994b133 --- /dev/null +++ b/pkgs/gcloud/test/pubsub/pubsub_test.dart @@ -0,0 +1,1079 @@ +// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file +// for details. All rights reserved. Use of this source code is governed by a +// BSD-style license that can be found in the LICENSE file. + +// ignore_for_file: only_throw_errors + +import 'dart:async'; +import 'dart:convert'; + +import 'package:gcloud/pubsub.dart'; +import 'package:googleapis/pubsub/v1.dart' as pubsub; +import 'package:http/http.dart' as http; +import 'package:test/test.dart'; + +import '../common.dart'; +import '../common_e2e.dart'; + +const _hostName = 'pubsub.googleapis.com'; +const _rootPath = '/v1/'; + +MockClient mockClient() => MockClient(_hostName, _rootPath); + +void main() { + group('api', () { + var badTopicNames = [ + 'projects/', + 'projects/topics', + 'projects/$testProject', + 'projects/$testProject/', + 'projects/$testProject/topics', + 'projects/$testProject/topics/' + ]; + + var badSubscriptionNames = [ + 'projects/', + 'projects/subscriptions', + 'projects/$testProject', + 'projects/$testProject/', + 'projects/$testProject/subscriptions', + 'projects/$testProject/subscriptions/' + ]; + + group('topic', () { + var name = 'test-topic'; + var absoluteName = 'projects/$testProject/topics/test-topic'; + + test('create', () { + var mock = mockClient(); + mock.register( + 'PUT', + 'projects/$testProject/topics/test-topic', + expectAsync1((http.Request request) { + expect(request.body, '{}'); + return mock.respond(pubsub.Topic()..name = absoluteName); + }, count: 2)); + + var api = PubSub(mock, testProject); + return api.createTopic(name).then(expectAsync1((topic) { + expect(topic.name, name); + expect(topic.project, testProject); + expect(topic.absoluteName, absoluteName); + return api.createTopic(absoluteName).then(expectAsync1((topic) { + expect(topic.name, name); + expect(topic.absoluteName, absoluteName); + })); + })); + }); + + test('create-error', () { + var mock = mockClient(); + var api = PubSub(mock, testProject); + for (var name in badTopicNames) { + expect(() => api.createTopic(name), throwsArgumentError); + } + for (var name in badSubscriptionNames) { + expect(() => api.createTopic(name), throwsArgumentError); + } + }); + + test('delete', () { + var mock = mockClient(); + mock.register( + 'DELETE', + 'projects/test-project/topics/test-topic', + expectAsync1((request) { + expect(request.body.length, 0); + return mock.respondEmpty(); + }, count: 2)); + + var api = PubSub(mock, testProject); + return api.deleteTopic(name).then(expectAsync1((result) { + expect(result, isNull); + return api.deleteTopic(absoluteName).then(expectAsync1((topic) { + expect(result, isNull); + })); + })); + }); + + test('delete-error', () { + var mock = mockClient(); + var api = PubSub(mock, testProject); + for (var name in badTopicNames) { + expect(() => api.deleteTopic(name), throwsArgumentError); + } + for (var name in badSubscriptionNames) { + expect(() => api.deleteTopic(name), throwsArgumentError); + } + }); + + test('lookup', () { + var mock = mockClient(); + mock.register( + 'GET', + 'projects/test-project/topics/test-topic', + expectAsync1((request) { + expect(request.body.length, 0); + return mock.respond(pubsub.Topic()..name = absoluteName); + }, count: 2)); + + var api = PubSub(mock, testProject); + return api.lookupTopic(name).then(expectAsync1((topic) { + expect(topic.name, name); + expect(topic.project, testProject); + expect(topic.absoluteName, absoluteName); + return api.lookupTopic(absoluteName).then(expectAsync1((topic) { + expect(topic.name, name); + expect(topic.absoluteName, absoluteName); + })); + })); + }); + + test('lookup-error', () { + var mock = mockClient(); + var api = PubSub(mock, testProject); + for (var name in badTopicNames) { + expect(() => api.lookupTopic(name), throwsArgumentError); + } + for (var name in badSubscriptionNames) { + expect(() => api.lookupTopic(name), throwsArgumentError); + } + }); + + group('query', () { + void addTopics( + pubsub.ListTopicsResponse response, int first, int count) { + response.topics = []; + for (var i = 0; i < count; i++) { + response.topics!.add(pubsub.Topic()..name = 'topic-${first + i}'); + } + } + + // Mock that expect/generates [n] topics in pages of page size + // [pageSize]. + void registerQueryMock( + MockClient mock, + int n, + int pageSize, [ + int? totalCalls, + ]) { + var totalPages = (n + pageSize - 1) ~/ pageSize; + // No items still generate one request. + if (totalPages == 0) totalPages = 1; + // Can pass in total calls if this mock is overwritten before all + // expected pages are done, e.g. when testing errors. + totalCalls ??= totalPages; + var pageCount = 0; + mock.register( + 'GET', + 'projects/$testProject/topics', + expectAsync1((request) { + pageCount++; + expect(request.url.queryParameters['pageSize'], '$pageSize'); + expect(request.body.length, 0); + if (pageCount > 1) { + expect(request.url.queryParameters['pageToken'], 'next-page'); + } + + var response = pubsub.ListTopicsResponse(); + var first = (pageCount - 1) * pageSize + 1; + if (pageCount < totalPages) { + response.nextPageToken = 'next-page'; + addTopics(response, first, pageSize); + } else { + addTopics(response, first, n - (totalPages - 1) * pageSize); + } + return mock.respond(response); + }, count: totalCalls)); + } + + group('list', () { + Future q(int count) { + var mock = mockClient(); + registerQueryMock(mock, count, 50); + + var api = PubSub(mock, testProject); + return api + .listTopics() + .listen(expectAsync1((_) {}, count: count)) + .asFuture(); + } + + test('simple', () { + return q(0) + .then((_) => q(1)) + .then((_) => q(1)) + .then((_) => q(49)) + .then((_) => q(50)) + .then((_) => q(51)) + .then((_) => q(99)) + .then((_) => q(100)) + .then((_) => q(101)) + .then((_) => q(170)); + }); + + test('immediate-pause-resume', () { + var mock = mockClient(); + registerQueryMock(mock, 70, 50); + + var api = PubSub(mock, testProject); + api.listTopics().listen(expectAsync1(((_) {}), count: 70), + onDone: expectAsync0(() {})) + ..pause() + ..resume() + ..pause() + ..resume(); + }); + + test('pause-resume', () { + var mock = mockClient(); + registerQueryMock(mock, 70, 50); + + var api = PubSub(mock, testProject); + var count = 0; + late StreamSubscription subscription; + subscription = api.listTopics().listen( + expectAsync1(((_) { + subscription + ..pause() + ..resume() + ..pause(); + if ((count % 2) == 0) { + subscription.resume(); + } else { + scheduleMicrotask(() => subscription.resume()); + } + return; + }), count: 70), + onDone: expectAsync0(() {})) + ..pause(); + scheduleMicrotask(() => subscription.resume()); + addTearDown(() => subscription.cancel()); + }); + + test('immediate-cancel', () { + var mock = mockClient(); + registerQueryMock(mock, 70, 50, 1); + + var api = PubSub(mock, testProject); + api + .listTopics() + .listen((_) => throw 'Unexpected', + onDone: () => throw 'Unexpected') + .cancel(); + }); + + test('cancel', () { + var mock = mockClient(); + registerQueryMock(mock, 170, 50, 1); + + var api = PubSub(mock, testProject); + late StreamSubscription subscription; + subscription = api.listTopics().listen( + expectAsync1((_) => subscription.cancel()), + onDone: () => throw 'Unexpected'); + }); + + test('error', () { + void runTest(bool withPause) { + // Test error on first GET request. + var mock = mockClient(); + mock.register('GET', 'projects/$testProject/topics', + expectAsync1((request) { + return mock.respondError(500); + })); + var api = PubSub(mock, testProject); + StreamSubscription subscription; + subscription = api.listTopics().listen((_) => throw 'Unexpected', + onDone: expectAsync0(() {}), + onError: + expectAsync1((e) => e is pubsub.DetailedApiRequestError)); + if (withPause) { + subscription.pause(); + scheduleMicrotask(() => subscription.resume()); + } + addTearDown(() => subscription.cancel()); + } + + runTest(false); + runTest(true); + }); + + test('error-2', () { + // Test error on second GET request. + void runTest(bool withPause) { + var mock = mockClient(); + registerQueryMock(mock, 51, 50, 1); + + var api = PubSub(mock, testProject); + + var count = 0; + late StreamSubscription subscription; + subscription = api.listTopics().listen( + expectAsync1((_) { + count++; + if (count == 50) { + if (withPause) { + subscription.pause(); + scheduleMicrotask(() => subscription.resume()); + } + mock.clear(); + mock.register('GET', 'projects/$testProject/topics', + expectAsync1((request) { + return mock.respondError(500); + })); + } + return; + }, count: 50), + onDone: expectAsync0(() {}), + onError: expectAsync1( + (e) => e is pubsub.DetailedApiRequestError), + ); + addTearDown(() => subscription.cancel()); + } + + runTest(false); + runTest(true); + }); + }); + + group('page', () { + test('empty', () { + var mock = mockClient(); + registerQueryMock(mock, 0, 50); + + var api = PubSub(mock, testProject); + return api.pageTopics().then(expectAsync1((page) { + expect(page.items.length, 0); + expect(page.isLast, isTrue); + expect(() => page.next(), throwsStateError); + + mock.clear(); + registerQueryMock(mock, 0, 20); + return api.pageTopics(pageSize: 20).then(expectAsync1((page) { + expect(page.items.length, 0); + expect(page.isLast, isTrue); + expect(() => page.next(), throwsStateError); + })); + })); + }); + + test('single', () { + var mock = mockClient(); + registerQueryMock(mock, 10, 50); + + var api = PubSub(mock, testProject); + return api.pageTopics().then(expectAsync1((page) { + expect(page.items.length, 10); + expect(page.isLast, isTrue); + expect(() => page.next(), throwsStateError); + + mock.clear(); + registerQueryMock(mock, 20, 20); + return api.pageTopics(pageSize: 20).then(expectAsync1((page) { + expect(page.items.length, 20); + expect(page.isLast, isTrue); + expect(() => page.next(), throwsStateError); + })); + })); + }); + + test('multiple', () { + Future runTest(int n, int pageSize) { + var totalPages = (n + pageSize - 1) ~/ pageSize; + var pageCount = 0; + + var completer = Completer(); + var mock = mockClient(); + registerQueryMock(mock, n, pageSize); + + void handlePage(Page page) { + pageCount++; + expect(page.isLast, pageCount == totalPages); + expect(page.items.length, + page.isLast ? n - (totalPages - 1) * pageSize : pageSize); + if (!page.isLast) { + page.next().then(expectAsync1(handlePage)); + } else { + expect(() => page.next(), throwsStateError); + expect(pageCount, totalPages); + completer.complete(); + } + } + + var api = PubSub(mock, testProject); + api.pageTopics(pageSize: pageSize).then(expectAsync1(handlePage)); + + return completer.future; + } + + return runTest(70, 50) + .then((_) => runTest(99, 1)) + .then((_) => runTest(99, 50)) + .then((_) => runTest(99, 98)) + .then((_) => runTest(99, 99)) + .then((_) => runTest(99, 100)) + .then((_) => runTest(100, 1)) + .then((_) => runTest(100, 50)) + .then((_) => runTest(100, 100)) + .then((_) => runTest(101, 50)); + }); + }); + }); + }); + + group('subscription', () { + var name = 'test-subscription'; + var absoluteName = + 'projects/$testProject/subscriptions/test-subscription'; + var topicName = 'test-topic'; + var absoluteTopicName = 'projects/$testProject/topics/test-topic'; + + test('create', () { + var mock = mockClient(); + mock.register( + 'PUT', + 'projects/$testProject/subscriptions', + expectAsync1((request) { + var requestSubscription = jsonDecode(request.body) as Map; + expect(requestSubscription['topic'], absoluteTopicName); + return mock.respond(pubsub.Subscription()..name = absoluteName); + }, count: 2)); + + var api = PubSub(mock, testProject); + return api + .createSubscription(name, topicName) + .then(expectAsync1((subscription) { + expect(subscription.name, name); + expect(subscription.absoluteName, absoluteName); + return api + .createSubscription(absoluteName, absoluteTopicName) + .then(expectAsync1((subscription) { + expect(subscription.name, name); + expect(subscription.project, testProject); + expect(subscription.absoluteName, absoluteName); + })); + })); + }); + + test('create-error', () { + var mock = mockClient(); + var api = PubSub(mock, testProject); + for (var name in badSubscriptionNames) { + expect(() => api.createSubscription(name, 'test-topic'), + throwsArgumentError); + } + for (var name in badTopicNames) { + expect(() => api.createSubscription('test-subscription', name), + throwsArgumentError); + } + }); + + test('delete', () { + var mock = mockClient(); + mock.register( + 'DELETE', + 'projects/$testProject/subscriptions', + expectAsync1((request) { + expect(request.body.length, 0); + return mock.respondEmpty(); + }, count: 2)); + + var api = PubSub(mock, testProject); + return api.deleteSubscription(name).then(expectAsync1((result) { + expect(result, isNull); + return api + .deleteSubscription(absoluteName) + .then(expectAsync1((topic) { + expect(result, isNull); + })); + })); + }); + + test('delete-error', () { + var mock = mockClient(); + var api = PubSub(mock, testProject); + for (var name in badSubscriptionNames) { + expect(() => api.deleteSubscription(name), throwsArgumentError); + } + for (var name in badTopicNames) { + expect(() => api.deleteSubscription(name), throwsArgumentError); + } + }); + + test('lookup', () { + var mock = mockClient(); + mock.register( + 'GET', + RegExp('projects/$testProject/subscriptions'), + expectAsync1((request) { + expect(request.body.length, 0); + return mock.respond(pubsub.Subscription()..name = absoluteName); + }, count: 2)); + + var api = PubSub(mock, testProject); + return api.lookupSubscription(name).then(expectAsync1((subscription) { + expect(subscription.name, name); + expect(subscription.absoluteName, absoluteName); + return api + .lookupSubscription(absoluteName) + .then(expectAsync1((subscription) { + expect(subscription.name, name); + expect(subscription.project, testProject); + expect(subscription.absoluteName, absoluteName); + })); + })); + }); + + test('lookup-error', () { + var mock = mockClient(); + var api = PubSub(mock, testProject); + for (var name in badSubscriptionNames) { + expect(() => api.lookupSubscription(name), throwsArgumentError); + } + for (var name in badTopicNames) { + expect(() => api.lookupSubscription(name), throwsArgumentError); + } + }); + + group('query', () { + void addSubscriptions( + pubsub.ListSubscriptionsResponse response, int first, int count) { + response.subscriptions = []; + for (var i = 0; i < count; i++) { + response.subscriptions! + .add(pubsub.Subscription()..name = 'subscription-${first + i}'); + } + } + + // Mock that expect/generates [n] subscriptions in pages of page size + // [pageSize]. + void registerQueryMock(MockClient mock, int n, int pageSize, + {String? topic, int? totalCalls}) { + var totalPages = (n + pageSize - 1) ~/ pageSize; + // No items still generate one request. + if (totalPages == 0) totalPages = 1; + // Can pass in total calls if this mock is overwritten before all + // expected pages are done, e.g. when testing errors. + totalCalls ??= totalPages; + var pageCount = 0; + mock.register( + 'GET', + 'projects/$testProject/subscriptions', + expectAsync1((request) { + pageCount++; + expect(request.url.queryParameters['pageSize'], '$pageSize'); + expect(request.body.length, 0); + if (pageCount > 1) { + expect(request.url.queryParameters['pageToken'], 'next-page'); + } + + var response = pubsub.ListSubscriptionsResponse(); + var first = (pageCount - 1) * pageSize + 1; + if (pageCount < totalPages) { + response.nextPageToken = 'next-page'; + addSubscriptions(response, first, pageSize); + } else { + addSubscriptions( + response, first, n - (totalPages - 1) * pageSize); + } + return mock.respond(response); + }, count: totalCalls)); + } + + group('list', () { + Future q(String? topic, int count) { + var mock = mockClient(); + registerQueryMock(mock, count, 50, topic: topic); + + var api = PubSub(mock, testProject); + return (topic == null + ? api.listSubscriptions() + : api.listSubscriptions(topic)) + .listen(expectAsync1((_) {}, count: count)) + .asFuture(); + } + + test('simple', () { + return q(null, 0) + .then((_) => q('topic', 0)) + .then((_) => q(null, 1)) + .then((_) => q('topic', 1)) + .then((_) => q(null, 10)) + .then((_) => q('topic', 10)) + .then((_) => q(null, 49)) + .then((_) => q('topic', 49)) + .then((_) => q(null, 50)) + .then((_) => q('topic', 50)) + .then((_) => q(null, 51)) + .then((_) => q('topic', 51)) + .then((_) => q(null, 99)) + .then((_) => q('topic', 99)) + .then((_) => q(null, 100)) + .then((_) => q('topic', 100)) + .then((_) => q(null, 101)) + .then((_) => q('topic', 101)) + .then((_) => q(null, 170)) + .then((_) => q('topic', 170)); + }); + + test('immediate-pause-resume', () { + var mock = mockClient(); + registerQueryMock(mock, 70, 50); + + var api = PubSub(mock, testProject); + api.listSubscriptions().listen(expectAsync1(((_) {}), count: 70), + onDone: expectAsync0(() {})) + ..pause() + ..resume() + ..pause() + ..resume(); + }); + + test('pause-resume', () { + var mock = mockClient(); + registerQueryMock(mock, 70, 50); + + var api = PubSub(mock, testProject); + var count = 0; + late StreamSubscription subscription; + subscription = api.listSubscriptions().listen( + expectAsync1(((_) { + subscription + ..pause() + ..resume() + ..pause(); + if ((count % 2) == 0) { + subscription.resume(); + } else { + scheduleMicrotask(() => subscription.resume()); + } + return; + }), count: 70), + onDone: expectAsync0(() {})) + ..pause(); + scheduleMicrotask(() => subscription.resume()); + addTearDown(() => subscription.cancel()); + }); + + test('immediate-cancel', () { + var mock = mockClient(); + registerQueryMock(mock, 70, 50, totalCalls: 1); + + var api = PubSub(mock, testProject); + api + .listSubscriptions() + .listen((_) => throw 'Unexpected', + onDone: () => throw 'Unexpected') + .cancel(); + }); + + test('cancel', () { + var mock = mockClient(); + registerQueryMock(mock, 170, 50, totalCalls: 1); + + var api = PubSub(mock, testProject); + late StreamSubscription subscription; + subscription = api.listSubscriptions().listen( + expectAsync1((_) => subscription.cancel()), + onDone: () => throw 'Unexpected'); + }); + + test('error', () { + void runTest(bool withPause) { + // Test error on first GET request. + var mock = mockClient(); + mock.register('GET', 'projects/$testProject/subscriptions', + expectAsync1((request) { + return mock.respondError(500); + })); + var api = PubSub(mock, testProject); + StreamSubscription subscription; + subscription = api.listSubscriptions().listen( + (_) => throw 'Unexpected', + onDone: expectAsync0(() {}), + onError: + expectAsync1((e) => e is pubsub.DetailedApiRequestError)); + addTearDown(() => subscription.cancel()); + if (withPause) { + subscription.pause(); + scheduleMicrotask(() => subscription.resume()); + } + } + + runTest(false); + runTest(true); + }); + + test('error-2', () { + void runTest(bool withPause) { + // Test error on second GET request. + var mock = mockClient(); + registerQueryMock(mock, 51, 50, totalCalls: 1); + + var api = PubSub(mock, testProject); + + var count = 0; + late StreamSubscription subscription; + subscription = api.listSubscriptions().listen( + expectAsync1((_) { + count++; + if (count == 50) { + if (withPause) { + subscription.pause(); + scheduleMicrotask(() => subscription.resume()); + } + mock.clear(); + mock.register( + 'GET', 'projects/$testProject/subscriptions', + expectAsync1((request) { + return mock.respondError(500); + })); + } + return; + }, count: 50), + onDone: expectAsync0(() {}), + onError: expectAsync1( + (e) => e is pubsub.DetailedApiRequestError), + ); + addTearDown(() => subscription.cancel()); + } + + runTest(false); + runTest(true); + }); + }); + + group('page', () { + Future emptyTest(String? topic) { + var mock = mockClient(); + registerQueryMock(mock, 0, 50, topic: topic); + + var api = PubSub(mock, testProject); + return (topic == null + ? api.pageSubscriptions() + : api.pageSubscriptions(topic: topic)) + .then(expectAsync1((page) { + expect(page.items.length, 0); + expect(page.isLast, isTrue); + expect(() => page.next(), throwsStateError); + + mock.clear(); + registerQueryMock(mock, 0, 20, topic: topic); + return (topic == null + ? api.pageSubscriptions(pageSize: 20) + : api.pageSubscriptions(topic: topic, pageSize: 20)) + .then(expectAsync1((page) { + expect(page.items.length, 0); + expect(page.isLast, isTrue); + expect(() => page.next(), throwsStateError); + })); + })); + } + + test('empty', () { + emptyTest(null); + emptyTest('topic'); + }); + + Future singleTest(String? topic) { + var mock = mockClient(); + registerQueryMock(mock, 10, 50, topic: topic); + + var api = PubSub(mock, testProject); + return (topic == null + ? api.pageSubscriptions() + : api.pageSubscriptions(topic: topic)) + .then(expectAsync1((page) { + expect(page.items.length, 10); + expect(page.isLast, isTrue); + expect(() => page.next(), throwsStateError); + + mock.clear(); + registerQueryMock(mock, 20, 20, topic: topic); + return (topic == null + ? api.pageSubscriptions(pageSize: 20) + : api.pageSubscriptions(topic: topic, pageSize: 20)) + .then(expectAsync1((page) { + expect(page.items.length, 20); + expect(page.isLast, isTrue); + expect(() => page.next(), throwsStateError); + })); + })); + } + + test('single', () { + singleTest(null); + singleTest('topic'); + }); + + Future multipleTest(int n, int pageSize, String? topic) { + var totalPages = (n + pageSize - 1) ~/ pageSize; + var pageCount = 0; + + var completer = Completer(); + var mock = mockClient(); + registerQueryMock(mock, n, pageSize, topic: topic); + + void handlingPage(Page page) { + pageCount++; + expect(page.isLast, pageCount == totalPages); + expect(page.items.length, + page.isLast ? n - (totalPages - 1) * pageSize : pageSize); + if (!page.isLast) { + page.next().then(handlingPage); + } else { + expect(() => page.next(), throwsStateError); + expect(pageCount, totalPages); + completer.complete(); + } + } + + var api = PubSub(mock, testProject); + (topic == null + ? api.pageSubscriptions(pageSize: pageSize) + : api.pageSubscriptions(topic: topic, pageSize: pageSize)) + .then(handlingPage); + + return completer.future; + } + + test('multiple', () { + return multipleTest(70, 50, null) + .then((_) => multipleTest(99, 1, null)) + .then((_) => multipleTest(99, 50, null)) + .then((_) => multipleTest(99, 98, null)) + .then((_) => multipleTest(99, 99, null)) + .then((_) => multipleTest(99, 100, null)) + .then((_) => multipleTest(100, 1, null)) + .then((_) => multipleTest(100, 50, null)) + .then((_) => multipleTest(100, 100, null)) + .then((_) => multipleTest(101, 50, null)) + .then((_) => multipleTest(70, 50, 'topic')) + .then((_) => multipleTest(99, 1, 'topic')) + .then((_) => multipleTest(99, 50, 'topic')) + .then((_) => multipleTest(99, 98, 'topic')) + .then((_) => multipleTest(99, 99, 'topic')) + .then((_) => multipleTest(99, 100, 'topic')) + .then((_) => multipleTest(100, 1, 'topic')) + .then((_) => multipleTest(100, 50, 'topic')) + .then((_) => multipleTest(100, 100, 'topic')) + .then((_) => multipleTest(101, 50, 'topic')); + }); + }); + }); + }); + }); + + group('topic', () { + var name = 'test-topic'; + var absoluteName = 'projects/$testProject/topics/test-topic'; + var message = 'Hello, world!'; + var messageBytes = utf8.encode(message); + var messageBase64 = base64.encode(messageBytes); + var attributes = {'a': '1', 'b': 'text'}; + + void registerLookup(MockClient mock) { + mock.register('GET', absoluteName, expectAsync1((request) { + expect(request.body.length, 0); + return mock.respond(pubsub.Topic()..name = absoluteName); + })); + } + + void registerPublish( + MockClient mock, + int count, + Future Function(pubsub.PublishRequest) fn, + ) { + mock.register( + 'POST', + 'projects/test-project/topics/test-topic:publish', + expectAsync1((request) { + var publishRequest = + pubsub.PublishRequest.fromJson(jsonDecode(request.body) as Map); + return fn(publishRequest); + }, count: count)); + } + + test('publish', () { + var mock = mockClient(); + registerLookup(mock); + + var api = PubSub(mock, testProject); + return api.lookupTopic(name).then(expectAsync1((topic) { + mock.clear(); + registerPublish(mock, 4, (request) { + expect(request.messages!.length, 1); + expect(request.messages![0].data, messageBase64); + expect(request.messages![0].attributes, isNull); + return mock.respond(pubsub.PublishResponse()..messageIds = ['0']); + }); + + return topic.publishString(message).then(expectAsync1((result) { + expect(result, isNull); + return topic.publishBytes(messageBytes).then(expectAsync1((result) { + expect(result, isNull); + return topic + .publish(Message.withString(message)) + .then(expectAsync1((result) { + expect(result, isNull); + return topic + .publish(Message.withBytes(messageBytes)) + .then(expectAsync1((result) { + expect(result, isNull); + })); + })); + })); + })); + })); + }); + + test('publish-with-attributes', () { + var mock = mockClient(); + registerLookup(mock); + + var api = PubSub(mock, testProject); + return api.lookupTopic(name).then(expectAsync1((topic) { + mock.clear(); + registerPublish(mock, 4, (request) { + expect(request.messages!.length, 1); + expect(request.messages![0].data, messageBase64); + expect(request.messages![0].attributes, isNotNull); + expect(request.messages![0].attributes!.length, attributes.length); + expect(request.messages![0].attributes, attributes); + return mock.respond(pubsub.PublishResponse()..messageIds = ['0']); + }); + + return topic + .publishString(message, attributes: attributes) + .then(expectAsync1((result) { + expect(result, isNull); + return topic + .publishBytes(messageBytes, attributes: attributes) + .then(expectAsync1((result) { + expect(result, isNull); + return topic + .publish(Message.withString(message, attributes: attributes)) + .then(expectAsync1((result) { + expect(result, isNull); + return topic + .publish( + Message.withBytes(messageBytes, attributes: attributes)) + .then(expectAsync1((result) { + expect(result, isNull); + })); + })); + })); + })); + })); + }); + + test('delete', () { + var mock = mockClient(); + mock.register('GET', absoluteName, expectAsync1((request) { + expect(request.body.length, 0); + return mock.respond(pubsub.Topic()..name = absoluteName); + })); + + var api = PubSub(mock, testProject); + return api.lookupTopic(name).then(expectAsync1((topic) { + expect(topic.name, name); + expect(topic.absoluteName, absoluteName); + + mock.register('DELETE', absoluteName, expectAsync1((request) { + expect(request.body.length, 0); + return mock.respondEmpty(); + })); + + return topic.delete().then(expectAsync1((result) { + expect(result, isNull); + })); + })); + }); + }); + + group('subscription', () { + var name = 'test-subscription'; + var absoluteName = 'projects/$testProject/subscriptions/test-subscription'; + + test('delete', () { + var mock = mockClient(); + mock.register('GET', absoluteName, expectAsync1((request) { + expect(request.body.length, 0); + return mock.respond(pubsub.Topic()..name = absoluteName); + })); + + var api = PubSub(mock, testProject); + return api.lookupSubscription(name).then(expectAsync1((subscription) { + expect(subscription.name, name); + expect(subscription.absoluteName, absoluteName); + + mock.register('DELETE', absoluteName, expectAsync1((request) { + expect(request.body.length, 0); + return mock.respondEmpty(); + })); + + return subscription.delete().then(expectAsync1((result) { + expect(result, isNull); + })); + })); + }); + }); + + group('push', () { + var relativeSubscriptionName = 'sgjesse-managed-vm/test-push-subscription'; + var absoluteSubscriptionName = '/subscriptions/$relativeSubscriptionName'; + + test('event', () { + var requestBody = ''' +{ + "message": { + "data":"SGVsbG8sIHdvcmxkIDMwIG9mIDUwIQ==", + "labels": [ + { + "key":"messageNo", + "numValue":"30" + }, + { + "key":"test", + "strValue":"hello" + } + ] + }, + "subscription":"$absoluteSubscriptionName" +} +'''; + var event = PushEvent.fromJson(requestBody); + expect(event.message.asString, 'Hello, world 30 of 50!'); + expect(event.message.attributes['messageNo'], '30'); + expect(event.message.attributes['test'], 'hello'); + expect(event.subscriptionName, absoluteSubscriptionName); + }); + + test('event-short-subscription-name', () { + var requestBody = ''' +{ + "message": { + "data":"SGVsbG8sIHdvcmxkIDMwIG9mIDUwIQ==", + "labels": [ + { + "key":"messageNo", + "numValue":30 + }, + { + "key":"test", + "strValue":"hello" + } + ] + }, + "subscription":"$relativeSubscriptionName" +} +'''; + var event = PushEvent.fromJson(requestBody); + expect(event.message.asString, 'Hello, world 30 of 50!'); + expect(event.message.attributes['messageNo'], '30'); + expect(event.message.attributes['test'], 'hello'); + expect(event.subscriptionName, absoluteSubscriptionName); + }); + }); +} diff --git a/pkgs/gcloud/test/service_scope_test.dart b/pkgs/gcloud/test/service_scope_test.dart new file mode 100644 index 00000000..1dfdf4b8 --- /dev/null +++ b/pkgs/gcloud/test/service_scope_test.dart @@ -0,0 +1,238 @@ +// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file +// for details. All rights reserved. Use of this source code is governed by a +// BSD-style license that can be found in the LICENSE file. + +// ignore_for_file: only_throw_errors + +import 'dart:async'; + +import 'package:gcloud/service_scope.dart' as ss; +import 'package:test/test.dart'; + +void main() { + test('no-service-scope', () { + expect(() => ss.register(1, 'foobar'), throwsA(isStateError)); + expect( + () => ss.registerScopeExitCallback(() => null), throwsA(isStateError)); + expect(() => ss.lookup(1), throwsA(isStateError)); + + var c = Completer.sync(); + ss.fork(expectAsync0(() { + c.complete(); + return Future.value(); + })); + + // Assert that after fork()ing we still don't have a service scope outside + // of the zone created by the fork()ing. + c.future.then(expectAsync1((_) { + expect(() => ss.register(1, 'foobar'), throwsA(isStateError)); + expect(() => ss.registerScopeExitCallback(() => null), + throwsA(isStateError)); + expect(() => ss.lookup(1), throwsA(isStateError)); + })); + }); + + test('nonexistent-key', () { + return ss.fork(expectAsync0(() { + expect(ss.lookup(1), isNull); + return Future.value(); + })); + }); + + test('error-on-double-insert', () { + // Ensure that inserting twice with the same key results in an error. + return ss.fork(expectAsync0(() => Future.sync(() { + ss.register(1, 'firstValue'); + expect(() => ss.register(1, 'firstValue'), throwsA(isArgumentError)); + }))); + }); + + test('only-cleanup', () { + return ss.fork(expectAsync0(() => Future.sync(() { + ss.registerScopeExitCallback(expectAsync0(() => null)); + }))); + }); + + test('correct-insertion-and-cleanup-order', () { + // Ensure cleanup functions are called in the reverse order of inserting + // their entries. + var insertions = 0; + return ss.fork(expectAsync0(() => Future.value(() { + var num = 10; + + for (var i = 0; i < num; i++) { + var key = i; + + insertions++; + ss.register(key, 'value$i'); + ss.registerScopeExitCallback(expectAsync0(() { + expect(insertions, equals(i + 1)); + insertions--; + return null; + })); + + for (var j = 0; j <= num; j++) { + if (j <= i) { + expect(ss.lookup(key), 'value$i'); + } else { + expect(ss.lookup(key), isNull); + } + } + } + }))); + }); + + test('onion-cleanup', () { + // Ensures that a cleanup method can look up things registered before it. + return ss.fork(expectAsync0(() { + ss.registerScopeExitCallback(expectAsync0(() { + expect(ss.lookup(1), isNull); + expect(ss.lookup(2), isNull); + return null; + })); + ss.register(1, 'value1'); + ss.registerScopeExitCallback(expectAsync0(() { + expect(ss.lookup(1), equals('value1')); + expect(ss.lookup(2), isNull); + return null; + })); + ss.register(2, 'value2', onScopeExit: expectAsync0(() { + expect(ss.lookup(1), equals('value1')); + expect(ss.lookup(2), isNull); + return null; + })); + ss.registerScopeExitCallback(expectAsync0(() { + expect(ss.lookup(1), 'value1'); + expect(ss.lookup(2), 'value2'); + return null; + })); + return Future.value(); + })); + }); + + test('correct-insertion-and-cleanup-order--errors', () { + // Ensure that all cleanup functions will be called - even if some of them + // result in an error. + // Ensure the fork() error message contains all error messages from the + // failed cleanup() calls. + var insertions = 0; + return ss + .fork(() => Future.sync(() { + for (var i = 0; i < 10; i++) { + insertions++; + ss.register(i, 'value$i'); + ss.registerScopeExitCallback(() { + expect(insertions, equals(i + 1)); + insertions--; + if (i.isEven) throw 'xx${i}yy'; + return null; + }); + } + })) + .catchError(expectAsync2((e, _) { + for (var i = 0; i < 10; i++) { + expect('$e'.contains('xx${i}yy'), equals(i.isEven)); + } + })); + }); + + test('service-scope-destroyed-after-callback-completes', () { + // Ensure that once the closure passed to fork() completes, the service + // scope is destroyed. + return ss.fork( + expectAsync0( + () => Future.sync(() { + var key = 1; + ss.register(key, 'firstValue'); + ss.registerScopeExitCallback(Zone.current.bindCallback(() { + // Spawn an async task which will be run after the cleanups to + // ensure the service scope got destroyed. + Timer.run(expectAsync0(() { + expect(() => ss.lookup(key), throwsA(isStateError)); + expect(() => ss.register(2, 'value'), throwsA(isStateError)); + expect(() => ss.registerScopeExitCallback(() => null), + throwsA(isStateError)); + })); + return null; + })); + expect(ss.lookup(key), equals('firstValue')); + }), + ), + ); + }); + + test('override-parent-value', () { + // Ensure that once the closure passed to fork() completes, the service + // scope is destroyed. + return ss.fork(expectAsync0(() => Future.sync(() { + var key = 1; + ss.register(key, 'firstValue'); + expect(ss.lookup(key), equals('firstValue')); + + return ss.fork(expectAsync0(() => Future.sync(() { + ss.register(key, 'secondValue'); + expect(ss.lookup(key), equals('secondValue')); + }))); + }))); + }); + + test('fork-onError-handler', () { + // Ensure that once the closure passed to fork() completes, the service + // scope is destroyed. + ss.fork(expectAsync0(() { + Timer.run(() => throw StateError('foobar')); + return Future.value(); + }), onError: expectAsync2((error, _) { + expect(error, isStateError); + })); + }); + + test('nested-fork-and-insert', () { + // Ensure that independently fork()ed service scopes can insert keys + // independently and they cannot see each others values but can see parent + // service scope values. + var rootKey = 1; + var subKey = 2; + var subKey1 = 3; + var subKey2 = 4; + + return ss.fork(expectAsync0(() { + var cleanupFork1 = 0; + var cleanupFork2 = 0; + + ss.register(rootKey, 'root'); + ss.registerScopeExitCallback(expectAsync0(() { + expect(cleanupFork1, equals(2)); + expect(cleanupFork2, equals(2)); + return null; + })); + expect(ss.lookup(rootKey), equals('root')); + + Future spawnChild(Object ownSubKey, Object otherSubKey, int i, + ss.ScopeExitCallback cleanup) { + return ss.fork(expectAsync0(() => Future.sync(() { + ss.register(subKey, 'fork$i'); + ss.registerScopeExitCallback(cleanup); + ss.register(ownSubKey, 'sub$i'); + ss.registerScopeExitCallback(cleanup); + + expect(ss.lookup(rootKey), equals('root')); + expect(ss.lookup(subKey), equals('fork$i')); + expect(ss.lookup(ownSubKey), equals('sub$i')); + expect(ss.lookup(otherSubKey), isNull); + }))); + } + + return Future.wait([ + spawnChild(subKey1, subKey2, 1, () { + cleanupFork1++; + return null; + }), + spawnChild(subKey2, subKey1, 2, () { + cleanupFork2++; + return null; + }), + ]); + })); + }); +} diff --git a/pkgs/gcloud/test/storage/e2e_test.dart b/pkgs/gcloud/test/storage/e2e_test.dart new file mode 100644 index 00000000..fa9a317c --- /dev/null +++ b/pkgs/gcloud/test/storage/e2e_test.dart @@ -0,0 +1,345 @@ +// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file +// for details. All rights reserved. Use of this source code is governed by a +// BSD-style license that can be found in the LICENSE file. +// ignore_for_file: avoid_dynamic_calls + +@Tags(['e2e']) + +library gcloud.storage; + +import 'dart:async'; + +import 'package:gcloud/storage.dart'; +import 'package:googleapis/storage/v1.dart' as storage_api; +import 'package:test/test.dart'; + +import '../common_e2e.dart'; + +String generateBucketName() { + var id = DateTime.now().millisecondsSinceEpoch; + return 'dart-e2e-test-$id'; +} + +bool testDetailedApiError(Object e) => e is storage_api.DetailedApiRequestError; + +// Generate a list just above the limit when changing to resumable upload. +const int mb = 1024 * 1024; +const int maxNormalUpload = 1 * mb; +const int minResumableUpload = maxNormalUpload + 1; +final bytesResumableUpload = + List.generate(minResumableUpload, (e) => e & 255); + +void main() { + var didSetUp = false; + late Storage storage; + late String testBucketName; + late Bucket testBucket; + + setUpAll(() { + return withAuthClient(Storage.SCOPES, (String project, httpClient) { + testBucketName = generateBucketName(); + + // Share the same storage connection for all tests. + storage = Storage(httpClient, project); + + // Create a shared bucket for all object tests. + return storage.createBucket(testBucketName).then((_) { + testBucket = storage.bucket(testBucketName); + didSetUp = true; + }); + }); + }); + + tearDownAll(() async { + // Don't cleanup if setup failed + if (!didSetUp) { + return; + } + // Deleting a bucket relies on eventually consistent behaviour, hence + // the delay in attempt to prevent test flakiness. + await Future.delayed(storageListDelay); + await storage.deleteBucket(testBucketName); + }); + + group('bucket', () { + test('create-info-delete', () { + var bucketName = generateBucketName(); + return storage.createBucket(bucketName).then(expectAsync1((result) { + expect(result, isNull); + return storage.bucketInfo(bucketName).then(expectAsync1((info) { + expect(info.bucketName, bucketName); + expect(info.etag, isNotNull); + expect(info.id, isNotNull); + return storage.deleteBucket(bucketName).then(expectAsync1((result) { + expect(result, isNull); + })); + })); + })); + }); + + test('create-with-predefined-acl-delete', () async { + final cases = { + // See documentation: + // https://cloud.google.com/storage/docs/access-control/lists + PredefinedAcl.authenticatedRead: 2, + PredefinedAcl.private: 1, + PredefinedAcl.projectPrivate: 3, + PredefinedAcl.publicRead: 2, + PredefinedAcl.publicReadWrite: 2, + }; + for (var e in cases.entries) { + var predefinedAcl = e.key; + var expectedLength = e.value; + var bucketName = generateBucketName(); + // Sleep for 2 seconds to avoid bucket request limit, see: + // https://cloud.google.com/storage/quotas#buckets + await Future.delayed(Duration(seconds: 2)); + var r1 = await storage.createBucket(bucketName, + predefinedAcl: predefinedAcl); + expect(r1, isNull); + var info = await storage.bucketInfo(bucketName); + expect(info.bucketName, bucketName); + expect(info.acl.entries.length, expectedLength); + var r2 = await storage.deleteBucket(bucketName); + expect(r2, isNull); + } + }, skip: 'unable to test with uniform buckets enforced for account'); + + test('create-error', () { + storage.createBucket('goog-reserved').catchError(expectAsync1((e) { + expect(e, isNotNull); + }), test: testDetailedApiError); + }); + }); + + group('object', () { + // Run all object tests in the same bucket to try to avoid the rate-limit + // for creating and deleting buckets while testing. + Future withTestBucket(Future Function(Bucket bucket) fn) async { + try { + return await fn(testBucket); + } finally { + // TODO: Clean the bucket. + } + } + + void testWithBucket( + String name, + FutureOr Function(Bucket bucket) fn, + ) { + test(name, () async { + try { + await fn(testBucket); + } finally { + // TODO: Clean the bucket. + } + }); + } + + group('create-read-delete', () { + void testCreateReadDelete(String name, List bytes) { + testWithBucket(name, (bucket) async { + final info = await bucket.writeBytes('test', bytes); + expect(info, isNotNull); + final result = await bucket + .read('test') + .fold>([], (p, e) => p..addAll(e)); + expect(result, bytes); + await bucket.delete('test'); + }); + } + + testCreateReadDelete('test-1', [1, 2, 3]); + testCreateReadDelete('test-2', bytesResumableUpload); + }); + + testWithBucket('create-copy-read-delete', (bucket) async { + final bytes = [1, 2, 3]; + final info = await bucket.writeBytes('test-for-copy', bytes); + expect(info, isNotNull); + + await storage.copyObject( + bucket.absoluteObjectName('test-for-copy'), + bucket.absoluteObjectName('test'), + ); + + final result = + await bucket.read('test').fold>([], (p, e) => p..addAll(e)); + expect(result, bytes); + + await bucket.delete('test'); + await bucket.delete('test-for-copy'); + }); + + testWithBucket('create-copy-metadata-read-delete', (bucket) async { + final bytes = [1, 2, 3]; + final info = await bucket.writeBytes( + 'test-for-copy', + bytes, + metadata: ObjectMetadata(contentType: 'text/plain'), + ); + expect(info, isNotNull); + + await storage.copyObject( + bucket.absoluteObjectName('test-for-copy'), + bucket.absoluteObjectName('test'), + metadata: ObjectMetadata(contentType: 'application/octet'), + ); + + final r1 = await bucket.info('test-for-copy'); + expect(r1.metadata.contentType, 'text/plain'); + final r2 = await bucket.info('test'); + expect(r2.metadata.contentType, 'application/octet'); + + final result = + await bucket.read('test').fold>([], (p, e) => p..addAll(e)); + expect(result, bytes); + + await bucket.delete('test'); + await bucket.delete('test-for-copy'); + }); + + group('create-read-delete-streaming', () { + void testCreateReadDelete(String name, List bytes) { + testWithBucket(name, (bucket) async { + await Stream.value(bytes).pipe(bucket.write('test')); + final result = await bucket + .read('test') + .fold>([], (p, e) => p..addAll(e)); + expect(result, bytes); + await bucket.delete('test'); + }); + } + + testCreateReadDelete('test-1', [1, 2, 3, 5, 6, 7, 8, 9]); + testCreateReadDelete('test-2', bytesResumableUpload); + }); + + test('create-with-predefined-acl-delete', () { + return withTestBucket((Bucket bucket) { + Future test( + String objectName, PredefinedAcl predefinedAcl, expectedLength) { + return bucket + .writeBytes(objectName, [1, 2, 3], predefinedAcl: predefinedAcl) + .then(expectAsync1((result) { + expect(result, isNotNull); + return bucket.info(objectName).then(expectAsync1((info) { + var acl = info.metadata.acl; + expect(info.name, objectName); + expect(info.etag, isNotNull); + expect(acl!.entries.length, expectedLength); + return bucket.delete(objectName).then(expectAsync1((result) { + expect(result, isNull); + })); + })); + })); + } + + return Future.forEach([ + () => test('test-1', PredefinedAcl.authenticatedRead, 2), + () => test('test-2', PredefinedAcl.private, 1), + () => test('test-3', PredefinedAcl.projectPrivate, 4), + () => test('test-4', PredefinedAcl.publicRead, 2), + () => test('test-5', PredefinedAcl.bucketOwnerFullControl, 2), + () => test('test-6', PredefinedAcl.bucketOwnerRead, 2) + ], (Function f) => f().then(expectAsync1((_) {}))); + }); + }, skip: 'unable to test with uniform buckets enforced for account'); + + test('create-with-acl-delete', () { + return withTestBucket((Bucket bucket) { + Future test(String objectName, Acl acl, expectedLength) { + return bucket + .writeBytes(objectName, [1, 2, 3], acl: acl) + .then(expectAsync1((result) { + expect(result, isNotNull); + return bucket.info(objectName).then(expectAsync1((info) { + var acl = info.metadata.acl; + expect(info.name, objectName); + expect(info.etag, isNotNull); + expect(acl!.entries.length, expectedLength); + return bucket.delete(objectName).then(expectAsync1((result) { + expect(result, isNull); + })); + })); + })); + } + + var acl1 = + Acl([AclEntry(AclScope.allAuthenticated, AclPermission.WRITE)]); + var acl2 = Acl([ + AclEntry(AclScope.allUsers, AclPermission.WRITE), + AclEntry(AccountScope('sgjesse@google.com'), AclPermission.WRITE) + ]); + var acl3 = Acl([ + AclEntry(AclScope.allUsers, AclPermission.WRITE), + AclEntry(AccountScope('sgjesse@google.com'), AclPermission.WRITE), + AclEntry(GroupScope('misc@dartlang.org'), AclPermission.READ) + ]); + var acl4 = Acl([ + AclEntry(AclScope.allUsers, AclPermission.WRITE), + AclEntry(AccountScope('sgjesse@google.com'), AclPermission.WRITE), + AclEntry(GroupScope('misc@dartlang.org'), AclPermission.READ), + AclEntry(DomainScope('dartlang.org'), AclPermission.FULL_CONTROL) + ]); + + // The expected length of the returned ACL is one longer than the one + // use during creation as an additional 'used-ID' ACL entry is added + // by cloud storage during creation. + return Future.forEach([ + () => test('test-1', acl1, acl1.entries.length + 1), + () => test('test-2', acl2, acl2.entries.length + 1), + () => test('test-3', acl3, acl3.entries.length + 1), + () => test('test-4', acl4, acl4.entries.length + 1) + ], (Function f) => f().then(expectAsync1((_) {}))); + }); + }, skip: 'unable to test with uniform buckets enforced for account'); + + test('create-with-metadata-delete', () { + return withTestBucket((Bucket bucket) { + Future test( + String objectName, ObjectMetadata metadata, List bytes) { + return bucket + .writeBytes(objectName, bytes, metadata: metadata) + .then(expectAsync1((result) { + expect(result, isNotNull); + return bucket.info(objectName).then(expectAsync1((info) { + expect(info.name, objectName); + expect(info.length, bytes.length); + expect(info.md5Hash, isNotNull); + expect(info.crc32CChecksum, isNotNull); + expect(info.generation.objectGeneration, isNotNull); + expect(info.generation.metaGeneration, 1); + expect(info.metadata.contentType, metadata.contentType); + expect(info.metadata.cacheControl, metadata.cacheControl); + expect(info.metadata.contentDisposition, + metadata.contentDisposition); + expect(info.metadata.contentEncoding, metadata.contentEncoding); + expect(info.metadata.contentLanguage, metadata.contentLanguage); + expect(info.metadata.custom, metadata.custom); + return bucket.delete(objectName).then(expectAsync1((result) { + expect(result, isNull); + })); + })); + })); + } + + var metadata1 = ObjectMetadata(contentType: 'text/plain'); + var metadata2 = ObjectMetadata( + contentType: 'text/plain', + cacheControl: 'no-cache', + contentDisposition: 'attachment; filename="test.txt"', + contentEncoding: 'gzip', + contentLanguage: 'da', + custom: {'a': 'b', 'c': 'd'}); + + return Future.forEach([ + () => test('test-1', metadata1, [65, 66, 67]), + () => test('test-2', metadata2, [65, 66, 67]), + () => test('test-3', metadata1, bytesResumableUpload), + () => test('test-4', metadata2, bytesResumableUpload) + ], (Function f) => f().then(expectAsync1((_) {}))); + }); + }); + }); +} diff --git a/pkgs/gcloud/test/storage/storage_test.dart b/pkgs/gcloud/test/storage/storage_test.dart new file mode 100644 index 00000000..83a5a3f0 --- /dev/null +++ b/pkgs/gcloud/test/storage/storage_test.dart @@ -0,0 +1,1114 @@ +// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file +// for details. All rights reserved. Use of this source code is governed by a +// BSD-style license that can be found in the LICENSE file. + +// ignore_for_file: only_throw_errors, avoid_catching_errors, +// ignore_for_file: avoid_dynamic_calls + +import 'dart:async'; +import 'dart:convert'; + +import 'package:gcloud/storage.dart'; +import 'package:googleapis/storage/v1.dart' as storage; +import 'package:http/http.dart' as http; +import 'package:test/test.dart'; + +import '../common.dart'; +import '../common_e2e.dart'; + +const _hostName = 'storage.googleapis.com'; +const _rootPath = '/storage/v1/'; + +MockClient mockClient() => MockClient(_hostName, _rootPath); + +void withMockClient(Function(MockClient client, Storage storage) function) { + var mock = mockClient(); + function(mock, Storage(mock, testProject)); +} + +Future withMockClientAsync( + Future Function(MockClient client, Storage storage) function) async { + var mock = mockClient(); + await function(mock, Storage(mock, testProject)); +} + +void main() { + group('bucket', () { + var bucketName = 'test-bucket'; + + test('create', () { + withMockClient((mock, api) { + mock.register('POST', 'b', expectAsync1((http.Request request) { + var requestBucket = + storage.Bucket.fromJson(jsonDecode(request.body) as Map); + expect(requestBucket.name, bucketName); + return mock.respond(storage.Bucket()..name = bucketName); + })); + + expect(api.createBucket(bucketName), completion(isNull)); + }); + }); + + test('create-with-predefined-acl', () { + var predefined = [ + [PredefinedAcl.authenticatedRead, 'authenticatedRead'], + [PredefinedAcl.private, 'private'], + [PredefinedAcl.projectPrivate, 'projectPrivate'], + [PredefinedAcl.publicRead, 'publicRead'], + [PredefinedAcl.publicReadWrite, 'publicReadWrite'] + ]; + + withMockClient((mock, api) { + var count = 0; + + mock.register( + 'POST', + 'b', + expectAsync1((http.Request request) { + var requestBucket = + storage.Bucket.fromJson(jsonDecode(request.body) as Map); + expect(requestBucket.name, bucketName); + expect(requestBucket.acl, isNull); + expect(request.url.queryParameters['predefinedAcl'], + predefined[count++][1]); + return mock.respond(storage.Bucket()..name = bucketName); + }, count: predefined.length)); + + var futures = []; + for (var i = 0; i < predefined.length; i++) { + futures.add(api.createBucket(bucketName, + predefinedAcl: predefined[i][0] as PredefinedAcl)); + } + return Future.wait(futures); + }); + }); + + test('create-with-acl', () { + var acl1 = Acl([ + AclEntry(AccountScope('user@example.com'), AclPermission.FULL_CONTROL), + ]); + var acl2 = Acl([ + AclEntry(AccountScope('user@example.com'), AclPermission.FULL_CONTROL), + AclEntry(GroupScope('group@example.com'), AclPermission.WRITE), + ]); + var acl3 = Acl([ + AclEntry(AccountScope('user@example.com'), AclPermission.FULL_CONTROL), + AclEntry(GroupScope('group@example.com'), AclPermission.WRITE), + AclEntry(DomainScope('example.com'), AclPermission.READ), + ]); + + var acls = [acl1, acl2, acl3]; + + withMockClient((mock, api) { + var count = 0; + + mock.register( + 'POST', + 'b', + expectAsync1((http.Request request) { + var requestBucket = + storage.Bucket.fromJson(jsonDecode(request.body) as Map); + expect(requestBucket.name, bucketName); + expect(request.url.queryParameters['predefinedAcl'], isNull); + expect(requestBucket.acl, isNotNull); + expect(requestBucket.acl!.length, count + 1); + expect(requestBucket.acl![0].entity, 'user-user@example.com'); + expect(requestBucket.acl![0].role, 'OWNER'); + if (count > 0) { + expect(requestBucket.acl![1].entity, 'group-group@example.com'); + expect(requestBucket.acl![1].role, 'WRITER'); + } + if (count > 2) { + expect(requestBucket.acl![2].entity, 'domain-example.com'); + expect(requestBucket.acl![2].role, 'READER'); + } + count++; + return mock.respond(storage.Bucket()..name = bucketName); + }, count: acls.length)); + + var futures = []; + for (var i = 0; i < acls.length; i++) { + futures.add(api.createBucket(bucketName, acl: acls[i])); + } + return Future.wait(futures); + }); + }); + + test('create-with-acl-and-predefined-acl', () { + var predefined = [ + [PredefinedAcl.authenticatedRead, 'authenticatedRead'], + [PredefinedAcl.private, 'private'], + [PredefinedAcl.projectPrivate, 'projectPrivate'], + [PredefinedAcl.publicRead, 'publicRead'], + [PredefinedAcl.publicReadWrite, 'publicReadWrite'] + ]; + + var acl1 = Acl([ + AclEntry(AccountScope('user@example.com'), AclPermission.FULL_CONTROL), + ]); + var acl2 = Acl([ + AclEntry(AccountScope('user@example.com'), AclPermission.FULL_CONTROL), + AclEntry(GroupScope('group@example.com'), AclPermission.WRITE), + ]); + var acl3 = Acl([ + AclEntry(AccountScope('user@example.com'), AclPermission.FULL_CONTROL), + AclEntry(GroupScope('group@example.com'), AclPermission.WRITE), + AclEntry(DomainScope('example.com'), AclPermission.READ), + ]); + + var acls = [acl1, acl2, acl3]; + + withMockClient((mock, api) { + var count = 0; + + mock.register( + 'POST', + 'b', + expectAsync1((http.Request request) { + var requestBucket = + storage.Bucket.fromJson(jsonDecode(request.body) as Map); + var predefinedIndex = count ~/ acls.length; + var aclIndex = count % acls.length; + expect(requestBucket.name, bucketName); + expect(request.url.queryParameters['predefinedAcl'], + predefined[predefinedIndex][1]); + expect(requestBucket.acl, isNotNull); + expect(requestBucket.acl!.length, aclIndex + 1); + expect(requestBucket.acl![0].entity, 'user-user@example.com'); + expect(requestBucket.acl![0].role, 'OWNER'); + if (aclIndex > 0) { + expect(requestBucket.acl![1].entity, 'group-group@example.com'); + expect(requestBucket.acl![1].role, 'WRITER'); + } + if (aclIndex > 2) { + expect(requestBucket.acl![2].entity, 'domain-example.com'); + expect(requestBucket.acl![2].role, 'READER'); + } + count++; + return mock.respond(storage.Bucket()..name = bucketName); + }, count: predefined.length * acls.length)); + + var futures = []; + for (var i = 0; i < predefined.length; i++) { + for (var j = 0; j < acls.length; j++) { + futures.add(api.createBucket(bucketName, + predefinedAcl: predefined[i][0] as PredefinedAcl, + acl: acls[j])); + } + } + return Future.wait(futures); + }); + }); + + test('delete', () { + withMockClient((mock, api) { + mock.register('DELETE', RegExp(r'b/[a-z/-]*$'), expectAsync1((request) { + expect(request.url.path, '${_rootPath}b/$bucketName'); + expect(request.body.length, 0); + return mock.respond(storage.Bucket()..name = bucketName); + })); + + expect(api.deleteBucket(bucketName), completion(isNull)); + }); + }); + + test('exists', () { + var exists = true; + + withMockClient((mock, api) { + mock.register( + 'GET', + RegExp(r'b/[a-z/-]*$'), + expectAsync1((request) { + expect(request.url.path, '${_rootPath}b/$bucketName'); + expect(request.body.length, 0); + if (exists) { + return mock.respond(storage.Bucket()..name = bucketName); + } else { + return mock.respondError(404); + } + }, count: 2)); + + return api.bucketExists(bucketName).then(expectAsync1((result) { + expect(result, isTrue); + exists = false; + expect(api.bucketExists(bucketName), completion(isFalse)); + })); + }); + }); + + test('stat', () { + withMockClient((mock, api) { + mock.register('GET', RegExp(r'b/[a-z/-]*$'), expectAsync1((request) { + expect(request.url.path, '${_rootPath}b/$bucketName'); + expect(request.body.length, 0); + return mock.respond(storage.Bucket() + ..name = bucketName + ..timeCreated = DateTime.utc(2014)); + })); + + return api.bucketInfo(bucketName).then(expectAsync1((result) { + expect(result.bucketName, bucketName); + expect(result.created, DateTime.utc(2014)); + })); + }); + }); + + group('list', () { + test('empty', () { + withMockClient((mock, api) { + mock.register('GET', 'b', expectAsync1((request) { + expect(request.body.length, 0); + return mock.respond(storage.Buckets()); + })); + + api + .listBucketNames() + .listen((_) => throw 'Unexpected', onDone: expectAsync0(() {})); + }); + }); + + test('immediate-cancel', () { + withMockClient((mock, api) { + api + .listBucketNames() + .listen((_) => throw 'Unexpected', + onDone: () => throw 'Unexpected') + .cancel(); + }); + }); + + test('list', () { + // TODO: Test list. + }); + + test('page', () { + // TODO: Test page. + }); + }); + + test('copy', () { + withMockClient((mock, api) { + mock.register( + 'POST', 'b/srcBucket/o/srcObject/copyTo/b/destBucket/o/destObject', + expectAsync1((request) { + return mock.respond(storage.Object()..name = 'destObject'); + })); + expect( + api.copyObject( + 'gs://srcBucket/srcObject', 'gs://destBucket/destObject'), + completion(isNull)); + }); + }); + + test('copy-invalid-args', () { + withMockClient((mock, api) { + expect(() => api.copyObject('a', 'b'), throwsA(isFormatException)); + expect(() => api.copyObject('a/b', 'c/d'), throwsA(isFormatException)); + expect(() => api.copyObject('gs://a/b', 'gs://c/'), + throwsA(isFormatException)); + expect(() => api.copyObject('gs://a/b', 'gs:///c'), + throwsA(isFormatException)); + }); + }); + }); + + group('object', () { + var bucketName = 'test-bucket'; + var objectName = 'test-object'; + + var bytesNormalUpload = [1, 2, 3]; + + // Generate a list just above the limit when changing to resumable upload. + const mb = 1024 * 1024; + const maxNormalUpload = 1 * mb; + const minResumableUpload = maxNormalUpload + 1; + var bytesResumableUpload = + List.generate(minResumableUpload, (e) => e & 255); + + final isDetailedApiError = isA(); + + void expectNormalUpload(MockClient mock, data, String objectName) { + var bytes = data.fold([], (p, e) => p..addAll(e)); + mock.registerUpload('POST', 'b/$bucketName/o', expectAsync1((request) { + return mock + .processNormalMediaUpload(request) + .then(expectAsync1((mediaUpload) { + var object = + storage.Object.fromJson(jsonDecode(mediaUpload.json) as Map); + expect(object.name, objectName); + expect(mediaUpload.bytes, bytes); + expect(mediaUpload.contentType, 'application/octet-stream'); + return mock.respond(storage.Object()..name = objectName); + })); + })); + } + + void expectResumableUpload(MockClient mock, data, String objectName) { + var bytes = data.fold([], (p, e) => p..addAll(e)); + expect(bytes.length, bytesResumableUpload.length); + var count = 0; + mock.registerResumableUpload('POST', 'b/$bucketName/o', + expectAsync1((request) { + var requestObject = + storage.Object.fromJson(jsonDecode(request.body) as Map); + expect(requestObject.name, objectName); + return mock.respondInitiateResumableUpload(testProject); + })); + mock.registerResumableUpload( + 'PUT', + 'b/$testProject/o', + expectAsync1((request) { + count++; + if (count == 1) { + expect(request.bodyBytes.length, mb); + return mock.respondContinueResumableUpload(); + } else { + expect(request.bodyBytes.length, 1); + return mock.respond(storage.Object()..name = objectName); + } + }, count: 2)); + } + + void checkResult(result) { + expect(result.name, objectName); + } + + Future pipeToSink(StreamSink> sink, List> data) { + sink.done.then(expectAsync1(checkResult)); + sink.done.catchError((e) => throw 'Unexpected $e'); + return Stream.fromIterable(data) + .pipe(sink) + .then(expectAsync1(checkResult)) + .catchError((e) => throw 'Unexpected $e'); + } + + Future addStreamToSink(StreamSink> sink, List> data) { + sink.done.then(expectAsync1(checkResult)); + sink.done.catchError((e) => throw 'Unexpected $e'); + return sink + .addStream(Stream.fromIterable(data)) + .then((_) => sink.close()) + .then(expectAsync1(checkResult)) + .catchError((e) => throw 'Unexpected $e'); + } + + Future addToSink(StreamSink> sink, List> data) { + sink.done.then(expectAsync1(checkResult)); + sink.done.catchError((e) => throw 'Unexpected $e'); + for (var bytes in data) { + sink.add(bytes); + } + return sink + .close() + .then(expectAsync1(checkResult)) + .catchError((e) => throw 'Unexpected $e'); + } + + Future runTest( + MockClient mock, Storage api, List> data, int length) { + var bucket = api.bucket(bucketName); + + Future upload( + Future Function(StreamSink> sink, List> data) fn, + bool sendLength) { + mock.clear(); + if (length <= maxNormalUpload) { + expectNormalUpload(mock, data, objectName); + } else { + expectResumableUpload(mock, data, objectName); + } + StreamSink> sink; + if (sendLength) { + sink = bucket.write(objectName, length: length); + } else { + sink = bucket.write(objectName); + } + return fn(sink, data); + } + + return upload(pipeToSink, true) + .then(expectAsync1((_) => upload(pipeToSink, false))) + .then(expectAsync1((_) => upload(addStreamToSink, true))) + .then(expectAsync1((_) => upload(addStreamToSink, false))) + .then(expectAsync1((_) => upload(addToSink, true))) + .then(expectAsync1((_) => upload(addToSink, false))); + } + + test('write-short-1', () { + withMockClient((mock, api) { + runTest(mock, api, [bytesNormalUpload], bytesNormalUpload.length); + }); + }); + + test('write-short-2', () { + withMockClient((mock, api) { + runTest(mock, api, [bytesNormalUpload, bytesNormalUpload], + bytesNormalUpload.length * 2); + }); + }); + + test('write-long', () { + withMockClient((mock, api) { + runTest(mock, api, [bytesResumableUpload], bytesResumableUpload.length); + }); + }); + + test('write-short-error', () { + withMockClient((MockClient mock, api) { + Future test(int? length) { + mock.clear(); + mock.registerUpload('POST', 'b/$bucketName/o', + expectAsync1((request) { + return mock.respondError(500); + })); + + var bucket = api.bucket(bucketName); + var sink = bucket.write(bucketName, length: length); + expect(sink.done, throwsA(isDetailedApiError)); + return expectLater( + Stream.fromIterable([bytesNormalUpload]).pipe(sink), + throwsA(isDetailedApiError)); + } + + test(null) // Unknown length. + .then(expectAsync1((_) => test(1))) + .then(expectAsync1((_) => test(10))) + .then(expectAsync1((_) => test(maxNormalUpload))); + }); + }); + + // TODO: Mock the resumable upload timeout. + test('write-long-error', () { + withMockClient((mock, api) { + Future test(length) { + mock.clear(); + mock.registerResumableUpload('POST', 'b/$bucketName/o', + expectAsync1((request) { + return mock.respondInitiateResumableUpload(testProject); + })); + mock.registerResumableUpload( + 'PUT', + 'b/$testProject/o', + expectAsync1((request) { + return mock.respondError(502); + }, count: 3)); // Default 3 retries in googleapis library. + + var bucket = api.bucket(bucketName); + var sink = bucket.write(bucketName); + expect(sink.done, throwsA(isDetailedApiError)); + return expectLater( + Stream.fromIterable([bytesResumableUpload]).pipe(sink), + throwsA(isDetailedApiError)); + } + + test(null) // Unknown length. + .then(expectAsync1((_) => test(minResumableUpload))); + }); + }); + + test('write-long-wrong-length', () { + withMockClient((mock, api) { + Future test(List> data, int length) { + mock.clear(); + mock.registerResumableUpload('POST', 'b/$bucketName/o', + expectAsync1((request) { + return mock.respondInitiateResumableUpload(testProject); + })); + mock.registerResumableUpload('PUT', 'b/$testProject/o', + expectAsync1((request) { + return mock.respondContinueResumableUpload(); + })); // Default 3 retries in googleapis library. + + var bucket = api.bucket(bucketName); + var sink = bucket.write(bucketName, length: length); + expect(sink.done, + throwsA(anyOf(isA(), isA()))); + return expectLater(Stream>.fromIterable(data).pipe(sink), + throwsA(anyOf(isA(), isA()))); + } + + test([bytesResumableUpload], bytesResumableUpload.length + 1) + .then(expectAsync1((_) => test([ + bytesResumableUpload, + [1, 2] + ], bytesResumableUpload.length + 1))); + }); + }); + + test('write-add-error', () { + withMockClient((mock, api) { + var bucket = api.bucket(bucketName); + var sink = bucket.write(bucketName); + expect(sink.done, throwsArgumentError); + var stream = Stream.fromIterable([ + [1, 2, 3] + ]); + sink.addStream(stream).then((_) { + sink.addError(ArgumentError()); + expect(sink.close(), throwsArgumentError); + }); + }); + }); + + test('write-long-add-error', () { + withMockClient((mock, api) { + mock.registerResumableUpload('POST', 'b/$bucketName/o', + expectAsync1((request) { + return mock.respondInitiateResumableUpload(testProject); + })); + // The resumable upload will buffer until either close or a full chunk, + // so when we add an error the last byte is never sent. Therefore this + // PUT is only called once. + mock.registerResumableUpload('PUT', 'b/$testProject/o', + expectAsync1((request) { + expect(request.bodyBytes.length, 1024 * 1024); + return mock.respondContinueResumableUpload(); + })); + + var bucket = api.bucket(bucketName); + var sink = bucket.write(bucketName); + expect(sink.done, throwsArgumentError); + var stream = Stream.fromIterable([bytesResumableUpload]); + sink.addStream(stream).then((_) { + sink.addError(ArgumentError()); + expect(sink.close(), throwsArgumentError); + }); + }); + }); + + test('write-with-metadata-short', () { + var metadata = [ + ObjectMetadata(contentType: 'mime/type'), + ObjectMetadata(contentType: 'type/mime', cacheControl: 'control-cache'), + ObjectMetadata(cacheControl: 'control-cache'), + ObjectMetadata( + cacheControl: 'control-cache', contentDisposition: 'disp-content'), + ObjectMetadata( + contentDisposition: 'disp-content', + contentEncoding: 'encoding', + contentLanguage: 'language'), + ObjectMetadata(custom: {'x': 'y'}), + ObjectMetadata(custom: {'a': 'b', 'x': 'y'}) + ]; + + withMockClient((mock, api) { + var count = 0; + var bytes = [1, 2, 3]; + + mock.registerUpload( + 'POST', + 'b/$bucketName/o', + expectAsync1((request) { + return mock + .processNormalMediaUpload(request) + .then(expectAsync1((mediaUpload) { + var object = storage.Object.fromJson( + jsonDecode(mediaUpload.json) as Map); + var m = metadata[count]; + expect(object.name, objectName); + expect(mediaUpload.bytes, bytes); + var contentType = m.contentType ?? 'application/octet-stream'; + expect(mediaUpload.contentType, contentType); + expect(object.cacheControl, m.cacheControl); + expect(object.contentDisposition, m.contentDisposition); + expect(object.contentEncoding, m.contentEncoding); + expect(object.contentLanguage, m.contentLanguage); + expect(object.metadata, m.custom); + count++; + return mock.respond(storage.Object()..name = objectName); + })); + }, count: metadata.length)); + + var bucket = api.bucket(bucketName); + var futures = []; + for (var i = 0; i < metadata.length; i++) { + futures + .add(bucket.writeBytes(objectName, bytes, metadata: metadata[i])); + } + return Future.wait(futures); + }); + }); + + test('write-with-metadata-long', () { + var metadata = [ + ObjectMetadata(contentType: 'mime/type'), + ObjectMetadata(contentType: 'type/mime', cacheControl: 'control-cache'), + ObjectMetadata(cacheControl: 'control-cache'), + ObjectMetadata( + cacheControl: 'control-cache', contentDisposition: 'disp-content'), + ObjectMetadata( + contentDisposition: 'disp-content', + contentEncoding: 'encoding', + contentLanguage: 'language'), + ObjectMetadata(custom: {'x': 'y'}), + ObjectMetadata(custom: {'a': 'b', 'x': 'y'}) + ]; + + withMockClient((mock, api) { + var countInitial = 0; + var countData = 0; + + mock.registerResumableUpload( + 'POST', + 'b/$bucketName/o', + expectAsync1((request) { + var object = + storage.Object.fromJson(jsonDecode(request.body) as Map); + var m = metadata[countInitial]; + expect(object.name, objectName); + expect(object.cacheControl, m.cacheControl); + expect(object.contentDisposition, m.contentDisposition); + expect(object.contentEncoding, m.contentEncoding); + expect(object.contentLanguage, m.contentLanguage); + expect(object.metadata, m.custom); + countInitial++; + return mock.respondInitiateResumableUpload(testProject); + }, count: metadata.length)); + mock.registerResumableUpload( + 'PUT', + 'b/$testProject/o', + expectAsync1((request) { + var m = metadata[countData % metadata.length]; + var contentType = m.contentType ?? 'application/octet-stream'; + expect(request.headers['content-type'], contentType); + var firstPart = countData < metadata.length; + countData++; + if (firstPart) { + expect(request.bodyBytes.length, mb); + return mock.respondContinueResumableUpload(); + } else { + expect(request.bodyBytes.length, 1); + return mock.respond(storage.Object()..name = objectName); + } + }, count: metadata.length * 2)); + + var bucket = api.bucket(bucketName); + var futures = []; + for (var i = 0; i < metadata.length; i++) { + futures.add(bucket.writeBytes(objectName, bytesResumableUpload, + metadata: metadata[i])); + } + return Future.wait(futures); + }); + }); + + test('write-with-predefined-acl', () { + var predefined = [ + [PredefinedAcl.authenticatedRead, 'authenticatedRead'], + [PredefinedAcl.private, 'private'], + [PredefinedAcl.projectPrivate, 'projectPrivate'], + [PredefinedAcl.publicRead, 'publicRead'], + [PredefinedAcl.bucketOwnerFullControl, 'bucketOwnerFullControl'], + [PredefinedAcl.bucketOwnerRead, 'bucketOwnerRead'] + ]; + + withMockClient((mock, api) { + var count = 0; + var bytes = [1, 2, 3]; + + mock.registerUpload( + 'POST', + 'b/$bucketName/o', + expectAsync1((request) { + return mock + .processNormalMediaUpload(request) + .then(expectAsync1((mediaUpload) { + var object = storage.Object.fromJson( + jsonDecode(mediaUpload.json) as Map); + expect(object.name, objectName); + expect(mediaUpload.bytes, bytes); + expect(mediaUpload.contentType, 'application/octet-stream'); + expect(request.url.queryParameters['predefinedAcl'], + predefined[count++][1]); + expect(object.acl, isNull); + return mock.respond(storage.Object()..name = objectName); + })); + }, count: predefined.length)); + + var bucket = api.bucket(bucketName); + var futures = []; + for (var i = 0; i < predefined.length; i++) { + futures.add(bucket.writeBytes(objectName, bytes, + predefinedAcl: predefined[i][0] as PredefinedAcl)); + } + return Future.wait(futures); + }); + }); + + test('write-with-acl', () { + var acl1 = Acl([ + AclEntry(AccountScope('user@example.com'), AclPermission.FULL_CONTROL), + ]); + var acl2 = Acl([ + AclEntry(AccountScope('user@example.com'), AclPermission.FULL_CONTROL), + AclEntry(GroupScope('group@example.com'), AclPermission.WRITE), + ]); + var acl3 = Acl([ + AclEntry(AccountScope('user@example.com'), AclPermission.FULL_CONTROL), + AclEntry(GroupScope('group@example.com'), AclPermission.WRITE), + AclEntry(DomainScope('example.com'), AclPermission.READ), + ]); + + var acls = [acl1, acl2, acl3]; + + withMockClient((mock, api) { + var count = 0; + var bytes = [1, 2, 3]; + + mock.registerUpload( + 'POST', + 'b/$bucketName/o', + expectAsync1((request) { + return mock + .processNormalMediaUpload(request) + .then(expectAsync1((mediaUpload) { + var object = storage.Object.fromJson( + jsonDecode(mediaUpload.json) as Map); + expect(object.name, objectName); + expect(mediaUpload.bytes, bytes); + expect(mediaUpload.contentType, 'application/octet-stream'); + expect(request.url.queryParameters['predefinedAcl'], isNull); + expect(object.acl, isNotNull); + expect(object.acl!.length, count + 1); + expect(object.acl![0].entity, 'user-user@example.com'); + expect(object.acl![0].role, 'OWNER'); + if (count > 0) { + expect(object.acl![1].entity, 'group-group@example.com'); + expect(object.acl![1].role, 'OWNER'); + } + if (count > 2) { + expect(object.acl![2].entity, 'domain-example.com'); + expect(object.acl![2].role, 'READER'); + } + count++; + return mock.respond(storage.Object()..name = objectName); + })); + }, count: acls.length)); + + var bucket = api.bucket(bucketName); + var futures = []; + for (var i = 0; i < acls.length; i++) { + futures.add(bucket.writeBytes(objectName, bytes, acl: acls[i])); + } + return Future.wait(futures); + }); + }); + + test('write-with-acl-and-predefined-acl', () { + var predefined = [ + [PredefinedAcl.authenticatedRead, 'authenticatedRead'], + [PredefinedAcl.private, 'private'], + [PredefinedAcl.projectPrivate, 'projectPrivate'], + [PredefinedAcl.publicRead, 'publicRead'], + [PredefinedAcl.bucketOwnerFullControl, 'bucketOwnerFullControl'], + [PredefinedAcl.bucketOwnerRead, 'bucketOwnerRead'] + ]; + + var acl1 = Acl([ + AclEntry(AccountScope('user@example.com'), AclPermission.FULL_CONTROL), + ]); + var acl2 = Acl([ + AclEntry(AccountScope('user@example.com'), AclPermission.FULL_CONTROL), + AclEntry(GroupScope('group@example.com'), AclPermission.WRITE), + ]); + var acl3 = Acl([ + AclEntry(AccountScope('user@example.com'), AclPermission.FULL_CONTROL), + AclEntry(GroupScope('group@example.com'), AclPermission.WRITE), + AclEntry(DomainScope('example.com'), AclPermission.READ), + ]); + + var acls = [acl1, acl2, acl3]; + + withMockClient((mock, api) { + var count = 0; + var bytes = [1, 2, 3]; + + mock.registerUpload( + 'POST', + 'b/$bucketName/o', + expectAsync1((request) { + return mock + .processNormalMediaUpload(request) + .then(expectAsync1((mediaUpload) { + var predefinedIndex = count ~/ acls.length; + var aclIndex = count % acls.length; + var object = storage.Object.fromJson( + jsonDecode(mediaUpload.json) as Map); + expect(object.name, objectName); + expect(mediaUpload.bytes, bytes); + expect(mediaUpload.contentType, 'application/octet-stream'); + expect(request.url.queryParameters['predefinedAcl'], + predefined[predefinedIndex][1]); + expect(object.acl, isNotNull); + expect(object.acl!.length, aclIndex + 1); + expect(object.acl![0].entity, 'user-user@example.com'); + expect(object.acl![0].role, 'OWNER'); + if (aclIndex > 0) { + expect(object.acl![1].entity, 'group-group@example.com'); + expect(object.acl![1].role, 'OWNER'); + } + if (aclIndex > 2) { + expect(object.acl![2].entity, 'domain-example.com'); + expect(object.acl![2].role, 'READER'); + } + count++; + return mock.respond(storage.Object()..name = objectName); + })); + }, count: predefined.length * acls.length)); + + var bucket = api.bucket(bucketName); + var futures = []; + for (var i = 0; i < predefined.length; i++) { + for (var j = 0; j < acls.length; j++) { + futures.add(bucket.writeBytes(objectName, bytes, + acl: acls[j], + predefinedAcl: predefined[i][0] as PredefinedAcl)); + } + } + return Future.wait(futures); + }); + }); + + group('read', () { + test('success', () async { + await withMockClientAsync((MockClient mock, Storage api) async { + mock.register('GET', 'b/$bucketName/o/$objectName', + expectAsync1(mock.respondBytes)); + + var bucket = api.bucket(bucketName); + var data = []; + + await bucket.read(objectName).forEach(data.addAll); + expect(data, MockClient.bytes); + }); + }); + + test('with offset, without length', () async { + await withMockClientAsync((MockClient mock, Storage api) async { + var bucket = api.bucket(bucketName); + + await expectLater( + bucket.read(objectName, offset: 1).toList(), + throwsA( + isA().having( + (p0) => p0.message, + 'message', + 'length must have a value if offset is non-zero.', + ), + ), + ); + }); + }); + + test('with offset and length zero', () async { + await withMockClientAsync((MockClient mock, Storage api) async { + var bucket = api.bucket(bucketName); + + try { + await bucket.read(objectName, offset: 1, length: 0).toList(); + fail('An exception should be thrown'); + } on ArgumentError catch (e) { + expect(e.message, 'If provided, length must greater than zero.'); + } + }); + }); + + test('with invalid length', () async { + await withMockClientAsync((MockClient mock, Storage api) async { + var bucket = api.bucket(bucketName); + + try { + await bucket.read(objectName, length: -1).toList(); + fail('An exception should be thrown'); + } on ArgumentError catch (e) { + expect(e.message, 'If provided, length must greater than zero.'); + } + }); + }); + + test('with length', () async { + await withMockClientAsync((MockClient mock, Storage api) async { + mock.register('GET', 'b/$bucketName/o/$objectName', + expectAsync1(mock.respondBytes)); + + var bucket = api.bucket(bucketName); + var data = []; + + await bucket.read(objectName, length: 4).forEach(data.addAll); + expect(data, MockClient.bytes.sublist(0, 4)); + }); + }); + + test('with offset and length', () async { + await withMockClientAsync((MockClient mock, Storage api) async { + mock.register('GET', 'b/$bucketName/o/$objectName', + expectAsync1(mock.respondBytes)); + + var bucket = api.bucket(bucketName); + var data = []; + + await bucket + .read(objectName, offset: 1, length: 3) + .forEach(data.addAll); + expect(data, MockClient.bytes.sublist(1, 4)); + }); + }); + + test('file does not exist', () async { + await withMockClientAsync((MockClient mock, Storage api) async { + mock.register('GET', 'b/$bucketName/o/$objectName', + expectAsync1((request) { + expect(request.url.queryParameters['alt'], 'media'); + return mock.respondError(404); + })); + + var bucket = api.bucket(bucketName); + + try { + await bucket.read(objectName).toList(); + fail('An exception should be thrown'); + } on storage.DetailedApiRequestError catch (e) { + expect(e.status, 404); + } + }); + }); + }); + + test('stat', () { + withMockClient((mock, api) { + mock.register('GET', 'b/$bucketName/o/$objectName', + expectAsync1((request) { + expect(request.url.queryParameters['alt'], 'json'); + return mock.respond(storage.Object() + ..name = objectName + ..updated = DateTime.utc(2014) + ..contentType = 'mime/type'); + })); + + var api = Storage(mock, testProject); + var bucket = api.bucket(bucketName); + bucket.info(objectName).then(expectAsync1((stat) { + expect(stat.name, objectName); + expect(stat.updated, DateTime.utc(2014)); + expect(stat.metadata.contentType, 'mime/type'); + })); + }); + }); + + test('stat-acl', () { + withMockClient((mock, api) { + mock.register('GET', 'b/$bucketName/o/$objectName', + expectAsync1((request) { + expect(request.url.queryParameters['alt'], 'json'); + var acl1 = storage.ObjectAccessControl(); + acl1.entity = 'user-1234567890'; + acl1.role = 'OWNER'; + var acl2 = storage.ObjectAccessControl(); + acl2.entity = 'user-xxx@yyy.zzz'; + acl2.role = 'OWNER'; + var acl3 = storage.ObjectAccessControl(); + acl3.entity = 'xxx-1234567890'; + acl3.role = 'OWNER'; + return mock.respond(storage.Object() + ..name = objectName + ..acl = [acl1, acl2, acl3]); + })); + + var api = Storage(mock, testProject); + var bucket = api.bucket(bucketName); + bucket.info(objectName).then(expectAsync1((ObjectInfo info) { + expect(info.name, objectName); + expect(info.metadata.acl!.entries.length, 3); + expect(info.metadata.acl!.entries[0].scope is StorageIdScope, isTrue); + expect(info.metadata.acl!.entries[1].scope is AccountScope, isTrue); + expect(info.metadata.acl!.entries[2].scope is OpaqueScope, isTrue); + })); + }); + }); + + group('list', () { + test('empty', () { + withMockClient((mock, api) { + mock.register('GET', 'b/$bucketName/o', expectAsync1((request) { + expect(request.body.length, 0); + return mock.respond(storage.Objects()); + })); + + var bucket = api.bucket(bucketName); + bucket + .list() + .listen((_) => throw 'Unexpected', onDone: expectAsync0(() {})); + }); + }); + + test('immediate-cancel', () { + withMockClient((mock, api) { + var bucket = api.bucket(bucketName); + bucket + .list() + .listen((_) => throw 'Unexpected', + onDone: () => throw 'Unexpected') + .cancel(); + }); + }); + + test('list', () { + // TODO: Test list. + }); + + test('page', () { + // TODO: Test page. + }); + }); + }); + + group('acl', () { + var id = StorageIdScope('1234567890'); + var user = AccountScope('sgjesse@google.com'); + var group = GroupScope('dart'); + var domain = DomainScope('dartlang.org'); + + var userRead = AclEntry(user, AclPermission.READ); + var groupWrite = AclEntry(group, AclPermission.WRITE); + var domainFullControl = AclEntry(domain, AclPermission.FULL_CONTROL); + + test('compare-scope', () { + expect(id, StorageIdScope('1234567890')); + expect(user, AccountScope('sgjesse@google.com')); + expect(group, GroupScope('dart')); + expect(domain, DomainScope('dartlang.org')); + expect(AclScope.allAuthenticated, AllAuthenticatedScope()); + expect(AclScope.allUsers, AllUsersScope()); + }); + + test('compare-entries', () { + expect(userRead, AclEntry(user, AclPermission.READ)); + expect(groupWrite, AclEntry(group, AclPermission.WRITE)); + expect(domainFullControl, AclEntry(domain, AclPermission.FULL_CONTROL)); + }); + + test('compare-acls', () { + var acl = Acl([userRead, groupWrite, domainFullControl]); + expect( + acl, + Acl([ + AclEntry(user, AclPermission.READ), + AclEntry(group, AclPermission.WRITE), + AclEntry(domain, AclPermission.FULL_CONTROL) + ])); + expect( + acl, + isNot(equals(Acl([ + AclEntry(group, AclPermission.WRITE), + AclEntry(user, AclPermission.READ), + AclEntry(domain, AclPermission.FULL_CONTROL) + ])))); + }); + + test('compare-predefined-acls', () { + expect(PredefinedAcl.private, PredefinedAcl.private); + expect(PredefinedAcl.private, isNot(equals(PredefinedAcl.publicRead))); + }); + }); +}